Bug Summary

File:build/gcc/tree-ssa-phiopt.cc
Warning:line 2703, column 15
Value stored to 'cond3_bb' during its initialization is never read

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-suse-linux -analyze -disable-free -clear-ast-before-backend -disable-llvm-verifier -discard-value-names -main-file-name tree-ssa-phiopt.cc -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -ffp-contract=on -fno-rounding-math -mconstructor-aliases -funwind-tables=2 -target-cpu x86-64 -tune-cpu generic -debugger-tuning=gdb -fcoverage-compilation-dir=/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -resource-dir /usr/lib64/clang/15.0.7 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13/backward -internal-isystem /usr/lib64/clang/15.0.7/include -internal-isystem /usr/local/include -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../x86_64-suse-linux/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir=/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -D__GCC_HAVE_DWARF2_CFI_ASM=1 -o /buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2023-03-27-141847-20772-1/report-7W5mLi.plist -x c++ /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc
1/* Optimization of PHI nodes by converting them into straightline code.
2 Copyright (C) 2004-2023 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it
7under the terms of the GNU General Public License as published by the
8Free Software Foundation; either version 3, or (at your option) any
9later version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT
12ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "backend.h"
24#include "insn-codes.h"
25#include "rtl.h"
26#include "tree.h"
27#include "gimple.h"
28#include "cfghooks.h"
29#include "tree-pass.h"
30#include "ssa.h"
31#include "tree-ssa.h"
32#include "optabs-tree.h"
33#include "insn-config.h"
34#include "gimple-pretty-print.h"
35#include "fold-const.h"
36#include "stor-layout.h"
37#include "cfganal.h"
38#include "gimplify.h"
39#include "gimple-iterator.h"
40#include "gimplify-me.h"
41#include "tree-cfg.h"
42#include "tree-dfa.h"
43#include "domwalk.h"
44#include "cfgloop.h"
45#include "tree-data-ref.h"
46#include "tree-scalar-evolution.h"
47#include "tree-inline.h"
48#include "case-cfn-macros.h"
49#include "tree-eh.h"
50#include "gimple-fold.h"
51#include "internal-fn.h"
52#include "gimple-range.h"
53#include "gimple-match.h"
54#include "dbgcnt.h"
55#include "tree-ssa-propagate.h"
56#include "tree-ssa-dce.h"
57
58static unsigned int tree_ssa_phiopt_worker (bool, bool, bool);
59static bool two_value_replacement (basic_block, basic_block, edge, gphi *,
60 tree, tree);
61static bool match_simplify_replacement (basic_block, basic_block,
62 edge, edge, gphi *, tree, tree, bool);
63static gphi *factor_out_conditional_conversion (edge, edge, gphi *, tree, tree,
64 gimple *);
65static int value_replacement (basic_block, basic_block,
66 edge, edge, gphi *, tree, tree);
67static bool minmax_replacement (basic_block, basic_block, basic_block,
68 edge, edge, gphi *, tree, tree, bool);
69static bool spaceship_replacement (basic_block, basic_block,
70 edge, edge, gphi *, tree, tree);
71static bool cond_removal_in_builtin_zero_pattern (basic_block, basic_block,
72 edge, edge, gphi *,
73 tree, tree);
74static bool cond_store_replacement (basic_block, basic_block, edge, edge,
75 hash_set<tree> *);
76static bool cond_if_else_store_replacement (basic_block, basic_block, basic_block);
77static hash_set<tree> * get_non_trapping ();
78static void hoist_adjacent_loads (basic_block, basic_block,
79 basic_block, basic_block);
80static bool gate_hoist_loads (void);
81
82/* This pass tries to transform conditional stores into unconditional
83 ones, enabling further simplifications with the simpler then and else
84 blocks. In particular it replaces this:
85
86 bb0:
87 if (cond) goto bb2; else goto bb1;
88 bb1:
89 *p = RHS;
90 bb2:
91
92 with
93
94 bb0:
95 if (cond) goto bb1; else goto bb2;
96 bb1:
97 condtmp' = *p;
98 bb2:
99 condtmp = PHI <RHS, condtmp'>
100 *p = condtmp;
101
102 This transformation can only be done under several constraints,
103 documented below. It also replaces:
104
105 bb0:
106 if (cond) goto bb2; else goto bb1;
107 bb1:
108 *p = RHS1;
109 goto bb3;
110 bb2:
111 *p = RHS2;
112 bb3:
113
114 with
115
116 bb0:
117 if (cond) goto bb3; else goto bb1;
118 bb1:
119 bb3:
120 condtmp = PHI <RHS1, RHS2>
121 *p = condtmp; */
122
123static unsigned int
124tree_ssa_cs_elim (void)
125{
126 unsigned todo;
127 /* ??? We are not interested in loop related info, but the following
128 will create it, ICEing as we didn't init loops with pre-headers.
129 An interfacing issue of find_data_references_in_bb. */
130 loop_optimizer_init (LOOPS_NORMAL(LOOPS_HAVE_PREHEADERS | LOOPS_HAVE_SIMPLE_LATCHES | LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS
)
);
131 scev_initialize ();
132 todo = tree_ssa_phiopt_worker (true, false, false);
133 scev_finalize ();
134 loop_optimizer_finalize ();
135 return todo;
136}
137
138/* Return the singleton PHI in the SEQ of PHIs for edges E0 and E1. */
139
140static gphi *
141single_non_singleton_phi_for_edges (gimple_seq seq, edge e0, edge e1)
142{
143 gimple_stmt_iterator i;
144 gphi *phi = NULLnullptr;
145 if (gimple_seq_singleton_p (seq))
146 return as_a <gphi *> (gsi_stmt (gsi_start (seq)));
147 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
148 {
149 gphi *p = as_a <gphi *> (gsi_stmt (i));
150 /* If the PHI arguments are equal then we can skip this PHI. */
151 if (operand_equal_for_phi_arg_p (gimple_phi_arg_def (p, e0->dest_idx),
152 gimple_phi_arg_def (p, e1->dest_idx)))
153 continue;
154
155 /* If we already have a PHI that has the two edge arguments are
156 different, then return it is not a singleton for these PHIs. */
157 if (phi)
158 return NULLnullptr;
159
160 phi = p;
161 }
162 return phi;
163}
164
165/* The core routine of conditional store replacement and normal
166 phi optimizations. Both share much of the infrastructure in how
167 to match applicable basic block patterns. DO_STORE_ELIM is true
168 when we want to do conditional store replacement, false otherwise.
169 DO_HOIST_LOADS is true when we want to hoist adjacent loads out
170 of diamond control flow patterns, false otherwise. */
171static unsigned int
172tree_ssa_phiopt_worker (bool do_store_elim, bool do_hoist_loads, bool early_p)
173{
174 basic_block bb;
175 basic_block *bb_order;
176 unsigned n, i;
177 bool cfgchanged = false;
178 hash_set<tree> *nontrap = 0;
179
180 calculate_dominance_info (CDI_DOMINATORS);
181
182 if (do_store_elim)
183 /* Calculate the set of non-trapping memory accesses. */
184 nontrap = get_non_trapping ();
185
186 /* Search every basic block for COND_EXPR we may be able to optimize.
187
188 We walk the blocks in order that guarantees that a block with
189 a single predecessor is processed before the predecessor.
190 This ensures that we collapse inner ifs before visiting the
191 outer ones, and also that we do not try to visit a removed
192 block. */
193 bb_order = single_pred_before_succ_order ();
194 n = n_basic_blocks_for_fn (cfun)(((cfun + 0))->cfg->x_n_basic_blocks) - NUM_FIXED_BLOCKS(2);
195
196 for (i = 0; i < n; i++)
197 {
198 gimple *cond_stmt;
199 gphi *phi;
200 basic_block bb1, bb2;
201 edge e1, e2;
202 tree arg0, arg1;
203 bool diamond_p = false;
204
205 bb = bb_order[i];
206
207 cond_stmt = last_stmt (bb);
208 /* Check to see if the last statement is a GIMPLE_COND. */
209 if (!cond_stmt
210 || gimple_code (cond_stmt) != GIMPLE_COND)
211 continue;
212
213 e1 = EDGE_SUCC (bb, 0)(*(bb)->succs)[(0)];
214 bb1 = e1->dest;
215 e2 = EDGE_SUCC (bb, 1)(*(bb)->succs)[(1)];
216 bb2 = e2->dest;
217
218 /* We cannot do the optimization on abnormal edges. */
219 if ((e1->flags & EDGE_ABNORMAL) != 0
220 || (e2->flags & EDGE_ABNORMAL) != 0)
221 continue;
222
223 /* If either bb1's succ or bb2 or bb2's succ is non NULL. */
224 if (EDGE_COUNT (bb1->succs)vec_safe_length (bb1->succs) == 0
225 || EDGE_COUNT (bb2->succs)vec_safe_length (bb2->succs) == 0)
226 continue;
227
228 /* Find the bb which is the fall through to the other. */
229 if (EDGE_SUCC (bb1, 0)(*(bb1)->succs)[(0)]->dest == bb2)
230 ;
231 else if (EDGE_SUCC (bb2, 0)(*(bb2)->succs)[(0)]->dest == bb1)
232 {
233 std::swap (bb1, bb2);
234 std::swap (e1, e2);
235 }
236 else if (do_store_elim
237 && EDGE_SUCC (bb1, 0)(*(bb1)->succs)[(0)]->dest == EDGE_SUCC (bb2, 0)(*(bb2)->succs)[(0)]->dest)
238 {
239 basic_block bb3 = EDGE_SUCC (bb1, 0)(*(bb1)->succs)[(0)]->dest;
240
241 if (!single_succ_p (bb1)
242 || (EDGE_SUCC (bb1, 0)(*(bb1)->succs)[(0)]->flags & EDGE_FALLTHRU) == 0
243 || !single_succ_p (bb2)
244 || (EDGE_SUCC (bb2, 0)(*(bb2)->succs)[(0)]->flags & EDGE_FALLTHRU) == 0
245 || EDGE_COUNT (bb3->preds)vec_safe_length (bb3->preds) != 2)
246 continue;
247 if (cond_if_else_store_replacement (bb1, bb2, bb3))
248 cfgchanged = true;
249 continue;
250 }
251 else if (do_hoist_loads
252 && EDGE_SUCC (bb1, 0)(*(bb1)->succs)[(0)]->dest == EDGE_SUCC (bb2, 0)(*(bb2)->succs)[(0)]->dest)
253 {
254 basic_block bb3 = EDGE_SUCC (bb1, 0)(*(bb1)->succs)[(0)]->dest;
255
256 if (!FLOAT_TYPE_P (TREE_TYPE (gimple_cond_lhs (cond_stmt)))((((enum tree_code) (((contains_struct_check ((gimple_cond_lhs
(cond_stmt)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 256, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE
) || ((((enum tree_code) (((contains_struct_check ((gimple_cond_lhs
(cond_stmt)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 256, __FUNCTION__))->typed.type))->base.code) == COMPLEX_TYPE
|| (((enum tree_code) (((contains_struct_check ((gimple_cond_lhs
(cond_stmt)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 256, __FUNCTION__))->typed.type))->base.code) == VECTOR_TYPE
)) && (((enum tree_code) (((contains_struct_check (((
(contains_struct_check ((gimple_cond_lhs (cond_stmt)), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 256, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 256, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE
)))
257 && single_succ_p (bb1)
258 && single_succ_p (bb2)
259 && single_pred_p (bb1)
260 && single_pred_p (bb2)
261 && EDGE_COUNT (bb->succs)vec_safe_length (bb->succs) == 2
262 && EDGE_COUNT (bb3->preds)vec_safe_length (bb3->preds) == 2
263 /* If one edge or the other is dominant, a conditional move
264 is likely to perform worse than the well-predicted branch. */
265 && !predictable_edge_p (EDGE_SUCC (bb, 0)(*(bb)->succs)[(0)])
266 && !predictable_edge_p (EDGE_SUCC (bb, 1)(*(bb)->succs)[(1)]))
267 hoist_adjacent_loads (bb, bb1, bb2, bb3);
268 continue;
269 }
270 else if (EDGE_SUCC (bb1, 0)(*(bb1)->succs)[(0)]->dest == EDGE_SUCC (bb2, 0)(*(bb2)->succs)[(0)]->dest
271 && !empty_block_p (bb1))
272 {
273 diamond_p = true;
274 e2 = EDGE_SUCC (bb2, 0)(*(bb2)->succs)[(0)];
275 }
276 else
277 continue;
278
279 e1 = EDGE_SUCC (bb1, 0)(*(bb1)->succs)[(0)];
280
281 /* Make sure that bb1 is just a fall through. */
282 if (!single_succ_p (bb1)
283 || (e1->flags & EDGE_FALLTHRU) == 0)
284 continue;
285
286 if (do_store_elim && !diamond_p)
287 {
288 /* Also make sure that bb1 only have one predecessor and that it
289 is bb. */
290 if (!single_pred_p (bb1)
291 || single_pred (bb1) != bb)
292 continue;
293
294 /* bb1 is the middle block, bb2 the join block, bb the split block,
295 e1 the fallthrough edge from bb1 to bb2. We can't do the
296 optimization if the join block has more than two predecessors. */
297 if (EDGE_COUNT (bb2->preds)vec_safe_length (bb2->preds) > 2)
298 continue;
299 if (cond_store_replacement (bb1, bb2, e1, e2, nontrap))
300 cfgchanged = true;
301 }
302 else
303 {
304 gimple_stmt_iterator gsi;
305 bool candorest = true;
306
307 /* Check that we're looking for nested phis. */
308 basic_block merge = diamond_p ? EDGE_SUCC (bb2, 0)(*(bb2)->succs)[(0)]->dest : bb2;
309 gimple_seq phis = phi_nodes (merge);
310
311 /* Value replacement can work with more than one PHI
312 so try that first. */
313 if (!early_p && !diamond_p)
314 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
315 {
316 phi = as_a <gphi *> (gsi_stmt (gsi));
317 arg0 = gimple_phi_arg_def (phi, e1->dest_idx);
318 arg1 = gimple_phi_arg_def (phi, e2->dest_idx);
319 if (value_replacement (bb, bb1, e1, e2, phi, arg0, arg1) == 2)
320 {
321 candorest = false;
322 cfgchanged = true;
323 break;
324 }
325 }
326
327 if (!candorest)
328 continue;
329
330 phi = single_non_singleton_phi_for_edges (phis, e1, e2);
331 if (!phi)
332 continue;
333
334 arg0 = gimple_phi_arg_def (phi, e1->dest_idx);
335 arg1 = gimple_phi_arg_def (phi, e2->dest_idx);
336
337 /* Something is wrong if we cannot find the arguments in the PHI
338 node. */
339 gcc_assert (arg0 != NULL_TREE && arg1 != NULL_TREE)((void)(!(arg0 != (tree) nullptr && arg1 != (tree) nullptr
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 339, __FUNCTION__), 0 : 0))
;
340
341 gphi *newphi;
342 if (single_pred_p (bb1)
343 && !diamond_p
344 && (newphi = factor_out_conditional_conversion (e1, e2, phi,
345 arg0, arg1,
346 cond_stmt)))
347 {
348 phi = newphi;
349 /* factor_out_conditional_conversion may create a new PHI in
350 BB2 and eliminate an existing PHI in BB2. Recompute values
351 that may be affected by that change. */
352 arg0 = gimple_phi_arg_def (phi, e1->dest_idx);
353 arg1 = gimple_phi_arg_def (phi, e2->dest_idx);
354 gcc_assert (arg0 != NULL_TREE && arg1 != NULL_TREE)((void)(!(arg0 != (tree) nullptr && arg1 != (tree) nullptr
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 354, __FUNCTION__), 0 : 0))
;
355 }
356
357 /* Do the replacement of conditional if it can be done. */
358 if (!early_p
359 && !diamond_p
360 && two_value_replacement (bb, bb1, e2, phi, arg0, arg1))
361 cfgchanged = true;
362 else if (!diamond_p
363 && match_simplify_replacement (bb, bb1, e1, e2, phi,
364 arg0, arg1, early_p))
365 cfgchanged = true;
366 else if (!early_p
367 && !diamond_p
368 && single_pred_p (bb1)
369 && cond_removal_in_builtin_zero_pattern (bb, bb1, e1, e2,
370 phi, arg0, arg1))
371 cfgchanged = true;
372 else if (minmax_replacement (bb, bb1, bb2, e1, e2, phi, arg0, arg1,
373 diamond_p))
374 cfgchanged = true;
375 else if (single_pred_p (bb1)
376 && !diamond_p
377 && spaceship_replacement (bb, bb1, e1, e2, phi, arg0, arg1))
378 cfgchanged = true;
379 }
380 }
381
382 free (bb_order);
383
384 if (do_store_elim)
385 delete nontrap;
386 /* If the CFG has changed, we should cleanup the CFG. */
387 if (cfgchanged && do_store_elim)
388 {
389 /* In cond-store replacement we have added some loads on edges
390 and new VOPS (as we moved the store, and created a load). */
391 gsi_commit_edge_inserts ();
392 return TODO_cleanup_cfg(1 << 5) | TODO_update_ssa_only_virtuals(1 << 14);
393 }
394 else if (cfgchanged)
395 return TODO_cleanup_cfg(1 << 5);
396 return 0;
397}
398
399/* Replace PHI node element whose edge is E in block BB with variable NEW.
400 Remove the edge from COND_BLOCK which does not lead to BB (COND_BLOCK
401 is known to have two edges, one of which must reach BB). */
402
403static void
404replace_phi_edge_with_variable (basic_block cond_block,
405 edge e, gphi *phi, tree new_tree,
406 bitmap dce_ssa_names = auto_bitmap())
407{
408 basic_block bb = gimple_bb (phi);
409 gimple_stmt_iterator gsi;
410 tree phi_result = PHI_RESULT (phi)get_def_from_ptr (gimple_phi_result_ptr (phi));
411
412 /* Duplicate range info if they are the only things setting the target PHI.
413 This is needed as later on, the new_tree will be replacing
414 The assignement of the PHI.
415 For an example:
416 bb1:
417 _4 = min<a_1, 255>
418 goto bb2
419
420 # RANGE [-INF, 255]
421 a_3 = PHI<_4(1)>
422 bb3:
423
424 use(a_3)
425 And _4 gets propagated into the use of a_3 and losing the range info.
426 This can't be done for more than 2 incoming edges as the propagation
427 won't happen.
428 The new_tree needs to be defined in the same basic block as the conditional. */
429 if (TREE_CODE (new_tree)((enum tree_code) (new_tree)->base.code) == SSA_NAME
430 && EDGE_COUNT (gimple_bb (phi)->preds)vec_safe_length (gimple_bb (phi)->preds) == 2
431 && INTEGRAL_TYPE_P (TREE_TYPE (phi_result))(((enum tree_code) (((contains_struct_check ((phi_result), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 431, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((phi_result),
(TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 431, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((phi_result),
(TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 431, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
432 && !SSA_NAME_RANGE_INFO (new_tree)(tree_check ((new_tree), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 432, __FUNCTION__, (SSA_NAME)))->ssa_name.info.range_info
433 && SSA_NAME_RANGE_INFO (phi_result)(tree_check ((phi_result), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 433, __FUNCTION__, (SSA_NAME)))->ssa_name.info.range_info
434 && gimple_bb (SSA_NAME_DEF_STMT (new_tree)(tree_check ((new_tree), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 434, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
) == cond_block
435 && dbg_cnt (phiopt_edge_range))
436 duplicate_ssa_name_range_info (new_tree, phi_result);
437
438 /* Change the PHI argument to new. */
439 SET_USE (PHI_ARG_DEF_PTR (phi, e->dest_idx), new_tree)set_ssa_use_from_ptr (gimple_phi_arg_imm_use_ptr ((phi), (e->
dest_idx)), new_tree)
;
440
441 /* Remove the empty basic block. */
442 edge edge_to_remove = NULLnullptr, keep_edge = NULLnullptr;
443 if (EDGE_SUCC (cond_block, 0)(*(cond_block)->succs)[(0)]->dest == bb)
444 {
445 edge_to_remove = EDGE_SUCC (cond_block, 1)(*(cond_block)->succs)[(1)];
446 keep_edge = EDGE_SUCC (cond_block, 0)(*(cond_block)->succs)[(0)];
447 }
448 else if (EDGE_SUCC (cond_block, 1)(*(cond_block)->succs)[(1)]->dest == bb)
449 {
450 edge_to_remove = EDGE_SUCC (cond_block, 0)(*(cond_block)->succs)[(0)];
451 keep_edge = EDGE_SUCC (cond_block, 1)(*(cond_block)->succs)[(1)];
452 }
453 else if ((keep_edge = find_edge (cond_block, e->src)))
454 ;
455 else
456 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 456, __FUNCTION__))
;
457
458 if (edge_to_remove && EDGE_COUNT (edge_to_remove->dest->preds)vec_safe_length (edge_to_remove->dest->preds) == 1)
459 {
460 e->flags |= EDGE_FALLTHRU;
461 e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
462 e->probability = profile_probability::always ();
463 delete_basic_block (edge_to_remove->dest);
464
465 /* Eliminate the COND_EXPR at the end of COND_BLOCK. */
466 gsi = gsi_last_bb (cond_block);
467 gsi_remove (&gsi, true);
468 }
469 else
470 {
471 /* If there are other edges into the middle block make
472 CFG cleanup deal with the edge removal to avoid
473 updating dominators here in a non-trivial way. */
474 gcond *cond = as_a <gcond *> (last_stmt (cond_block));
475 if (keep_edge->flags & EDGE_FALSE_VALUE)
476 gimple_cond_make_false (cond);
477 else if (keep_edge->flags & EDGE_TRUE_VALUE)
478 gimple_cond_make_true (cond);
479 }
480
481 simple_dce_from_worklist (dce_ssa_names);
482
483 statistics_counter_event (cfun(cfun + 0), "Replace PHI with variable", 1);
484
485 if (dump_file && (dump_flags & TDF_DETAILS))
486 fprintf (dump_file,
487 "COND_EXPR in block %d and PHI in block %d converted to straightline code.\n",
488 cond_block->index,
489 bb->index);
490}
491
492/* PR66726: Factor conversion out of COND_EXPR. If the arguments of the PHI
493 stmt are CONVERT_STMT, factor out the conversion and perform the conversion
494 to the result of PHI stmt. COND_STMT is the controlling predicate.
495 Return the newly-created PHI, if any. */
496
497static gphi *
498factor_out_conditional_conversion (edge e0, edge e1, gphi *phi,
499 tree arg0, tree arg1, gimple *cond_stmt)
500{
501 gimple *arg0_def_stmt = NULLnullptr, *arg1_def_stmt = NULLnullptr, *new_stmt;
502 tree new_arg0 = NULL_TREE(tree) nullptr, new_arg1 = NULL_TREE(tree) nullptr;
503 tree temp, result;
504 gphi *newphi;
505 gimple_stmt_iterator gsi, gsi_for_def;
506 location_t locus = gimple_location (phi);
507 enum tree_code convert_code;
508
509 /* Handle only PHI statements with two arguments. TODO: If all
510 other arguments to PHI are INTEGER_CST or if their defining
511 statement have the same unary operation, we can handle more
512 than two arguments too. */
513 if (gimple_phi_num_args (phi) != 2)
514 return NULLnullptr;
515
516 /* First canonicalize to simplify tests. */
517 if (TREE_CODE (arg0)((enum tree_code) (arg0)->base.code) != SSA_NAME)
518 {
519 std::swap (arg0, arg1);
520 std::swap (e0, e1);
521 }
522
523 if (TREE_CODE (arg0)((enum tree_code) (arg0)->base.code) != SSA_NAME
524 || (TREE_CODE (arg1)((enum tree_code) (arg1)->base.code) != SSA_NAME
525 && TREE_CODE (arg1)((enum tree_code) (arg1)->base.code) != INTEGER_CST))
526 return NULLnullptr;
527
528 /* Check if arg0 is an SSA_NAME and the stmt which defines arg0 is
529 a conversion. */
530 arg0_def_stmt = SSA_NAME_DEF_STMT (arg0)(tree_check ((arg0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 530, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
531 if (!gimple_assign_cast_p (arg0_def_stmt))
532 return NULLnullptr;
533
534 /* Use the RHS as new_arg0. */
535 convert_code = gimple_assign_rhs_code (arg0_def_stmt);
536 new_arg0 = gimple_assign_rhs1 (arg0_def_stmt);
537 if (convert_code == VIEW_CONVERT_EXPR)
538 {
539 new_arg0 = TREE_OPERAND (new_arg0, 0)(*((const_cast<tree*> (tree_operand_check ((new_arg0), (
0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 539, __FUNCTION__)))))
;
540 if (!is_gimple_reg_type (TREE_TYPE (new_arg0)((contains_struct_check ((new_arg0), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 540, __FUNCTION__))->typed.type)
))
541 return NULLnullptr;
542 }
543 if (TREE_CODE (new_arg0)((enum tree_code) (new_arg0)->base.code) == SSA_NAME
544 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_arg0)(tree_check ((new_arg0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 544, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
)
545 return NULLnullptr;
546
547 if (TREE_CODE (arg1)((enum tree_code) (arg1)->base.code) == SSA_NAME)
548 {
549 /* Check if arg1 is an SSA_NAME and the stmt which defines arg1
550 is a conversion. */
551 arg1_def_stmt = SSA_NAME_DEF_STMT (arg1)(tree_check ((arg1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 551, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
552 if (!is_gimple_assign (arg1_def_stmt)
553 || gimple_assign_rhs_code (arg1_def_stmt) != convert_code)
554 return NULLnullptr;
555
556 /* Either arg1_def_stmt or arg0_def_stmt should be conditional. */
557 if (dominated_by_p (CDI_DOMINATORS, gimple_bb (phi), gimple_bb (arg0_def_stmt))
558 && dominated_by_p (CDI_DOMINATORS,
559 gimple_bb (phi), gimple_bb (arg1_def_stmt)))
560 return NULLnullptr;
561
562 /* Use the RHS as new_arg1. */
563 new_arg1 = gimple_assign_rhs1 (arg1_def_stmt);
564 if (convert_code == VIEW_CONVERT_EXPR)
565 new_arg1 = TREE_OPERAND (new_arg1, 0)(*((const_cast<tree*> (tree_operand_check ((new_arg1), (
0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 565, __FUNCTION__)))))
;
566 if (TREE_CODE (new_arg1)((enum tree_code) (new_arg1)->base.code) == SSA_NAME
567 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_arg1)(tree_check ((new_arg1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 567, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
)
568 return NULLnullptr;
569 }
570 else
571 {
572 /* arg0_def_stmt should be conditional. */
573 if (dominated_by_p (CDI_DOMINATORS, gimple_bb (phi), gimple_bb (arg0_def_stmt)))
574 return NULLnullptr;
575 /* If arg1 is an INTEGER_CST, fold it to new type. */
576 if (INTEGRAL_TYPE_P (TREE_TYPE (new_arg0))(((enum tree_code) (((contains_struct_check ((new_arg0), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 576, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((new_arg0), (
TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 576, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((new_arg0), (
TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 576, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
577 && int_fits_type_p (arg1, TREE_TYPE (new_arg0)((contains_struct_check ((new_arg0), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 577, __FUNCTION__))->typed.type)
))
578 {
579 if (gimple_assign_cast_p (arg0_def_stmt))
580 {
581 /* For the INTEGER_CST case, we are just moving the
582 conversion from one place to another, which can often
583 hurt as the conversion moves further away from the
584 statement that computes the value. So, perform this
585 only if new_arg0 is an operand of COND_STMT, or
586 if arg0_def_stmt is the only non-debug stmt in
587 its basic block, because then it is possible this
588 could enable further optimizations (minmax replacement
589 etc.). See PR71016. */
590 if (new_arg0 != gimple_cond_lhs (cond_stmt)
591 && new_arg0 != gimple_cond_rhs (cond_stmt)
592 && gimple_bb (arg0_def_stmt) == e0->src)
593 {
594 gsi = gsi_for_stmt (arg0_def_stmt);
595 gsi_prev_nondebug (&gsi);
596 if (!gsi_end_p (gsi))
597 {
598 if (gassign *assign
599 = dyn_cast <gassign *> (gsi_stmt (gsi)))
600 {
601 tree lhs = gimple_assign_lhs (assign);
602 enum tree_code ass_code
603 = gimple_assign_rhs_code (assign);
604 if (ass_code != MAX_EXPR && ass_code != MIN_EXPR)
605 return NULLnullptr;
606 if (lhs != gimple_assign_rhs1 (arg0_def_stmt))
607 return NULLnullptr;
608 gsi_prev_nondebug (&gsi);
609 if (!gsi_end_p (gsi))
610 return NULLnullptr;
611 }
612 else
613 return NULLnullptr;
614 }
615 gsi = gsi_for_stmt (arg0_def_stmt);
616 gsi_next_nondebug (&gsi);
617 if (!gsi_end_p (gsi))
618 return NULLnullptr;
619 }
620 new_arg1 = fold_convert (TREE_TYPE (new_arg0), arg1)fold_convert_loc (((location_t) 0), ((contains_struct_check (
(new_arg0), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 620, __FUNCTION__))->typed.type), arg1)
;
621 }
622 else
623 return NULLnullptr;
624 }
625 else
626 return NULLnullptr;
627 }
628
629 /* If arg0/arg1 have > 1 use, then this transformation actually increases
630 the number of expressions evaluated at runtime. */
631 if (!has_single_use (arg0)
632 || (arg1_def_stmt && !has_single_use (arg1)))
633 return NULLnullptr;
634
635 /* If types of new_arg0 and new_arg1 are different bailout. */
636 if (!types_compatible_p (TREE_TYPE (new_arg0)((contains_struct_check ((new_arg0), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 636, __FUNCTION__))->typed.type)
, TREE_TYPE (new_arg1)((contains_struct_check ((new_arg1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 636, __FUNCTION__))->typed.type)
))
637 return NULLnullptr;
638
639 /* Create a new PHI stmt. */
640 result = PHI_RESULT (phi)get_def_from_ptr (gimple_phi_result_ptr (phi));
641 temp = make_ssa_name (TREE_TYPE (new_arg0)((contains_struct_check ((new_arg0), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 641, __FUNCTION__))->typed.type)
, NULLnullptr);
642 newphi = create_phi_node (temp, gimple_bb (phi));
643
644 if (dump_file && (dump_flags & TDF_DETAILS))
645 {
646 fprintf (dump_file, "PHI ");
647 print_generic_expr (dump_file, gimple_phi_result (phi));
648 fprintf (dump_file,
649 " changed to factor conversion out from COND_EXPR.\n");
650 fprintf (dump_file, "New stmt with CAST that defines ");
651 print_generic_expr (dump_file, result);
652 fprintf (dump_file, ".\n");
653 }
654
655 /* Remove the old cast(s) that has single use. */
656 gsi_for_def = gsi_for_stmt (arg0_def_stmt);
657 gsi_remove (&gsi_for_def, true);
658 release_defs (arg0_def_stmt);
659
660 if (arg1_def_stmt)
661 {
662 gsi_for_def = gsi_for_stmt (arg1_def_stmt);
663 gsi_remove (&gsi_for_def, true);
664 release_defs (arg1_def_stmt);
665 }
666
667 add_phi_arg (newphi, new_arg0, e0, locus);
668 add_phi_arg (newphi, new_arg1, e1, locus);
669
670 /* Create the conversion stmt and insert it. */
671 if (convert_code == VIEW_CONVERT_EXPR)
672 {
673 temp = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (result), temp)fold_build1_loc (((location_t) 0), VIEW_CONVERT_EXPR, ((contains_struct_check
((result), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 673, __FUNCTION__))->typed.type), temp )
;
674 new_stmt = gimple_build_assign (result, temp);
675 }
676 else
677 new_stmt = gimple_build_assign (result, convert_code, temp);
678 gsi = gsi_after_labels (gimple_bb (phi));
679 gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
680
681 /* Remove the original PHI stmt. */
682 gsi = gsi_for_stmt (phi);
683 gsi_remove (&gsi, true);
684
685 statistics_counter_event (cfun(cfun + 0), "factored out cast", 1);
686
687 return newphi;
688}
689
690/* Optimize
691 # x_5 in range [cst1, cst2] where cst2 = cst1 + 1
692 if (x_5 op cstN) # where op is == or != and N is 1 or 2
693 goto bb3;
694 else
695 goto bb4;
696 bb3:
697 bb4:
698 # r_6 = PHI<cst3(2), cst4(3)> # where cst3 == cst4 + 1 or cst4 == cst3 + 1
699
700 to r_6 = x_5 + (min (cst3, cst4) - cst1) or
701 r_6 = (min (cst3, cst4) + cst1) - x_5 depending on op, N and which
702 of cst3 and cst4 is smaller. */
703
704static bool
705two_value_replacement (basic_block cond_bb, basic_block middle_bb,
706 edge e1, gphi *phi, tree arg0, tree arg1)
707{
708 /* Only look for adjacent integer constants. */
709 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0))(((enum tree_code) (((contains_struct_check ((arg0), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 709, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg0), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 709, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg0), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 709, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
710 || !INTEGRAL_TYPE_P (TREE_TYPE (arg1))(((enum tree_code) (((contains_struct_check ((arg1), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 710, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg1), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 710, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg1), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 710, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
711 || TREE_CODE (arg0)((enum tree_code) (arg0)->base.code) != INTEGER_CST
712 || TREE_CODE (arg1)((enum tree_code) (arg1)->base.code) != INTEGER_CST
713 || (tree_int_cst_lt (arg0, arg1)
714 ? wi::to_widest (arg0) + 1 != wi::to_widest (arg1)
715 : wi::to_widest (arg1) + 1 != wi::to_widest (arg0)))
716 return false;
717
718 if (!empty_block_p (middle_bb))
719 return false;
720
721 gimple *stmt = last_stmt (cond_bb);
722 tree lhs = gimple_cond_lhs (stmt);
723 tree rhs = gimple_cond_rhs (stmt);
724
725 if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) != SSA_NAME
726 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs))(((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 726, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 726, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 726, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
727 || TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) != INTEGER_CST)
728 return false;
729
730 switch (gimple_cond_code (stmt))
731 {
732 case EQ_EXPR:
733 case NE_EXPR:
734 break;
735 default:
736 return false;
737 }
738
739 /* Defer boolean x ? 0 : {1,-1} or x ? {1,-1} : 0 to
740 match_simplify_replacement. */
741 if (TREE_CODE (TREE_TYPE (lhs))((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED)
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 741, __FUNCTION__))->typed.type))->base.code)
== BOOLEAN_TYPE
742 && (integer_zerop (arg0)
743 || integer_zerop (arg1)
744 || TREE_CODE (TREE_TYPE (arg0))((enum tree_code) (((contains_struct_check ((arg0), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 744, __FUNCTION__))->typed.type))->base.code)
== BOOLEAN_TYPE
745 || (TYPE_PRECISION (TREE_TYPE (arg0))((tree_class_check ((((contains_struct_check ((arg0), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 745, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 745, __FUNCTION__))->type_common.precision)
746 <= TYPE_PRECISION (TREE_TYPE (lhs))((tree_class_check ((((contains_struct_check ((lhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 746, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 746, __FUNCTION__))->type_common.precision)
)))
747 return false;
748
749 wide_int min, max;
750 value_range r;
751 get_range_query (cfun(cfun + 0))->range_of_expr (r, lhs);
752
753 if (r.kind () == VR_RANGE)
754 {
755 min = r.lower_bound ();
756 max = r.upper_bound ();
757 }
758 else
759 {
760 int prec = TYPE_PRECISION (TREE_TYPE (lhs))((tree_class_check ((((contains_struct_check ((lhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 760, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 760, __FUNCTION__))->type_common.precision)
;
761 signop sgn = TYPE_SIGN (TREE_TYPE (lhs))((signop) ((tree_class_check ((((contains_struct_check ((lhs)
, (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 761, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 761, __FUNCTION__))->base.u.bits.unsigned_flag))
;
762 min = wi::min_value (prec, sgn);
763 max = wi::max_value (prec, sgn);
764 }
765 if (min + 1 != max
766 || (wi::to_wide (rhs) != min
767 && wi::to_wide (rhs) != max))
768 return false;
769
770 /* We need to know which is the true edge and which is the false
771 edge so that we know when to invert the condition below. */
772 edge true_edge, false_edge;
773 extract_true_false_edges_from_block (cond_bb, &true_edge, &false_edge);
774 if ((gimple_cond_code (stmt) == EQ_EXPR)
775 ^ (wi::to_wide (rhs) == max)
776 ^ (e1 == false_edge))
777 std::swap (arg0, arg1);
778
779 tree type;
780 if (TYPE_PRECISION (TREE_TYPE (lhs))((tree_class_check ((((contains_struct_check ((lhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 780, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 780, __FUNCTION__))->type_common.precision)
== TYPE_PRECISION (TREE_TYPE (arg0))((tree_class_check ((((contains_struct_check ((arg0), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 780, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 780, __FUNCTION__))->type_common.precision)
)
781 {
782 /* Avoid performing the arithmetics in bool type which has different
783 semantics, otherwise prefer unsigned types from the two with
784 the same precision. */
785 if (TREE_CODE (TREE_TYPE (arg0))((enum tree_code) (((contains_struct_check ((arg0), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 785, __FUNCTION__))->typed.type))->base.code)
== BOOLEAN_TYPE
786 || !TYPE_UNSIGNED (TREE_TYPE (arg0))((tree_class_check ((((contains_struct_check ((arg0), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 786, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 786, __FUNCTION__))->base.u.bits.unsigned_flag)
)
787 type = TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 787, __FUNCTION__))->typed.type)
;
788 else
789 type = TREE_TYPE (arg0)((contains_struct_check ((arg0), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 789, __FUNCTION__))->typed.type)
;
790 }
791 else if (TYPE_PRECISION (TREE_TYPE (lhs))((tree_class_check ((((contains_struct_check ((lhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 791, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 791, __FUNCTION__))->type_common.precision)
> TYPE_PRECISION (TREE_TYPE (arg0))((tree_class_check ((((contains_struct_check ((arg0), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 791, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 791, __FUNCTION__))->type_common.precision)
)
792 type = TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 792, __FUNCTION__))->typed.type)
;
793 else
794 type = TREE_TYPE (arg0)((contains_struct_check ((arg0), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 794, __FUNCTION__))->typed.type)
;
795
796 min = wide_int::from (min, TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 796, __FUNCTION__))->type_common.precision)
,
797 TYPE_SIGN (TREE_TYPE (lhs))((signop) ((tree_class_check ((((contains_struct_check ((lhs)
, (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 797, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 797, __FUNCTION__))->base.u.bits.unsigned_flag))
);
798 wide_int a = wide_int::from (wi::to_wide (arg0), TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 798, __FUNCTION__))->type_common.precision)
,
799 TYPE_SIGN (TREE_TYPE (arg0))((signop) ((tree_class_check ((((contains_struct_check ((arg0
), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 799, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 799, __FUNCTION__))->base.u.bits.unsigned_flag))
);
800 enum tree_code code;
801 wi::overflow_type ovf;
802 if (tree_int_cst_lt (arg0, arg1))
803 {
804 code = PLUS_EXPR;
805 a -= min;
806 if (!TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 806, __FUNCTION__))->base.u.bits.unsigned_flag)
)
807 {
808 /* lhs is known to be in range [min, min+1] and we want to add a
809 to it. Check if that operation can overflow for those 2 values
810 and if yes, force unsigned type. */
811 wi::add (min + (wi::neg_p (a) ? 0 : 1), a, SIGNED, &ovf);
812 if (ovf)
813 type = unsigned_type_for (type);
814 }
815 }
816 else
817 {
818 code = MINUS_EXPR;
819 a += min;
820 if (!TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 820, __FUNCTION__))->base.u.bits.unsigned_flag)
)
821 {
822 /* lhs is known to be in range [min, min+1] and we want to subtract
823 it from a. Check if that operation can overflow for those 2
824 values and if yes, force unsigned type. */
825 wi::sub (a, min + (wi::neg_p (min) ? 0 : 1), SIGNED, &ovf);
826 if (ovf)
827 type = unsigned_type_for (type);
828 }
829 }
830
831 tree arg = wide_int_to_tree (type, a);
832 gimple_seq stmts = NULLnullptr;
833 lhs = gimple_convert (&stmts, type, lhs);
834 tree new_rhs;
835 if (code == PLUS_EXPR)
836 new_rhs = gimple_build (&stmts, PLUS_EXPR, type, lhs, arg);
837 else
838 new_rhs = gimple_build (&stmts, MINUS_EXPR, type, arg, lhs);
839 new_rhs = gimple_convert (&stmts, TREE_TYPE (arg0)((contains_struct_check ((arg0), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 839, __FUNCTION__))->typed.type)
, new_rhs);
840 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
841 gsi_insert_seq_before (&gsi, stmts, GSI_SAME_STMT);
842
843 replace_phi_edge_with_variable (cond_bb, e1, phi, new_rhs);
844
845 /* Note that we optimized this PHI. */
846 return true;
847}
848
849/* Return TRUE if SEQ/OP pair should be allowed during early phiopt.
850 Currently this is to allow MIN/MAX and ABS/NEGATE and constants. */
851static bool
852phiopt_early_allow (gimple_seq &seq, gimple_match_op &op)
853{
854 /* Don't allow functions. */
855 if (!op.code.is_tree_code ())
856 return false;
857 tree_code code = (tree_code)op.code;
858
859 /* For non-empty sequence, only allow one statement. */
860 if (!gimple_seq_empty_p (seq))
861 {
862 /* Check to make sure op was already a SSA_NAME. */
863 if (code != SSA_NAME)
864 return false;
865 if (!gimple_seq_singleton_p (seq))
866 return false;
867 gimple *stmt = gimple_seq_first_stmt (seq);
868 /* Only allow assignments. */
869 if (!is_gimple_assign (stmt))
870 return false;
871 if (gimple_assign_lhs (stmt) != op.ops[0])
872 return false;
873 code = gimple_assign_rhs_code (stmt);
874 }
875
876 switch (code)
877 {
878 case MIN_EXPR:
879 case MAX_EXPR:
880 case ABS_EXPR:
881 case ABSU_EXPR:
882 case NEGATE_EXPR:
883 case SSA_NAME:
884 return true;
885 case INTEGER_CST:
886 case REAL_CST:
887 case VECTOR_CST:
888 case FIXED_CST:
889 return true;
890 default:
891 return false;
892 }
893}
894
895/* gimple_simplify_phiopt is like gimple_simplify but designed for PHIOPT.
896 Return NULL if nothing can be simplified or the resulting simplified value
897 with parts pushed if EARLY_P was true. Also rejects non allowed tree code
898 if EARLY_P is set.
899 Takes the comparison from COMP_STMT and two args, ARG0 and ARG1 and tries
900 to simplify CMP ? ARG0 : ARG1.
901 Also try to simplify (!CMP) ? ARG1 : ARG0 if the non-inverse failed. */
902static tree
903gimple_simplify_phiopt (bool early_p, tree type, gimple *comp_stmt,
904 tree arg0, tree arg1,
905 gimple_seq *seq)
906{
907 tree result;
908 gimple_seq seq1 = NULLnullptr;
909 enum tree_code comp_code = gimple_cond_code (comp_stmt);
910 location_t loc = gimple_location (comp_stmt);
911 tree cmp0 = gimple_cond_lhs (comp_stmt);
912 tree cmp1 = gimple_cond_rhs (comp_stmt);
913 /* To handle special cases like floating point comparison, it is easier and
914 less error-prone to build a tree and gimplify it on the fly though it is
915 less efficient.
916 Don't use fold_build2 here as that might create (bool)a instead of just
917 "a != 0". */
918 tree cond = build2_loc (loc, comp_code, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE],
919 cmp0, cmp1);
920 gimple_match_op op (gimple_match_cond::UNCOND,
921 COND_EXPR, type, cond, arg0, arg1);
922
923 if (op.resimplify (&seq1, follow_all_ssa_edges))
924 {
925 /* Early we want only to allow some generated tree codes. */
926 if (!early_p
927 || phiopt_early_allow (seq1, op))
928 {
929 result = maybe_push_res_to_seq (&op, &seq1);
930 if (result)
931 {
932 if (loc != UNKNOWN_LOCATION((location_t) 0))
933 annotate_all_with_location (seq1, loc);
934 gimple_seq_add_seq_without_update (seq, seq1);
935 return result;
936 }
937 }
938 }
939 gimple_seq_discard (seq1);
940 seq1 = NULLnullptr;
941
942 /* Try the inverted comparison, that is !COMP ? ARG1 : ARG0. */
943 comp_code = invert_tree_comparison (comp_code, HONOR_NANS (cmp0));
944
945 if (comp_code == ERROR_MARK)
946 return NULLnullptr;
947
948 cond = build2_loc (loc,
949 comp_code, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE],
950 cmp0, cmp1);
951 gimple_match_op op1 (gimple_match_cond::UNCOND,
952 COND_EXPR, type, cond, arg1, arg0);
953
954 if (op1.resimplify (&seq1, follow_all_ssa_edges))
955 {
956 /* Early we want only to allow some generated tree codes. */
957 if (!early_p
958 || phiopt_early_allow (seq1, op1))
959 {
960 result = maybe_push_res_to_seq (&op1, &seq1);
961 if (result)
962 {
963 if (loc != UNKNOWN_LOCATION((location_t) 0))
964 annotate_all_with_location (seq1, loc);
965 gimple_seq_add_seq_without_update (seq, seq1);
966 return result;
967 }
968 }
969 }
970 gimple_seq_discard (seq1);
971
972 return NULLnullptr;
973}
974
975/* The function match_simplify_replacement does the main work of doing the
976 replacement using match and simplify. Return true if the replacement is done.
977 Otherwise return false.
978 BB is the basic block where the replacement is going to be done on. ARG0
979 is argument 0 from PHI. Likewise for ARG1. */
980
981static bool
982match_simplify_replacement (basic_block cond_bb, basic_block middle_bb,
983 edge e0, edge e1, gphi *phi,
984 tree arg0, tree arg1, bool early_p)
985{
986 gimple *stmt;
987 gimple_stmt_iterator gsi;
988 edge true_edge, false_edge;
989 gimple_seq seq = NULLnullptr;
990 tree result;
991 gimple *stmt_to_move = NULLnullptr;
992 auto_bitmap inserted_exprs;
993
994 /* Special case A ? B : B as this will always simplify to B. */
995 if (operand_equal_for_phi_arg_p (arg0, arg1))
996 return false;
997
998 /* If the basic block only has a cheap preparation statement,
999 allow it and move it once the transformation is done. */
1000 if (!empty_block_p (middle_bb))
1001 {
1002 if (!single_pred_p (middle_bb))
1003 return false;
1004
1005 /* The middle bb cannot have phi nodes as we don't
1006 move those assignments yet. */
1007 if (!gimple_seq_empty_p (phi_nodes (middle_bb)))
1008 return false;
1009
1010 stmt_to_move = last_and_only_stmt (middle_bb);
1011 if (!stmt_to_move)
1012 return false;
1013
1014 if (gimple_vuse (stmt_to_move))
1015 return false;
1016
1017 if (gimple_could_trap_p (stmt_to_move)
1018 || gimple_has_side_effects (stmt_to_move))
1019 return false;
1020
1021 if (gimple_uses_undefined_value_p (stmt_to_move))
1022 return false;
1023
1024 /* Allow assignments and not no calls.
1025 As const calls don't match any of the above, yet they could
1026 still have some side-effects - they could contain
1027 gimple_could_trap_p statements, like floating point
1028 exceptions or integer division by zero. See PR70586.
1029 FIXME: perhaps gimple_has_side_effects or gimple_could_trap_p
1030 should handle this. */
1031 if (!is_gimple_assign (stmt_to_move))
1032 return false;
1033
1034 tree lhs = gimple_assign_lhs (stmt_to_move);
1035 gimple *use_stmt;
1036 use_operand_p use_p;
1037
1038 /* Allow only a statement which feeds into the phi. */
1039 if (!lhs || TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) != SSA_NAME
1040 || !single_imm_use (lhs, &use_p, &use_stmt)
1041 || use_stmt != phi)
1042 return false;
1043 }
1044
1045 /* At this point we know we have a GIMPLE_COND with two successors.
1046 One successor is BB, the other successor is an empty block which
1047 falls through into BB.
1048
1049 There is a single PHI node at the join point (BB).
1050
1051 So, given the condition COND, and the two PHI arguments, match and simplify
1052 can happen on (COND) ? arg0 : arg1. */
1053
1054 stmt = last_stmt (cond_bb);
1055
1056 /* We need to know which is the true edge and which is the false
1057 edge so that we know when to invert the condition below. */
1058 extract_true_false_edges_from_block (cond_bb, &true_edge, &false_edge);
1059 if (e1 == true_edge || e0 == false_edge)
1060 std::swap (arg0, arg1);
1061
1062 tree type = TREE_TYPE (gimple_phi_result (phi))((contains_struct_check ((gimple_phi_result (phi)), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1062, __FUNCTION__))->typed.type)
;
1063 result = gimple_simplify_phiopt (early_p, type, stmt,
1064 arg0, arg1,
1065 &seq);
1066 if (!result)
1067 return false;
1068
1069 gsi = gsi_last_bb (cond_bb);
1070 /* Insert the sequence generated from gimple_simplify_phiopt. */
1071 if (seq)
1072 {
1073 // Mark the lhs of the new statements maybe for dce
1074 gimple_stmt_iterator gsi1 = gsi_start (seq);
1075 for (; !gsi_end_p (gsi1); gsi_next (&gsi1))
1076 {
1077 gimple *stmt = gsi_stmt (gsi1);
1078 tree name = gimple_get_lhs (stmt);
1079 if (name && TREE_CODE (name)((enum tree_code) (name)->base.code) == SSA_NAME)
1080 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (name)(tree_check ((name), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1080, __FUNCTION__, (SSA_NAME)))->base.u.version
);
1081 }
1082 gsi_insert_seq_before (&gsi, seq, GSI_CONTINUE_LINKING);
1083 }
1084
1085 /* If there was a statement to move, move it to right before
1086 the original conditional. */
1087 if (stmt_to_move)
1088 {
1089 if (dump_file && (dump_flags & TDF_DETAILS))
1090 {
1091 fprintf (dump_file, "statement un-sinked:\n");
1092 print_gimple_stmt (dump_file, stmt_to_move, 0,
1093 TDF_VOPS|TDF_MEMSYMS);
1094 }
1095
1096 tree name = gimple_get_lhs (stmt_to_move);
1097 // Mark the name to be renamed if there is one.
1098 if (name && TREE_CODE (name)((enum tree_code) (name)->base.code) == SSA_NAME)
1099 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (name)(tree_check ((name), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1099, __FUNCTION__, (SSA_NAME)))->base.u.version
);
1100 gimple_stmt_iterator gsi1 = gsi_for_stmt (stmt_to_move);
1101 gsi_move_before (&gsi1, &gsi);
1102 reset_flow_sensitive_info (gimple_assign_lhs (stmt_to_move));
1103 }
1104
1105 replace_phi_edge_with_variable (cond_bb, e1, phi, result, inserted_exprs);
1106
1107 /* Add Statistic here even though replace_phi_edge_with_variable already
1108 does it as we want to be able to count when match-simplify happens vs
1109 the others. */
1110 statistics_counter_event (cfun(cfun + 0), "match-simplify PHI replacement", 1);
1111
1112 /* Note that we optimized this PHI. */
1113 return true;
1114}
1115
1116/* Update *ARG which is defined in STMT so that it contains the
1117 computed value if that seems profitable. Return true if the
1118 statement is made dead by that rewriting. */
1119
1120static bool
1121jump_function_from_stmt (tree *arg, gimple *stmt)
1122{
1123 enum tree_code code = gimple_assign_rhs_code (stmt);
1124 if (code == ADDR_EXPR)
1125 {
1126 /* For arg = &p->i transform it to p, if possible. */
1127 tree rhs1 = gimple_assign_rhs1 (stmt);
1128 poly_int64 offset;
1129 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs1, 0)(*((const_cast<tree*> (tree_operand_check ((rhs1), (0),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1129, __FUNCTION__)))))
,
1130 &offset);
1131 if (tem
1132 && TREE_CODE (tem)((enum tree_code) (tem)->base.code) == MEM_REF
1133 && known_eq (mem_ref_offset (tem) + offset, 0)(!maybe_ne (mem_ref_offset (tem) + offset, 0)))
1134 {
1135 *arg = TREE_OPERAND (tem, 0)(*((const_cast<tree*> (tree_operand_check ((tem), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1135, __FUNCTION__)))))
;
1136 return true;
1137 }
1138 }
1139 /* TODO: Much like IPA-CP jump-functions we want to handle constant
1140 additions symbolically here, and we'd need to update the comparison
1141 code that compares the arg + cst tuples in our caller. For now the
1142 code above exactly handles the VEC_BASE pattern from vec.h. */
1143 return false;
1144}
1145
1146/* RHS is a source argument in a BIT_AND_EXPR which feeds a conditional
1147 of the form SSA_NAME NE 0.
1148
1149 If RHS is fed by a simple EQ_EXPR comparison of two values, see if
1150 the two input values of the EQ_EXPR match arg0 and arg1.
1151
1152 If so update *code and return TRUE. Otherwise return FALSE. */
1153
1154static bool
1155rhs_is_fed_for_value_replacement (const_tree arg0, const_tree arg1,
1156 enum tree_code *code, const_tree rhs)
1157{
1158 /* Obviously if RHS is not an SSA_NAME, we can't look at the defining
1159 statement. */
1160 if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == SSA_NAME)
1161 {
1162 gimple *def1 = SSA_NAME_DEF_STMT (rhs)(tree_check ((rhs), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1162, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
1163
1164 /* Verify the defining statement has an EQ_EXPR on the RHS. */
1165 if (is_gimple_assign (def1) && gimple_assign_rhs_code (def1) == EQ_EXPR)
1166 {
1167 /* Finally verify the source operands of the EQ_EXPR are equal
1168 to arg0 and arg1. */
1169 tree op0 = gimple_assign_rhs1 (def1);
1170 tree op1 = gimple_assign_rhs2 (def1);
1171 if ((operand_equal_for_phi_arg_p (arg0, op0)
1172 && operand_equal_for_phi_arg_p (arg1, op1))
1173 || (operand_equal_for_phi_arg_p (arg0, op1)
1174 && operand_equal_for_phi_arg_p (arg1, op0)))
1175 {
1176 /* We will perform the optimization. */
1177 *code = gimple_assign_rhs_code (def1);
1178 return true;
1179 }
1180 }
1181 }
1182 return false;
1183}
1184
1185/* Return TRUE if arg0/arg1 are equal to the rhs/lhs or lhs/rhs of COND.
1186
1187 Also return TRUE if arg0/arg1 are equal to the source arguments of a
1188 an EQ comparison feeding a BIT_AND_EXPR which feeds COND.
1189
1190 Return FALSE otherwise. */
1191
1192static bool
1193operand_equal_for_value_replacement (const_tree arg0, const_tree arg1,
1194 enum tree_code *code, gimple *cond)
1195{
1196 gimple *def;
1197 tree lhs = gimple_cond_lhs (cond);
1198 tree rhs = gimple_cond_rhs (cond);
1199
1200 if ((operand_equal_for_phi_arg_p (arg0, lhs)
1201 && operand_equal_for_phi_arg_p (arg1, rhs))
1202 || (operand_equal_for_phi_arg_p (arg1, lhs)
1203 && operand_equal_for_phi_arg_p (arg0, rhs)))
1204 return true;
1205
1206 /* Now handle more complex case where we have an EQ comparison
1207 which feeds a BIT_AND_EXPR which feeds COND.
1208
1209 First verify that COND is of the form SSA_NAME NE 0. */
1210 if (*code != NE_EXPR || !integer_zerop (rhs)
1211 || TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) != SSA_NAME)
1212 return false;
1213
1214 /* Now ensure that SSA_NAME is set by a BIT_AND_EXPR. */
1215 def = SSA_NAME_DEF_STMT (lhs)(tree_check ((lhs), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1215, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
1216 if (!is_gimple_assign (def) || gimple_assign_rhs_code (def) != BIT_AND_EXPR)
1217 return false;
1218
1219 /* Now verify arg0/arg1 correspond to the source arguments of an
1220 EQ comparison feeding the BIT_AND_EXPR. */
1221
1222 tree tmp = gimple_assign_rhs1 (def);
1223 if (rhs_is_fed_for_value_replacement (arg0, arg1, code, tmp))
1224 return true;
1225
1226 tmp = gimple_assign_rhs2 (def);
1227 if (rhs_is_fed_for_value_replacement (arg0, arg1, code, tmp))
1228 return true;
1229
1230 return false;
1231}
1232
1233/* Returns true if ARG is a neutral element for operation CODE
1234 on the RIGHT side. */
1235
1236static bool
1237neutral_element_p (tree_code code, tree arg, bool right)
1238{
1239 switch (code)
1240 {
1241 case PLUS_EXPR:
1242 case BIT_IOR_EXPR:
1243 case BIT_XOR_EXPR:
1244 return integer_zerop (arg);
1245
1246 case LROTATE_EXPR:
1247 case RROTATE_EXPR:
1248 case LSHIFT_EXPR:
1249 case RSHIFT_EXPR:
1250 case MINUS_EXPR:
1251 case POINTER_PLUS_EXPR:
1252 return right && integer_zerop (arg);
1253
1254 case MULT_EXPR:
1255 return integer_onep (arg);
1256
1257 case TRUNC_DIV_EXPR:
1258 case CEIL_DIV_EXPR:
1259 case FLOOR_DIV_EXPR:
1260 case ROUND_DIV_EXPR:
1261 case EXACT_DIV_EXPR:
1262 return right && integer_onep (arg);
1263
1264 case BIT_AND_EXPR:
1265 return integer_all_onesp (arg);
1266
1267 default:
1268 return false;
1269 }
1270}
1271
1272/* Returns true if ARG is an absorbing element for operation CODE. */
1273
1274static bool
1275absorbing_element_p (tree_code code, tree arg, bool right, tree rval)
1276{
1277 switch (code)
1278 {
1279 case BIT_IOR_EXPR:
1280 return integer_all_onesp (arg);
1281
1282 case MULT_EXPR:
1283 case BIT_AND_EXPR:
1284 return integer_zerop (arg);
1285
1286 case LSHIFT_EXPR:
1287 case RSHIFT_EXPR:
1288 case LROTATE_EXPR:
1289 case RROTATE_EXPR:
1290 return !right && integer_zerop (arg);
1291
1292 case TRUNC_DIV_EXPR:
1293 case CEIL_DIV_EXPR:
1294 case FLOOR_DIV_EXPR:
1295 case ROUND_DIV_EXPR:
1296 case EXACT_DIV_EXPR:
1297 case TRUNC_MOD_EXPR:
1298 case CEIL_MOD_EXPR:
1299 case FLOOR_MOD_EXPR:
1300 case ROUND_MOD_EXPR:
1301 return (!right
1302 && integer_zerop (arg)
1303 && tree_single_nonzero_warnv_p (rval, NULLnullptr));
1304
1305 default:
1306 return false;
1307 }
1308}
1309
1310/* The function value_replacement does the main work of doing the value
1311 replacement. Return non-zero if the replacement is done. Otherwise return
1312 0. If we remove the middle basic block, return 2.
1313 BB is the basic block where the replacement is going to be done on. ARG0
1314 is argument 0 from the PHI. Likewise for ARG1. */
1315
1316static int
1317value_replacement (basic_block cond_bb, basic_block middle_bb,
1318 edge e0, edge e1, gphi *phi, tree arg0, tree arg1)
1319{
1320 gimple_stmt_iterator gsi;
1321 gimple *cond;
1322 edge true_edge, false_edge;
1323 enum tree_code code;
1324 bool empty_or_with_defined_p = true;
1325
1326 /* If the type says honor signed zeros we cannot do this
1327 optimization. */
1328 if (HONOR_SIGNED_ZEROS (arg1))
1329 return 0;
1330
1331 /* If there is a statement in MIDDLE_BB that defines one of the PHI
1332 arguments, then adjust arg0 or arg1. */
1333 gsi = gsi_start_nondebug_after_labels_bb (middle_bb);
1334 while (!gsi_end_p (gsi))
1335 {
1336 gimple *stmt = gsi_stmt (gsi);
1337 tree lhs;
1338 gsi_next_nondebug (&gsi);
1339 if (!is_gimple_assign (stmt))
1340 {
1341 if (gimple_code (stmt) != GIMPLE_PREDICT
1342 && gimple_code (stmt) != GIMPLE_NOP)
1343 empty_or_with_defined_p = false;
1344 continue;
1345 }
1346 /* Now try to adjust arg0 or arg1 according to the computation
1347 in the statement. */
1348 lhs = gimple_assign_lhs (stmt);
1349 if (!(lhs == arg0
1350 && jump_function_from_stmt (&arg0, stmt))
1351 || (lhs == arg1
1352 && jump_function_from_stmt (&arg1, stmt)))
1353 empty_or_with_defined_p = false;
1354 }
1355
1356 cond = last_stmt (cond_bb);
1357 code = gimple_cond_code (cond);
1358
1359 /* This transformation is only valid for equality comparisons. */
1360 if (code != NE_EXPR && code != EQ_EXPR)
1361 return 0;
1362
1363 /* We need to know which is the true edge and which is the false
1364 edge so that we know if have abs or negative abs. */
1365 extract_true_false_edges_from_block (cond_bb, &true_edge, &false_edge);
1366
1367 /* At this point we know we have a COND_EXPR with two successors.
1368 One successor is BB, the other successor is an empty block which
1369 falls through into BB.
1370
1371 The condition for the COND_EXPR is known to be NE_EXPR or EQ_EXPR.
1372
1373 There is a single PHI node at the join point (BB) with two arguments.
1374
1375 We now need to verify that the two arguments in the PHI node match
1376 the two arguments to the equality comparison. */
1377
1378 bool equal_p = operand_equal_for_value_replacement (arg0, arg1, &code, cond);
1379 bool maybe_equal_p = false;
1380 if (!equal_p
1381 && empty_or_with_defined_p
1382 && TREE_CODE (gimple_cond_rhs (cond))((enum tree_code) (gimple_cond_rhs (cond))->base.code) == INTEGER_CST
1383 && (operand_equal_for_phi_arg_p (gimple_cond_lhs (cond), arg0)
1384 ? TREE_CODE (arg1)((enum tree_code) (arg1)->base.code) == INTEGER_CST
1385 : (operand_equal_for_phi_arg_p (gimple_cond_lhs (cond), arg1)
1386 && TREE_CODE (arg0)((enum tree_code) (arg0)->base.code) == INTEGER_CST)))
1387 maybe_equal_p = true;
1388 if (equal_p || maybe_equal_p)
1389 {
1390 edge e;
1391 tree arg;
1392
1393 /* For NE_EXPR, we want to build an assignment result = arg where
1394 arg is the PHI argument associated with the true edge. For
1395 EQ_EXPR we want the PHI argument associated with the false edge. */
1396 e = (code == NE_EXPR ? true_edge : false_edge);
1397
1398 /* Unfortunately, E may not reach BB (it may instead have gone to
1399 OTHER_BLOCK). If that is the case, then we want the single outgoing
1400 edge from OTHER_BLOCK which reaches BB and represents the desired
1401 path from COND_BLOCK. */
1402 if (e->dest == middle_bb)
1403 e = single_succ_edge (e->dest);
1404
1405 /* Now we know the incoming edge to BB that has the argument for the
1406 RHS of our new assignment statement. */
1407 if (e0 == e)
1408 arg = arg0;
1409 else
1410 arg = arg1;
1411
1412 /* If the middle basic block was empty or is defining the
1413 PHI arguments and this is a single phi where the args are different
1414 for the edges e0 and e1 then we can remove the middle basic block. */
1415 if (empty_or_with_defined_p
1416 && single_non_singleton_phi_for_edges (phi_nodes (gimple_bb (phi)),
1417 e0, e1) == phi)
1418 {
1419 use_operand_p use_p;
1420 gimple *use_stmt;
1421
1422 /* Even if arg0/arg1 isn't equal to second operand of cond, we
1423 can optimize away the bb if we can prove it doesn't care whether
1424 phi result is arg0/arg1 or second operand of cond. Consider:
1425 <bb 2> [local count: 118111600]:
1426 if (i_2(D) == 4)
1427 goto <bb 4>; [97.00%]
1428 else
1429 goto <bb 3>; [3.00%]
1430
1431 <bb 3> [local count: 3540129]:
1432
1433 <bb 4> [local count: 118111600]:
1434 # i_6 = PHI <i_2(D)(3), 6(2)>
1435 _3 = i_6 != 0;
1436 Here, carg is 4, oarg is 6, crhs is 0, and because
1437 (4 != 0) == (6 != 0), we don't care if i_6 is 4 or 6, both
1438 have the same outcome. So, can can optimize this to:
1439 _3 = i_2(D) != 0;
1440 If the single imm use of phi result >, >=, < or <=, similarly
1441 we can check if both carg and oarg compare the same against
1442 crhs using ccode. */
1443 if (maybe_equal_p
1444 && TREE_CODE (arg)((enum tree_code) (arg)->base.code) != INTEGER_CST
1445 && single_imm_use (gimple_phi_result (phi), &use_p, &use_stmt))
1446 {
1447 enum tree_code ccode = ERROR_MARK;
1448 tree clhs = NULL_TREE(tree) nullptr, crhs = NULL_TREE(tree) nullptr;
1449 tree carg = gimple_cond_rhs (cond);
1450 tree oarg = e0 == e ? arg1 : arg0;
1451 if (is_gimple_assign (use_stmt)
1452 && (TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))tree_code_type_tmpl <0>::tree_code_type[(int) (gimple_assign_rhs_code
(use_stmt))]
1453 == tcc_comparison))
1454 {
1455 ccode = gimple_assign_rhs_code (use_stmt);
1456 clhs = gimple_assign_rhs1 (use_stmt);
1457 crhs = gimple_assign_rhs2 (use_stmt);
1458 }
1459 else if (gimple_code (use_stmt) == GIMPLE_COND)
1460 {
1461 ccode = gimple_cond_code (use_stmt);
1462 clhs = gimple_cond_lhs (use_stmt);
1463 crhs = gimple_cond_rhs (use_stmt);
1464 }
1465 if (ccode != ERROR_MARK
1466 && clhs == gimple_phi_result (phi)
1467 && TREE_CODE (crhs)((enum tree_code) (crhs)->base.code) == INTEGER_CST)
1468 switch (ccode)
1469 {
1470 case EQ_EXPR:
1471 case NE_EXPR:
1472 if (!tree_int_cst_equal (crhs, carg)
1473 && !tree_int_cst_equal (crhs, oarg))
1474 equal_p = true;
1475 break;
1476 case GT_EXPR:
1477 if (tree_int_cst_lt (crhs, carg)
1478 == tree_int_cst_lt (crhs, oarg))
1479 equal_p = true;
1480 break;
1481 case GE_EXPR:
1482 if (tree_int_cst_le (crhs, carg)
1483 == tree_int_cst_le (crhs, oarg))
1484 equal_p = true;
1485 break;
1486 case LT_EXPR:
1487 if (tree_int_cst_lt (carg, crhs)
1488 == tree_int_cst_lt (oarg, crhs))
1489 equal_p = true;
1490 break;
1491 case LE_EXPR:
1492 if (tree_int_cst_le (carg, crhs)
1493 == tree_int_cst_le (oarg, crhs))
1494 equal_p = true;
1495 break;
1496 default:
1497 break;
1498 }
1499 if (equal_p)
1500 {
1501 tree phires = gimple_phi_result (phi);
1502 if (SSA_NAME_RANGE_INFO (phires)(tree_check ((phires), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1502, __FUNCTION__, (SSA_NAME)))->ssa_name.info.range_info
)
1503 {
1504 /* After the optimization PHI result can have value
1505 which it couldn't have previously. */
1506 int_range_max r;
1507 if (get_global_range_query ()->range_of_expr (r, phires,
1508 phi))
1509 {
1510 int_range<2> tmp (carg, carg);
1511 r.union_ (tmp);
1512 reset_flow_sensitive_info (phires);
1513 set_range_info (phires, r);
1514 }
1515 else
1516 reset_flow_sensitive_info (phires);
1517 }
1518 }
1519 if (equal_p && MAY_HAVE_DEBUG_BIND_STMTSglobal_options.x_flag_var_tracking_assignments)
1520 {
1521 imm_use_iterator imm_iter;
1522 tree phires = gimple_phi_result (phi);
1523 tree temp = NULL_TREE(tree) nullptr;
1524 bool reset_p = false;
1525
1526 /* Add # DEBUG D#1 => arg != carg ? arg : oarg. */
1527 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, phires)for (struct auto_end_imm_use_stmt_traverse auto_end_imm_use_stmt_traverse
((((use_stmt) = first_imm_use_stmt (&(imm_iter), (phires
))), &(imm_iter))); !end_imm_use_stmt_p (&(imm_iter))
; (void) ((use_stmt) = next_imm_use_stmt (&(imm_iter))))
1528 {
1529 if (!is_gimple_debug (use_stmt))
1530 continue;
1531 if (temp == NULL_TREE(tree) nullptr)
1532 {
1533 if (!single_pred_p (middle_bb)
1534 || EDGE_COUNT (gimple_bb (phi)->preds)vec_safe_length (gimple_bb (phi)->preds) != 2)
1535 {
1536 /* But only if middle_bb has a single
1537 predecessor and phi bb has two, otherwise
1538 we could use a SSA_NAME not usable in that
1539 place or wrong-debug. */
1540 reset_p = true;
1541 break;
1542 }
1543 gimple_stmt_iterator gsi
1544 = gsi_after_labels (gimple_bb (phi));
1545 tree type = TREE_TYPE (phires)((contains_struct_check ((phires), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1545, __FUNCTION__))->typed.type)
;
1546 temp = build_debug_expr_decl (type);
1547 tree t = build2 (NE_EXPR, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE],
1548 arg, carg);
1549 t = build3 (COND_EXPR, type, t, arg, oarg);
1550 gimple *g = gimple_build_debug_bind (temp, t, phi);
1551 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
1552 }
1553 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)for ((use_p) = first_imm_use_on_stmt (&(imm_iter)); !end_imm_use_on_stmt_p
(&(imm_iter)); (void) ((use_p) = next_imm_use_on_stmt (&
(imm_iter))))
1554 replace_exp (use_p, temp);
1555 update_stmt (use_stmt);
1556 }
1557 if (reset_p)
1558 reset_debug_uses (phi);
1559 }
1560 }
1561 if (equal_p)
1562 {
1563 replace_phi_edge_with_variable (cond_bb, e1, phi, arg);
1564 /* Note that we optimized this PHI. */
1565 return 2;
1566 }
1567 }
1568 else if (equal_p)
1569 {
1570 if (!single_pred_p (middle_bb))
1571 return 0;
1572 statistics_counter_event (cfun(cfun + 0), "Replace PHI with "
1573 "variable/value_replacement", 1);
1574
1575 /* Replace the PHI arguments with arg. */
1576 SET_PHI_ARG_DEF (phi, e0->dest_idx, arg)set_ssa_use_from_ptr (gimple_phi_arg_imm_use_ptr (((phi)), ((
e0->dest_idx))), (arg))
;
1577 SET_PHI_ARG_DEF (phi, e1->dest_idx, arg)set_ssa_use_from_ptr (gimple_phi_arg_imm_use_ptr (((phi)), ((
e1->dest_idx))), (arg))
;
1578 if (dump_file && (dump_flags & TDF_DETAILS))
1579 {
1580 fprintf (dump_file, "PHI ");
1581 print_generic_expr (dump_file, gimple_phi_result (phi));
1582 fprintf (dump_file, " reduced for COND_EXPR in block %d to ",
1583 cond_bb->index);
1584 print_generic_expr (dump_file, arg);
1585 fprintf (dump_file, ".\n");
1586 }
1587 return 1;
1588 }
1589 }
1590
1591 if (!single_pred_p (middle_bb))
1592 return 0;
1593
1594 /* Now optimize (x != 0) ? x + y : y to just x + y. */
1595 gsi = gsi_last_nondebug_bb (middle_bb);
1596 if (gsi_end_p (gsi))
1597 return 0;
1598
1599 gimple *assign = gsi_stmt (gsi);
1600 if (!is_gimple_assign (assign)
1601 || (!INTEGRAL_TYPE_P (TREE_TYPE (arg0))(((enum tree_code) (((contains_struct_check ((arg0), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1601, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg0), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1601, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg0), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1601, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
1602 && !POINTER_TYPE_P (TREE_TYPE (arg0))(((enum tree_code) (((contains_struct_check ((arg0), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1602, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg0), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1602, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
))
1603 return 0;
1604
1605 if (gimple_assign_rhs_class (assign) != GIMPLE_BINARY_RHS)
1606 {
1607 /* If last stmt of the middle_bb is a conversion, handle it like
1608 a preparation statement through constant evaluation with
1609 checking for UB. */
1610 enum tree_code sc = gimple_assign_rhs_code (assign);
1611 if (CONVERT_EXPR_CODE_P (sc)((sc) == NOP_EXPR || (sc) == CONVERT_EXPR))
1612 assign = NULLnullptr;
1613 else
1614 return 0;
1615 }
1616
1617 /* Punt if there are (degenerate) PHIs in middle_bb, there should not be. */
1618 if (!gimple_seq_empty_p (phi_nodes (middle_bb)))
1619 return 0;
1620
1621 /* Allow up to 2 cheap preparation statements that prepare argument
1622 for assign, e.g.:
1623 if (y_4 != 0)
1624 goto <bb 3>;
1625 else
1626 goto <bb 4>;
1627 <bb 3>:
1628 _1 = (int) y_4;
1629 iftmp.0_6 = x_5(D) r<< _1;
1630 <bb 4>:
1631 # iftmp.0_2 = PHI <iftmp.0_6(3), x_5(D)(2)>
1632 or:
1633 if (y_3(D) == 0)
1634 goto <bb 4>;
1635 else
1636 goto <bb 3>;
1637 <bb 3>:
1638 y_4 = y_3(D) & 31;
1639 _1 = (int) y_4;
1640 _6 = x_5(D) r<< _1;
1641 <bb 4>:
1642 # _2 = PHI <x_5(D)(2), _6(3)> */
1643 gimple *prep_stmt[2] = { NULLnullptr, NULLnullptr };
1644 int prep_cnt;
1645 for (prep_cnt = 0; ; prep_cnt++)
1646 {
1647 if (prep_cnt || assign)
1648 gsi_prev_nondebug (&gsi);
1649 if (gsi_end_p (gsi))
1650 break;
1651
1652 gimple *g = gsi_stmt (gsi);
1653 if (gimple_code (g) == GIMPLE_LABEL)
1654 break;
1655
1656 if (prep_cnt == 2 || !is_gimple_assign (g))
1657 return 0;
1658
1659 tree lhs = gimple_assign_lhs (g);
1660 tree rhs1 = gimple_assign_rhs1 (g);
1661 use_operand_p use_p;
1662 gimple *use_stmt;
1663 if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) != SSA_NAME
1664 || TREE_CODE (rhs1)((enum tree_code) (rhs1)->base.code) != SSA_NAME
1665 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs))(((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1665, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1665, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1665, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
1666 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1))(((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1666, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1666, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1666, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
1667 || !single_imm_use (lhs, &use_p, &use_stmt)
1668 || ((prep_cnt || assign)
1669 && use_stmt != (prep_cnt ? prep_stmt[prep_cnt - 1] : assign)))
1670 return 0;
1671 switch (gimple_assign_rhs_code (g))
1672 {
1673 CASE_CONVERTcase NOP_EXPR: case CONVERT_EXPR:
1674 break;
1675 case PLUS_EXPR:
1676 case BIT_AND_EXPR:
1677 case BIT_IOR_EXPR:
1678 case BIT_XOR_EXPR:
1679 if (TREE_CODE (gimple_assign_rhs2 (g))((enum tree_code) (gimple_assign_rhs2 (g))->base.code) != INTEGER_CST)
1680 return 0;
1681 break;
1682 default:
1683 return 0;
1684 }
1685 prep_stmt[prep_cnt] = g;
1686 }
1687
1688 /* Only transform if it removes the condition. */
1689 if (!single_non_singleton_phi_for_edges (phi_nodes (gimple_bb (phi)), e0, e1))
1690 return 0;
1691
1692 /* Size-wise, this is always profitable. */
1693 if (optimize_bb_for_speed_p (cond_bb)
1694 /* The special case is useless if it has a low probability. */
1695 && profile_status_for_fn (cfun)(((cfun + 0))->cfg->x_profile_status) != PROFILE_ABSENT
1696 && EDGE_PRED (middle_bb, 0)(*(middle_bb)->preds)[(0)]->probability < profile_probability::even ()
1697 /* If assign is cheap, there is no point avoiding it. */
1698 && estimate_num_insns_seq (bb_seq (middle_bb), &eni_time_weights)
1699 >= 3 * estimate_num_insns (cond, &eni_time_weights))
1700 return 0;
1701
1702 tree cond_lhs = gimple_cond_lhs (cond);
1703 tree cond_rhs = gimple_cond_rhs (cond);
1704
1705 /* Propagate the cond_rhs constant through preparation stmts,
1706 make sure UB isn't invoked while doing that. */
1707 for (int i = prep_cnt - 1; i >= 0; --i)
1708 {
1709 gimple *g = prep_stmt[i];
1710 tree grhs1 = gimple_assign_rhs1 (g);
1711 if (!operand_equal_for_phi_arg_p (cond_lhs, grhs1))
1712 return 0;
1713 cond_lhs = gimple_assign_lhs (g);
1714 cond_rhs = fold_convert (TREE_TYPE (grhs1), cond_rhs)fold_convert_loc (((location_t) 0), ((contains_struct_check (
(grhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1714, __FUNCTION__))->typed.type), cond_rhs)
;
1715 if (TREE_CODE (cond_rhs)((enum tree_code) (cond_rhs)->base.code) != INTEGER_CST
1716 || TREE_OVERFLOW (cond_rhs)((tree_class_check ((cond_rhs), (tcc_constant), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1716, __FUNCTION__))->base.public_flag)
)
1717 return 0;
1718 if (gimple_assign_rhs_class (g) == GIMPLE_BINARY_RHS)
1719 {
1720 cond_rhs = int_const_binop (gimple_assign_rhs_code (g), cond_rhs,
1721 gimple_assign_rhs2 (g));
1722 if (TREE_OVERFLOW (cond_rhs)((tree_class_check ((cond_rhs), (tcc_constant), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1722, __FUNCTION__))->base.public_flag)
)
1723 return 0;
1724 }
1725 cond_rhs = fold_convert (TREE_TYPE (cond_lhs), cond_rhs)fold_convert_loc (((location_t) 0), ((contains_struct_check (
(cond_lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1725, __FUNCTION__))->typed.type), cond_rhs)
;
1726 if (TREE_CODE (cond_rhs)((enum tree_code) (cond_rhs)->base.code) != INTEGER_CST
1727 || TREE_OVERFLOW (cond_rhs)((tree_class_check ((cond_rhs), (tcc_constant), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1727, __FUNCTION__))->base.public_flag)
)
1728 return 0;
1729 }
1730
1731 tree lhs, rhs1, rhs2;
1732 enum tree_code code_def;
1733 if (assign)
1734 {
1735 lhs = gimple_assign_lhs (assign);
1736 rhs1 = gimple_assign_rhs1 (assign);
1737 rhs2 = gimple_assign_rhs2 (assign);
1738 code_def = gimple_assign_rhs_code (assign);
1739 }
1740 else
1741 {
1742 gcc_assert (prep_cnt > 0)((void)(!(prep_cnt > 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1742, __FUNCTION__), 0 : 0))
;
1743 lhs = cond_lhs;
1744 rhs1 = NULL_TREE(tree) nullptr;
1745 rhs2 = NULL_TREE(tree) nullptr;
1746 code_def = ERROR_MARK;
1747 }
1748
1749 if (((code == NE_EXPR && e1 == false_edge)
1750 || (code == EQ_EXPR && e1 == true_edge))
1751 && arg0 == lhs
1752 && ((assign == NULLnullptr
1753 && operand_equal_for_phi_arg_p (arg1, cond_rhs))
1754 || (assign
1755 && arg1 == rhs1
1756 && operand_equal_for_phi_arg_p (rhs2, cond_lhs)
1757 && neutral_element_p (code_def, cond_rhs, true))
1758 || (assign
1759 && arg1 == rhs2
1760 && operand_equal_for_phi_arg_p (rhs1, cond_lhs)
1761 && neutral_element_p (code_def, cond_rhs, false))
1762 || (assign
1763 && operand_equal_for_phi_arg_p (arg1, cond_rhs)
1764 && ((operand_equal_for_phi_arg_p (rhs2, cond_lhs)
1765 && absorbing_element_p (code_def, cond_rhs, true, rhs2))
1766 || (operand_equal_for_phi_arg_p (rhs1, cond_lhs)
1767 && absorbing_element_p (code_def,
1768 cond_rhs, false, rhs2))))))
1769 {
1770 gsi = gsi_for_stmt (cond);
1771 /* Moving ASSIGN might change VR of lhs, e.g. when moving u_6
1772 def-stmt in:
1773 if (n_5 != 0)
1774 goto <bb 3>;
1775 else
1776 goto <bb 4>;
1777
1778 <bb 3>:
1779 # RANGE [0, 4294967294]
1780 u_6 = n_5 + 4294967295;
1781
1782 <bb 4>:
1783 # u_3 = PHI <u_6(3), 4294967295(2)> */
1784 reset_flow_sensitive_info (lhs);
1785 gimple_stmt_iterator gsi_from;
1786 for (int i = prep_cnt - 1; i >= 0; --i)
1787 {
1788 tree plhs = gimple_assign_lhs (prep_stmt[i]);
1789 reset_flow_sensitive_info (plhs);
1790 gsi_from = gsi_for_stmt (prep_stmt[i]);
1791 gsi_move_before (&gsi_from, &gsi);
1792 }
1793 if (assign)
1794 {
1795 gsi_from = gsi_for_stmt (assign);
1796 gsi_move_before (&gsi_from, &gsi);
1797 }
1798 replace_phi_edge_with_variable (cond_bb, e1, phi, lhs);
1799 return 2;
1800 }
1801
1802 return 0;
1803}
1804
1805/* If VAR is an SSA_NAME that points to a BIT_NOT_EXPR then return the TREE for
1806 the value being inverted. */
1807
1808static tree
1809strip_bit_not (tree var)
1810{
1811 if (TREE_CODE (var)((enum tree_code) (var)->base.code) != SSA_NAME)
1812 return NULL_TREE(tree) nullptr;
1813
1814 gimple *assign = SSA_NAME_DEF_STMT (var)(tree_check ((var), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1814, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
1815 if (gimple_code (assign) != GIMPLE_ASSIGN)
1816 return NULL_TREE(tree) nullptr;
1817
1818 if (gimple_assign_rhs_code (assign) != BIT_NOT_EXPR)
1819 return NULL_TREE(tree) nullptr;
1820
1821 return gimple_assign_rhs1 (assign);
1822}
1823
1824/* Invert a MIN to a MAX or a MAX to a MIN expression CODE. */
1825
1826enum tree_code
1827invert_minmax_code (enum tree_code code)
1828{
1829 switch (code) {
1830 case MIN_EXPR:
1831 return MAX_EXPR;
1832 case MAX_EXPR:
1833 return MIN_EXPR;
1834 default:
1835 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1835, __FUNCTION__))
;
1836 }
1837}
1838
1839/* The function minmax_replacement does the main work of doing the minmax
1840 replacement. Return true if the replacement is done. Otherwise return
1841 false.
1842 BB is the basic block where the replacement is going to be done on. ARG0
1843 is argument 0 from the PHI. Likewise for ARG1.
1844
1845 If THREEWAY_P then expect the BB to be laid out in diamond shape with each
1846 BB containing only a MIN or MAX expression. */
1847
1848static bool
1849minmax_replacement (basic_block cond_bb, basic_block middle_bb, basic_block alt_middle_bb,
1850 edge e0, edge e1, gphi *phi, tree arg0, tree arg1, bool threeway_p)
1851{
1852 tree result;
1853 edge true_edge, false_edge;
1854 enum tree_code minmax, ass_code;
1855 tree smaller, larger, arg_true, arg_false;
1856 gimple_stmt_iterator gsi, gsi_from;
1857
1858 tree type = TREE_TYPE (PHI_RESULT (phi))((contains_struct_check ((get_def_from_ptr (gimple_phi_result_ptr
(phi))), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1858, __FUNCTION__))->typed.type)
;
1859
1860 /* The optimization may be unsafe due to NaNs. */
1861 if (HONOR_NANS (type) || HONOR_SIGNED_ZEROS (type))
1862 return false;
1863
1864 gcond *cond = as_a <gcond *> (last_stmt (cond_bb));
1865 enum tree_code cmp = gimple_cond_code (cond);
1866 tree rhs = gimple_cond_rhs (cond);
1867
1868 /* Turn EQ/NE of extreme values to order comparisons. */
1869 if ((cmp == NE_EXPR || cmp == EQ_EXPR)
1870 && TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == INTEGER_CST
1871 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))(((enum tree_code) (((contains_struct_check ((rhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1871, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((rhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1871, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((rhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1871, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
)
1872 {
1873 if (wi::eq_p (wi::to_wide (rhs), wi::min_value (TREE_TYPE (rhs)((contains_struct_check ((rhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1873, __FUNCTION__))->typed.type)
)))
1874 {
1875 cmp = (cmp == EQ_EXPR) ? LT_EXPR : GE_EXPR;
1876 rhs = wide_int_to_tree (TREE_TYPE (rhs)((contains_struct_check ((rhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1876, __FUNCTION__))->typed.type)
,
1877 wi::min_value (TREE_TYPE (rhs)((contains_struct_check ((rhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1877, __FUNCTION__))->typed.type)
) + 1);
1878 }
1879 else if (wi::eq_p (wi::to_wide (rhs), wi::max_value (TREE_TYPE (rhs)((contains_struct_check ((rhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1879, __FUNCTION__))->typed.type)
)))
1880 {
1881 cmp = (cmp == EQ_EXPR) ? GT_EXPR : LE_EXPR;
1882 rhs = wide_int_to_tree (TREE_TYPE (rhs)((contains_struct_check ((rhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1882, __FUNCTION__))->typed.type)
,
1883 wi::max_value (TREE_TYPE (rhs)((contains_struct_check ((rhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1883, __FUNCTION__))->typed.type)
) - 1);
1884 }
1885 }
1886
1887 /* This transformation is only valid for order comparisons. Record which
1888 operand is smaller/larger if the result of the comparison is true. */
1889 tree alt_smaller = NULL_TREE(tree) nullptr;
1890 tree alt_larger = NULL_TREE(tree) nullptr;
1891 if (cmp == LT_EXPR || cmp == LE_EXPR)
1892 {
1893 smaller = gimple_cond_lhs (cond);
1894 larger = rhs;
1895 /* If we have smaller < CST it is equivalent to smaller <= CST-1.
1896 Likewise smaller <= CST is equivalent to smaller < CST+1. */
1897 if (TREE_CODE (larger)((enum tree_code) (larger)->base.code) == INTEGER_CST
1898 && INTEGRAL_TYPE_P (TREE_TYPE (larger))(((enum tree_code) (((contains_struct_check ((larger), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1898, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((larger), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1898, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((larger), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1898, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
)
1899 {
1900 if (cmp == LT_EXPR)
1901 {
1902 wi::overflow_type overflow;
1903 wide_int alt = wi::sub (wi::to_wide (larger), 1,
1904 TYPE_SIGN (TREE_TYPE (larger))((signop) ((tree_class_check ((((contains_struct_check ((larger
), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1904, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1904, __FUNCTION__))->base.u.bits.unsigned_flag))
,
1905 &overflow);
1906 if (! overflow)
1907 alt_larger = wide_int_to_tree (TREE_TYPE (larger)((contains_struct_check ((larger), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1907, __FUNCTION__))->typed.type)
, alt);
1908 }
1909 else
1910 {
1911 wi::overflow_type overflow;
1912 wide_int alt = wi::add (wi::to_wide (larger), 1,
1913 TYPE_SIGN (TREE_TYPE (larger))((signop) ((tree_class_check ((((contains_struct_check ((larger
), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1913, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1913, __FUNCTION__))->base.u.bits.unsigned_flag))
,
1914 &overflow);
1915 if (! overflow)
1916 alt_larger = wide_int_to_tree (TREE_TYPE (larger)((contains_struct_check ((larger), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1916, __FUNCTION__))->typed.type)
, alt);
1917 }
1918 }
1919 }
1920 else if (cmp == GT_EXPR || cmp == GE_EXPR)
1921 {
1922 smaller = rhs;
1923 larger = gimple_cond_lhs (cond);
1924 /* If we have larger > CST it is equivalent to larger >= CST+1.
1925 Likewise larger >= CST is equivalent to larger > CST-1. */
1926 if (TREE_CODE (smaller)((enum tree_code) (smaller)->base.code) == INTEGER_CST
1927 && INTEGRAL_TYPE_P (TREE_TYPE (smaller))(((enum tree_code) (((contains_struct_check ((smaller), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1927, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((smaller), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1927, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((smaller), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1927, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
)
1928 {
1929 wi::overflow_type overflow;
1930 if (cmp == GT_EXPR)
1931 {
1932 wide_int alt = wi::add (wi::to_wide (smaller), 1,
1933 TYPE_SIGN (TREE_TYPE (smaller))((signop) ((tree_class_check ((((contains_struct_check ((smaller
), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1933, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1933, __FUNCTION__))->base.u.bits.unsigned_flag))
,
1934 &overflow);
1935 if (! overflow)
1936 alt_smaller = wide_int_to_tree (TREE_TYPE (smaller)((contains_struct_check ((smaller), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1936, __FUNCTION__))->typed.type)
, alt);
1937 }
1938 else
1939 {
1940 wide_int alt = wi::sub (wi::to_wide (smaller), 1,
1941 TYPE_SIGN (TREE_TYPE (smaller))((signop) ((tree_class_check ((((contains_struct_check ((smaller
), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1941, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1941, __FUNCTION__))->base.u.bits.unsigned_flag))
,
1942 &overflow);
1943 if (! overflow)
1944 alt_smaller = wide_int_to_tree (TREE_TYPE (smaller)((contains_struct_check ((smaller), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1944, __FUNCTION__))->typed.type)
, alt);
1945 }
1946 }
1947 }
1948 else
1949 return false;
1950
1951 /* Handle the special case of (signed_type)x < 0 being equivalent
1952 to x > MAX_VAL(signed_type) and (signed_type)x >= 0 equivalent
1953 to x <= MAX_VAL(signed_type). */
1954 if ((cmp == GE_EXPR || cmp == LT_EXPR)
1955 && INTEGRAL_TYPE_P (type)(((enum tree_code) (type)->base.code) == ENUMERAL_TYPE || (
(enum tree_code) (type)->base.code) == BOOLEAN_TYPE || ((enum
tree_code) (type)->base.code) == INTEGER_TYPE)
1956 && TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1956, __FUNCTION__))->base.u.bits.unsigned_flag)
1957 && integer_zerop (rhs))
1958 {
1959 tree op = gimple_cond_lhs (cond);
1960 if (TREE_CODE (op)((enum tree_code) (op)->base.code) == SSA_NAME
1961 && INTEGRAL_TYPE_P (TREE_TYPE (op))(((enum tree_code) (((contains_struct_check ((op), (TS_TYPED)
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1961, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((op), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1961, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((op), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1961, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
1962 && !TYPE_UNSIGNED (TREE_TYPE (op))((tree_class_check ((((contains_struct_check ((op), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1962, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1962, __FUNCTION__))->base.u.bits.unsigned_flag)
)
1963 {
1964 gimple *def_stmt = SSA_NAME_DEF_STMT (op)(tree_check ((op), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1964, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
1965 if (gimple_assign_cast_p (def_stmt))
1966 {
1967 tree op1 = gimple_assign_rhs1 (def_stmt);
1968 if (INTEGRAL_TYPE_P (TREE_TYPE (op1))(((enum tree_code) (((contains_struct_check ((op1), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1968, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((op1), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1968, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((op1), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1968, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
1969 && TYPE_UNSIGNED (TREE_TYPE (op1))((tree_class_check ((((contains_struct_check ((op1), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1969, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1969, __FUNCTION__))->base.u.bits.unsigned_flag)
1970 && (TYPE_PRECISION (TREE_TYPE (op))((tree_class_check ((((contains_struct_check ((op), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1970, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1970, __FUNCTION__))->type_common.precision)
1971 == TYPE_PRECISION (TREE_TYPE (op1))((tree_class_check ((((contains_struct_check ((op1), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1971, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1971, __FUNCTION__))->type_common.precision)
)
1972 && useless_type_conversion_p (type, TREE_TYPE (op1)((contains_struct_check ((op1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1972, __FUNCTION__))->typed.type)
))
1973 {
1974 wide_int w1 = wi::max_value (TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1974, __FUNCTION__))->typed.type)
);
1975 wide_int w2 = wi::add (w1, 1);
1976 if (cmp == LT_EXPR)
1977 {
1978 larger = op1;
1979 smaller = wide_int_to_tree (TREE_TYPE (op1)((contains_struct_check ((op1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1979, __FUNCTION__))->typed.type)
, w1);
1980 alt_smaller = wide_int_to_tree (TREE_TYPE (op1)((contains_struct_check ((op1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1980, __FUNCTION__))->typed.type)
, w2);
1981 alt_larger = NULL_TREE(tree) nullptr;
1982 }
1983 else
1984 {
1985 smaller = op1;
1986 larger = wide_int_to_tree (TREE_TYPE (op1)((contains_struct_check ((op1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1986, __FUNCTION__))->typed.type)
, w1);
1987 alt_larger = wide_int_to_tree (TREE_TYPE (op1)((contains_struct_check ((op1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 1987, __FUNCTION__))->typed.type)
, w2);
1988 alt_smaller = NULL_TREE(tree) nullptr;
1989 }
1990 }
1991 }
1992 }
1993 }
1994
1995 /* We need to know which is the true edge and which is the false
1996 edge so that we know if have abs or negative abs. */
1997 extract_true_false_edges_from_block (cond_bb, &true_edge, &false_edge);
1998
1999 /* Forward the edges over the middle basic block. */
2000 if (true_edge->dest == middle_bb)
2001 true_edge = EDGE_SUCC (true_edge->dest, 0)(*(true_edge->dest)->succs)[(0)];
2002 if (false_edge->dest == middle_bb)
2003 false_edge = EDGE_SUCC (false_edge->dest, 0)(*(false_edge->dest)->succs)[(0)];
2004
2005 /* When THREEWAY_P then e1 will point to the edge of the final transition
2006 from middle-bb to end. */
2007 if (true_edge == e0)
2008 {
2009 if (!threeway_p)
2010 gcc_assert (false_edge == e1)((void)(!(false_edge == e1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2010, __FUNCTION__), 0 : 0))
;
2011 arg_true = arg0;
2012 arg_false = arg1;
2013 }
2014 else
2015 {
2016 gcc_assert (false_edge == e0)((void)(!(false_edge == e0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2016, __FUNCTION__), 0 : 0))
;
2017 if (!threeway_p)
2018 gcc_assert (true_edge == e1)((void)(!(true_edge == e1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2018, __FUNCTION__), 0 : 0))
;
2019 arg_true = arg1;
2020 arg_false = arg0;
2021 }
2022
2023 if (empty_block_p (middle_bb))
2024 {
2025 if ((operand_equal_for_phi_arg_p (arg_true, smaller)
2026 || (alt_smaller
2027 && operand_equal_for_phi_arg_p (arg_true, alt_smaller)))
2028 && (operand_equal_for_phi_arg_p (arg_false, larger)
2029 || (alt_larger
2030 && operand_equal_for_phi_arg_p (arg_true, alt_larger))))
2031 {
2032 /* Case
2033
2034 if (smaller < larger)
2035 rslt = smaller;
2036 else
2037 rslt = larger; */
2038 minmax = MIN_EXPR;
2039 }
2040 else if ((operand_equal_for_phi_arg_p (arg_false, smaller)
2041 || (alt_smaller
2042 && operand_equal_for_phi_arg_p (arg_false, alt_smaller)))
2043 && (operand_equal_for_phi_arg_p (arg_true, larger)
2044 || (alt_larger
2045 && operand_equal_for_phi_arg_p (arg_true, alt_larger))))
2046 minmax = MAX_EXPR;
2047 else
2048 return false;
2049 }
2050 else if (middle_bb != alt_middle_bb && threeway_p)
2051 {
2052 /* Recognize the following case:
2053
2054 if (smaller < larger)
2055 a = MIN (smaller, c);
2056 else
2057 b = MIN (larger, c);
2058 x = PHI <a, b>
2059
2060 This is equivalent to
2061
2062 a = MIN (smaller, c);
2063 x = MIN (larger, a); */
2064
2065 gimple *assign = last_and_only_stmt (middle_bb);
2066 tree lhs, op0, op1, bound;
2067 tree alt_lhs, alt_op0, alt_op1;
2068 bool invert = false;
2069
2070 if (!single_pred_p (middle_bb)
2071 || !single_pred_p (alt_middle_bb)
2072 || !single_succ_p (middle_bb)
2073 || !single_succ_p (alt_middle_bb))
2074 return false;
2075
2076 /* When THREEWAY_P then e1 will point to the edge of the final transition
2077 from middle-bb to end. */
2078 if (true_edge == e0)
2079 gcc_assert (false_edge == EDGE_PRED (e1->src, 0))((void)(!(false_edge == (*(e1->src)->preds)[(0)]) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2079, __FUNCTION__), 0 : 0))
;
2080 else
2081 gcc_assert (true_edge == EDGE_PRED (e1->src, 0))((void)(!(true_edge == (*(e1->src)->preds)[(0)]) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2081, __FUNCTION__), 0 : 0))
;
2082
2083 bool valid_minmax_p = false;
2084 gimple_stmt_iterator it1
2085 = gsi_start_nondebug_after_labels_bb (middle_bb);
2086 gimple_stmt_iterator it2
2087 = gsi_start_nondebug_after_labels_bb (alt_middle_bb);
2088 if (gsi_one_nondebug_before_end_p (it1)
2089 && gsi_one_nondebug_before_end_p (it2))
2090 {
2091 gimple *stmt1 = gsi_stmt (it1);
2092 gimple *stmt2 = gsi_stmt (it2);
2093 if (is_gimple_assign (stmt1) && is_gimple_assign (stmt2))
2094 {
2095 enum tree_code code1 = gimple_assign_rhs_code (stmt1);
2096 enum tree_code code2 = gimple_assign_rhs_code (stmt2);
2097 valid_minmax_p = (code1 == MIN_EXPR || code1 == MAX_EXPR)
2098 && (code2 == MIN_EXPR || code2 == MAX_EXPR);
2099 }
2100 }
2101
2102 if (!valid_minmax_p)
2103 return false;
2104
2105 if (!assign
2106 || gimple_code (assign) != GIMPLE_ASSIGN)
2107 return false;
2108
2109 lhs = gimple_assign_lhs (assign);
2110 ass_code = gimple_assign_rhs_code (assign);
2111 if (ass_code != MAX_EXPR && ass_code != MIN_EXPR)
2112 return false;
2113
2114 op0 = gimple_assign_rhs1 (assign);
2115 op1 = gimple_assign_rhs2 (assign);
2116
2117 assign = last_and_only_stmt (alt_middle_bb);
2118 if (!assign
2119 || gimple_code (assign) != GIMPLE_ASSIGN)
2120 return false;
2121
2122 alt_lhs = gimple_assign_lhs (assign);
2123 if (ass_code != gimple_assign_rhs_code (assign))
2124 return false;
2125
2126 if (!operand_equal_for_phi_arg_p (lhs, arg_true)
2127 || !operand_equal_for_phi_arg_p (alt_lhs, arg_false))
2128 return false;
2129
2130 alt_op0 = gimple_assign_rhs1 (assign);
2131 alt_op1 = gimple_assign_rhs2 (assign);
2132
2133 if ((operand_equal_for_phi_arg_p (op0, smaller)
2134 || (alt_smaller
2135 && operand_equal_for_phi_arg_p (op0, alt_smaller)))
2136 && (operand_equal_for_phi_arg_p (alt_op0, larger)
2137 || (alt_larger
2138 && operand_equal_for_phi_arg_p (alt_op0, alt_larger))))
2139 {
2140 /* We got here if the condition is true, i.e., SMALLER < LARGER. */
2141 if (!operand_equal_for_phi_arg_p (op1, alt_op1))
2142 return false;
2143
2144 if ((arg0 = strip_bit_not (op0)) != NULLnullptr
2145 && (arg1 = strip_bit_not (alt_op0)) != NULLnullptr
2146 && (bound = strip_bit_not (op1)) != NULLnullptr)
2147 {
2148 minmax = MAX_EXPR;
2149 ass_code = invert_minmax_code (ass_code);
2150 invert = true;
2151 }
2152 else
2153 {
2154 bound = op1;
2155 minmax = MIN_EXPR;
2156 arg0 = op0;
2157 arg1 = alt_op0;
2158 }
2159 }
2160 else if ((operand_equal_for_phi_arg_p (op0, larger)
2161 || (alt_larger
2162 && operand_equal_for_phi_arg_p (op0, alt_larger)))
2163 && (operand_equal_for_phi_arg_p (alt_op0, smaller)
2164 || (alt_smaller
2165 && operand_equal_for_phi_arg_p (alt_op0, alt_smaller))))
2166 {
2167 /* We got here if the condition is true, i.e., SMALLER > LARGER. */
2168 if (!operand_equal_for_phi_arg_p (op1, alt_op1))
2169 return false;
2170
2171 if ((arg0 = strip_bit_not (op0)) != NULLnullptr
2172 && (arg1 = strip_bit_not (alt_op0)) != NULLnullptr
2173 && (bound = strip_bit_not (op1)) != NULLnullptr)
2174 {
2175 minmax = MIN_EXPR;
2176 ass_code = invert_minmax_code (ass_code);
2177 invert = true;
2178 }
2179 else
2180 {
2181 bound = op1;
2182 minmax = MAX_EXPR;
2183 arg0 = op0;
2184 arg1 = alt_op0;
2185 }
2186 }
2187 else
2188 return false;
2189
2190 /* Emit the statement to compute min/max. */
2191 location_t locus = gimple_location (last_stmt (cond_bb));
2192 gimple_seq stmts = NULLnullptr;
2193 tree phi_result = PHI_RESULT (phi)get_def_from_ptr (gimple_phi_result_ptr (phi));
2194 result = gimple_build (&stmts, locus, minmax, TREE_TYPE (phi_result)((contains_struct_check ((phi_result), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2194, __FUNCTION__))->typed.type)
,
2195 arg0, arg1);
2196 result = gimple_build (&stmts, locus, ass_code, TREE_TYPE (phi_result)((contains_struct_check ((phi_result), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2196, __FUNCTION__))->typed.type)
,
2197 result, bound);
2198 if (invert)
2199 result = gimple_build (&stmts, locus, BIT_NOT_EXPR, TREE_TYPE (phi_result)((contains_struct_check ((phi_result), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2199, __FUNCTION__))->typed.type)
,
2200 result);
2201
2202 gsi = gsi_last_bb (cond_bb);
2203 gsi_insert_seq_before (&gsi, stmts, GSI_NEW_STMT);
2204
2205 replace_phi_edge_with_variable (cond_bb, e1, phi, result);
2206
2207 return true;
2208 }
2209 else
2210 {
2211 /* Recognize the following case, assuming d <= u:
2212
2213 if (a <= u)
2214 b = MAX (a, d);
2215 x = PHI <b, u>
2216
2217 This is equivalent to
2218
2219 b = MAX (a, d);
2220 x = MIN (b, u); */
2221
2222 gimple *assign = last_and_only_stmt (middle_bb);
2223 tree lhs, op0, op1, bound;
2224
2225 if (!single_pred_p (middle_bb))
2226 return false;
2227
2228 if (!assign
2229 || gimple_code (assign) != GIMPLE_ASSIGN)
2230 return false;
2231
2232 lhs = gimple_assign_lhs (assign);
2233 ass_code = gimple_assign_rhs_code (assign);
2234 if (ass_code != MAX_EXPR && ass_code != MIN_EXPR)
2235 return false;
2236 op0 = gimple_assign_rhs1 (assign);
2237 op1 = gimple_assign_rhs2 (assign);
2238
2239 if (true_edge->src == middle_bb)
2240 {
2241 /* We got here if the condition is true, i.e., SMALLER < LARGER. */
2242 if (!operand_equal_for_phi_arg_p (lhs, arg_true))
2243 return false;
2244
2245 if (operand_equal_for_phi_arg_p (arg_false, larger)
2246 || (alt_larger
2247 && operand_equal_for_phi_arg_p (arg_false, alt_larger)))
2248 {
2249 /* Case
2250
2251 if (smaller < larger)
2252 {
2253 r' = MAX_EXPR (smaller, bound)
2254 }
2255 r = PHI <r', larger> --> to be turned to MIN_EXPR. */
2256 if (ass_code != MAX_EXPR)
2257 return false;
2258
2259 minmax = MIN_EXPR;
2260 if (operand_equal_for_phi_arg_p (op0, smaller)
2261 || (alt_smaller
2262 && operand_equal_for_phi_arg_p (op0, alt_smaller)))
2263 bound = op1;
2264 else if (operand_equal_for_phi_arg_p (op1, smaller)
2265 || (alt_smaller
2266 && operand_equal_for_phi_arg_p (op1, alt_smaller)))
2267 bound = op0;
2268 else
2269 return false;
2270
2271 /* We need BOUND <= LARGER. */
2272 if (!integer_nonzerop (fold_build2 (LE_EXPR, boolean_type_node,fold_build2_loc (((location_t) 0), LE_EXPR, global_trees[TI_BOOLEAN_TYPE
], bound, larger )
2273 bound, larger)fold_build2_loc (((location_t) 0), LE_EXPR, global_trees[TI_BOOLEAN_TYPE
], bound, larger )
))
2274 return false;
2275 }
2276 else if (operand_equal_for_phi_arg_p (arg_false, smaller)
2277 || (alt_smaller
2278 && operand_equal_for_phi_arg_p (arg_false, alt_smaller)))
2279 {
2280 /* Case
2281
2282 if (smaller < larger)
2283 {
2284 r' = MIN_EXPR (larger, bound)
2285 }
2286 r = PHI <r', smaller> --> to be turned to MAX_EXPR. */
2287 if (ass_code != MIN_EXPR)
2288 return false;
2289
2290 minmax = MAX_EXPR;
2291 if (operand_equal_for_phi_arg_p (op0, larger)
2292 || (alt_larger
2293 && operand_equal_for_phi_arg_p (op0, alt_larger)))
2294 bound = op1;
2295 else if (operand_equal_for_phi_arg_p (op1, larger)
2296 || (alt_larger
2297 && operand_equal_for_phi_arg_p (op1, alt_larger)))
2298 bound = op0;
2299 else
2300 return false;
2301
2302 /* We need BOUND >= SMALLER. */
2303 if (!integer_nonzerop (fold_build2 (GE_EXPR, boolean_type_node,fold_build2_loc (((location_t) 0), GE_EXPR, global_trees[TI_BOOLEAN_TYPE
], bound, smaller )
2304 bound, smaller)fold_build2_loc (((location_t) 0), GE_EXPR, global_trees[TI_BOOLEAN_TYPE
], bound, smaller )
))
2305 return false;
2306 }
2307 else
2308 return false;
2309 }
2310 else
2311 {
2312 /* We got here if the condition is false, i.e., SMALLER > LARGER. */
2313 if (!operand_equal_for_phi_arg_p (lhs, arg_false))
2314 return false;
2315
2316 if (operand_equal_for_phi_arg_p (arg_true, larger)
2317 || (alt_larger
2318 && operand_equal_for_phi_arg_p (arg_true, alt_larger)))
2319 {
2320 /* Case
2321
2322 if (smaller > larger)
2323 {
2324 r' = MIN_EXPR (smaller, bound)
2325 }
2326 r = PHI <r', larger> --> to be turned to MAX_EXPR. */
2327 if (ass_code != MIN_EXPR)
2328 return false;
2329
2330 minmax = MAX_EXPR;
2331 if (operand_equal_for_phi_arg_p (op0, smaller)
2332 || (alt_smaller
2333 && operand_equal_for_phi_arg_p (op0, alt_smaller)))
2334 bound = op1;
2335 else if (operand_equal_for_phi_arg_p (op1, smaller)
2336 || (alt_smaller
2337 && operand_equal_for_phi_arg_p (op1, alt_smaller)))
2338 bound = op0;
2339 else
2340 return false;
2341
2342 /* We need BOUND >= LARGER. */
2343 if (!integer_nonzerop (fold_build2 (GE_EXPR, boolean_type_node,fold_build2_loc (((location_t) 0), GE_EXPR, global_trees[TI_BOOLEAN_TYPE
], bound, larger )
2344 bound, larger)fold_build2_loc (((location_t) 0), GE_EXPR, global_trees[TI_BOOLEAN_TYPE
], bound, larger )
))
2345 return false;
2346 }
2347 else if (operand_equal_for_phi_arg_p (arg_true, smaller)
2348 || (alt_smaller
2349 && operand_equal_for_phi_arg_p (arg_true, alt_smaller)))
2350 {
2351 /* Case
2352
2353 if (smaller > larger)
2354 {
2355 r' = MAX_EXPR (larger, bound)
2356 }
2357 r = PHI <r', smaller> --> to be turned to MIN_EXPR. */
2358 if (ass_code != MAX_EXPR)
2359 return false;
2360
2361 minmax = MIN_EXPR;
2362 if (operand_equal_for_phi_arg_p (op0, larger))
2363 bound = op1;
2364 else if (operand_equal_for_phi_arg_p (op1, larger))
2365 bound = op0;
2366 else
2367 return false;
2368
2369 /* We need BOUND <= SMALLER. */
2370 if (!integer_nonzerop (fold_build2 (LE_EXPR, boolean_type_node,fold_build2_loc (((location_t) 0), LE_EXPR, global_trees[TI_BOOLEAN_TYPE
], bound, smaller )
2371 bound, smaller)fold_build2_loc (((location_t) 0), LE_EXPR, global_trees[TI_BOOLEAN_TYPE
], bound, smaller )
))
2372 return false;
2373 }
2374 else
2375 return false;
2376 }
2377
2378 /* Move the statement from the middle block. */
2379 gsi = gsi_last_bb (cond_bb);
2380 gsi_from = gsi_last_nondebug_bb (middle_bb);
2381 reset_flow_sensitive_info (SINGLE_SSA_TREE_OPERAND (gsi_stmt (gsi_from),single_ssa_tree_operand (gsi_stmt (gsi_from), 0x02)
2382 SSA_OP_DEF)single_ssa_tree_operand (gsi_stmt (gsi_from), 0x02));
2383 gsi_move_before (&gsi_from, &gsi);
2384 }
2385
2386 /* Emit the statement to compute min/max. */
2387 gimple_seq stmts = NULLnullptr;
2388 tree phi_result = PHI_RESULT (phi)get_def_from_ptr (gimple_phi_result_ptr (phi));
2389 result = gimple_build (&stmts, minmax, TREE_TYPE (phi_result)((contains_struct_check ((phi_result), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2389, __FUNCTION__))->typed.type)
, arg0, arg1);
2390
2391 gsi = gsi_last_bb (cond_bb);
2392 gsi_insert_seq_before (&gsi, stmts, GSI_NEW_STMT);
2393
2394 replace_phi_edge_with_variable (cond_bb, e1, phi, result);
2395
2396 return true;
2397}
2398
2399/* Attempt to optimize (x <=> y) cmp 0 and similar comparisons.
2400 For strong ordering <=> try to match something like:
2401 <bb 2> : // cond3_bb (== cond2_bb)
2402 if (x_4(D) != y_5(D))
2403 goto <bb 3>; [INV]
2404 else
2405 goto <bb 6>; [INV]
2406
2407 <bb 3> : // cond_bb
2408 if (x_4(D) < y_5(D))
2409 goto <bb 6>; [INV]
2410 else
2411 goto <bb 4>; [INV]
2412
2413 <bb 4> : // middle_bb
2414
2415 <bb 6> : // phi_bb
2416 # iftmp.0_2 = PHI <1(4), 0(2), -1(3)>
2417 _1 = iftmp.0_2 == 0;
2418
2419 and for partial ordering <=> something like:
2420
2421 <bb 2> : // cond3_bb
2422 if (a_3(D) == b_5(D))
2423 goto <bb 6>; [50.00%]
2424 else
2425 goto <bb 3>; [50.00%]
2426
2427 <bb 3> [local count: 536870913]: // cond2_bb
2428 if (a_3(D) < b_5(D))
2429 goto <bb 6>; [50.00%]
2430 else
2431 goto <bb 4>; [50.00%]
2432
2433 <bb 4> [local count: 268435456]: // cond_bb
2434 if (a_3(D) > b_5(D))
2435 goto <bb 6>; [50.00%]
2436 else
2437 goto <bb 5>; [50.00%]
2438
2439 <bb 5> [local count: 134217728]: // middle_bb
2440
2441 <bb 6> [local count: 1073741824]: // phi_bb
2442 # SR.27_4 = PHI <0(2), -1(3), 1(4), 2(5)>
2443 _2 = SR.27_4 > 0; */
2444
2445static bool
2446spaceship_replacement (basic_block cond_bb, basic_block middle_bb,
2447 edge e0, edge e1, gphi *phi,
2448 tree arg0, tree arg1)
2449{
2450 tree phires = PHI_RESULT (phi)get_def_from_ptr (gimple_phi_result_ptr (phi));
2451 if (!INTEGRAL_TYPE_P (TREE_TYPE (phires))(((enum tree_code) (((contains_struct_check ((phires), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2451, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((phires), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2451, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((phires), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2451, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
2452 || TYPE_UNSIGNED (TREE_TYPE (phires))((tree_class_check ((((contains_struct_check ((phires), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2452, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2452, __FUNCTION__))->base.u.bits.unsigned_flag)
2453 || !tree_fits_shwi_p (arg0)
2454 || !tree_fits_shwi_p (arg1)
2455 || !IN_RANGE (tree_to_shwi (arg0), -1, 2)((unsigned long) (tree_to_shwi (arg0)) - (unsigned long) (-1)
<= (unsigned long) (2) - (unsigned long) (-1))
2456 || !IN_RANGE (tree_to_shwi (arg1), -1, 2)((unsigned long) (tree_to_shwi (arg1)) - (unsigned long) (-1)
<= (unsigned long) (2) - (unsigned long) (-1))
)
2457 return false;
2458
2459 basic_block phi_bb = gimple_bb (phi);
2460 gcc_assert (phi_bb == e0->dest && phi_bb == e1->dest)((void)(!(phi_bb == e0->dest && phi_bb == e1->dest
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2460, __FUNCTION__), 0 : 0))
;
2461 if (!IN_RANGE (EDGE_COUNT (phi_bb->preds), 3, 4)((unsigned long) (vec_safe_length (phi_bb->preds)) - (unsigned
long) (3) <= (unsigned long) (4) - (unsigned long) (3))
)
2462 return false;
2463
2464 use_operand_p use_p;
2465 gimple *use_stmt;
2466 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (phires)(tree_check ((phires), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2466, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
)
2467 return false;
2468 if (!single_imm_use (phires, &use_p, &use_stmt))
2469 return false;
2470 enum tree_code cmp;
2471 tree lhs, rhs;
2472 gimple *orig_use_stmt = use_stmt;
2473 tree orig_use_lhs = NULL_TREE(tree) nullptr;
2474 int prec = TYPE_PRECISION (TREE_TYPE (phires))((tree_class_check ((((contains_struct_check ((phires), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2474, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2474, __FUNCTION__))->type_common.precision)
;
2475 bool is_cast = false;
2476
2477 /* Deal with the case when match.pd has rewritten the (res & ~1) == 0
2478 into res <= 1 and has left a type-cast for signed types. */
2479 if (gimple_assign_cast_p (use_stmt))
2480 {
2481 orig_use_lhs = gimple_assign_lhs (use_stmt);
2482 /* match.pd would have only done this for a signed type,
2483 so the conversion must be to an unsigned one. */
2484 tree ty1 = TREE_TYPE (gimple_assign_rhs1 (use_stmt))((contains_struct_check ((gimple_assign_rhs1 (use_stmt)), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2484, __FUNCTION__))->typed.type)
;
2485 tree ty2 = TREE_TYPE (orig_use_lhs)((contains_struct_check ((orig_use_lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2485, __FUNCTION__))->typed.type)
;
2486
2487 if (!TYPE_UNSIGNED (ty2)((tree_class_check ((ty2), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2487, __FUNCTION__))->base.u.bits.unsigned_flag)
|| !INTEGRAL_TYPE_P (ty2)(((enum tree_code) (ty2)->base.code) == ENUMERAL_TYPE || (
(enum tree_code) (ty2)->base.code) == BOOLEAN_TYPE || ((enum
tree_code) (ty2)->base.code) == INTEGER_TYPE)
)
2488 return false;
2489 if (TYPE_PRECISION (ty1)((tree_class_check ((ty1), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2489, __FUNCTION__))->type_common.precision)
> TYPE_PRECISION (ty2)((tree_class_check ((ty2), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2489, __FUNCTION__))->type_common.precision)
)
2490 return false;
2491 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig_use_lhs)(tree_check ((orig_use_lhs), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2491, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
)
2492 return false;
2493 if (!single_imm_use (orig_use_lhs, &use_p, &use_stmt))
2494 return false;
2495
2496 is_cast = true;
2497 }
2498 else if (is_gimple_assign (use_stmt)
2499 && gimple_assign_rhs_code (use_stmt) == BIT_AND_EXPR
2500 && TREE_CODE (gimple_assign_rhs2 (use_stmt))((enum tree_code) (gimple_assign_rhs2 (use_stmt))->base.code
)
== INTEGER_CST
2501 && (wi::to_wide (gimple_assign_rhs2 (use_stmt))
2502 == wi::shifted_mask (1, prec - 1, false, prec)))
2503 {
2504 /* For partial_ordering result operator>= with unspec as second
2505 argument is (res & 1) == res, folded by match.pd into
2506 (res & ~1) == 0. */
2507 orig_use_lhs = gimple_assign_lhs (use_stmt);
2508 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig_use_lhs)(tree_check ((orig_use_lhs), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2508, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
)
2509 return false;
2510 if (!single_imm_use (orig_use_lhs, &use_p, &use_stmt))
2511 return false;
2512 }
2513 if (gimple_code (use_stmt) == GIMPLE_COND)
2514 {
2515 cmp = gimple_cond_code (use_stmt);
2516 lhs = gimple_cond_lhs (use_stmt);
2517 rhs = gimple_cond_rhs (use_stmt);
2518 }
2519 else if (is_gimple_assign (use_stmt))
2520 {
2521 if (gimple_assign_rhs_class (use_stmt) == GIMPLE_BINARY_RHS)
2522 {
2523 cmp = gimple_assign_rhs_code (use_stmt);
2524 lhs = gimple_assign_rhs1 (use_stmt);
2525 rhs = gimple_assign_rhs2 (use_stmt);
2526 }
2527 else if (gimple_assign_rhs_code (use_stmt) == COND_EXPR)
2528 {
2529 tree cond = gimple_assign_rhs1 (use_stmt);
2530 if (!COMPARISON_CLASS_P (cond)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code
) (cond)->base.code))] == tcc_comparison)
)
2531 return false;
2532 cmp = TREE_CODE (cond)((enum tree_code) (cond)->base.code);
2533 lhs = TREE_OPERAND (cond, 0)(*((const_cast<tree*> (tree_operand_check ((cond), (0),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2533, __FUNCTION__)))))
;
2534 rhs = TREE_OPERAND (cond, 1)(*((const_cast<tree*> (tree_operand_check ((cond), (1),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2534, __FUNCTION__)))))
;
2535 }
2536 else
2537 return false;
2538 }
2539 else
2540 return false;
2541 switch (cmp)
2542 {
2543 case EQ_EXPR:
2544 case NE_EXPR:
2545 case LT_EXPR:
2546 case GT_EXPR:
2547 case LE_EXPR:
2548 case GE_EXPR:
2549 break;
2550 default:
2551 return false;
2552 }
2553 if (lhs != (orig_use_lhs ? orig_use_lhs : phires)
2554 || !tree_fits_shwi_p (rhs)
2555 || !IN_RANGE (tree_to_shwi (rhs), -1, 1)((unsigned long) (tree_to_shwi (rhs)) - (unsigned long) (-1) <=
(unsigned long) (1) - (unsigned long) (-1))
)
2556 return false;
2557
2558 if (is_cast)
2559 {
2560 if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) != INTEGER_CST)
2561 return false;
2562 /* As for -ffast-math we assume the 2 return to be
2563 impossible, canonicalize (unsigned) res <= 1U or
2564 (unsigned) res < 2U into res >= 0 and (unsigned) res > 1U
2565 or (unsigned) res >= 2U as res < 0. */
2566 switch (cmp)
2567 {
2568 case LE_EXPR:
2569 if (!integer_onep (rhs))
2570 return false;
2571 cmp = GE_EXPR;
2572 break;
2573 case LT_EXPR:
2574 if (wi::ne_p (wi::to_widest (rhs), 2))
2575 return false;
2576 cmp = GE_EXPR;
2577 break;
2578 case GT_EXPR:
2579 if (!integer_onep (rhs))
2580 return false;
2581 cmp = LT_EXPR;
2582 break;
2583 case GE_EXPR:
2584 if (wi::ne_p (wi::to_widest (rhs), 2))
2585 return false;
2586 cmp = LT_EXPR;
2587 break;
2588 default:
2589 return false;
2590 }
2591 rhs = build_zero_cst (TREE_TYPE (phires)((contains_struct_check ((phires), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2591, __FUNCTION__))->typed.type)
);
2592 }
2593 else if (orig_use_lhs)
2594 {
2595 if ((cmp != EQ_EXPR && cmp != NE_EXPR) || !integer_zerop (rhs))
2596 return false;
2597 /* As for -ffast-math we assume the 2 return to be
2598 impossible, canonicalize (res & ~1) == 0 into
2599 res >= 0 and (res & ~1) != 0 as res < 0. */
2600 cmp = cmp == EQ_EXPR ? GE_EXPR : LT_EXPR;
2601 }
2602
2603 if (!empty_block_p (middle_bb))
2604 return false;
2605
2606 gcond *cond1 = as_a <gcond *> (last_stmt (cond_bb));
2607 enum tree_code cmp1 = gimple_cond_code (cond1);
2608 switch (cmp1)
2609 {
2610 case LT_EXPR:
2611 case LE_EXPR:
2612 case GT_EXPR:
2613 case GE_EXPR:
2614 break;
2615 default:
2616 return false;
2617 }
2618 tree lhs1 = gimple_cond_lhs (cond1);
2619 tree rhs1 = gimple_cond_rhs (cond1);
2620 /* The optimization may be unsafe due to NaNs. */
2621 if (HONOR_NANS (TREE_TYPE (lhs1)((contains_struct_check ((lhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2621, __FUNCTION__))->typed.type)
))
2622 return false;
2623 if (TREE_CODE (lhs1)((enum tree_code) (lhs1)->base.code) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs1)(tree_check ((lhs1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2623, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
)
2624 return false;
2625 if (TREE_CODE (rhs1)((enum tree_code) (rhs1)->base.code) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)(tree_check ((rhs1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2625, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
)
2626 return false;
2627
2628 if (!single_pred_p (cond_bb) || !cond_only_block_p (cond_bb))
2629 return false;
2630
2631 basic_block cond2_bb = single_pred (cond_bb);
2632 if (EDGE_COUNT (cond2_bb->succs)vec_safe_length (cond2_bb->succs) != 2)
2633 return false;
2634 edge cond2_phi_edge;
2635 if (EDGE_SUCC (cond2_bb, 0)(*(cond2_bb)->succs)[(0)]->dest == cond_bb)
2636 {
2637 if (EDGE_SUCC (cond2_bb, 1)(*(cond2_bb)->succs)[(1)]->dest != phi_bb)
2638 return false;
2639 cond2_phi_edge = EDGE_SUCC (cond2_bb, 1)(*(cond2_bb)->succs)[(1)];
2640 }
2641 else if (EDGE_SUCC (cond2_bb, 0)(*(cond2_bb)->succs)[(0)]->dest != phi_bb)
2642 return false;
2643 else
2644 cond2_phi_edge = EDGE_SUCC (cond2_bb, 0)(*(cond2_bb)->succs)[(0)];
2645 tree arg2 = gimple_phi_arg_def (phi, cond2_phi_edge->dest_idx);
2646 if (!tree_fits_shwi_p (arg2))
2647 return false;
2648 gimple *cond2 = last_stmt (cond2_bb);
2649 if (cond2 == NULLnullptr || gimple_code (cond2) != GIMPLE_COND)
2650 return false;
2651 enum tree_code cmp2 = gimple_cond_code (cond2);
2652 tree lhs2 = gimple_cond_lhs (cond2);
2653 tree rhs2 = gimple_cond_rhs (cond2);
2654 if (lhs2 == lhs1)
2655 {
2656 if (!operand_equal_p (rhs2, rhs1, 0))
2657 {
2658 if ((cmp2 == EQ_EXPR || cmp2 == NE_EXPR)
2659 && TREE_CODE (rhs1)((enum tree_code) (rhs1)->base.code) == INTEGER_CST
2660 && TREE_CODE (rhs2)((enum tree_code) (rhs2)->base.code) == INTEGER_CST)
2661 {
2662 /* For integers, we can have cond2 x == 5
2663 and cond1 x < 5, x <= 4, x <= 5, x < 6,
2664 x > 5, x >= 6, x >= 5 or x > 4. */
2665 if (tree_int_cst_lt (rhs1, rhs2))
2666 {
2667 if (wi::ne_p (wi::to_wide (rhs1) + 1, wi::to_wide (rhs2)))
2668 return false;
2669 if (cmp1 == LE_EXPR)
2670 cmp1 = LT_EXPR;
2671 else if (cmp1 == GT_EXPR)
2672 cmp1 = GE_EXPR;
2673 else
2674 return false;
2675 }
2676 else
2677 {
2678 gcc_checking_assert (tree_int_cst_lt (rhs2, rhs1))((void)(!(tree_int_cst_lt (rhs2, rhs1)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2678, __FUNCTION__), 0 : 0))
;
2679 if (wi::ne_p (wi::to_wide (rhs2) + 1, wi::to_wide (rhs1)))
2680 return false;
2681 if (cmp1 == LT_EXPR)
2682 cmp1 = LE_EXPR;
2683 else if (cmp1 == GE_EXPR)
2684 cmp1 = GT_EXPR;
2685 else
2686 return false;
2687 }
2688 rhs1 = rhs2;
2689 }
2690 else
2691 return false;
2692 }
2693 }
2694 else if (lhs2 == rhs1)
2695 {
2696 if (rhs2 != lhs1)
2697 return false;
2698 }
2699 else
2700 return false;
2701
2702 tree arg3 = arg2;
2703 basic_block cond3_bb = cond2_bb;
Value stored to 'cond3_bb' during its initialization is never read
2704 edge cond3_phi_edge = cond2_phi_edge;
2705 gimple *cond3 = cond2;
2706 enum tree_code cmp3 = cmp2;
2707 tree lhs3 = lhs2;
2708 tree rhs3 = rhs2;
2709 if (EDGE_COUNT (phi_bb->preds)vec_safe_length (phi_bb->preds) == 4)
2710 {
2711 if (absu_hwi (tree_to_shwi (arg2)) != 1)
2712 return false;
2713 if (e1->flags & EDGE_TRUE_VALUE)
2714 {
2715 if (tree_to_shwi (arg0) != 2
2716 || absu_hwi (tree_to_shwi (arg1)) != 1
2717 || wi::to_widest (arg1) == wi::to_widest (arg2))
2718 return false;
2719 }
2720 else if (tree_to_shwi (arg1) != 2
2721 || absu_hwi (tree_to_shwi (arg0)) != 1
2722 || wi::to_widest (arg0) == wi::to_widest (arg1))
2723 return false;
2724 switch (cmp2)
2725 {
2726 case LT_EXPR:
2727 case LE_EXPR:
2728 case GT_EXPR:
2729 case GE_EXPR:
2730 break;
2731 default:
2732 return false;
2733 }
2734 /* if (x < y) goto phi_bb; else fallthru;
2735 if (x > y) goto phi_bb; else fallthru;
2736 bbx:;
2737 phi_bb:;
2738 is ok, but if x and y are swapped in one of the comparisons,
2739 or the comparisons are the same and operands not swapped,
2740 or the true and false edges are swapped, it is not. */
2741 if ((lhs2 == lhs1)
2742 ^ (((cond2_phi_edge->flags
2743 & ((cmp2 == LT_EXPR || cmp2 == LE_EXPR)
2744 ? EDGE_TRUE_VALUE : EDGE_FALSE_VALUE)) != 0)
2745 != ((e1->flags
2746 & ((cmp1 == LT_EXPR || cmp1 == LE_EXPR)
2747 ? EDGE_TRUE_VALUE : EDGE_FALSE_VALUE)) != 0)))
2748 return false;
2749 if (!single_pred_p (cond2_bb) || !cond_only_block_p (cond2_bb))
2750 return false;
2751 cond3_bb = single_pred (cond2_bb);
2752 if (EDGE_COUNT (cond2_bb->succs)vec_safe_length (cond2_bb->succs) != 2)
2753 return false;
2754 if (EDGE_SUCC (cond3_bb, 0)(*(cond3_bb)->succs)[(0)]->dest == cond2_bb)
2755 {
2756 if (EDGE_SUCC (cond3_bb, 1)(*(cond3_bb)->succs)[(1)]->dest != phi_bb)
2757 return false;
2758 cond3_phi_edge = EDGE_SUCC (cond3_bb, 1)(*(cond3_bb)->succs)[(1)];
2759 }
2760 else if (EDGE_SUCC (cond3_bb, 0)(*(cond3_bb)->succs)[(0)]->dest != phi_bb)
2761 return false;
2762 else
2763 cond3_phi_edge = EDGE_SUCC (cond3_bb, 0)(*(cond3_bb)->succs)[(0)];
2764 arg3 = gimple_phi_arg_def (phi, cond3_phi_edge->dest_idx);
2765 cond3 = last_stmt (cond3_bb);
2766 if (cond3 == NULLnullptr || gimple_code (cond3) != GIMPLE_COND)
2767 return false;
2768 cmp3 = gimple_cond_code (cond3);
2769 lhs3 = gimple_cond_lhs (cond3);
2770 rhs3 = gimple_cond_rhs (cond3);
2771 if (lhs3 == lhs1)
2772 {
2773 if (!operand_equal_p (rhs3, rhs1, 0))
2774 return false;
2775 }
2776 else if (lhs3 == rhs1)
2777 {
2778 if (rhs3 != lhs1)
2779 return false;
2780 }
2781 else
2782 return false;
2783 }
2784 else if (absu_hwi (tree_to_shwi (arg0)) != 1
2785 || absu_hwi (tree_to_shwi (arg1)) != 1
2786 || wi::to_widest (arg0) == wi::to_widest (arg1))
2787 return false;
2788
2789 if (!integer_zerop (arg3) || (cmp3 != EQ_EXPR && cmp3 != NE_EXPR))
2790 return false;
2791 if ((cond3_phi_edge->flags & (cmp3 == EQ_EXPR
2792 ? EDGE_TRUE_VALUE : EDGE_FALSE_VALUE)) == 0)
2793 return false;
2794
2795 /* lhs1 one_cmp rhs1 results in phires of 1. */
2796 enum tree_code one_cmp;
2797 if ((cmp1 == LT_EXPR || cmp1 == LE_EXPR)
2798 ^ (!integer_onep ((e1->flags & EDGE_TRUE_VALUE) ? arg1 : arg0)))
2799 one_cmp = LT_EXPR;
2800 else
2801 one_cmp = GT_EXPR;
2802
2803 enum tree_code res_cmp;
2804 switch (cmp)
2805 {
2806 case EQ_EXPR:
2807 if (integer_zerop (rhs))
2808 res_cmp = EQ_EXPR;
2809 else if (integer_minus_onep (rhs))
2810 res_cmp = one_cmp == LT_EXPR ? GT_EXPR : LT_EXPR;
2811 else if (integer_onep (rhs))
2812 res_cmp = one_cmp;
2813 else
2814 return false;
2815 break;
2816 case NE_EXPR:
2817 if (integer_zerop (rhs))
2818 res_cmp = NE_EXPR;
2819 else if (integer_minus_onep (rhs))
2820 res_cmp = one_cmp == LT_EXPR ? LE_EXPR : GE_EXPR;
2821 else if (integer_onep (rhs))
2822 res_cmp = one_cmp == LT_EXPR ? GE_EXPR : LE_EXPR;
2823 else
2824 return false;
2825 break;
2826 case LT_EXPR:
2827 if (integer_onep (rhs))
2828 res_cmp = one_cmp == LT_EXPR ? GE_EXPR : LE_EXPR;
2829 else if (integer_zerop (rhs))
2830 res_cmp = one_cmp == LT_EXPR ? GT_EXPR : LT_EXPR;
2831 else
2832 return false;
2833 break;
2834 case LE_EXPR:
2835 if (integer_zerop (rhs))
2836 res_cmp = one_cmp == LT_EXPR ? GE_EXPR : LE_EXPR;
2837 else if (integer_minus_onep (rhs))
2838 res_cmp = one_cmp == LT_EXPR ? GT_EXPR : LT_EXPR;
2839 else
2840 return false;
2841 break;
2842 case GT_EXPR:
2843 if (integer_minus_onep (rhs))
2844 res_cmp = one_cmp == LT_EXPR ? LE_EXPR : GE_EXPR;
2845 else if (integer_zerop (rhs))
2846 res_cmp = one_cmp;
2847 else
2848 return false;
2849 break;
2850 case GE_EXPR:
2851 if (integer_zerop (rhs))
2852 res_cmp = one_cmp == LT_EXPR ? LE_EXPR : GE_EXPR;
2853 else if (integer_onep (rhs))
2854 res_cmp = one_cmp;
2855 else
2856 return false;
2857 break;
2858 default:
2859 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2859, __FUNCTION__))
;
2860 }
2861
2862 if (gimple_code (use_stmt) == GIMPLE_COND)
2863 {
2864 gcond *use_cond = as_a <gcond *> (use_stmt);
2865 gimple_cond_set_code (use_cond, res_cmp);
2866 gimple_cond_set_lhs (use_cond, lhs1);
2867 gimple_cond_set_rhs (use_cond, rhs1);
2868 }
2869 else if (gimple_assign_rhs_class (use_stmt) == GIMPLE_BINARY_RHS)
2870 {
2871 gimple_assign_set_rhs_code (use_stmt, res_cmp);
2872 gimple_assign_set_rhs1 (use_stmt, lhs1);
2873 gimple_assign_set_rhs2 (use_stmt, rhs1);
2874 }
2875 else
2876 {
2877 tree cond = build2 (res_cmp, TREE_TYPE (gimple_assign_rhs1 (use_stmt))((contains_struct_check ((gimple_assign_rhs1 (use_stmt)), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2877, __FUNCTION__))->typed.type)
,
2878 lhs1, rhs1);
2879 gimple_assign_set_rhs1 (use_stmt, cond);
2880 }
2881 update_stmt (use_stmt);
2882
2883 if (MAY_HAVE_DEBUG_BIND_STMTSglobal_options.x_flag_var_tracking_assignments)
2884 {
2885 use_operand_p use_p;
2886 imm_use_iterator iter;
2887 bool has_debug_uses = false;
2888 bool has_cast_debug_uses = false;
2889 FOR_EACH_IMM_USE_FAST (use_p, iter, phires)for ((use_p) = first_readonly_imm_use (&(iter), (phires))
; !end_readonly_imm_use_p (&(iter)); (void) ((use_p) = next_readonly_imm_use
(&(iter))))
2890 {
2891 gimple *use_stmt = USE_STMT (use_p)(use_p)->loc.stmt;
2892 if (orig_use_lhs && use_stmt == orig_use_stmt)
2893 continue;
2894 gcc_assert (is_gimple_debug (use_stmt))((void)(!(is_gimple_debug (use_stmt)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2894, __FUNCTION__), 0 : 0))
;
2895 has_debug_uses = true;
2896 break;
2897 }
2898 if (orig_use_lhs)
2899 {
2900 if (!has_debug_uses || is_cast)
2901 FOR_EACH_IMM_USE_FAST (use_p, iter, orig_use_lhs)for ((use_p) = first_readonly_imm_use (&(iter), (orig_use_lhs
)); !end_readonly_imm_use_p (&(iter)); (void) ((use_p) = next_readonly_imm_use
(&(iter))))
2902 {
2903 gimple *use_stmt = USE_STMT (use_p)(use_p)->loc.stmt;
2904 gcc_assert (is_gimple_debug (use_stmt))((void)(!(is_gimple_debug (use_stmt)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2904, __FUNCTION__), 0 : 0))
;
2905 has_debug_uses = true;
2906 if (is_cast)
2907 has_cast_debug_uses = true;
2908 }
2909 gimple_stmt_iterator gsi = gsi_for_stmt (orig_use_stmt);
2910 tree zero = build_zero_cst (TREE_TYPE (orig_use_lhs)((contains_struct_check ((orig_use_lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2910, __FUNCTION__))->typed.type)
);
2911 gimple_assign_set_rhs_with_ops (&gsi, INTEGER_CST, zero);
2912 update_stmt (orig_use_stmt);
2913 }
2914
2915 if (has_debug_uses)
2916 {
2917 /* If there are debug uses, emit something like:
2918 # DEBUG D#1 => i_2(D) > j_3(D) ? 1 : -1
2919 # DEBUG D#2 => i_2(D) == j_3(D) ? 0 : D#1
2920 where > stands for the comparison that yielded 1
2921 and replace debug uses of phi result with that D#2.
2922 Ignore the value of 2, because if NaNs aren't expected,
2923 all floating point numbers should be comparable. */
2924 gimple_stmt_iterator gsi = gsi_after_labels (gimple_bb (phi));
2925 tree type = TREE_TYPE (phires)((contains_struct_check ((phires), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2925, __FUNCTION__))->typed.type)
;
2926 tree temp1 = build_debug_expr_decl (type);
2927 tree t = build2 (one_cmp, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], lhs1, rhs2);
2928 t = build3 (COND_EXPR, type, t, build_one_cst (type),
2929 build_int_cst (type, -1));
2930 gimple *g = gimple_build_debug_bind (temp1, t, phi);
2931 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
2932 tree temp2 = build_debug_expr_decl (type);
2933 t = build2 (EQ_EXPR, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], lhs1, rhs2);
2934 t = build3 (COND_EXPR, type, t, build_zero_cst (type), temp1);
2935 g = gimple_build_debug_bind (temp2, t, phi);
2936 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
2937 replace_uses_by (phires, temp2);
2938 if (orig_use_lhs)
2939 {
2940 if (has_cast_debug_uses)
2941 {
2942 tree temp3 = make_node (DEBUG_EXPR_DECL);
2943 DECL_ARTIFICIAL (temp3)((contains_struct_check ((temp3), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2943, __FUNCTION__))->decl_common.artificial_flag)
= 1;
2944 TREE_TYPE (temp3)((contains_struct_check ((temp3), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2944, __FUNCTION__))->typed.type)
= TREE_TYPE (orig_use_lhs)((contains_struct_check ((orig_use_lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2944, __FUNCTION__))->typed.type)
;
2945 SET_DECL_MODE (temp3, TYPE_MODE (type))((contains_struct_check ((temp3), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2945, __FUNCTION__))->decl_common.mode = (((((enum tree_code
) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2945, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)))
;
2946 t = fold_convert (TREE_TYPE (temp3), temp2)fold_convert_loc (((location_t) 0), ((contains_struct_check (
(temp3), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 2946, __FUNCTION__))->typed.type), temp2)
;
2947 g = gimple_build_debug_bind (temp3, t, phi);
2948 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
2949 replace_uses_by (orig_use_lhs, temp3);
2950 }
2951 else
2952 replace_uses_by (orig_use_lhs, temp2);
2953 }
2954 }
2955 }
2956
2957 if (orig_use_lhs)
2958 {
2959 gimple_stmt_iterator gsi = gsi_for_stmt (orig_use_stmt);
2960 gsi_remove (&gsi, true);
2961 }
2962
2963 gimple_stmt_iterator psi = gsi_for_stmt (phi);
2964 remove_phi_node (&psi, true);
2965 statistics_counter_event (cfun(cfun + 0), "spaceship replacement", 1);
2966
2967 return true;
2968}
2969
2970/* Optimize x ? __builtin_fun (x) : C, where C is __builtin_fun (0).
2971 Convert
2972
2973 <bb 2>
2974 if (b_4(D) != 0)
2975 goto <bb 3>
2976 else
2977 goto <bb 4>
2978
2979 <bb 3>
2980 _2 = (unsigned long) b_4(D);
2981 _9 = __builtin_popcountl (_2);
2982 OR
2983 _9 = __builtin_popcountl (b_4(D));
2984
2985 <bb 4>
2986 c_12 = PHI <0(2), _9(3)>
2987
2988 Into
2989 <bb 2>
2990 _2 = (unsigned long) b_4(D);
2991 _9 = __builtin_popcountl (_2);
2992 OR
2993 _9 = __builtin_popcountl (b_4(D));
2994
2995 <bb 4>
2996 c_12 = PHI <_9(2)>
2997
2998 Similarly for __builtin_clz or __builtin_ctz if
2999 C?Z_DEFINED_VALUE_AT_ZERO is 2, optab is present and
3000 instead of 0 above it uses the value from that macro. */
3001
3002static bool
3003cond_removal_in_builtin_zero_pattern (basic_block cond_bb,
3004 basic_block middle_bb,
3005 edge e1, edge e2, gphi *phi,
3006 tree arg0, tree arg1)
3007{
3008 gimple *cond;
3009 gimple_stmt_iterator gsi, gsi_from;
3010 gimple *call;
3011 gimple *cast = NULLnullptr;
3012 tree lhs, arg;
3013
3014 /* Check that
3015 _2 = (unsigned long) b_4(D);
3016 _9 = __builtin_popcountl (_2);
3017 OR
3018 _9 = __builtin_popcountl (b_4(D));
3019 are the only stmts in the middle_bb. */
3020
3021 gsi = gsi_start_nondebug_after_labels_bb (middle_bb);
3022 if (gsi_end_p (gsi))
3023 return false;
3024 cast = gsi_stmt (gsi);
3025 gsi_next_nondebug (&gsi);
3026 if (!gsi_end_p (gsi))
3027 {
3028 call = gsi_stmt (gsi);
3029 gsi_next_nondebug (&gsi);
3030 if (!gsi_end_p (gsi))
3031 return false;
3032 }
3033 else
3034 {
3035 call = cast;
3036 cast = NULLnullptr;
3037 }
3038
3039 /* Check that we have a popcount/clz/ctz builtin. */
3040 if (!is_gimple_call (call) || gimple_call_num_args (call) != 1)
3041 return false;
3042
3043 arg = gimple_call_arg (call, 0);
3044 lhs = gimple_get_lhs (call);
3045
3046 if (lhs == NULL_TREE(tree) nullptr)
3047 return false;
3048
3049 combined_fn cfn = gimple_call_combined_fn (call);
3050 internal_fn ifn = IFN_LAST;
3051 int val = 0;
3052 switch (cfn)
3053 {
3054 case CFN_BUILT_IN_BSWAP16:
3055 case CFN_BUILT_IN_BSWAP32:
3056 case CFN_BUILT_IN_BSWAP64:
3057 case CFN_BUILT_IN_BSWAP128:
3058 CASE_CFN_FFScase CFN_FFS: case CFN_BUILT_IN_FFS: case CFN_BUILT_IN_FFSL: case
CFN_BUILT_IN_FFSLL: case CFN_BUILT_IN_FFSIMAX
:
3059 CASE_CFN_PARITYcase CFN_PARITY: case CFN_BUILT_IN_PARITY: case CFN_BUILT_IN_PARITYL
: case CFN_BUILT_IN_PARITYLL: case CFN_BUILT_IN_PARITYIMAX
:
3060 CASE_CFN_POPCOUNTcase CFN_POPCOUNT: case CFN_BUILT_IN_POPCOUNT: case CFN_BUILT_IN_POPCOUNTL
: case CFN_BUILT_IN_POPCOUNTLL: case CFN_BUILT_IN_POPCOUNTIMAX
:
3061 break;
3062 CASE_CFN_CLZcase CFN_CLZ: case CFN_BUILT_IN_CLZ: case CFN_BUILT_IN_CLZL: case
CFN_BUILT_IN_CLZLL: case CFN_BUILT_IN_CLZIMAX
:
3063 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))(((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3063, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3063, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3063, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
)
3064 {
3065 tree type = TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3065, __FUNCTION__))->typed.type)
;
3066 if (direct_internal_fn_supported_p (IFN_CLZ, type, OPTIMIZE_FOR_BOTH)
3067 && CLZ_DEFINED_VALUE_AT_ZERO (SCALAR_INT_TYPE_MODE (type),((val) = GET_MODE_BITSIZE ((as_a <scalar_int_mode> ((tree_class_check
((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3067, __FUNCTION__))->type_common.mode))), ((global_options
.x_ix86_isa_flags & (1UL << 35)) != 0) ? 2 : 0)
3068 val)((val) = GET_MODE_BITSIZE ((as_a <scalar_int_mode> ((tree_class_check
((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3067, __FUNCTION__))->type_common.mode))), ((global_options
.x_ix86_isa_flags & (1UL << 35)) != 0) ? 2 : 0)
== 2)
3069 {
3070 ifn = IFN_CLZ;
3071 break;
3072 }
3073 }
3074 return false;
3075 CASE_CFN_CTZcase CFN_CTZ: case CFN_BUILT_IN_CTZ: case CFN_BUILT_IN_CTZL: case
CFN_BUILT_IN_CTZLL: case CFN_BUILT_IN_CTZIMAX
:
3076 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))(((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3076, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3076, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3076, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
)
3077 {
3078 tree type = TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3078, __FUNCTION__))->typed.type)
;
3079 if (direct_internal_fn_supported_p (IFN_CTZ, type, OPTIMIZE_FOR_BOTH)
3080 && CTZ_DEFINED_VALUE_AT_ZERO (SCALAR_INT_TYPE_MODE (type),((val) = GET_MODE_BITSIZE ((as_a <scalar_int_mode> ((tree_class_check
((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3080, __FUNCTION__))->type_common.mode))), ((global_options
.x_ix86_isa_flags & (1UL << 23)) != 0) ? 2 : 0)
3081 val)((val) = GET_MODE_BITSIZE ((as_a <scalar_int_mode> ((tree_class_check
((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3080, __FUNCTION__))->type_common.mode))), ((global_options
.x_ix86_isa_flags & (1UL << 23)) != 0) ? 2 : 0)
== 2)
3082 {
3083 ifn = IFN_CTZ;
3084 break;
3085 }
3086 }
3087 return false;
3088 case CFN_BUILT_IN_CLRSB:
3089 val = TYPE_PRECISION (integer_type_node)((tree_class_check ((integer_types[itk_int]), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3089, __FUNCTION__))->type_common.precision)
- 1;
3090 break;
3091 case CFN_BUILT_IN_CLRSBL:
3092 val = TYPE_PRECISION (long_integer_type_node)((tree_class_check ((integer_types[itk_long]), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3092, __FUNCTION__))->type_common.precision)
- 1;
3093 break;
3094 case CFN_BUILT_IN_CLRSBLL:
3095 val = TYPE_PRECISION (long_long_integer_type_node)((tree_class_check ((integer_types[itk_long_long]), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3095, __FUNCTION__))->type_common.precision)
- 1;
3096 break;
3097 default:
3098 return false;
3099 }
3100
3101 if (cast)
3102 {
3103 /* We have a cast stmt feeding popcount/clz/ctz builtin. */
3104 /* Check that we have a cast prior to that. */
3105 if (gimple_code (cast) != GIMPLE_ASSIGN
3106 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (cast))((gimple_assign_rhs_code (cast)) == NOP_EXPR || (gimple_assign_rhs_code
(cast)) == CONVERT_EXPR)
)
3107 return false;
3108 /* Result of the cast stmt is the argument to the builtin. */
3109 if (arg != gimple_assign_lhs (cast))
3110 return false;
3111 arg = gimple_assign_rhs1 (cast);
3112 }
3113
3114 cond = last_stmt (cond_bb);
3115
3116 /* Cond_bb has a check for b_4 [!=|==] 0 before calling the popcount/clz/ctz
3117 builtin. */
3118 if (gimple_code (cond) != GIMPLE_COND
3119 || (gimple_cond_code (cond) != NE_EXPR
3120 && gimple_cond_code (cond) != EQ_EXPR)
3121 || !integer_zerop (gimple_cond_rhs (cond))
3122 || arg != gimple_cond_lhs (cond))
3123 return false;
3124
3125 /* Canonicalize. */
3126 if ((e2->flags & EDGE_TRUE_VALUE
3127 && gimple_cond_code (cond) == NE_EXPR)
3128 || (e1->flags & EDGE_TRUE_VALUE
3129 && gimple_cond_code (cond) == EQ_EXPR))
3130 {
3131 std::swap (arg0, arg1);
3132 std::swap (e1, e2);
3133 }
3134
3135 /* Check PHI arguments. */
3136 if (lhs != arg0
3137 || TREE_CODE (arg1)((enum tree_code) (arg1)->base.code) != INTEGER_CST
3138 || wi::to_wide (arg1) != val)
3139 return false;
3140
3141 /* And insert the popcount/clz/ctz builtin and cast stmt before the
3142 cond_bb. */
3143 gsi = gsi_last_bb (cond_bb);
3144 if (cast)
3145 {
3146 gsi_from = gsi_for_stmt (cast);
3147 gsi_move_before (&gsi_from, &gsi);
3148 reset_flow_sensitive_info (gimple_get_lhs (cast));
3149 }
3150 gsi_from = gsi_for_stmt (call);
3151 if (ifn == IFN_LAST || gimple_call_internal_p (call))
3152 gsi_move_before (&gsi_from, &gsi);
3153 else
3154 {
3155 /* For __builtin_c[lt]z* force .C[LT]Z ifn, because only
3156 the latter is well defined at zero. */
3157 call = gimple_build_call_internal (ifn, 1, gimple_call_arg (call, 0));
3158 gimple_call_set_lhs (call, lhs);
3159 gsi_insert_before (&gsi, call, GSI_SAME_STMT);
3160 gsi_remove (&gsi_from, true);
3161 }
3162 reset_flow_sensitive_info (lhs);
3163
3164 /* Now update the PHI and remove unneeded bbs. */
3165 replace_phi_edge_with_variable (cond_bb, e2, phi, lhs);
3166 return true;
3167}
3168
3169/* Auxiliary functions to determine the set of memory accesses which
3170 can't trap because they are preceded by accesses to the same memory
3171 portion. We do that for MEM_REFs, so we only need to track
3172 the SSA_NAME of the pointer indirectly referenced. The algorithm
3173 simply is a walk over all instructions in dominator order. When
3174 we see an MEM_REF we determine if we've already seen a same
3175 ref anywhere up to the root of the dominator tree. If we do the
3176 current access can't trap. If we don't see any dominating access
3177 the current access might trap, but might also make later accesses
3178 non-trapping, so we remember it. We need to be careful with loads
3179 or stores, for instance a load might not trap, while a store would,
3180 so if we see a dominating read access this doesn't mean that a later
3181 write access would not trap. Hence we also need to differentiate the
3182 type of access(es) seen.
3183
3184 ??? We currently are very conservative and assume that a load might
3185 trap even if a store doesn't (write-only memory). This probably is
3186 overly conservative.
3187
3188 We currently support a special case that for !TREE_ADDRESSABLE automatic
3189 variables, it could ignore whether something is a load or store because the
3190 local stack should be always writable. */
3191
3192/* A hash-table of references (MEM_REF/ARRAY_REF/COMPONENT_REF), and in which
3193 basic block an *_REF through it was seen, which would constitute a
3194 no-trap region for same accesses.
3195
3196 Size is needed to support 2 MEM_REFs of different types, like
3197 MEM<double>(s_1) and MEM<long>(s_1), which would compare equal with
3198 OEP_ADDRESS_OF. */
3199struct ref_to_bb
3200{
3201 tree exp;
3202 HOST_WIDE_INTlong size;
3203 unsigned int phase;
3204 basic_block bb;
3205};
3206
3207/* Hashtable helpers. */
3208
3209struct refs_hasher : free_ptr_hash<ref_to_bb>
3210{
3211 static inline hashval_t hash (const ref_to_bb *);
3212 static inline bool equal (const ref_to_bb *, const ref_to_bb *);
3213};
3214
3215/* Used for quick clearing of the hash-table when we see calls.
3216 Hash entries with phase < nt_call_phase are invalid. */
3217static unsigned int nt_call_phase;
3218
3219/* The hash function. */
3220
3221inline hashval_t
3222refs_hasher::hash (const ref_to_bb *n)
3223{
3224 inchash::hash hstate;
3225 inchash::add_expr (n->exp, hstate, OEP_ADDRESS_OF);
3226 hstate.add_hwi (n->size);
3227 return hstate.end ();
3228}
3229
3230/* The equality function of *P1 and *P2. */
3231
3232inline bool
3233refs_hasher::equal (const ref_to_bb *n1, const ref_to_bb *n2)
3234{
3235 return operand_equal_p (n1->exp, n2->exp, OEP_ADDRESS_OF)
3236 && n1->size == n2->size;
3237}
3238
3239class nontrapping_dom_walker : public dom_walker
3240{
3241public:
3242 nontrapping_dom_walker (cdi_direction direction, hash_set<tree> *ps)
3243 : dom_walker (direction), m_nontrapping (ps), m_seen_refs (128)
3244 {}
3245
3246 edge before_dom_children (basic_block) final override;
3247 void after_dom_children (basic_block) final override;
3248
3249private:
3250
3251 /* We see the expression EXP in basic block BB. If it's an interesting
3252 expression (an MEM_REF through an SSA_NAME) possibly insert the
3253 expression into the set NONTRAP or the hash table of seen expressions.
3254 STORE is true if this expression is on the LHS, otherwise it's on
3255 the RHS. */
3256 void add_or_mark_expr (basic_block, tree, bool);
3257
3258 hash_set<tree> *m_nontrapping;
3259
3260 /* The hash table for remembering what we've seen. */
3261 hash_table<refs_hasher> m_seen_refs;
3262};
3263
3264/* Called by walk_dominator_tree, when entering the block BB. */
3265edge
3266nontrapping_dom_walker::before_dom_children (basic_block bb)
3267{
3268 edge e;
3269 edge_iterator ei;
3270 gimple_stmt_iterator gsi;
3271
3272 /* If we haven't seen all our predecessors, clear the hash-table. */
3273 FOR_EACH_EDGE (e, ei, bb->preds)for ((ei) = ei_start_1 (&((bb->preds))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
3274 if ((((size_t)e->src->aux) & 2) == 0)
3275 {
3276 nt_call_phase++;
3277 break;
3278 }
3279
3280 /* Mark this BB as being on the path to dominator root and as visited. */
3281 bb->aux = (void*)(1 | 2);
3282
3283 /* And walk the statements in order. */
3284 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3285 {
3286 gimple *stmt = gsi_stmt (gsi);
3287
3288 if ((gimple_code (stmt) == GIMPLE_ASM && gimple_vdef (stmt))
3289 || (is_gimple_call (stmt)
3290 && (!nonfreeing_call_p (stmt) || !nonbarrier_call_p (stmt))))
3291 nt_call_phase++;
3292 else if (gimple_assign_single_p (stmt) && !gimple_has_volatile_ops (stmt))
3293 {
3294 add_or_mark_expr (bb, gimple_assign_lhs (stmt), true);
3295 add_or_mark_expr (bb, gimple_assign_rhs1 (stmt), false);
3296 }
3297 }
3298 return NULLnullptr;
3299}
3300
3301/* Called by walk_dominator_tree, when basic block BB is exited. */
3302void
3303nontrapping_dom_walker::after_dom_children (basic_block bb)
3304{
3305 /* This BB isn't on the path to dominator root anymore. */
3306 bb->aux = (void*)2;
3307}
3308
3309/* We see the expression EXP in basic block BB. If it's an interesting
3310 expression of:
3311 1) MEM_REF
3312 2) ARRAY_REF
3313 3) COMPONENT_REF
3314 possibly insert the expression into the set NONTRAP or the hash table
3315 of seen expressions. STORE is true if this expression is on the LHS,
3316 otherwise it's on the RHS. */
3317void
3318nontrapping_dom_walker::add_or_mark_expr (basic_block bb, tree exp, bool store)
3319{
3320 HOST_WIDE_INTlong size;
3321
3322 if ((TREE_CODE (exp)((enum tree_code) (exp)->base.code) == MEM_REF || TREE_CODE (exp)((enum tree_code) (exp)->base.code) == ARRAY_REF
3323 || TREE_CODE (exp)((enum tree_code) (exp)->base.code) == COMPONENT_REF)
3324 && (size = int_size_in_bytes (TREE_TYPE (exp)((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3324, __FUNCTION__))->typed.type)
)) > 0)
3325 {
3326 struct ref_to_bb map;
3327 ref_to_bb **slot;
3328 struct ref_to_bb *r2bb;
3329 basic_block found_bb = 0;
3330
3331 if (!store)
3332 {
3333 tree base = get_base_address (exp);
3334 /* Only record a LOAD of a local variable without address-taken, as
3335 the local stack is always writable. This allows cselim on a STORE
3336 with a dominating LOAD. */
3337 if (!auto_var_p (base) || TREE_ADDRESSABLE (base)((base)->base.addressable_flag))
3338 return;
3339 }
3340
3341 /* Try to find the last seen *_REF, which can trap. */
3342 map.exp = exp;
3343 map.size = size;
3344 slot = m_seen_refs.find_slot (&map, INSERT);
3345 r2bb = *slot;
3346 if (r2bb && r2bb->phase >= nt_call_phase)
3347 found_bb = r2bb->bb;
3348
3349 /* If we've found a trapping *_REF, _and_ it dominates EXP
3350 (it's in a basic block on the path from us to the dominator root)
3351 then we can't trap. */
3352 if (found_bb && (((size_t)found_bb->aux) & 1) == 1)
3353 {
3354 m_nontrapping->add (exp);
3355 }
3356 else
3357 {
3358 /* EXP might trap, so insert it into the hash table. */
3359 if (r2bb)
3360 {
3361 r2bb->phase = nt_call_phase;
3362 r2bb->bb = bb;
3363 }
3364 else
3365 {
3366 r2bb = XNEW (struct ref_to_bb)((struct ref_to_bb *) xmalloc (sizeof (struct ref_to_bb)));
3367 r2bb->phase = nt_call_phase;
3368 r2bb->bb = bb;
3369 r2bb->exp = exp;
3370 r2bb->size = size;
3371 *slot = r2bb;
3372 }
3373 }
3374 }
3375}
3376
3377/* This is the entry point of gathering non trapping memory accesses.
3378 It will do a dominator walk over the whole function, and it will
3379 make use of the bb->aux pointers. It returns a set of trees
3380 (the MEM_REFs itself) which can't trap. */
3381static hash_set<tree> *
3382get_non_trapping (void)
3383{
3384 nt_call_phase = 0;
3385 hash_set<tree> *nontrap = new hash_set<tree>;
3386
3387 nontrapping_dom_walker (CDI_DOMINATORS, nontrap)
3388 .walk (cfun(cfun + 0)->cfg->x_entry_block_ptr);
3389
3390 clear_aux_for_blocks ();
3391 return nontrap;
3392}
3393
3394/* Do the main work of conditional store replacement. We already know
3395 that the recognized pattern looks like so:
3396
3397 split:
3398 if (cond) goto MIDDLE_BB; else goto JOIN_BB (edge E1)
3399 MIDDLE_BB:
3400 something
3401 fallthrough (edge E0)
3402 JOIN_BB:
3403 some more
3404
3405 We check that MIDDLE_BB contains only one store, that that store
3406 doesn't trap (not via NOTRAP, but via checking if an access to the same
3407 memory location dominates us, or the store is to a local addressable
3408 object) and that the store has a "simple" RHS. */
3409
3410static bool
3411cond_store_replacement (basic_block middle_bb, basic_block join_bb,
3412 edge e0, edge e1, hash_set<tree> *nontrap)
3413{
3414 gimple *assign = last_and_only_stmt (middle_bb);
3415 tree lhs, rhs, name, name2;
3416 gphi *newphi;
3417 gassign *new_stmt;
3418 gimple_stmt_iterator gsi;
3419 location_t locus;
3420
3421 /* Check if middle_bb contains of only one store. */
3422 if (!assign
3423 || !gimple_assign_single_p (assign)
3424 || gimple_has_volatile_ops (assign))
3425 return false;
3426
3427 /* And no PHI nodes so all uses in the single stmt are also
3428 available where we insert to. */
3429 if (!gimple_seq_empty_p (phi_nodes (middle_bb)))
3430 return false;
3431
3432 locus = gimple_location (assign);
3433 lhs = gimple_assign_lhs (assign);
3434 rhs = gimple_assign_rhs1 (assign);
3435 if ((!REFERENCE_CLASS_P (lhs)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code
) (lhs)->base.code))] == tcc_reference)
3436 && !DECL_P (lhs)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code
) (lhs)->base.code))] == tcc_declaration)
)
3437 || !is_gimple_reg_type (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3437, __FUNCTION__))->typed.type)
))
3438 return false;
3439
3440 /* Prove that we can move the store down. We could also check
3441 TREE_THIS_NOTRAP here, but in that case we also could move stores,
3442 whose value is not available readily, which we want to avoid. */
3443 if (!nontrap->contains (lhs))
3444 {
3445 /* If LHS is an access to a local variable without address-taken
3446 (or when we allow data races) and known not to trap, we could
3447 always safely move down the store. */
3448 tree base = get_base_address (lhs);
3449 if (!auto_var_p (base)
3450 || (TREE_ADDRESSABLE (base)((base)->base.addressable_flag) && !flag_store_data_racesglobal_options.x_flag_store_data_races)
3451 || tree_could_trap_p (lhs))
3452 return false;
3453 }
3454
3455 /* Now we've checked the constraints, so do the transformation:
3456 1) Remove the single store. */
3457 gsi = gsi_for_stmt (assign);
3458 unlink_stmt_vdef (assign);
3459 gsi_remove (&gsi, true);
3460 release_defs (assign);
3461
3462 /* Make both store and load use alias-set zero as we have to
3463 deal with the case of the store being a conditional change
3464 of the dynamic type. */
3465 lhs = unshare_expr (lhs);
3466 tree *basep = &lhs;
3467 while (handled_component_p (*basep))
3468 basep = &TREE_OPERAND (*basep, 0)(*((const_cast<tree*> (tree_operand_check ((*basep), (0
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3468, __FUNCTION__)))))
;
3469 if (TREE_CODE (*basep)((enum tree_code) (*basep)->base.code) == MEM_REF
3470 || TREE_CODE (*basep)((enum tree_code) (*basep)->base.code) == TARGET_MEM_REF)
3471 TREE_OPERAND (*basep, 1)(*((const_cast<tree*> (tree_operand_check ((*basep), (1
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3471, __FUNCTION__)))))
3472 = fold_convert (ptr_type_node, TREE_OPERAND (*basep, 1))fold_convert_loc (((location_t) 0), global_trees[TI_PTR_TYPE]
, (*((const_cast<tree*> (tree_operand_check ((*basep), (
1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3472, __FUNCTION__))))))
;
3473 else
3474 *basep = build2 (MEM_REF, TREE_TYPE (*basep)((contains_struct_check ((*basep), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3474, __FUNCTION__))->typed.type)
,
3475 build_fold_addr_expr (*basep)build_fold_addr_expr_loc (((location_t) 0), (*basep)),
3476 build_zero_cst (ptr_type_nodeglobal_trees[TI_PTR_TYPE]));
3477
3478 /* 2) Insert a load from the memory of the store to the temporary
3479 on the edge which did not contain the store. */
3480 name = make_temp_ssa_name (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3480, __FUNCTION__))->typed.type)
, NULLnullptr, "cstore");
3481 new_stmt = gimple_build_assign (name, lhs);
3482 gimple_set_location (new_stmt, locus);
3483 lhs = unshare_expr (lhs);
3484 {
3485 /* Set the no-warning bit on the rhs of the load to avoid uninit
3486 warnings. */
3487 tree rhs1 = gimple_assign_rhs1 (new_stmt);
3488 suppress_warning (rhs1, OPT_Wuninitialized);
3489 }
3490 gsi_insert_on_edge (e1, new_stmt);
3491
3492 /* 3) Create a PHI node at the join block, with one argument
3493 holding the old RHS, and the other holding the temporary
3494 where we stored the old memory contents. */
3495 name2 = make_temp_ssa_name (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3495, __FUNCTION__))->typed.type)
, NULLnullptr, "cstore");
3496 newphi = create_phi_node (name2, join_bb);
3497 add_phi_arg (newphi, rhs, e0, locus);
3498 add_phi_arg (newphi, name, e1, locus);
3499
3500 new_stmt = gimple_build_assign (lhs, PHI_RESULT (newphi)get_def_from_ptr (gimple_phi_result_ptr (newphi)));
3501
3502 /* 4) Insert that PHI node. */
3503 gsi = gsi_after_labels (join_bb);
3504 if (gsi_end_p (gsi))
3505 {
3506 gsi = gsi_last_bb (join_bb);
3507 gsi_insert_after (&gsi, new_stmt, GSI_NEW_STMT);
3508 }
3509 else
3510 gsi_insert_before (&gsi, new_stmt, GSI_NEW_STMT);
3511
3512 if (dump_file && (dump_flags & TDF_DETAILS))
3513 {
3514 fprintf (dump_file, "\nConditional store replacement happened!");
3515 fprintf (dump_file, "\nReplaced the store with a load.");
3516 fprintf (dump_file, "\nInserted a new PHI statement in joint block:\n");
3517 print_gimple_stmt (dump_file, new_stmt, 0, TDF_VOPS|TDF_MEMSYMS);
3518 }
3519 statistics_counter_event (cfun(cfun + 0), "conditional store replacement", 1);
3520
3521 return true;
3522}
3523
3524/* Do the main work of conditional store replacement. */
3525
3526static bool
3527cond_if_else_store_replacement_1 (basic_block then_bb, basic_block else_bb,
3528 basic_block join_bb, gimple *then_assign,
3529 gimple *else_assign)
3530{
3531 tree lhs_base, lhs, then_rhs, else_rhs, name;
3532 location_t then_locus, else_locus;
3533 gimple_stmt_iterator gsi;
3534 gphi *newphi;
3535 gassign *new_stmt;
3536
3537 if (then_assign == NULLnullptr
3538 || !gimple_assign_single_p (then_assign)
3539 || gimple_clobber_p (then_assign)
3540 || gimple_has_volatile_ops (then_assign)
3541 || else_assign == NULLnullptr
3542 || !gimple_assign_single_p (else_assign)
3543 || gimple_clobber_p (else_assign)
3544 || gimple_has_volatile_ops (else_assign))
3545 return false;
3546
3547 lhs = gimple_assign_lhs (then_assign);
3548 if (!is_gimple_reg_type (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3548, __FUNCTION__))->typed.type)
)
3549 || !operand_equal_p (lhs, gimple_assign_lhs (else_assign), 0))
3550 return false;
3551
3552 lhs_base = get_base_address (lhs);
3553 if (lhs_base == NULL_TREE(tree) nullptr
3554 || (!DECL_P (lhs_base)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code
) (lhs_base)->base.code))] == tcc_declaration)
&& TREE_CODE (lhs_base)((enum tree_code) (lhs_base)->base.code) != MEM_REF))
3555 return false;
3556
3557 then_rhs = gimple_assign_rhs1 (then_assign);
3558 else_rhs = gimple_assign_rhs1 (else_assign);
3559 then_locus = gimple_location (then_assign);
3560 else_locus = gimple_location (else_assign);
3561
3562 /* Now we've checked the constraints, so do the transformation:
3563 1) Remove the stores. */
3564 gsi = gsi_for_stmt (then_assign);
3565 unlink_stmt_vdef (then_assign);
3566 gsi_remove (&gsi, true);
3567 release_defs (then_assign);
3568
3569 gsi = gsi_for_stmt (else_assign);
3570 unlink_stmt_vdef (else_assign);
3571 gsi_remove (&gsi, true);
3572 release_defs (else_assign);
3573
3574 /* 2) Create a PHI node at the join block, with one argument
3575 holding the old RHS, and the other holding the temporary
3576 where we stored the old memory contents. */
3577 name = make_temp_ssa_name (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3577, __FUNCTION__))->typed.type)
, NULLnullptr, "cstore");
3578 newphi = create_phi_node (name, join_bb);
3579 add_phi_arg (newphi, then_rhs, EDGE_SUCC (then_bb, 0)(*(then_bb)->succs)[(0)], then_locus);
3580 add_phi_arg (newphi, else_rhs, EDGE_SUCC (else_bb, 0)(*(else_bb)->succs)[(0)], else_locus);
3581
3582 new_stmt = gimple_build_assign (lhs, PHI_RESULT (newphi)get_def_from_ptr (gimple_phi_result_ptr (newphi)));
3583
3584 /* 3) Insert that PHI node. */
3585 gsi = gsi_after_labels (join_bb);
3586 if (gsi_end_p (gsi))
3587 {
3588 gsi = gsi_last_bb (join_bb);
3589 gsi_insert_after (&gsi, new_stmt, GSI_NEW_STMT);
3590 }
3591 else
3592 gsi_insert_before (&gsi, new_stmt, GSI_NEW_STMT);
3593
3594 statistics_counter_event (cfun(cfun + 0), "if-then-else store replacement", 1);
3595
3596 return true;
3597}
3598
3599/* Return the single store in BB with VDEF or NULL if there are
3600 other stores in the BB or loads following the store. */
3601
3602static gimple *
3603single_trailing_store_in_bb (basic_block bb, tree vdef)
3604{
3605 if (SSA_NAME_IS_DEFAULT_DEF (vdef)(tree_check ((vdef), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3605, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
)
3606 return NULLnullptr;
3607 gimple *store = SSA_NAME_DEF_STMT (vdef)(tree_check ((vdef), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3607, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
3608 if (gimple_bb (store) != bb
3609 || gimple_code (store) == GIMPLE_PHI)
3610 return NULLnullptr;
3611
3612 /* Verify there is no other store in this BB. */
3613 if (!SSA_NAME_IS_DEFAULT_DEF (gimple_vuse (store))(tree_check ((gimple_vuse (store)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3613, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
3614 && gimple_bb (SSA_NAME_DEF_STMT (gimple_vuse (store))(tree_check ((gimple_vuse (store)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3614, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
) == bb
3615 && gimple_code (SSA_NAME_DEF_STMT (gimple_vuse (store))(tree_check ((gimple_vuse (store)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3615, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
) != GIMPLE_PHI)
3616 return NULLnullptr;
3617
3618 /* Verify there is no load or store after the store. */
3619 use_operand_p use_p;
3620 imm_use_iterator imm_iter;
3621 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, gimple_vdef (store))for ((use_p) = first_readonly_imm_use (&(imm_iter), (gimple_vdef
(store))); !end_readonly_imm_use_p (&(imm_iter)); (void)
((use_p) = next_readonly_imm_use (&(imm_iter))))
3622 if (USE_STMT (use_p)(use_p)->loc.stmt != store
3623 && gimple_bb (USE_STMT (use_p)(use_p)->loc.stmt) == bb)
3624 return NULLnullptr;
3625
3626 return store;
3627}
3628
3629/* Conditional store replacement. We already know
3630 that the recognized pattern looks like so:
3631
3632 split:
3633 if (cond) goto THEN_BB; else goto ELSE_BB (edge E1)
3634 THEN_BB:
3635 ...
3636 X = Y;
3637 ...
3638 goto JOIN_BB;
3639 ELSE_BB:
3640 ...
3641 X = Z;
3642 ...
3643 fallthrough (edge E0)
3644 JOIN_BB:
3645 some more
3646
3647 We check that it is safe to sink the store to JOIN_BB by verifying that
3648 there are no read-after-write or write-after-write dependencies in
3649 THEN_BB and ELSE_BB. */
3650
3651static bool
3652cond_if_else_store_replacement (basic_block then_bb, basic_block else_bb,
3653 basic_block join_bb)
3654{
3655 vec<data_reference_p> then_datarefs, else_datarefs;
3656 vec<ddr_p> then_ddrs, else_ddrs;
3657 gimple *then_store, *else_store;
3658 bool found, ok = false, res;
3659 struct data_dependence_relation *ddr;
3660 data_reference_p then_dr, else_dr;
3661 int i, j;
3662 tree then_lhs, else_lhs;
3663 basic_block blocks[3];
3664
3665 /* Handle the case with single store in THEN_BB and ELSE_BB. That is
3666 cheap enough to always handle as it allows us to elide dependence
3667 checking. */
3668 gphi *vphi = NULLnullptr;
3669 for (gphi_iterator si = gsi_start_phis (join_bb); !gsi_end_p (si);
3670 gsi_next (&si))
3671 if (virtual_operand_p (gimple_phi_result (si.phi ())))
3672 {
3673 vphi = si.phi ();
3674 break;
3675 }
3676 if (!vphi)
3677 return false;
3678 tree then_vdef = PHI_ARG_DEF_FROM_EDGE (vphi, single_succ_edge (then_bb))gimple_phi_arg_def (((vphi)), ((single_succ_edge (then_bb))->
dest_idx))
;
3679 tree else_vdef = PHI_ARG_DEF_FROM_EDGE (vphi, single_succ_edge (else_bb))gimple_phi_arg_def (((vphi)), ((single_succ_edge (else_bb))->
dest_idx))
;
3680 gimple *then_assign = single_trailing_store_in_bb (then_bb, then_vdef);
3681 if (then_assign)
3682 {
3683 gimple *else_assign = single_trailing_store_in_bb (else_bb, else_vdef);
3684 if (else_assign)
3685 return cond_if_else_store_replacement_1 (then_bb, else_bb, join_bb,
3686 then_assign, else_assign);
3687 }
3688
3689 /* If either vectorization or if-conversion is disabled then do
3690 not sink any stores. */
3691 if (param_max_stores_to_sinkglobal_options.x_param_max_stores_to_sink == 0
3692 || (!flag_tree_loop_vectorizeglobal_options.x_flag_tree_loop_vectorize && !flag_tree_slp_vectorizeglobal_options.x_flag_tree_slp_vectorize)
3693 || !flag_tree_loop_if_convertglobal_options.x_flag_tree_loop_if_convert)
3694 return false;
3695
3696 /* Find data references. */
3697 then_datarefs.create (1);
3698 else_datarefs.create (1);
3699 if ((find_data_references_in_bb (NULLnullptr, then_bb, &then_datarefs)
3700 == chrec_dont_knowglobal_trees[TI_CHREC_DONT_KNOW])
3701 || !then_datarefs.length ()
3702 || (find_data_references_in_bb (NULLnullptr, else_bb, &else_datarefs)
3703 == chrec_dont_knowglobal_trees[TI_CHREC_DONT_KNOW])
3704 || !else_datarefs.length ())
3705 {
3706 free_data_refs (then_datarefs);
3707 free_data_refs (else_datarefs);
3708 return false;
3709 }
3710
3711 /* Find pairs of stores with equal LHS. */
3712 auto_vec<gimple *, 1> then_stores, else_stores;
3713 FOR_EACH_VEC_ELT (then_datarefs, i, then_dr)for (i = 0; (then_datarefs).iterate ((i), &(then_dr)); ++
(i))
3714 {
3715 if (DR_IS_READ (then_dr)(then_dr)->is_read)
3716 continue;
3717
3718 then_store = DR_STMT (then_dr)(then_dr)->stmt;
3719 then_lhs = gimple_get_lhs (then_store);
3720 if (then_lhs == NULL_TREE(tree) nullptr)
3721 continue;
3722 found = false;
3723
3724 FOR_EACH_VEC_ELT (else_datarefs, j, else_dr)for (j = 0; (else_datarefs).iterate ((j), &(else_dr)); ++
(j))
3725 {
3726 if (DR_IS_READ (else_dr)(else_dr)->is_read)
3727 continue;
3728
3729 else_store = DR_STMT (else_dr)(else_dr)->stmt;
3730 else_lhs = gimple_get_lhs (else_store);
3731 if (else_lhs == NULL_TREE(tree) nullptr)
3732 continue;
3733
3734 if (operand_equal_p (then_lhs, else_lhs, 0))
3735 {
3736 found = true;
3737 break;
3738 }
3739 }
3740
3741 if (!found)
3742 continue;
3743
3744 then_stores.safe_push (then_store);
3745 else_stores.safe_push (else_store);
3746 }
3747
3748 /* No pairs of stores found. */
3749 if (!then_stores.length ()
3750 || then_stores.length () > (unsigned) param_max_stores_to_sinkglobal_options.x_param_max_stores_to_sink)
3751 {
3752 free_data_refs (then_datarefs);
3753 free_data_refs (else_datarefs);
3754 return false;
3755 }
3756
3757 /* Compute and check data dependencies in both basic blocks. */
3758 then_ddrs.create (1);
3759 else_ddrs.create (1);
3760 if (!compute_all_dependences (then_datarefs, &then_ddrs,
3761 vNULL, false)
3762 || !compute_all_dependences (else_datarefs, &else_ddrs,
3763 vNULL, false))
3764 {
3765 free_dependence_relations (then_ddrs);
3766 free_dependence_relations (else_ddrs);
3767 free_data_refs (then_datarefs);
3768 free_data_refs (else_datarefs);
3769 return false;
3770 }
3771 blocks[0] = then_bb;
3772 blocks[1] = else_bb;
3773 blocks[2] = join_bb;
3774 renumber_gimple_stmt_uids_in_blocks (blocks, 3);
3775
3776 /* Check that there are no read-after-write or write-after-write dependencies
3777 in THEN_BB. */
3778 FOR_EACH_VEC_ELT (then_ddrs, i, ddr)for (i = 0; (then_ddrs).iterate ((i), &(ddr)); ++(i))
3779 {
3780 struct data_reference *dra = DDR_A (ddr)(ddr)->a;
3781 struct data_reference *drb = DDR_B (ddr)(ddr)->b;
3782
3783 if (DDR_ARE_DEPENDENT (ddr)(ddr)->are_dependent != chrec_knownglobal_trees[TI_CHREC_KNOWN]
3784 && ((DR_IS_READ (dra)(dra)->is_read && DR_IS_WRITE (drb)(!(drb)->is_read)
3785 && gimple_uid (DR_STMT (dra)(dra)->stmt) > gimple_uid (DR_STMT (drb)(drb)->stmt))
3786 || (DR_IS_READ (drb)(drb)->is_read && DR_IS_WRITE (dra)(!(dra)->is_read)
3787 && gimple_uid (DR_STMT (drb)(drb)->stmt) > gimple_uid (DR_STMT (dra)(dra)->stmt))
3788 || (DR_IS_WRITE (dra)(!(dra)->is_read) && DR_IS_WRITE (drb)(!(drb)->is_read))))
3789 {
3790 free_dependence_relations (then_ddrs);
3791 free_dependence_relations (else_ddrs);
3792 free_data_refs (then_datarefs);
3793 free_data_refs (else_datarefs);
3794 return false;
3795 }
3796 }
3797
3798 /* Check that there are no read-after-write or write-after-write dependencies
3799 in ELSE_BB. */
3800 FOR_EACH_VEC_ELT (else_ddrs, i, ddr)for (i = 0; (else_ddrs).iterate ((i), &(ddr)); ++(i))
3801 {
3802 struct data_reference *dra = DDR_A (ddr)(ddr)->a;
3803 struct data_reference *drb = DDR_B (ddr)(ddr)->b;
3804
3805 if (DDR_ARE_DEPENDENT (ddr)(ddr)->are_dependent != chrec_knownglobal_trees[TI_CHREC_KNOWN]
3806 && ((DR_IS_READ (dra)(dra)->is_read && DR_IS_WRITE (drb)(!(drb)->is_read)
3807 && gimple_uid (DR_STMT (dra)(dra)->stmt) > gimple_uid (DR_STMT (drb)(drb)->stmt))
3808 || (DR_IS_READ (drb)(drb)->is_read && DR_IS_WRITE (dra)(!(dra)->is_read)
3809 && gimple_uid (DR_STMT (drb)(drb)->stmt) > gimple_uid (DR_STMT (dra)(dra)->stmt))
3810 || (DR_IS_WRITE (dra)(!(dra)->is_read) && DR_IS_WRITE (drb)(!(drb)->is_read))))
3811 {
3812 free_dependence_relations (then_ddrs);
3813 free_dependence_relations (else_ddrs);
3814 free_data_refs (then_datarefs);
3815 free_data_refs (else_datarefs);
3816 return false;
3817 }
3818 }
3819
3820 /* Sink stores with same LHS. */
3821 FOR_EACH_VEC_ELT (then_stores, i, then_store)for (i = 0; (then_stores).iterate ((i), &(then_store)); ++
(i))
3822 {
3823 else_store = else_stores[i];
3824 res = cond_if_else_store_replacement_1 (then_bb, else_bb, join_bb,
3825 then_store, else_store);
3826 ok = ok || res;
3827 }
3828
3829 free_dependence_relations (then_ddrs);
3830 free_dependence_relations (else_ddrs);
3831 free_data_refs (then_datarefs);
3832 free_data_refs (else_datarefs);
3833
3834 return ok;
3835}
3836
3837/* Return TRUE if STMT has a VUSE whose corresponding VDEF is in BB. */
3838
3839static bool
3840local_mem_dependence (gimple *stmt, basic_block bb)
3841{
3842 tree vuse = gimple_vuse (stmt);
3843 gimple *def;
3844
3845 if (!vuse)
3846 return false;
3847
3848 def = SSA_NAME_DEF_STMT (vuse)(tree_check ((vuse), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3848, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
3849 return (def && gimple_bb (def) == bb);
3850}
3851
3852/* Given a "diamond" control-flow pattern where BB0 tests a condition,
3853 BB1 and BB2 are "then" and "else" blocks dependent on this test,
3854 and BB3 rejoins control flow following BB1 and BB2, look for
3855 opportunities to hoist loads as follows. If BB3 contains a PHI of
3856 two loads, one each occurring in BB1 and BB2, and the loads are
3857 provably of adjacent fields in the same structure, then move both
3858 loads into BB0. Of course this can only be done if there are no
3859 dependencies preventing such motion.
3860
3861 One of the hoisted loads will always be speculative, so the
3862 transformation is currently conservative:
3863
3864 - The fields must be strictly adjacent.
3865 - The two fields must occupy a single memory block that is
3866 guaranteed to not cross a page boundary.
3867
3868 The last is difficult to prove, as such memory blocks should be
3869 aligned on the minimum of the stack alignment boundary and the
3870 alignment guaranteed by heap allocation interfaces. Thus we rely
3871 on a parameter for the alignment value.
3872
3873 Provided a good value is used for the last case, the first
3874 restriction could possibly be relaxed. */
3875
3876static void
3877hoist_adjacent_loads (basic_block bb0, basic_block bb1,
3878 basic_block bb2, basic_block bb3)
3879{
3880 int param_align = param_l1_cache_line_sizeglobal_options.x_param_l1_cache_line_size;
3881 unsigned param_align_bits = (unsigned) (param_align * BITS_PER_UNIT(8));
3882 gphi_iterator gsi;
3883
3884 /* Walk the phis in bb3 looking for an opportunity. We are looking
3885 for phis of two SSA names, one each of which is defined in bb1 and
3886 bb2. */
3887 for (gsi = gsi_start_phis (bb3); !gsi_end_p (gsi); gsi_next (&gsi))
3888 {
3889 gphi *phi_stmt = gsi.phi ();
3890 gimple *def1, *def2;
3891 tree arg1, arg2, ref1, ref2, field1, field2;
3892 tree tree_offset1, tree_offset2, tree_size2, next;
3893 int offset1, offset2, size2;
3894 unsigned align1;
3895 gimple_stmt_iterator gsi2;
3896 basic_block bb_for_def1, bb_for_def2;
3897
3898 if (gimple_phi_num_args (phi_stmt) != 2
3899 || virtual_operand_p (gimple_phi_result (phi_stmt)))
3900 continue;
3901
3902 arg1 = gimple_phi_arg_def (phi_stmt, 0);
3903 arg2 = gimple_phi_arg_def (phi_stmt, 1);
3904
3905 if (TREE_CODE (arg1)((enum tree_code) (arg1)->base.code) != SSA_NAME
3906 || TREE_CODE (arg2)((enum tree_code) (arg2)->base.code) != SSA_NAME
3907 || SSA_NAME_IS_DEFAULT_DEF (arg1)(tree_check ((arg1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3907, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
3908 || SSA_NAME_IS_DEFAULT_DEF (arg2)(tree_check ((arg2), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3908, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
)
3909 continue;
3910
3911 def1 = SSA_NAME_DEF_STMT (arg1)(tree_check ((arg1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3911, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
3912 def2 = SSA_NAME_DEF_STMT (arg2)(tree_check ((arg2), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3912, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
3913
3914 if ((gimple_bb (def1) != bb1 || gimple_bb (def2) != bb2)
3915 && (gimple_bb (def2) != bb1 || gimple_bb (def1) != bb2))
3916 continue;
3917
3918 /* Check the mode of the arguments to be sure a conditional move
3919 can be generated for it. */
3920 if (optab_handler (movcc_optab, TYPE_MODE (TREE_TYPE (arg1))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((arg1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3920, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3920, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((arg1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3920, __FUNCTION__))->typed.type)) : (((contains_struct_check
((arg1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3920, __FUNCTION__))->typed.type))->type_common.mode)
)
3921 == CODE_FOR_nothing)
3922 continue;
3923
3924 /* Both statements must be assignments whose RHS is a COMPONENT_REF. */
3925 if (!gimple_assign_single_p (def1)
3926 || !gimple_assign_single_p (def2)
3927 || gimple_has_volatile_ops (def1)
3928 || gimple_has_volatile_ops (def2))
3929 continue;
3930
3931 ref1 = gimple_assign_rhs1 (def1);
3932 ref2 = gimple_assign_rhs1 (def2);
3933
3934 if (TREE_CODE (ref1)((enum tree_code) (ref1)->base.code) != COMPONENT_REF
3935 || TREE_CODE (ref2)((enum tree_code) (ref2)->base.code) != COMPONENT_REF)
3936 continue;
3937
3938 /* The zeroth operand of the two component references must be
3939 identical. It is not sufficient to compare get_base_address of
3940 the two references, because this could allow for different
3941 elements of the same array in the two trees. It is not safe to
3942 assume that the existence of one array element implies the
3943 existence of a different one. */
3944 if (!operand_equal_p (TREE_OPERAND (ref1, 0)(*((const_cast<tree*> (tree_operand_check ((ref1), (0),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3944, __FUNCTION__)))))
, TREE_OPERAND (ref2, 0)(*((const_cast<tree*> (tree_operand_check ((ref2), (0),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3944, __FUNCTION__)))))
, 0))
3945 continue;
3946
3947 field1 = TREE_OPERAND (ref1, 1)(*((const_cast<tree*> (tree_operand_check ((ref1), (1),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3947, __FUNCTION__)))))
;
3948 field2 = TREE_OPERAND (ref2, 1)(*((const_cast<tree*> (tree_operand_check ((ref2), (1),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3948, __FUNCTION__)))))
;
3949
3950 /* Check for field adjacency, and ensure field1 comes first. */
3951 for (next = DECL_CHAIN (field1)(((contains_struct_check (((contains_struct_check ((field1), (
TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3951, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3951, __FUNCTION__))->common.chain))
;
3952 next && TREE_CODE (next)((enum tree_code) (next)->base.code) != FIELD_DECL;
3953 next = DECL_CHAIN (next)(((contains_struct_check (((contains_struct_check ((next), (TS_DECL_MINIMAL
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3953, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3953, __FUNCTION__))->common.chain))
)
3954 ;
3955
3956 if (next != field2)
3957 {
3958 for (next = DECL_CHAIN (field2)(((contains_struct_check (((contains_struct_check ((field2), (
TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3958, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3958, __FUNCTION__))->common.chain))
;
3959 next && TREE_CODE (next)((enum tree_code) (next)->base.code) != FIELD_DECL;
3960 next = DECL_CHAIN (next)(((contains_struct_check (((contains_struct_check ((next), (TS_DECL_MINIMAL
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3960, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3960, __FUNCTION__))->common.chain))
)
3961 ;
3962
3963 if (next != field1)
3964 continue;
3965
3966 std::swap (field1, field2);
3967 std::swap (def1, def2);
3968 }
3969
3970 bb_for_def1 = gimple_bb (def1);
3971 bb_for_def2 = gimple_bb (def2);
3972
3973 /* Check for proper alignment of the first field. */
3974 tree_offset1 = bit_position (field1);
3975 tree_offset2 = bit_position (field2);
3976 tree_size2 = DECL_SIZE (field2)((contains_struct_check ((field2), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3976, __FUNCTION__))->decl_common.size)
;
3977
3978 if (!tree_fits_uhwi_p (tree_offset1)
3979 || !tree_fits_uhwi_p (tree_offset2)
3980 || !tree_fits_uhwi_p (tree_size2))
3981 continue;
3982
3983 offset1 = tree_to_uhwi (tree_offset1);
3984 offset2 = tree_to_uhwi (tree_offset2);
3985 size2 = tree_to_uhwi (tree_size2);
3986 align1 = DECL_ALIGN (field1)(((contains_struct_check ((field1), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3986, __FUNCTION__))->decl_common.align) ? ((unsigned)1)
<< (((contains_struct_check ((field1), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 3986, __FUNCTION__))->decl_common.align) - 1) : 0)
% param_align_bits;
3987
3988 if (offset1 % BITS_PER_UNIT(8) != 0)
3989 continue;
3990
3991 /* For profitability, the two field references should fit within
3992 a single cache line. */
3993 if (align1 + offset2 - offset1 + size2 > param_align_bits)
3994 continue;
3995
3996 /* The two expressions cannot be dependent upon vdefs defined
3997 in bb1/bb2. */
3998 if (local_mem_dependence (def1, bb_for_def1)
3999 || local_mem_dependence (def2, bb_for_def2))
4000 continue;
4001
4002 /* The conditions are satisfied; hoist the loads from bb1 and bb2 into
4003 bb0. We hoist the first one first so that a cache miss is handled
4004 efficiently regardless of hardware cache-fill policy. */
4005 gsi2 = gsi_for_stmt (def1);
4006 gsi_move_to_bb_end (&gsi2, bb0);
4007 gsi2 = gsi_for_stmt (def2);
4008 gsi_move_to_bb_end (&gsi2, bb0);
4009 statistics_counter_event (cfun(cfun + 0), "hoisted loads", 1);
4010
4011 if (dump_file && (dump_flags & TDF_DETAILS))
4012 {
4013 fprintf (dump_file,
4014 "\nHoisting adjacent loads from %d and %d into %d: \n",
4015 bb_for_def1->index, bb_for_def2->index, bb0->index);
4016 print_gimple_stmt (dump_file, def1, 0, TDF_VOPS|TDF_MEMSYMS);
4017 print_gimple_stmt (dump_file, def2, 0, TDF_VOPS|TDF_MEMSYMS);
4018 }
4019 }
4020}
4021
4022/* Determine whether we should attempt to hoist adjacent loads out of
4023 diamond patterns in pass_phiopt. Always hoist loads if
4024 -fhoist-adjacent-loads is specified and the target machine has
4025 both a conditional move instruction and a defined cache line size. */
4026
4027static bool
4028gate_hoist_loads (void)
4029{
4030 return (flag_hoist_adjacent_loadsglobal_options.x_flag_hoist_adjacent_loads == 1
4031 && param_l1_cache_line_sizeglobal_options.x_param_l1_cache_line_size
4032 && HAVE_conditional_move1);
4033}
4034
4035/* This pass tries to replaces an if-then-else block with an
4036 assignment. We have four kinds of transformations. Some of these
4037 transformations are also performed by the ifcvt RTL optimizer.
4038
4039 Conditional Replacement
4040 -----------------------
4041
4042 This transformation, implemented in match_simplify_replacement,
4043 replaces
4044
4045 bb0:
4046 if (cond) goto bb2; else goto bb1;
4047 bb1:
4048 bb2:
4049 x = PHI <0 (bb1), 1 (bb0), ...>;
4050
4051 with
4052
4053 bb0:
4054 x' = cond;
4055 goto bb2;
4056 bb2:
4057 x = PHI <x' (bb0), ...>;
4058
4059 We remove bb1 as it becomes unreachable. This occurs often due to
4060 gimplification of conditionals.
4061
4062 Value Replacement
4063 -----------------
4064
4065 This transformation, implemented in value_replacement, replaces
4066
4067 bb0:
4068 if (a != b) goto bb2; else goto bb1;
4069 bb1:
4070 bb2:
4071 x = PHI <a (bb1), b (bb0), ...>;
4072
4073 with
4074
4075 bb0:
4076 bb2:
4077 x = PHI <b (bb0), ...>;
4078
4079 This opportunity can sometimes occur as a result of other
4080 optimizations.
4081
4082
4083 Another case caught by value replacement looks like this:
4084
4085 bb0:
4086 t1 = a == CONST;
4087 t2 = b > c;
4088 t3 = t1 & t2;
4089 if (t3 != 0) goto bb1; else goto bb2;
4090 bb1:
4091 bb2:
4092 x = PHI (CONST, a)
4093
4094 Gets replaced with:
4095 bb0:
4096 bb2:
4097 t1 = a == CONST;
4098 t2 = b > c;
4099 t3 = t1 & t2;
4100 x = a;
4101
4102 ABS Replacement
4103 ---------------
4104
4105 This transformation, implemented in match_simplify_replacement, replaces
4106
4107 bb0:
4108 if (a >= 0) goto bb2; else goto bb1;
4109 bb1:
4110 x = -a;
4111 bb2:
4112 x = PHI <x (bb1), a (bb0), ...>;
4113
4114 with
4115
4116 bb0:
4117 x' = ABS_EXPR< a >;
4118 bb2:
4119 x = PHI <x' (bb0), ...>;
4120
4121 MIN/MAX Replacement
4122 -------------------
4123
4124 This transformation, minmax_replacement replaces
4125
4126 bb0:
4127 if (a <= b) goto bb2; else goto bb1;
4128 bb1:
4129 bb2:
4130 x = PHI <b (bb1), a (bb0), ...>;
4131
4132 with
4133
4134 bb0:
4135 x' = MIN_EXPR (a, b)
4136 bb2:
4137 x = PHI <x' (bb0), ...>;
4138
4139 A similar transformation is done for MAX_EXPR.
4140
4141
4142 This pass also performs a fifth transformation of a slightly different
4143 flavor.
4144
4145 Factor conversion in COND_EXPR
4146 ------------------------------
4147
4148 This transformation factors the conversion out of COND_EXPR with
4149 factor_out_conditional_conversion.
4150
4151 For example:
4152 if (a <= CST) goto <bb 3>; else goto <bb 4>;
4153 <bb 3>:
4154 tmp = (int) a;
4155 <bb 4>:
4156 tmp = PHI <tmp, CST>
4157
4158 Into:
4159 if (a <= CST) goto <bb 3>; else goto <bb 4>;
4160 <bb 3>:
4161 <bb 4>:
4162 a = PHI <a, CST>
4163 tmp = (int) a;
4164
4165 Adjacent Load Hoisting
4166 ----------------------
4167
4168 This transformation replaces
4169
4170 bb0:
4171 if (...) goto bb2; else goto bb1;
4172 bb1:
4173 x1 = (<expr>).field1;
4174 goto bb3;
4175 bb2:
4176 x2 = (<expr>).field2;
4177 bb3:
4178 # x = PHI <x1, x2>;
4179
4180 with
4181
4182 bb0:
4183 x1 = (<expr>).field1;
4184 x2 = (<expr>).field2;
4185 if (...) goto bb2; else goto bb1;
4186 bb1:
4187 goto bb3;
4188 bb2:
4189 bb3:
4190 # x = PHI <x1, x2>;
4191
4192 The purpose of this transformation is to enable generation of conditional
4193 move instructions such as Intel CMOVE or PowerPC ISEL. Because one of
4194 the loads is speculative, the transformation is restricted to very
4195 specific cases to avoid introducing a page fault. We are looking for
4196 the common idiom:
4197
4198 if (...)
4199 x = y->left;
4200 else
4201 x = y->right;
4202
4203 where left and right are typically adjacent pointers in a tree structure. */
4204
4205namespace {
4206
4207const pass_data pass_data_phiopt =
4208{
4209 GIMPLE_PASS, /* type */
4210 "phiopt", /* name */
4211 OPTGROUP_NONE, /* optinfo_flags */
4212 TV_TREE_PHIOPT, /* tv_id */
4213 ( PROP_cfg(1 << 3) | PROP_ssa(1 << 5) ), /* properties_required */
4214 0, /* properties_provided */
4215 0, /* properties_destroyed */
4216 0, /* todo_flags_start */
4217 0, /* todo_flags_finish */
4218};
4219
4220class pass_phiopt : public gimple_opt_pass
4221{
4222public:
4223 pass_phiopt (gcc::context *ctxt)
4224 : gimple_opt_pass (pass_data_phiopt, ctxt), early_p (false)
4225 {}
4226
4227 /* opt_pass methods: */
4228 opt_pass * clone () final override { return new pass_phiopt (m_ctxt); }
4229 void set_pass_param (unsigned n, bool param) final override
4230 {
4231 gcc_assert (n == 0)((void)(!(n == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-phiopt.cc"
, 4231, __FUNCTION__), 0 : 0))
;
4232 early_p = param;
4233 }
4234 bool gate (function *) final override { return flag_ssa_phioptglobal_options.x_flag_ssa_phiopt; }
4235 unsigned int execute (function *) final override
4236 {
4237 return tree_ssa_phiopt_worker (false,
4238 !early_p ? gate_hoist_loads () : false,
4239 early_p);
4240 }
4241
4242private:
4243 bool early_p;
4244}; // class pass_phiopt
4245
4246} // anon namespace
4247
4248gimple_opt_pass *
4249make_pass_phiopt (gcc::context *ctxt)
4250{
4251 return new pass_phiopt (ctxt);
4252}
4253
4254namespace {
4255
4256const pass_data pass_data_cselim =
4257{
4258 GIMPLE_PASS, /* type */
4259 "cselim", /* name */
4260 OPTGROUP_NONE, /* optinfo_flags */
4261 TV_TREE_PHIOPT, /* tv_id */
4262 ( PROP_cfg(1 << 3) | PROP_ssa(1 << 5) ), /* properties_required */
4263 0, /* properties_provided */
4264 0, /* properties_destroyed */
4265 0, /* todo_flags_start */
4266 0, /* todo_flags_finish */
4267};
4268
4269class pass_cselim : public gimple_opt_pass
4270{
4271public:
4272 pass_cselim (gcc::context *ctxt)
4273 : gimple_opt_pass (pass_data_cselim, ctxt)
4274 {}
4275
4276 /* opt_pass methods: */
4277 bool gate (function *) final override { return flag_tree_cselimglobal_options.x_flag_tree_cselim; }
4278 unsigned int execute (function *) final override
4279 {
4280 return tree_ssa_cs_elim ();
4281 }
4282
4283}; // class pass_cselim
4284
4285} // anon namespace
4286
4287gimple_opt_pass *
4288make_pass_cselim (gcc::context *ctxt)
4289{
4290 return new pass_cselim (ctxt);
4291}