Bug Summary

File:build/gcc/vec.h
Warning:line 741, column 3
Called C++ object pointer is null

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-unknown-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name tree-ssa-loop-ivopts.c -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -fno-rounding-math -mconstructor-aliases -munwind-tables -target-cpu x86-64 -tune-cpu generic -debugger-tuning=gdb -fcoverage-compilation-dir=/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -resource-dir /usr/lib64/clang/13.0.0 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11/backward -internal-isystem /usr/lib64/clang/13.0.0/include -internal-isystem /usr/local/include -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../x86_64-suse-linux/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-error=format-diag -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir=/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -D__GCC_HAVE_DWARF2_CFI_ASM=1 -o /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2021-11-20-133755-20252-1/report-REde0Z.plist -x c++ /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c

/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c

1/* Induction variable optimizations.
2 Copyright (C) 2003-2021 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it
7under the terms of the GNU General Public License as published by the
8Free Software Foundation; either version 3, or (at your option) any
9later version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT
12ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20/* This pass tries to find the optimal set of induction variables for the loop.
21 It optimizes just the basic linear induction variables (although adding
22 support for other types should not be too hard). It includes the
23 optimizations commonly known as strength reduction, induction variable
24 coalescing and induction variable elimination. It does it in the
25 following steps:
26
27 1) The interesting uses of induction variables are found. This includes
28
29 -- uses of induction variables in non-linear expressions
30 -- addresses of arrays
31 -- comparisons of induction variables
32
33 Note the interesting uses are categorized and handled in group.
34 Generally, address type uses are grouped together if their iv bases
35 are different in constant offset.
36
37 2) Candidates for the induction variables are found. This includes
38
39 -- old induction variables
40 -- the variables defined by expressions derived from the "interesting
41 groups/uses" above
42
43 3) The optimal (w.r. to a cost function) set of variables is chosen. The
44 cost function assigns a cost to sets of induction variables and consists
45 of three parts:
46
47 -- The group/use costs. Each of the interesting groups/uses chooses
48 the best induction variable in the set and adds its cost to the sum.
49 The cost reflects the time spent on modifying the induction variables
50 value to be usable for the given purpose (adding base and offset for
51 arrays, etc.).
52 -- The variable costs. Each of the variables has a cost assigned that
53 reflects the costs associated with incrementing the value of the
54 variable. The original variables are somewhat preferred.
55 -- The set cost. Depending on the size of the set, extra cost may be
56 added to reflect register pressure.
57
58 All the costs are defined in a machine-specific way, using the target
59 hooks and machine descriptions to determine them.
60
61 4) The trees are transformed to use the new variables, the dead code is
62 removed.
63
64 All of this is done loop by loop. Doing it globally is theoretically
65 possible, it might give a better performance and it might enable us
66 to decide costs more precisely, but getting all the interactions right
67 would be complicated.
68
69 For the targets supporting low-overhead loops, IVOPTs has to take care of
70 the loops which will probably be transformed in RTL doloop optimization,
71 to try to make selected IV candidate set optimal. The process of doloop
72 support includes:
73
74 1) Analyze the current loop will be transformed to doloop or not, find and
75 mark its compare type IV use as doloop use (iv_group field doloop_p), and
76 set flag doloop_use_p of ivopts_data to notify subsequent processings on
77 doloop. See analyze_and_mark_doloop_use and its callees for the details.
78 The target hook predict_doloop_p can be used for target specific checks.
79
80 2) Add one doloop dedicated IV cand {(may_be_zero ? 1 : (niter + 1)), +, -1},
81 set flag doloop_p of iv_cand, step cost is set as zero and no extra cost
82 like biv. For cost determination between doloop IV cand and IV use, the
83 target hooks doloop_cost_for_generic and doloop_cost_for_address are
84 provided to add on extra costs for generic type and address type IV use.
85 Zero cost is assigned to the pair between doloop IV cand and doloop IV
86 use, and bound zero is set for IV elimination.
87
88 3) With the cost setting in step 2), the current cost model based IV
89 selection algorithm will process as usual, pick up doloop dedicated IV if
90 profitable. */
91
92#include "config.h"
93#include "system.h"
94#include "coretypes.h"
95#include "backend.h"
96#include "rtl.h"
97#include "tree.h"
98#include "gimple.h"
99#include "cfghooks.h"
100#include "tree-pass.h"
101#include "memmodel.h"
102#include "tm_p.h"
103#include "ssa.h"
104#include "expmed.h"
105#include "insn-config.h"
106#include "emit-rtl.h"
107#include "recog.h"
108#include "cgraph.h"
109#include "gimple-pretty-print.h"
110#include "alias.h"
111#include "fold-const.h"
112#include "stor-layout.h"
113#include "tree-eh.h"
114#include "gimplify.h"
115#include "gimple-iterator.h"
116#include "gimplify-me.h"
117#include "tree-cfg.h"
118#include "tree-ssa-loop-ivopts.h"
119#include "tree-ssa-loop-manip.h"
120#include "tree-ssa-loop-niter.h"
121#include "tree-ssa-loop.h"
122#include "explow.h"
123#include "expr.h"
124#include "tree-dfa.h"
125#include "tree-ssa.h"
126#include "cfgloop.h"
127#include "tree-scalar-evolution.h"
128#include "tree-affine.h"
129#include "tree-ssa-propagate.h"
130#include "tree-ssa-address.h"
131#include "builtins.h"
132#include "tree-vectorizer.h"
133#include "dbgcnt.h"
134
135/* For lang_hooks.types.type_for_mode. */
136#include "langhooks.h"
137
138/* FIXME: Expressions are expanded to RTL in this pass to determine the
139 cost of different addressing modes. This should be moved to a TBD
140 interface between the GIMPLE and RTL worlds. */
141
142/* The infinite cost. */
143#define INFTY1000000000 1000000000
144
145/* Returns the expected number of loop iterations for LOOP.
146 The average trip count is computed from profile data if it
147 exists. */
148
149static inline HOST_WIDE_INTlong
150avg_loop_niter (class loop *loop)
151{
152 HOST_WIDE_INTlong niter = estimated_stmt_executions_int (loop);
153 if (niter == -1)
154 {
155 niter = likely_max_stmt_executions_int (loop);
156
157 if (niter == -1 || niter > param_avg_loop_niterglobal_options.x_param_avg_loop_niter)
158 return param_avg_loop_niterglobal_options.x_param_avg_loop_niter;
159 }
160
161 return niter;
162}
163
164struct iv_use;
165
166/* Representation of the induction variable. */
167struct iv
168{
169 tree base; /* Initial value of the iv. */
170 tree base_object; /* A memory object to that the induction variable points. */
171 tree step; /* Step of the iv (constant only). */
172 tree ssa_name; /* The ssa name with the value. */
173 struct iv_use *nonlin_use; /* The identifier in the use if it is the case. */
174 bool biv_p; /* Is it a biv? */
175 bool no_overflow; /* True if the iv doesn't overflow. */
176 bool have_address_use;/* For biv, indicate if it's used in any address
177 type use. */
178};
179
180/* Per-ssa version information (induction variable descriptions, etc.). */
181struct version_info
182{
183 tree name; /* The ssa name. */
184 struct iv *iv; /* Induction variable description. */
185 bool has_nonlin_use; /* For a loop-level invariant, whether it is used in
186 an expression that is not an induction variable. */
187 bool preserve_biv; /* For the original biv, whether to preserve it. */
188 unsigned inv_id; /* Id of an invariant. */
189};
190
191/* Types of uses. */
192enum use_type
193{
194 USE_NONLINEAR_EXPR, /* Use in a nonlinear expression. */
195 USE_REF_ADDRESS, /* Use is an address for an explicit memory
196 reference. */
197 USE_PTR_ADDRESS, /* Use is a pointer argument to a function in
198 cases where the expansion of the function
199 will turn the argument into a normal address. */
200 USE_COMPARE /* Use is a compare. */
201};
202
203/* Cost of a computation. */
204class comp_cost
205{
206public:
207 comp_cost (): cost (0), complexity (0), scratch (0)
208 {}
209
210 comp_cost (int64_t cost, unsigned complexity, int64_t scratch = 0)
211 : cost (cost), complexity (complexity), scratch (scratch)
212 {}
213
214 /* Returns true if COST is infinite. */
215 bool infinite_cost_p ();
216
217 /* Adds costs COST1 and COST2. */
218 friend comp_cost operator+ (comp_cost cost1, comp_cost cost2);
219
220 /* Adds COST to the comp_cost. */
221 comp_cost operator+= (comp_cost cost);
222
223 /* Adds constant C to this comp_cost. */
224 comp_cost operator+= (HOST_WIDE_INTlong c);
225
226 /* Subtracts constant C to this comp_cost. */
227 comp_cost operator-= (HOST_WIDE_INTlong c);
228
229 /* Divide the comp_cost by constant C. */
230 comp_cost operator/= (HOST_WIDE_INTlong c);
231
232 /* Multiply the comp_cost by constant C. */
233 comp_cost operator*= (HOST_WIDE_INTlong c);
234
235 /* Subtracts costs COST1 and COST2. */
236 friend comp_cost operator- (comp_cost cost1, comp_cost cost2);
237
238 /* Subtracts COST from this comp_cost. */
239 comp_cost operator-= (comp_cost cost);
240
241 /* Returns true if COST1 is smaller than COST2. */
242 friend bool operator< (comp_cost cost1, comp_cost cost2);
243
244 /* Returns true if COST1 and COST2 are equal. */
245 friend bool operator== (comp_cost cost1, comp_cost cost2);
246
247 /* Returns true if COST1 is smaller or equal than COST2. */
248 friend bool operator<= (comp_cost cost1, comp_cost cost2);
249
250 int64_t cost; /* The runtime cost. */
251 unsigned complexity; /* The estimate of the complexity of the code for
252 the computation (in no concrete units --
253 complexity field should be larger for more
254 complex expressions and addressing modes). */
255 int64_t scratch; /* Scratch used during cost computation. */
256};
257
258static const comp_cost no_cost;
259static const comp_cost infinite_cost (INFTY1000000000, 0, INFTY1000000000);
260
261bool
262comp_cost::infinite_cost_p ()
263{
264 return cost == INFTY1000000000;
265}
266
267comp_cost
268operator+ (comp_cost cost1, comp_cost cost2)
269{
270 if (cost1.infinite_cost_p () || cost2.infinite_cost_p ())
271 return infinite_cost;
272
273 gcc_assert (cost1.cost + cost2.cost < infinite_cost.cost)((void)(!(cost1.cost + cost2.cost < infinite_cost.cost) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 273, __FUNCTION__), 0 : 0))
;
274 cost1.cost += cost2.cost;
275 cost1.complexity += cost2.complexity;
276
277 return cost1;
278}
279
280comp_cost
281operator- (comp_cost cost1, comp_cost cost2)
282{
283 if (cost1.infinite_cost_p ())
284 return infinite_cost;
285
286 gcc_assert (!cost2.infinite_cost_p ())((void)(!(!cost2.infinite_cost_p ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 286, __FUNCTION__), 0 : 0))
;
287 gcc_assert (cost1.cost - cost2.cost < infinite_cost.cost)((void)(!(cost1.cost - cost2.cost < infinite_cost.cost) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 287, __FUNCTION__), 0 : 0))
;
288
289 cost1.cost -= cost2.cost;
290 cost1.complexity -= cost2.complexity;
291
292 return cost1;
293}
294
295comp_cost
296comp_cost::operator+= (comp_cost cost)
297{
298 *this = *this + cost;
299 return *this;
300}
301
302comp_cost
303comp_cost::operator+= (HOST_WIDE_INTlong c)
304{
305 if (c >= INFTY1000000000)
306 this->cost = INFTY1000000000;
307
308 if (infinite_cost_p ())
309 return *this;
310
311 gcc_assert (this->cost + c < infinite_cost.cost)((void)(!(this->cost + c < infinite_cost.cost) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 311, __FUNCTION__), 0 : 0))
;
312 this->cost += c;
313
314 return *this;
315}
316
317comp_cost
318comp_cost::operator-= (HOST_WIDE_INTlong c)
319{
320 if (infinite_cost_p ())
321 return *this;
322
323 gcc_assert (this->cost - c < infinite_cost.cost)((void)(!(this->cost - c < infinite_cost.cost) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 323, __FUNCTION__), 0 : 0))
;
324 this->cost -= c;
325
326 return *this;
327}
328
329comp_cost
330comp_cost::operator/= (HOST_WIDE_INTlong c)
331{
332 gcc_assert (c != 0)((void)(!(c != 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 332, __FUNCTION__), 0 : 0))
;
333 if (infinite_cost_p ())
334 return *this;
335
336 this->cost /= c;
337
338 return *this;
339}
340
341comp_cost
342comp_cost::operator*= (HOST_WIDE_INTlong c)
343{
344 if (infinite_cost_p ())
345 return *this;
346
347 gcc_assert (this->cost * c < infinite_cost.cost)((void)(!(this->cost * c < infinite_cost.cost) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 347, __FUNCTION__), 0 : 0))
;
348 this->cost *= c;
349
350 return *this;
351}
352
353comp_cost
354comp_cost::operator-= (comp_cost cost)
355{
356 *this = *this - cost;
357 return *this;
358}
359
360bool
361operator< (comp_cost cost1, comp_cost cost2)
362{
363 if (cost1.cost == cost2.cost)
364 return cost1.complexity < cost2.complexity;
365
366 return cost1.cost < cost2.cost;
367}
368
369bool
370operator== (comp_cost cost1, comp_cost cost2)
371{
372 return cost1.cost == cost2.cost
373 && cost1.complexity == cost2.complexity;
374}
375
376bool
377operator<= (comp_cost cost1, comp_cost cost2)
378{
379 return cost1 < cost2 || cost1 == cost2;
380}
381
382struct iv_inv_expr_ent;
383
384/* The candidate - cost pair. */
385class cost_pair
386{
387public:
388 struct iv_cand *cand; /* The candidate. */
389 comp_cost cost; /* The cost. */
390 enum tree_code comp; /* For iv elimination, the comparison. */
391 bitmap inv_vars; /* The list of invariant ssa_vars that have to be
392 preserved when representing iv_use with iv_cand. */
393 bitmap inv_exprs; /* The list of newly created invariant expressions
394 when representing iv_use with iv_cand. */
395 tree value; /* For final value elimination, the expression for
396 the final value of the iv. For iv elimination,
397 the new bound to compare with. */
398};
399
400/* Use. */
401struct iv_use
402{
403 unsigned id; /* The id of the use. */
404 unsigned group_id; /* The group id the use belongs to. */
405 enum use_type type; /* Type of the use. */
406 tree mem_type; /* The memory type to use when testing whether an
407 address is legitimate, and what the address's
408 cost is. */
409 struct iv *iv; /* The induction variable it is based on. */
410 gimple *stmt; /* Statement in that it occurs. */
411 tree *op_p; /* The place where it occurs. */
412
413 tree addr_base; /* Base address with const offset stripped. */
414 poly_uint64_pod addr_offset;
415 /* Const offset stripped from base address. */
416};
417
418/* Group of uses. */
419struct iv_group
420{
421 /* The id of the group. */
422 unsigned id;
423 /* Uses of the group are of the same type. */
424 enum use_type type;
425 /* The set of "related" IV candidates, plus the important ones. */
426 bitmap related_cands;
427 /* Number of IV candidates in the cost_map. */
428 unsigned n_map_members;
429 /* The costs wrto the iv candidates. */
430 class cost_pair *cost_map;
431 /* The selected candidate for the group. */
432 struct iv_cand *selected;
433 /* To indicate this is a doloop use group. */
434 bool doloop_p;
435 /* Uses in the group. */
436 vec<struct iv_use *> vuses;
437};
438
439/* The position where the iv is computed. */
440enum iv_position
441{
442 IP_NORMAL, /* At the end, just before the exit condition. */
443 IP_END, /* At the end of the latch block. */
444 IP_BEFORE_USE, /* Immediately before a specific use. */
445 IP_AFTER_USE, /* Immediately after a specific use. */
446 IP_ORIGINAL /* The original biv. */
447};
448
449/* The induction variable candidate. */
450struct iv_cand
451{
452 unsigned id; /* The number of the candidate. */
453 bool important; /* Whether this is an "important" candidate, i.e. such
454 that it should be considered by all uses. */
455 ENUM_BITFIELD(iv_position)enum iv_position pos : 8; /* Where it is computed. */
456 gimple *incremented_at;/* For original biv, the statement where it is
457 incremented. */
458 tree var_before; /* The variable used for it before increment. */
459 tree var_after; /* The variable used for it after increment. */
460 struct iv *iv; /* The value of the candidate. NULL for
461 "pseudocandidate" used to indicate the possibility
462 to replace the final value of an iv by direct
463 computation of the value. */
464 unsigned cost; /* Cost of the candidate. */
465 unsigned cost_step; /* Cost of the candidate's increment operation. */
466 struct iv_use *ainc_use; /* For IP_{BEFORE,AFTER}_USE candidates, the place
467 where it is incremented. */
468 bitmap inv_vars; /* The list of invariant ssa_vars used in step of the
469 iv_cand. */
470 bitmap inv_exprs; /* If step is more complicated than a single ssa_var,
471 hanlde it as a new invariant expression which will
472 be hoisted out of loop. */
473 struct iv *orig_iv; /* The original iv if this cand is added from biv with
474 smaller type. */
475 bool doloop_p; /* Whether this is a doloop candidate. */
476};
477
478/* Hashtable entry for common candidate derived from iv uses. */
479class iv_common_cand
480{
481public:
482 tree base;
483 tree step;
484 /* IV uses from which this common candidate is derived. */
485 auto_vec<struct iv_use *> uses;
486 hashval_t hash;
487};
488
489/* Hashtable helpers. */
490
491struct iv_common_cand_hasher : delete_ptr_hash <iv_common_cand>
492{
493 static inline hashval_t hash (const iv_common_cand *);
494 static inline bool equal (const iv_common_cand *, const iv_common_cand *);
495};
496
497/* Hash function for possible common candidates. */
498
499inline hashval_t
500iv_common_cand_hasher::hash (const iv_common_cand *ccand)
501{
502 return ccand->hash;
503}
504
505/* Hash table equality function for common candidates. */
506
507inline bool
508iv_common_cand_hasher::equal (const iv_common_cand *ccand1,
509 const iv_common_cand *ccand2)
510{
511 return (ccand1->hash == ccand2->hash
512 && operand_equal_p (ccand1->base, ccand2->base, 0)
513 && operand_equal_p (ccand1->step, ccand2->step, 0)
514 && (TYPE_PRECISION (TREE_TYPE (ccand1->base))((tree_class_check ((((contains_struct_check ((ccand1->base
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 514, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 514, __FUNCTION__))->type_common.precision)
515 == TYPE_PRECISION (TREE_TYPE (ccand2->base))((tree_class_check ((((contains_struct_check ((ccand2->base
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 515, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 515, __FUNCTION__))->type_common.precision)
));
516}
517
518/* Loop invariant expression hashtable entry. */
519
520struct iv_inv_expr_ent
521{
522 /* Tree expression of the entry. */
523 tree expr;
524 /* Unique indentifier. */
525 int id;
526 /* Hash value. */
527 hashval_t hash;
528};
529
530/* Sort iv_inv_expr_ent pair A and B by id field. */
531
532static int
533sort_iv_inv_expr_ent (const void *a, const void *b)
534{
535 const iv_inv_expr_ent * const *e1 = (const iv_inv_expr_ent * const *) (a);
536 const iv_inv_expr_ent * const *e2 = (const iv_inv_expr_ent * const *) (b);
537
538 unsigned id1 = (*e1)->id;
539 unsigned id2 = (*e2)->id;
540
541 if (id1 < id2)
542 return -1;
543 else if (id1 > id2)
544 return 1;
545 else
546 return 0;
547}
548
549/* Hashtable helpers. */
550
551struct iv_inv_expr_hasher : free_ptr_hash <iv_inv_expr_ent>
552{
553 static inline hashval_t hash (const iv_inv_expr_ent *);
554 static inline bool equal (const iv_inv_expr_ent *, const iv_inv_expr_ent *);
555};
556
557/* Return true if uses of type TYPE represent some form of address. */
558
559inline bool
560address_p (use_type type)
561{
562 return type == USE_REF_ADDRESS || type == USE_PTR_ADDRESS;
563}
564
565/* Hash function for loop invariant expressions. */
566
567inline hashval_t
568iv_inv_expr_hasher::hash (const iv_inv_expr_ent *expr)
569{
570 return expr->hash;
571}
572
573/* Hash table equality function for expressions. */
574
575inline bool
576iv_inv_expr_hasher::equal (const iv_inv_expr_ent *expr1,
577 const iv_inv_expr_ent *expr2)
578{
579 return expr1->hash == expr2->hash
580 && operand_equal_p (expr1->expr, expr2->expr, 0);
581}
582
583struct ivopts_data
584{
585 /* The currently optimized loop. */
586 class loop *current_loop;
587 location_t loop_loc;
588
589 /* Numbers of iterations for all exits of the current loop. */
590 hash_map<edge, tree_niter_desc *> *niters;
591
592 /* Number of registers used in it. */
593 unsigned regs_used;
594
595 /* The size of version_info array allocated. */
596 unsigned version_info_size;
597
598 /* The array of information for the ssa names. */
599 struct version_info *version_info;
600
601 /* The hashtable of loop invariant expressions created
602 by ivopt. */
603 hash_table<iv_inv_expr_hasher> *inv_expr_tab;
604
605 /* The bitmap of indices in version_info whose value was changed. */
606 bitmap relevant;
607
608 /* The uses of induction variables. */
609 vec<iv_group *> vgroups;
610
611 /* The candidates. */
612 vec<iv_cand *> vcands;
613
614 /* A bitmap of important candidates. */
615 bitmap important_candidates;
616
617 /* Cache used by tree_to_aff_combination_expand. */
618 hash_map<tree, name_expansion *> *name_expansion_cache;
619
620 /* The hashtable of common candidates derived from iv uses. */
621 hash_table<iv_common_cand_hasher> *iv_common_cand_tab;
622
623 /* The common candidates. */
624 vec<iv_common_cand *> iv_common_cands;
625
626 /* Hash map recording base object information of tree exp. */
627 hash_map<tree, tree> *base_object_map;
628
629 /* The maximum invariant variable id. */
630 unsigned max_inv_var_id;
631
632 /* The maximum invariant expression id. */
633 unsigned max_inv_expr_id;
634
635 /* Number of no_overflow BIVs which are not used in memory address. */
636 unsigned bivs_not_used_in_addr;
637
638 /* Obstack for iv structure. */
639 struct obstack iv_obstack;
640
641 /* Whether to consider just related and important candidates when replacing a
642 use. */
643 bool consider_all_candidates;
644
645 /* Are we optimizing for speed? */
646 bool speed;
647
648 /* Whether the loop body includes any function calls. */
649 bool body_includes_call;
650
651 /* Whether the loop body can only be exited via single exit. */
652 bool loop_single_exit_p;
653
654 /* Whether the loop has doloop comparison use. */
655 bool doloop_use_p;
656};
657
658/* An assignment of iv candidates to uses. */
659
660class iv_ca
661{
662public:
663 /* The number of uses covered by the assignment. */
664 unsigned upto;
665
666 /* Number of uses that cannot be expressed by the candidates in the set. */
667 unsigned bad_groups;
668
669 /* Candidate assigned to a use, together with the related costs. */
670 class cost_pair **cand_for_group;
671
672 /* Number of times each candidate is used. */
673 unsigned *n_cand_uses;
674
675 /* The candidates used. */
676 bitmap cands;
677
678 /* The number of candidates in the set. */
679 unsigned n_cands;
680
681 /* The number of invariants needed, including both invariant variants and
682 invariant expressions. */
683 unsigned n_invs;
684
685 /* Total cost of expressing uses. */
686 comp_cost cand_use_cost;
687
688 /* Total cost of candidates. */
689 int64_t cand_cost;
690
691 /* Number of times each invariant variable is used. */
692 unsigned *n_inv_var_uses;
693
694 /* Number of times each invariant expression is used. */
695 unsigned *n_inv_expr_uses;
696
697 /* Total cost of the assignment. */
698 comp_cost cost;
699};
700
701/* Difference of two iv candidate assignments. */
702
703struct iv_ca_delta
704{
705 /* Changed group. */
706 struct iv_group *group;
707
708 /* An old assignment (for rollback purposes). */
709 class cost_pair *old_cp;
710
711 /* A new assignment. */
712 class cost_pair *new_cp;
713
714 /* Next change in the list. */
715 struct iv_ca_delta *next;
716};
717
718/* Bound on number of candidates below that all candidates are considered. */
719
720#define CONSIDER_ALL_CANDIDATES_BOUND((unsigned) global_options.x_param_iv_consider_all_candidates_bound
)
\
721 ((unsigned) param_iv_consider_all_candidates_boundglobal_options.x_param_iv_consider_all_candidates_bound)
722
723/* If there are more iv occurrences, we just give up (it is quite unlikely that
724 optimizing such a loop would help, and it would take ages). */
725
726#define MAX_CONSIDERED_GROUPS((unsigned) global_options.x_param_iv_max_considered_uses) \
727 ((unsigned) param_iv_max_considered_usesglobal_options.x_param_iv_max_considered_uses)
728
729/* If there are at most this number of ivs in the set, try removing unnecessary
730 ivs from the set always. */
731
732#define ALWAYS_PRUNE_CAND_SET_BOUND((unsigned) global_options.x_param_iv_always_prune_cand_set_bound
)
\
733 ((unsigned) param_iv_always_prune_cand_set_boundglobal_options.x_param_iv_always_prune_cand_set_bound)
734
735/* The list of trees for that the decl_rtl field must be reset is stored
736 here. */
737
738static vec<tree> decl_rtl_to_reset;
739
740static comp_cost force_expr_to_var_cost (tree, bool);
741
742/* The single loop exit if it dominates the latch, NULL otherwise. */
743
744edge
745single_dom_exit (class loop *loop)
746{
747 edge exit = single_exit (loop);
748
749 if (!exit)
750 return NULLnullptr;
751
752 if (!just_once_each_iteration_p (loop, exit->src))
753 return NULLnullptr;
754
755 return exit;
756}
757
758/* Dumps information about the induction variable IV to FILE. Don't dump
759 variable's name if DUMP_NAME is FALSE. The information is dumped with
760 preceding spaces indicated by INDENT_LEVEL. */
761
762void
763dump_iv (FILE *file, struct iv *iv, bool dump_name, unsigned indent_level)
764{
765 const char *p;
766 const char spaces[9] = {' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', '\0'};
767
768 if (indent_level > 4)
769 indent_level = 4;
770 p = spaces + 8 - (indent_level << 1);
771
772 fprintf (file, "%sIV struct:\n", p);
773 if (iv->ssa_name && dump_name)
774 {
775 fprintf (file, "%s SSA_NAME:\t", p);
776 print_generic_expr (file, iv->ssa_name, TDF_SLIM);
777 fprintf (file, "\n");
778 }
779
780 fprintf (file, "%s Type:\t", p);
781 print_generic_expr (file, TREE_TYPE (iv->base)((contains_struct_check ((iv->base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 781, __FUNCTION__))->typed.type)
, TDF_SLIM);
782 fprintf (file, "\n");
783
784 fprintf (file, "%s Base:\t", p);
785 print_generic_expr (file, iv->base, TDF_SLIM);
786 fprintf (file, "\n");
787
788 fprintf (file, "%s Step:\t", p);
789 print_generic_expr (file, iv->step, TDF_SLIM);
790 fprintf (file, "\n");
791
792 if (iv->base_object)
793 {
794 fprintf (file, "%s Object:\t", p);
795 print_generic_expr (file, iv->base_object, TDF_SLIM);
796 fprintf (file, "\n");
797 }
798
799 fprintf (file, "%s Biv:\t%c\n", p, iv->biv_p ? 'Y' : 'N');
800
801 fprintf (file, "%s Overflowness wrto loop niter:\t%s\n",
802 p, iv->no_overflow ? "No-overflow" : "Overflow");
803}
804
805/* Dumps information about the USE to FILE. */
806
807void
808dump_use (FILE *file, struct iv_use *use)
809{
810 fprintf (file, " Use %d.%d:\n", use->group_id, use->id);
811 fprintf (file, " At stmt:\t");
812 print_gimple_stmt (file, use->stmt, 0);
813 fprintf (file, " At pos:\t");
814 if (use->op_p)
815 print_generic_expr (file, *use->op_p, TDF_SLIM);
816 fprintf (file, "\n");
817 dump_iv (file, use->iv, false, 2);
818}
819
820/* Dumps information about the uses to FILE. */
821
822void
823dump_groups (FILE *file, struct ivopts_data *data)
824{
825 unsigned i, j;
826 struct iv_group *group;
827
828 for (i = 0; i < data->vgroups.length (); i++)
829 {
830 group = data->vgroups[i];
831 fprintf (file, "Group %d:\n", group->id);
832 if (group->type == USE_NONLINEAR_EXPR)
833 fprintf (file, " Type:\tGENERIC\n");
834 else if (group->type == USE_REF_ADDRESS)
835 fprintf (file, " Type:\tREFERENCE ADDRESS\n");
836 else if (group->type == USE_PTR_ADDRESS)
837 fprintf (file, " Type:\tPOINTER ARGUMENT ADDRESS\n");
838 else
839 {
840 gcc_assert (group->type == USE_COMPARE)((void)(!(group->type == USE_COMPARE) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 840, __FUNCTION__), 0 : 0))
;
841 fprintf (file, " Type:\tCOMPARE\n");
842 }
843 for (j = 0; j < group->vuses.length (); j++)
844 dump_use (file, group->vuses[j]);
845 }
846}
847
848/* Dumps information about induction variable candidate CAND to FILE. */
849
850void
851dump_cand (FILE *file, struct iv_cand *cand)
852{
853 struct iv *iv = cand->iv;
854
855 fprintf (file, "Candidate %d:\n", cand->id);
856 if (cand->inv_vars)
857 {
858 fprintf (file, " Depend on inv.vars: ");
859 dump_bitmap (file, cand->inv_vars);
860 }
861 if (cand->inv_exprs)
862 {
863 fprintf (file, " Depend on inv.exprs: ");
864 dump_bitmap (file, cand->inv_exprs);
865 }
866
867 if (cand->var_before)
868 {
869 fprintf (file, " Var befor: ");
870 print_generic_expr (file, cand->var_before, TDF_SLIM);
871 fprintf (file, "\n");
872 }
873 if (cand->var_after)
874 {
875 fprintf (file, " Var after: ");
876 print_generic_expr (file, cand->var_after, TDF_SLIM);
877 fprintf (file, "\n");
878 }
879
880 switch (cand->pos)
881 {
882 case IP_NORMAL:
883 fprintf (file, " Incr POS: before exit test\n");
884 break;
885
886 case IP_BEFORE_USE:
887 fprintf (file, " Incr POS: before use %d\n", cand->ainc_use->id);
888 break;
889
890 case IP_AFTER_USE:
891 fprintf (file, " Incr POS: after use %d\n", cand->ainc_use->id);
892 break;
893
894 case IP_END:
895 fprintf (file, " Incr POS: at end\n");
896 break;
897
898 case IP_ORIGINAL:
899 fprintf (file, " Incr POS: orig biv\n");
900 break;
901 }
902
903 dump_iv (file, iv, false, 1);
904}
905
906/* Returns the info for ssa version VER. */
907
908static inline struct version_info *
909ver_info (struct ivopts_data *data, unsigned ver)
910{
911 return data->version_info + ver;
912}
913
914/* Returns the info for ssa name NAME. */
915
916static inline struct version_info *
917name_info (struct ivopts_data *data, tree name)
918{
919 return ver_info (data, SSA_NAME_VERSION (name)(tree_check ((name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 919, __FUNCTION__, (SSA_NAME)))->base.u.version
);
920}
921
922/* Returns true if STMT is after the place where the IP_NORMAL ivs will be
923 emitted in LOOP. */
924
925static bool
926stmt_after_ip_normal_pos (class loop *loop, gimple *stmt)
927{
928 basic_block bb = ip_normal_pos (loop), sbb = gimple_bb (stmt);
929
930 gcc_assert (bb)((void)(!(bb) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 930, __FUNCTION__), 0 : 0))
;
931
932 if (sbb == loop->latch)
933 return true;
934
935 if (sbb != bb)
936 return false;
937
938 return stmt == last_stmt (bb);
939}
940
941/* Returns true if STMT if after the place where the original induction
942 variable CAND is incremented. If TRUE_IF_EQUAL is set, we return true
943 if the positions are identical. */
944
945static bool
946stmt_after_inc_pos (struct iv_cand *cand, gimple *stmt, bool true_if_equal)
947{
948 basic_block cand_bb = gimple_bb (cand->incremented_at);
949 basic_block stmt_bb = gimple_bb (stmt);
950
951 if (!dominated_by_p (CDI_DOMINATORS, stmt_bb, cand_bb))
952 return false;
953
954 if (stmt_bb != cand_bb)
955 return true;
956
957 if (true_if_equal
958 && gimple_uid (stmt) == gimple_uid (cand->incremented_at))
959 return true;
960 return gimple_uid (stmt) > gimple_uid (cand->incremented_at);
961}
962
963/* Returns true if STMT if after the place where the induction variable
964 CAND is incremented in LOOP. */
965
966static bool
967stmt_after_increment (class loop *loop, struct iv_cand *cand, gimple *stmt)
968{
969 switch (cand->pos)
970 {
971 case IP_END:
972 return false;
973
974 case IP_NORMAL:
975 return stmt_after_ip_normal_pos (loop, stmt);
976
977 case IP_ORIGINAL:
978 case IP_AFTER_USE:
979 return stmt_after_inc_pos (cand, stmt, false);
980
981 case IP_BEFORE_USE:
982 return stmt_after_inc_pos (cand, stmt, true);
983
984 default:
985 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 985, __FUNCTION__))
;
986 }
987}
988
989/* walk_tree callback for contains_abnormal_ssa_name_p. */
990
991static tree
992contains_abnormal_ssa_name_p_1 (tree *tp, int *walk_subtrees, void *)
993{
994 if (TREE_CODE (*tp)((enum tree_code) (*tp)->base.code) == SSA_NAME
995 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (*tp)(tree_check ((*tp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 995, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
)
996 return *tp;
997
998 if (!EXPR_P (*tp)((tree_code_type[(int) (((enum tree_code) (*tp)->base.code
))]) >= tcc_reference && (tree_code_type[(int) (((
enum tree_code) (*tp)->base.code))]) <= tcc_expression)
)
999 *walk_subtrees = 0;
1000
1001 return NULL_TREE(tree) nullptr;
1002}
1003
1004/* Returns true if EXPR contains a ssa name that occurs in an
1005 abnormal phi node. */
1006
1007bool
1008contains_abnormal_ssa_name_p (tree expr)
1009{
1010 return walk_tree_without_duplicateswalk_tree_without_duplicates_1 (&expr, contains_abnormal_ssa_name_p_1
, nullptr, nullptr)
1011 (&expr, contains_abnormal_ssa_name_p_1, NULL)walk_tree_without_duplicates_1 (&expr, contains_abnormal_ssa_name_p_1
, nullptr, nullptr)
!= NULL_TREE(tree) nullptr;
1012}
1013
1014/* Returns the structure describing number of iterations determined from
1015 EXIT of DATA->current_loop, or NULL if something goes wrong. */
1016
1017static class tree_niter_desc *
1018niter_for_exit (struct ivopts_data *data, edge exit)
1019{
1020 class tree_niter_desc *desc;
1021 tree_niter_desc **slot;
1022
1023 if (!data->niters)
1024 {
1025 data->niters = new hash_map<edge, tree_niter_desc *>;
1026 slot = NULLnullptr;
1027 }
1028 else
1029 slot = data->niters->get (exit);
1030
1031 if (!slot)
1032 {
1033 /* Try to determine number of iterations. We cannot safely work with ssa
1034 names that appear in phi nodes on abnormal edges, so that we do not
1035 create overlapping life ranges for them (PR 27283). */
1036 desc = XNEW (class tree_niter_desc)((class tree_niter_desc *) xmalloc (sizeof (class tree_niter_desc
)))
;
1037 if (!number_of_iterations_exit (data->current_loop,
1038 exit, desc, true)
1039 || contains_abnormal_ssa_name_p (desc->niter))
1040 {
1041 XDELETE (desc)free ((void*) (desc));
1042 desc = NULLnullptr;
1043 }
1044 data->niters->put (exit, desc);
1045 }
1046 else
1047 desc = *slot;
1048
1049 return desc;
1050}
1051
1052/* Returns the structure describing number of iterations determined from
1053 single dominating exit of DATA->current_loop, or NULL if something
1054 goes wrong. */
1055
1056static class tree_niter_desc *
1057niter_for_single_dom_exit (struct ivopts_data *data)
1058{
1059 edge exit = single_dom_exit (data->current_loop);
1060
1061 if (!exit)
1062 return NULLnullptr;
1063
1064 return niter_for_exit (data, exit);
1065}
1066
1067/* Initializes data structures used by the iv optimization pass, stored
1068 in DATA. */
1069
1070static void
1071tree_ssa_iv_optimize_init (struct ivopts_data *data)
1072{
1073 data->version_info_size = 2 * num_ssa_names(vec_safe_length ((cfun + 0)->gimple_df->ssa_names));
1074 data->version_info = XCNEWVEC (struct version_info, data->version_info_size)((struct version_info *) xcalloc ((data->version_info_size
), sizeof (struct version_info)))
;
1075 data->relevant = BITMAP_ALLOCbitmap_alloc (NULLnullptr);
1076 data->important_candidates = BITMAP_ALLOCbitmap_alloc (NULLnullptr);
1077 data->max_inv_var_id = 0;
1078 data->max_inv_expr_id = 0;
1079 data->niters = NULLnullptr;
1080 data->vgroups.create (20);
1081 data->vcands.create (20);
1082 data->inv_expr_tab = new hash_table<iv_inv_expr_hasher> (10);
1083 data->name_expansion_cache = NULLnullptr;
1084 data->base_object_map = NULLnullptr;
1085 data->iv_common_cand_tab = new hash_table<iv_common_cand_hasher> (10);
1086 data->iv_common_cands.create (20);
1087 decl_rtl_to_reset.create (20);
1088 gcc_obstack_init (&data->iv_obstack)_obstack_begin (((&data->iv_obstack)), (memory_block_pool
::block_size), (0), (mempool_obstack_chunk_alloc), (mempool_obstack_chunk_free
))
;
1089}
1090
1091/* walk_tree callback for determine_base_object. */
1092
1093static tree
1094determine_base_object_1 (tree *tp, int *walk_subtrees, void *wdata)
1095{
1096 tree_code code = TREE_CODE (*tp)((enum tree_code) (*tp)->base.code);
1097 tree obj = NULL_TREE(tree) nullptr;
1098 if (code == ADDR_EXPR)
1099 {
1100 tree base = get_base_address (TREE_OPERAND (*tp, 0)(*((const_cast<tree*> (tree_operand_check ((*tp), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1100, __FUNCTION__)))))
);
1101 if (!base)
1102 obj = *tp;
1103 else if (TREE_CODE (base)((enum tree_code) (base)->base.code) != MEM_REF)
1104 obj = fold_convert (ptr_type_node, build_fold_addr_expr (base))fold_convert_loc (((location_t) 0), global_trees[TI_PTR_TYPE]
, build_fold_addr_expr_loc (((location_t) 0), (base)))
;
1105 }
1106 else if (code == SSA_NAME && POINTER_TYPE_P (TREE_TYPE (*tp))(((enum tree_code) (((contains_struct_check ((*tp), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1106, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((*tp), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1106, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
1107 obj = fold_convert (ptr_type_node, *tp)fold_convert_loc (((location_t) 0), global_trees[TI_PTR_TYPE]
, *tp)
;
1108
1109 if (!obj)
1110 {
1111 if (!EXPR_P (*tp)((tree_code_type[(int) (((enum tree_code) (*tp)->base.code
))]) >= tcc_reference && (tree_code_type[(int) (((
enum tree_code) (*tp)->base.code))]) <= tcc_expression)
)
1112 *walk_subtrees = 0;
1113
1114 return NULL_TREE(tree) nullptr;
1115 }
1116 /* Record special node for multiple base objects and stop. */
1117 if (*static_cast<tree *> (wdata))
1118 {
1119 *static_cast<tree *> (wdata) = integer_zero_nodeglobal_trees[TI_INTEGER_ZERO];
1120 return integer_zero_nodeglobal_trees[TI_INTEGER_ZERO];
1121 }
1122 /* Record the base object and continue looking. */
1123 *static_cast<tree *> (wdata) = obj;
1124 return NULL_TREE(tree) nullptr;
1125}
1126
1127/* Returns a memory object to that EXPR points with caching. Return NULL if we
1128 are able to determine that it does not point to any such object; specially
1129 return integer_zero_node if EXPR contains multiple base objects. */
1130
1131static tree
1132determine_base_object (struct ivopts_data *data, tree expr)
1133{
1134 tree *slot, obj = NULL_TREE(tree) nullptr;
1135 if (data->base_object_map)
1136 {
1137 if ((slot = data->base_object_map->get(expr)) != NULLnullptr)
1138 return *slot;
1139 }
1140 else
1141 data->base_object_map = new hash_map<tree, tree>;
1142
1143 (void) walk_tree_without_duplicates (&expr, determine_base_object_1, &obj)walk_tree_without_duplicates_1 (&expr, determine_base_object_1
, &obj, nullptr)
;
1144 data->base_object_map->put (expr, obj);
1145 return obj;
1146}
1147
1148/* Return true if address expression with non-DECL_P operand appears
1149 in EXPR. */
1150
1151static bool
1152contain_complex_addr_expr (tree expr)
1153{
1154 bool res = false;
1155
1156 STRIP_NOPS (expr)(expr) = tree_strip_nop_conversions ((const_cast<union tree_node
*> (((expr)))))
;
1157 switch (TREE_CODE (expr)((enum tree_code) (expr)->base.code))
1158 {
1159 case POINTER_PLUS_EXPR:
1160 case PLUS_EXPR:
1161 case MINUS_EXPR:
1162 res |= contain_complex_addr_expr (TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1162, __FUNCTION__)))))
);
1163 res |= contain_complex_addr_expr (TREE_OPERAND (expr, 1)(*((const_cast<tree*> (tree_operand_check ((expr), (1),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1163, __FUNCTION__)))))
);
1164 break;
1165
1166 case ADDR_EXPR:
1167 return (!DECL_P (TREE_OPERAND (expr, 0))(tree_code_type[(int) (((enum tree_code) ((*((const_cast<tree
*> (tree_operand_check ((expr), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1167, __FUNCTION__))))))->base.code))] == tcc_declaration
)
);
1168
1169 default:
1170 return false;
1171 }
1172
1173 return res;
1174}
1175
1176/* Allocates an induction variable with given initial value BASE and step STEP
1177 for loop LOOP. NO_OVERFLOW implies the iv doesn't overflow. */
1178
1179static struct iv *
1180alloc_iv (struct ivopts_data *data, tree base, tree step,
1181 bool no_overflow = false)
1182{
1183 tree expr = base;
1184 struct iv *iv = (struct iv*) obstack_alloc (&data->iv_obstack,__extension__ ({ struct obstack *__h = (&data->iv_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (struct iv))); if (__extension__ ({ struct obstack
const *__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->
next_free); }) < __len) _obstack_newchunk (__o, __len); ((
void) ((__o)->next_free += (__len))); }); __extension__ ({
struct obstack *__o1 = (__h); void *__value = (void *) __o1->
object_base; if (__o1->next_free == __value) __o1->maybe_empty_object
= 1; __o1->next_free = (sizeof (ptrdiff_t) < sizeof (void
*) ? ((__o1->object_base) + (((__o1->next_free) - (__o1
->object_base) + (__o1->alignment_mask)) & ~(__o1->
alignment_mask))) : (char *) (((ptrdiff_t) (__o1->next_free
) + (__o1->alignment_mask)) & ~(__o1->alignment_mask
))); if ((size_t) (__o1->next_free - (char *) __o1->chunk
) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk
)) __o1->next_free = __o1->chunk_limit; __o1->object_base
= __o1->next_free; __value; }); })
1185 sizeof (struct iv))__extension__ ({ struct obstack *__h = (&data->iv_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (struct iv))); if (__extension__ ({ struct obstack
const *__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->
next_free); }) < __len) _obstack_newchunk (__o, __len); ((
void) ((__o)->next_free += (__len))); }); __extension__ ({
struct obstack *__o1 = (__h); void *__value = (void *) __o1->
object_base; if (__o1->next_free == __value) __o1->maybe_empty_object
= 1; __o1->next_free = (sizeof (ptrdiff_t) < sizeof (void
*) ? ((__o1->object_base) + (((__o1->next_free) - (__o1
->object_base) + (__o1->alignment_mask)) & ~(__o1->
alignment_mask))) : (char *) (((ptrdiff_t) (__o1->next_free
) + (__o1->alignment_mask)) & ~(__o1->alignment_mask
))); if ((size_t) (__o1->next_free - (char *) __o1->chunk
) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk
)) __o1->next_free = __o1->chunk_limit; __o1->object_base
= __o1->next_free; __value; }); })
;
1186 gcc_assert (step != NULL_TREE)((void)(!(step != (tree) nullptr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1186, __FUNCTION__), 0 : 0))
;
1187
1188 /* Lower address expression in base except ones with DECL_P as operand.
1189 By doing this:
1190 1) More accurate cost can be computed for address expressions;
1191 2) Duplicate candidates won't be created for bases in different
1192 forms, like &a[0] and &a. */
1193 STRIP_NOPS (expr)(expr) = tree_strip_nop_conversions ((const_cast<union tree_node
*> (((expr)))))
;
1194 if ((TREE_CODE (expr)((enum tree_code) (expr)->base.code) == ADDR_EXPR && !DECL_P (TREE_OPERAND (expr, 0))(tree_code_type[(int) (((enum tree_code) ((*((const_cast<tree
*> (tree_operand_check ((expr), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1194, __FUNCTION__))))))->base.code))] == tcc_declaration
)
)
1195 || contain_complex_addr_expr (expr))
1196 {
1197 aff_tree comb;
1198 tree_to_aff_combination (expr, TREE_TYPE (expr)((contains_struct_check ((expr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1198, __FUNCTION__))->typed.type)
, &comb);
1199 base = fold_convert (TREE_TYPE (base), aff_combination_to_tree (&comb))fold_convert_loc (((location_t) 0), ((contains_struct_check (
(base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1199, __FUNCTION__))->typed.type), aff_combination_to_tree
(&comb))
;
1200 }
1201
1202 iv->base = base;
1203 iv->base_object = determine_base_object (data, base);
1204 iv->step = step;
1205 iv->biv_p = false;
1206 iv->nonlin_use = NULLnullptr;
1207 iv->ssa_name = NULL_TREE(tree) nullptr;
1208 if (!no_overflow
1209 && !iv_can_overflow_p (data->current_loop, TREE_TYPE (base)((contains_struct_check ((base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1209, __FUNCTION__))->typed.type)
,
1210 base, step))
1211 no_overflow = true;
1212 iv->no_overflow = no_overflow;
1213 iv->have_address_use = false;
1214
1215 return iv;
1216}
1217
1218/* Sets STEP and BASE for induction variable IV. NO_OVERFLOW implies the IV
1219 doesn't overflow. */
1220
1221static void
1222set_iv (struct ivopts_data *data, tree iv, tree base, tree step,
1223 bool no_overflow)
1224{
1225 struct version_info *info = name_info (data, iv);
1226
1227 gcc_assert (!info->iv)((void)(!(!info->iv) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1227, __FUNCTION__), 0 : 0))
;
1228
1229 bitmap_set_bit (data->relevant, SSA_NAME_VERSION (iv)(tree_check ((iv), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1229, __FUNCTION__, (SSA_NAME)))->base.u.version
);
1230 info->iv = alloc_iv (data, base, step, no_overflow);
1231 info->iv->ssa_name = iv;
1232}
1233
1234/* Finds induction variable declaration for VAR. */
1235
1236static struct iv *
1237get_iv (struct ivopts_data *data, tree var)
1238{
1239 basic_block bb;
1240 tree type = TREE_TYPE (var)((contains_struct_check ((var), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1240, __FUNCTION__))->typed.type)
;
1241
1242 if (!POINTER_TYPE_P (type)(((enum tree_code) (type)->base.code) == POINTER_TYPE || (
(enum tree_code) (type)->base.code) == REFERENCE_TYPE)
1243 && !INTEGRAL_TYPE_P (type)(((enum tree_code) (type)->base.code) == ENUMERAL_TYPE || (
(enum tree_code) (type)->base.code) == BOOLEAN_TYPE || ((enum
tree_code) (type)->base.code) == INTEGER_TYPE)
)
1244 return NULLnullptr;
1245
1246 if (!name_info (data, var)->iv)
1247 {
1248 bb = gimple_bb (SSA_NAME_DEF_STMT (var)(tree_check ((var), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1248, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
);
1249
1250 if (!bb
1251 || !flow_bb_inside_loop_p (data->current_loop, bb))
1252 {
1253 if (POINTER_TYPE_P (type)(((enum tree_code) (type)->base.code) == POINTER_TYPE || (
(enum tree_code) (type)->base.code) == REFERENCE_TYPE)
)
1254 type = sizetypesizetype_tab[(int) stk_sizetype];
1255 set_iv (data, var, var, build_int_cst (type, 0), true);
1256 }
1257 }
1258
1259 return name_info (data, var)->iv;
1260}
1261
1262/* Return the first non-invariant ssa var found in EXPR. */
1263
1264static tree
1265extract_single_var_from_expr (tree expr)
1266{
1267 int i, n;
1268 tree tmp;
1269 enum tree_code code;
1270
1271 if (!expr || is_gimple_min_invariant (expr))
1272 return NULLnullptr;
1273
1274 code = TREE_CODE (expr)((enum tree_code) (expr)->base.code);
1275 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))((tree_code_type[(int) (code)]) >= tcc_reference &&
(tree_code_type[(int) (code)]) <= tcc_expression)
)
1276 {
1277 n = TREE_OPERAND_LENGTH (expr)tree_operand_length (expr);
1278 for (i = 0; i < n; i++)
1279 {
1280 tmp = extract_single_var_from_expr (TREE_OPERAND (expr, i)(*((const_cast<tree*> (tree_operand_check ((expr), (i),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1280, __FUNCTION__)))))
);
1281
1282 if (tmp)
1283 return tmp;
1284 }
1285 }
1286 return (TREE_CODE (expr)((enum tree_code) (expr)->base.code) == SSA_NAME) ? expr : NULLnullptr;
1287}
1288
1289/* Finds basic ivs. */
1290
1291static bool
1292find_bivs (struct ivopts_data *data)
1293{
1294 gphi *phi;
1295 affine_iv iv;
1296 tree step, type, base, stop;
1297 bool found = false;
1298 class loop *loop = data->current_loop;
1299 gphi_iterator psi;
1300
1301 for (psi = gsi_start_phis (loop->header); !gsi_end_p (psi); gsi_next (&psi))
1302 {
1303 phi = psi.phi ();
1304
1305 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi))(tree_check ((get_def_from_ptr (gimple_phi_result_ptr (phi)))
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1305, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
)
1306 continue;
1307
1308 if (virtual_operand_p (PHI_RESULT (phi)get_def_from_ptr (gimple_phi_result_ptr (phi))))
1309 continue;
1310
1311 if (!simple_iv (loop, loop, PHI_RESULT (phi)get_def_from_ptr (gimple_phi_result_ptr (phi)), &iv, true))
1312 continue;
1313
1314 if (integer_zerop (iv.step))
1315 continue;
1316
1317 step = iv.step;
1318 base = PHI_ARG_DEF_FROM_EDGE (phi, loop_preheader_edge (loop))gimple_phi_arg_def (((phi)), ((loop_preheader_edge (loop))->
dest_idx))
;
1319 /* Stop expanding iv base at the first ssa var referred by iv step.
1320 Ideally we should stop at any ssa var, because that's expensive
1321 and unusual to happen, we just do it on the first one.
1322
1323 See PR64705 for the rationale. */
1324 stop = extract_single_var_from_expr (step);
1325 base = expand_simple_operations (base, stop);
1326 if (contains_abnormal_ssa_name_p (base)
1327 || contains_abnormal_ssa_name_p (step))
1328 continue;
1329
1330 type = TREE_TYPE (PHI_RESULT (phi))((contains_struct_check ((get_def_from_ptr (gimple_phi_result_ptr
(phi))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1330, __FUNCTION__))->typed.type)
;
1331 base = fold_convert (type, base)fold_convert_loc (((location_t) 0), type, base);
1332 if (step)
1333 {
1334 if (POINTER_TYPE_P (type)(((enum tree_code) (type)->base.code) == POINTER_TYPE || (
(enum tree_code) (type)->base.code) == REFERENCE_TYPE)
)
1335 step = convert_to_ptrofftype (step)convert_to_ptrofftype_loc (((location_t) 0), step);
1336 else
1337 step = fold_convert (type, step)fold_convert_loc (((location_t) 0), type, step);
1338 }
1339
1340 set_iv (data, PHI_RESULT (phi)get_def_from_ptr (gimple_phi_result_ptr (phi)), base, step, iv.no_overflow);
1341 found = true;
1342 }
1343
1344 return found;
1345}
1346
1347/* Marks basic ivs. */
1348
1349static void
1350mark_bivs (struct ivopts_data *data)
1351{
1352 gphi *phi;
1353 gimple *def;
1354 tree var;
1355 struct iv *iv, *incr_iv;
1356 class loop *loop = data->current_loop;
1357 basic_block incr_bb;
1358 gphi_iterator psi;
1359
1360 data->bivs_not_used_in_addr = 0;
1361 for (psi = gsi_start_phis (loop->header); !gsi_end_p (psi); gsi_next (&psi))
1362 {
1363 phi = psi.phi ();
1364
1365 iv = get_iv (data, PHI_RESULT (phi)get_def_from_ptr (gimple_phi_result_ptr (phi)));
1366 if (!iv)
1367 continue;
1368
1369 var = PHI_ARG_DEF_FROM_EDGE (phi, loop_latch_edge (loop))gimple_phi_arg_def (((phi)), ((loop_latch_edge (loop))->dest_idx
))
;
1370 def = SSA_NAME_DEF_STMT (var)(tree_check ((var), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1370, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
1371 /* Don't mark iv peeled from other one as biv. */
1372 if (def
1373 && gimple_code (def) == GIMPLE_PHI
1374 && gimple_bb (def) == loop->header)
1375 continue;
1376
1377 incr_iv = get_iv (data, var);
1378 if (!incr_iv)
1379 continue;
1380
1381 /* If the increment is in the subloop, ignore it. */
1382 incr_bb = gimple_bb (SSA_NAME_DEF_STMT (var)(tree_check ((var), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1382, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
);
1383 if (incr_bb->loop_father != data->current_loop
1384 || (incr_bb->flags & BB_IRREDUCIBLE_LOOP))
1385 continue;
1386
1387 iv->biv_p = true;
1388 incr_iv->biv_p = true;
1389 if (iv->no_overflow)
1390 data->bivs_not_used_in_addr++;
1391 if (incr_iv->no_overflow)
1392 data->bivs_not_used_in_addr++;
1393 }
1394}
1395
1396/* Checks whether STMT defines a linear induction variable and stores its
1397 parameters to IV. */
1398
1399static bool
1400find_givs_in_stmt_scev (struct ivopts_data *data, gimple *stmt, affine_iv *iv)
1401{
1402 tree lhs, stop;
1403 class loop *loop = data->current_loop;
1404
1405 iv->base = NULL_TREE(tree) nullptr;
1406 iv->step = NULL_TREE(tree) nullptr;
1407
1408 if (gimple_code (stmt) != GIMPLE_ASSIGN)
1409 return false;
1410
1411 lhs = gimple_assign_lhs (stmt);
1412 if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) != SSA_NAME)
1413 return false;
1414
1415 if (!simple_iv (loop, loop_containing_stmt (stmt), lhs, iv, true))
1416 return false;
1417
1418 /* Stop expanding iv base at the first ssa var referred by iv step.
1419 Ideally we should stop at any ssa var, because that's expensive
1420 and unusual to happen, we just do it on the first one.
1421
1422 See PR64705 for the rationale. */
1423 stop = extract_single_var_from_expr (iv->step);
1424 iv->base = expand_simple_operations (iv->base, stop);
1425 if (contains_abnormal_ssa_name_p (iv->base)
1426 || contains_abnormal_ssa_name_p (iv->step))
1427 return false;
1428
1429 /* If STMT could throw, then do not consider STMT as defining a GIV.
1430 While this will suppress optimizations, we cannot safely delete this
1431 GIV and associated statements, even if it appears it is not used. */
1432 if (stmt_could_throw_p (cfun(cfun + 0), stmt))
1433 return false;
1434
1435 return true;
1436}
1437
1438/* Finds general ivs in statement STMT. */
1439
1440static void
1441find_givs_in_stmt (struct ivopts_data *data, gimple *stmt)
1442{
1443 affine_iv iv;
1444
1445 if (!find_givs_in_stmt_scev (data, stmt, &iv))
1446 return;
1447
1448 set_iv (data, gimple_assign_lhs (stmt), iv.base, iv.step, iv.no_overflow);
1449}
1450
1451/* Finds general ivs in basic block BB. */
1452
1453static void
1454find_givs_in_bb (struct ivopts_data *data, basic_block bb)
1455{
1456 gimple_stmt_iterator bsi;
1457
1458 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
1459 find_givs_in_stmt (data, gsi_stmt (bsi));
1460}
1461
1462/* Finds general ivs. */
1463
1464static void
1465find_givs (struct ivopts_data *data)
1466{
1467 class loop *loop = data->current_loop;
1468 basic_block *body = get_loop_body_in_dom_order (loop);
1469 unsigned i;
1470
1471 for (i = 0; i < loop->num_nodes; i++)
1472 find_givs_in_bb (data, body[i]);
1473 free (body);
1474}
1475
1476/* For each ssa name defined in LOOP determines whether it is an induction
1477 variable and if so, its initial value and step. */
1478
1479static bool
1480find_induction_variables (struct ivopts_data *data)
1481{
1482 unsigned i;
1483 bitmap_iterator bi;
1484
1485 if (!find_bivs (data))
1486 return false;
1487
1488 find_givs (data);
1489 mark_bivs (data);
1490
1491 if (dump_file && (dump_flags & TDF_DETAILS))
1492 {
1493 class tree_niter_desc *niter = niter_for_single_dom_exit (data);
1494
1495 if (niter)
1496 {
1497 fprintf (dump_file, " number of iterations ");
1498 print_generic_expr (dump_file, niter->niter, TDF_SLIM);
1499 if (!integer_zerop (niter->may_be_zero))
1500 {
1501 fprintf (dump_file, "; zero if ");
1502 print_generic_expr (dump_file, niter->may_be_zero, TDF_SLIM);
1503 }
1504 fprintf (dump_file, "\n");
1505 };
1506
1507 fprintf (dump_file, "\n<Induction Vars>:\n");
1508 EXECUTE_IF_SET_IN_BITMAP (data->relevant, 0, i, bi)for (bmp_iter_set_init (&(bi), (data->relevant), (0), &
(i)); bmp_iter_set (&(bi), &(i)); bmp_iter_next (&
(bi), &(i)))
1509 {
1510 struct version_info *info = ver_info (data, i);
1511 if (info->iv && info->iv->step && !integer_zerop (info->iv->step))
1512 dump_iv (dump_file, ver_info (data, i)->iv, true, 0);
1513 }
1514 }
1515
1516 return true;
1517}
1518
1519/* Records a use of TYPE at *USE_P in STMT whose value is IV in GROUP.
1520 For address type use, ADDR_BASE is the stripped IV base, ADDR_OFFSET
1521 is the const offset stripped from IV base and MEM_TYPE is the type
1522 of the memory being addressed. For uses of other types, ADDR_BASE
1523 and ADDR_OFFSET are zero by default and MEM_TYPE is NULL_TREE. */
1524
1525static struct iv_use *
1526record_use (struct iv_group *group, tree *use_p, struct iv *iv,
1527 gimple *stmt, enum use_type type, tree mem_type,
1528 tree addr_base, poly_uint64 addr_offset)
1529{
1530 struct iv_use *use = XCNEW (struct iv_use)((struct iv_use *) xcalloc (1, sizeof (struct iv_use)));
1531
1532 use->id = group->vuses.length ();
1533 use->group_id = group->id;
1534 use->type = type;
1535 use->mem_type = mem_type;
1536 use->iv = iv;
1537 use->stmt = stmt;
1538 use->op_p = use_p;
1539 use->addr_base = addr_base;
1540 use->addr_offset = addr_offset;
1541
1542 group->vuses.safe_push (use);
1543 return use;
1544}
1545
1546/* Checks whether OP is a loop-level invariant and if so, records it.
1547 NONLINEAR_USE is true if the invariant is used in a way we do not
1548 handle specially. */
1549
1550static void
1551record_invariant (struct ivopts_data *data, tree op, bool nonlinear_use)
1552{
1553 basic_block bb;
1554 struct version_info *info;
1555
1556 if (TREE_CODE (op)((enum tree_code) (op)->base.code) != SSA_NAME
1557 || virtual_operand_p (op))
1558 return;
1559
1560 bb = gimple_bb (SSA_NAME_DEF_STMT (op)(tree_check ((op), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1560, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
);
1561 if (bb
1562 && flow_bb_inside_loop_p (data->current_loop, bb))
1563 return;
1564
1565 info = name_info (data, op);
1566 info->name = op;
1567 info->has_nonlin_use |= nonlinear_use;
1568 if (!info->inv_id)
1569 info->inv_id = ++data->max_inv_var_id;
1570 bitmap_set_bit (data->relevant, SSA_NAME_VERSION (op)(tree_check ((op), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1570, __FUNCTION__, (SSA_NAME)))->base.u.version
);
1571}
1572
1573/* Record a group of TYPE. */
1574
1575static struct iv_group *
1576record_group (struct ivopts_data *data, enum use_type type)
1577{
1578 struct iv_group *group = XCNEW (struct iv_group)((struct iv_group *) xcalloc (1, sizeof (struct iv_group)));
1579
1580 group->id = data->vgroups.length ();
1581 group->type = type;
1582 group->related_cands = BITMAP_ALLOCbitmap_alloc (NULLnullptr);
1583 group->vuses.create (1);
1584 group->doloop_p = false;
1585
1586 data->vgroups.safe_push (group);
1587 return group;
1588}
1589
1590/* Record a use of TYPE at *USE_P in STMT whose value is IV in a group.
1591 New group will be created if there is no existing group for the use.
1592 MEM_TYPE is the type of memory being addressed, or NULL if this
1593 isn't an address reference. */
1594
1595static struct iv_use *
1596record_group_use (struct ivopts_data *data, tree *use_p,
1597 struct iv *iv, gimple *stmt, enum use_type type,
1598 tree mem_type)
1599{
1600 tree addr_base = NULLnullptr;
1601 struct iv_group *group = NULLnullptr;
1602 poly_uint64 addr_offset = 0;
1603
1604 /* Record non address type use in a new group. */
1605 if (address_p (type))
1606 {
1607 unsigned int i;
1608
1609 addr_base = strip_offset (iv->base, &addr_offset);
1610 for (i = 0; i < data->vgroups.length (); i++)
1611 {
1612 struct iv_use *use;
1613
1614 group = data->vgroups[i];
1615 use = group->vuses[0];
1616 if (!address_p (use->type))
1617 continue;
1618
1619 /* Check if it has the same stripped base and step. */
1620 if (operand_equal_p (iv->base_object, use->iv->base_object, 0)
1621 && operand_equal_p (iv->step, use->iv->step, 0)
1622 && operand_equal_p (addr_base, use->addr_base, 0))
1623 break;
1624 }
1625 if (i == data->vgroups.length ())
1626 group = NULLnullptr;
1627 }
1628
1629 if (!group)
1630 group = record_group (data, type);
1631
1632 return record_use (group, use_p, iv, stmt, type, mem_type,
1633 addr_base, addr_offset);
1634}
1635
1636/* Checks whether the use OP is interesting and if so, records it. */
1637
1638static struct iv_use *
1639find_interesting_uses_op (struct ivopts_data *data, tree op)
1640{
1641 struct iv *iv;
1642 gimple *stmt;
1643 struct iv_use *use;
1644
1645 if (TREE_CODE (op)((enum tree_code) (op)->base.code) != SSA_NAME)
1646 return NULLnullptr;
1647
1648 iv = get_iv (data, op);
1649 if (!iv)
1650 return NULLnullptr;
1651
1652 if (iv->nonlin_use)
1653 {
1654 gcc_assert (iv->nonlin_use->type == USE_NONLINEAR_EXPR)((void)(!(iv->nonlin_use->type == USE_NONLINEAR_EXPR) ?
fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1654, __FUNCTION__), 0 : 0))
;
1655 return iv->nonlin_use;
1656 }
1657
1658 if (integer_zerop (iv->step))
1659 {
1660 record_invariant (data, op, true);
1661 return NULLnullptr;
1662 }
1663
1664 stmt = SSA_NAME_DEF_STMT (op)(tree_check ((op), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1664, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
1665 gcc_assert (gimple_code (stmt) == GIMPLE_PHI || is_gimple_assign (stmt))((void)(!(gimple_code (stmt) == GIMPLE_PHI || is_gimple_assign
(stmt)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1665, __FUNCTION__), 0 : 0))
;
1666
1667 use = record_group_use (data, NULLnullptr, iv, stmt, USE_NONLINEAR_EXPR, NULL_TREE(tree) nullptr);
1668 iv->nonlin_use = use;
1669 return use;
1670}
1671
1672/* Indicate how compare type iv_use can be handled. */
1673enum comp_iv_rewrite
1674{
1675 COMP_IV_NA,
1676 /* We may rewrite compare type iv_use by expressing value of the iv_use. */
1677 COMP_IV_EXPR,
1678 /* We may rewrite compare type iv_uses on both sides of comparison by
1679 expressing value of each iv_use. */
1680 COMP_IV_EXPR_2,
1681 /* We may rewrite compare type iv_use by expressing value of the iv_use
1682 or by eliminating it with other iv_cand. */
1683 COMP_IV_ELIM
1684};
1685
1686/* Given a condition in statement STMT, checks whether it is a compare
1687 of an induction variable and an invariant. If this is the case,
1688 CONTROL_VAR is set to location of the iv, BOUND to the location of
1689 the invariant, IV_VAR and IV_BOUND are set to the corresponding
1690 induction variable descriptions, and true is returned. If this is not
1691 the case, CONTROL_VAR and BOUND are set to the arguments of the
1692 condition and false is returned. */
1693
1694static enum comp_iv_rewrite
1695extract_cond_operands (struct ivopts_data *data, gimple *stmt,
1696 tree **control_var, tree **bound,
1697 struct iv **iv_var, struct iv **iv_bound)
1698{
1699 /* The objects returned when COND has constant operands. */
1700 static struct iv const_iv;
1701 static tree zero;
1702 tree *op0 = &zero, *op1 = &zero;
1703 struct iv *iv0 = &const_iv, *iv1 = &const_iv;
1704 enum comp_iv_rewrite rewrite_type = COMP_IV_NA;
1705
1706 if (gimple_code (stmt) == GIMPLE_COND)
1707 {
1708 gcond *cond_stmt = as_a <gcond *> (stmt);
1709 op0 = gimple_cond_lhs_ptr (cond_stmt);
1710 op1 = gimple_cond_rhs_ptr (cond_stmt);
1711 }
1712 else
1713 {
1714 op0 = gimple_assign_rhs1_ptr (stmt);
1715 op1 = gimple_assign_rhs2_ptr (stmt);
1716 }
1717
1718 zero = integer_zero_nodeglobal_trees[TI_INTEGER_ZERO];
1719 const_iv.step = integer_zero_nodeglobal_trees[TI_INTEGER_ZERO];
1720
1721 if (TREE_CODE (*op0)((enum tree_code) (*op0)->base.code) == SSA_NAME)
1722 iv0 = get_iv (data, *op0);
1723 if (TREE_CODE (*op1)((enum tree_code) (*op1)->base.code) == SSA_NAME)
1724 iv1 = get_iv (data, *op1);
1725
1726 /* If both sides of comparison are IVs. We can express ivs on both end. */
1727 if (iv0 && iv1 && !integer_zerop (iv0->step) && !integer_zerop (iv1->step))
1728 {
1729 rewrite_type = COMP_IV_EXPR_2;
1730 goto end;
1731 }
1732
1733 /* If none side of comparison is IV. */
1734 if ((!iv0 || integer_zerop (iv0->step))
1735 && (!iv1 || integer_zerop (iv1->step)))
1736 goto end;
1737
1738 /* Control variable may be on the other side. */
1739 if (!iv0 || integer_zerop (iv0->step))
1740 {
1741 std::swap (op0, op1);
1742 std::swap (iv0, iv1);
1743 }
1744 /* If one side is IV and the other side isn't loop invariant. */
1745 if (!iv1)
1746 rewrite_type = COMP_IV_EXPR;
1747 /* If one side is IV and the other side is loop invariant. */
1748 else if (!integer_zerop (iv0->step) && integer_zerop (iv1->step))
1749 rewrite_type = COMP_IV_ELIM;
1750
1751end:
1752 if (control_var)
1753 *control_var = op0;
1754 if (iv_var)
1755 *iv_var = iv0;
1756 if (bound)
1757 *bound = op1;
1758 if (iv_bound)
1759 *iv_bound = iv1;
1760
1761 return rewrite_type;
1762}
1763
1764/* Checks whether the condition in STMT is interesting and if so,
1765 records it. */
1766
1767static void
1768find_interesting_uses_cond (struct ivopts_data *data, gimple *stmt)
1769{
1770 tree *var_p, *bound_p;
1771 struct iv *var_iv, *bound_iv;
1772 enum comp_iv_rewrite ret;
1773
1774 ret = extract_cond_operands (data, stmt,
1775 &var_p, &bound_p, &var_iv, &bound_iv);
1776 if (ret == COMP_IV_NA)
1777 {
1778 find_interesting_uses_op (data, *var_p);
1779 find_interesting_uses_op (data, *bound_p);
1780 return;
1781 }
1782
1783 record_group_use (data, var_p, var_iv, stmt, USE_COMPARE, NULL_TREE(tree) nullptr);
1784 /* Record compare type iv_use for iv on the other side of comparison. */
1785 if (ret == COMP_IV_EXPR_2)
1786 record_group_use (data, bound_p, bound_iv, stmt, USE_COMPARE, NULL_TREE(tree) nullptr);
1787}
1788
1789/* Returns the outermost loop EXPR is obviously invariant in
1790 relative to the loop LOOP, i.e. if all its operands are defined
1791 outside of the returned loop. Returns NULL if EXPR is not
1792 even obviously invariant in LOOP. */
1793
1794class loop *
1795outermost_invariant_loop_for_expr (class loop *loop, tree expr)
1796{
1797 basic_block def_bb;
1798 unsigned i, len;
1799
1800 if (is_gimple_min_invariant (expr))
1801 return current_loops((cfun + 0)->x_current_loops)->tree_root;
1802
1803 if (TREE_CODE (expr)((enum tree_code) (expr)->base.code) == SSA_NAME)
1804 {
1805 def_bb = gimple_bb (SSA_NAME_DEF_STMT (expr)(tree_check ((expr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1805, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
);
1806 if (def_bb)
1807 {
1808 if (flow_bb_inside_loop_p (loop, def_bb))
1809 return NULLnullptr;
1810 return superloop_at_depth (loop,
1811 loop_depth (def_bb->loop_father) + 1);
1812 }
1813
1814 return current_loops((cfun + 0)->x_current_loops)->tree_root;
1815 }
1816
1817 if (!EXPR_P (expr)((tree_code_type[(int) (((enum tree_code) (expr)->base.code
))]) >= tcc_reference && (tree_code_type[(int) (((
enum tree_code) (expr)->base.code))]) <= tcc_expression
)
)
1818 return NULLnullptr;
1819
1820 unsigned maxdepth = 0;
1821 len = TREE_OPERAND_LENGTH (expr)tree_operand_length (expr);
1822 for (i = 0; i < len; i++)
1823 {
1824 class loop *ivloop;
1825 if (!TREE_OPERAND (expr, i)(*((const_cast<tree*> (tree_operand_check ((expr), (i),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1825, __FUNCTION__)))))
)
1826 continue;
1827
1828 ivloop = outermost_invariant_loop_for_expr (loop, TREE_OPERAND (expr, i)(*((const_cast<tree*> (tree_operand_check ((expr), (i),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1828, __FUNCTION__)))))
);
1829 if (!ivloop)
1830 return NULLnullptr;
1831 maxdepth = MAX (maxdepth, loop_depth (ivloop))((maxdepth) > (loop_depth (ivloop)) ? (maxdepth) : (loop_depth
(ivloop)))
;
1832 }
1833
1834 return superloop_at_depth (loop, maxdepth);
1835}
1836
1837/* Returns true if expression EXPR is obviously invariant in LOOP,
1838 i.e. if all its operands are defined outside of the LOOP. LOOP
1839 should not be the function body. */
1840
1841bool
1842expr_invariant_in_loop_p (class loop *loop, tree expr)
1843{
1844 basic_block def_bb;
1845 unsigned i, len;
1846
1847 gcc_assert (loop_depth (loop) > 0)((void)(!(loop_depth (loop) > 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1847, __FUNCTION__), 0 : 0))
;
1848
1849 if (is_gimple_min_invariant (expr))
1850 return true;
1851
1852 if (TREE_CODE (expr)((enum tree_code) (expr)->base.code) == SSA_NAME)
1853 {
1854 def_bb = gimple_bb (SSA_NAME_DEF_STMT (expr)(tree_check ((expr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1854, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
);
1855 if (def_bb
1856 && flow_bb_inside_loop_p (loop, def_bb))
1857 return false;
1858
1859 return true;
1860 }
1861
1862 if (!EXPR_P (expr)((tree_code_type[(int) (((enum tree_code) (expr)->base.code
))]) >= tcc_reference && (tree_code_type[(int) (((
enum tree_code) (expr)->base.code))]) <= tcc_expression
)
)
1863 return false;
1864
1865 len = TREE_OPERAND_LENGTH (expr)tree_operand_length (expr);
1866 for (i = 0; i < len; i++)
1867 if (TREE_OPERAND (expr, i)(*((const_cast<tree*> (tree_operand_check ((expr), (i),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1867, __FUNCTION__)))))
1868 && !expr_invariant_in_loop_p (loop, TREE_OPERAND (expr, i)(*((const_cast<tree*> (tree_operand_check ((expr), (i),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1868, __FUNCTION__)))))
))
1869 return false;
1870
1871 return true;
1872}
1873
1874/* Given expression EXPR which computes inductive values with respect
1875 to loop recorded in DATA, this function returns biv from which EXPR
1876 is derived by tracing definition chains of ssa variables in EXPR. */
1877
1878static struct iv*
1879find_deriving_biv_for_expr (struct ivopts_data *data, tree expr)
1880{
1881 struct iv *iv;
1882 unsigned i, n;
1883 tree e2, e1;
1884 enum tree_code code;
1885 gimple *stmt;
1886
1887 if (expr == NULL_TREE(tree) nullptr)
1888 return NULLnullptr;
1889
1890 if (is_gimple_min_invariant (expr))
1891 return NULLnullptr;
1892
1893 code = TREE_CODE (expr)((enum tree_code) (expr)->base.code);
1894 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))((tree_code_type[(int) (code)]) >= tcc_reference &&
(tree_code_type[(int) (code)]) <= tcc_expression)
)
1895 {
1896 n = TREE_OPERAND_LENGTH (expr)tree_operand_length (expr);
1897 for (i = 0; i < n; i++)
1898 {
1899 iv = find_deriving_biv_for_expr (data, TREE_OPERAND (expr, i)(*((const_cast<tree*> (tree_operand_check ((expr), (i),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1899, __FUNCTION__)))))
);
1900 if (iv)
1901 return iv;
1902 }
1903 }
1904
1905 /* Stop if it's not ssa name. */
1906 if (code != SSA_NAME)
1907 return NULLnullptr;
1908
1909 iv = get_iv (data, expr);
1910 if (!iv || integer_zerop (iv->step))
1911 return NULLnullptr;
1912 else if (iv->biv_p)
1913 return iv;
1914
1915 stmt = SSA_NAME_DEF_STMT (expr)(tree_check ((expr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1915, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
1916 if (gphi *phi = dyn_cast <gphi *> (stmt))
1917 {
1918 ssa_op_iter iter;
1919 use_operand_p use_p;
1920 basic_block phi_bb = gimple_bb (phi);
1921
1922 /* Skip loop header PHI that doesn't define biv. */
1923 if (phi_bb->loop_father == data->current_loop)
1924 return NULLnullptr;
1925
1926 if (virtual_operand_p (gimple_phi_result (phi)))
1927 return NULLnullptr;
1928
1929 FOR_EACH_PHI_ARG (use_p, phi, iter, SSA_OP_USE)for ((use_p) = op_iter_init_phiuse (&(iter), phi, 0x01); !
op_iter_done (&(iter)); (use_p) = op_iter_next_use (&
(iter)))
1930 {
1931 tree use = USE_FROM_PTR (use_p)get_use_from_ptr (use_p);
1932 iv = find_deriving_biv_for_expr (data, use);
1933 if (iv)
1934 return iv;
1935 }
1936 return NULLnullptr;
1937 }
1938 if (gimple_code (stmt) != GIMPLE_ASSIGN)
1939 return NULLnullptr;
1940
1941 e1 = gimple_assign_rhs1 (stmt);
1942 code = gimple_assign_rhs_code (stmt);
1943 if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
1944 return find_deriving_biv_for_expr (data, e1);
1945
1946 switch (code)
1947 {
1948 case MULT_EXPR:
1949 case PLUS_EXPR:
1950 case MINUS_EXPR:
1951 case POINTER_PLUS_EXPR:
1952 /* Increments, decrements and multiplications by a constant
1953 are simple. */
1954 e2 = gimple_assign_rhs2 (stmt);
1955 iv = find_deriving_biv_for_expr (data, e2);
1956 if (iv)
1957 return iv;
1958 gcc_fallthrough ();
1959
1960 CASE_CONVERTcase NOP_EXPR: case CONVERT_EXPR:
1961 /* Casts are simple. */
1962 return find_deriving_biv_for_expr (data, e1);
1963
1964 default:
1965 break;
1966 }
1967
1968 return NULLnullptr;
1969}
1970
1971/* Record BIV, its predecessor and successor that they are used in
1972 address type uses. */
1973
1974static void
1975record_biv_for_address_use (struct ivopts_data *data, struct iv *biv)
1976{
1977 unsigned i;
1978 tree type, base_1, base_2;
1979 bitmap_iterator bi;
1980
1981 if (!biv || !biv->biv_p || integer_zerop (biv->step)
1982 || biv->have_address_use || !biv->no_overflow)
1983 return;
1984
1985 type = TREE_TYPE (biv->base)((contains_struct_check ((biv->base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 1985, __FUNCTION__))->typed.type)
;
1986 if (!INTEGRAL_TYPE_P (type)(((enum tree_code) (type)->base.code) == ENUMERAL_TYPE || (
(enum tree_code) (type)->base.code) == BOOLEAN_TYPE || ((enum
tree_code) (type)->base.code) == INTEGER_TYPE)
)
1987 return;
1988
1989 biv->have_address_use = true;
1990 data->bivs_not_used_in_addr--;
1991 base_1 = fold_build2 (PLUS_EXPR, type, biv->base, biv->step)fold_build2_loc (((location_t) 0), PLUS_EXPR, type, biv->base
, biv->step )
;
1992 EXECUTE_IF_SET_IN_BITMAP (data->relevant, 0, i, bi)for (bmp_iter_set_init (&(bi), (data->relevant), (0), &
(i)); bmp_iter_set (&(bi), &(i)); bmp_iter_next (&
(bi), &(i)))
1993 {
1994 struct iv *iv = ver_info (data, i)->iv;
1995
1996 if (!iv || !iv->biv_p || integer_zerop (iv->step)
1997 || iv->have_address_use || !iv->no_overflow)
1998 continue;
1999
2000 if (type != TREE_TYPE (iv->base)((contains_struct_check ((iv->base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2000, __FUNCTION__))->typed.type)
2001 || !INTEGRAL_TYPE_P (TREE_TYPE (iv->base))(((enum tree_code) (((contains_struct_check ((iv->base), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2001, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((iv->base)
, (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2001, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((iv->base)
, (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2001, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
)
2002 continue;
2003
2004 if (!operand_equal_p (biv->step, iv->step, 0))
2005 continue;
2006
2007 base_2 = fold_build2 (PLUS_EXPR, type, iv->base, iv->step)fold_build2_loc (((location_t) 0), PLUS_EXPR, type, iv->base
, iv->step )
;
2008 if (operand_equal_p (base_1, iv->base, 0)
2009 || operand_equal_p (base_2, biv->base, 0))
2010 {
2011 iv->have_address_use = true;
2012 data->bivs_not_used_in_addr--;
2013 }
2014 }
2015}
2016
2017/* Cumulates the steps of indices into DATA and replaces their values with the
2018 initial ones. Returns false when the value of the index cannot be determined.
2019 Callback for for_each_index. */
2020
2021struct ifs_ivopts_data
2022{
2023 struct ivopts_data *ivopts_data;
2024 gimple *stmt;
2025 tree step;
2026};
2027
2028static bool
2029idx_find_step (tree base, tree *idx, void *data)
2030{
2031 struct ifs_ivopts_data *dta = (struct ifs_ivopts_data *) data;
2032 struct iv *iv;
2033 bool use_overflow_semantics = false;
2034 tree step, iv_base, iv_step, lbound, off;
2035 class loop *loop = dta->ivopts_data->current_loop;
2036
2037 /* If base is a component ref, require that the offset of the reference
2038 be invariant. */
2039 if (TREE_CODE (base)((enum tree_code) (base)->base.code) == COMPONENT_REF)
2040 {
2041 off = component_ref_field_offset (base);
2042 return expr_invariant_in_loop_p (loop, off);
2043 }
2044
2045 /* If base is array, first check whether we will be able to move the
2046 reference out of the loop (in order to take its address in strength
2047 reduction). In order for this to work we need both lower bound
2048 and step to be loop invariants. */
2049 if (TREE_CODE (base)((enum tree_code) (base)->base.code) == ARRAY_REF || TREE_CODE (base)((enum tree_code) (base)->base.code) == ARRAY_RANGE_REF)
2050 {
2051 /* Moreover, for a range, the size needs to be invariant as well. */
2052 if (TREE_CODE (base)((enum tree_code) (base)->base.code) == ARRAY_RANGE_REF
2053 && !expr_invariant_in_loop_p (loop, TYPE_SIZE (TREE_TYPE (base))((tree_class_check ((((contains_struct_check ((base), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2053, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2053, __FUNCTION__))->type_common.size)
))
2054 return false;
2055
2056 step = array_ref_element_size (base);
2057 lbound = array_ref_low_bound (base);
2058
2059 if (!expr_invariant_in_loop_p (loop, step)
2060 || !expr_invariant_in_loop_p (loop, lbound))
2061 return false;
2062 }
2063
2064 if (TREE_CODE (*idx)((enum tree_code) (*idx)->base.code) != SSA_NAME)
2065 return true;
2066
2067 iv = get_iv (dta->ivopts_data, *idx);
2068 if (!iv)
2069 return false;
2070
2071 /* XXX We produce for a base of *D42 with iv->base being &x[0]
2072 *&x[0], which is not folded and does not trigger the
2073 ARRAY_REF path below. */
2074 *idx = iv->base;
2075
2076 if (integer_zerop (iv->step))
2077 return true;
2078
2079 if (TREE_CODE (base)((enum tree_code) (base)->base.code) == ARRAY_REF || TREE_CODE (base)((enum tree_code) (base)->base.code) == ARRAY_RANGE_REF)
2080 {
2081 step = array_ref_element_size (base);
2082
2083 /* We only handle addresses whose step is an integer constant. */
2084 if (TREE_CODE (step)((enum tree_code) (step)->base.code) != INTEGER_CST)
2085 return false;
2086 }
2087 else
2088 /* The step for pointer arithmetics already is 1 byte. */
2089 step = size_one_nodeglobal_trees[TI_SIZE_ONE];
2090
2091 iv_base = iv->base;
2092 iv_step = iv->step;
2093 if (iv->no_overflow && nowrap_type_p (TREE_TYPE (iv_step)((contains_struct_check ((iv_step), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2093, __FUNCTION__))->typed.type)
))
2094 use_overflow_semantics = true;
2095
2096 if (!convert_affine_scev (dta->ivopts_data->current_loop,
2097 sizetypesizetype_tab[(int) stk_sizetype], &iv_base, &iv_step, dta->stmt,
2098 use_overflow_semantics))
2099 {
2100 /* The index might wrap. */
2101 return false;
2102 }
2103
2104 step = fold_build2 (MULT_EXPR, sizetype, step, iv_step)fold_build2_loc (((location_t) 0), MULT_EXPR, sizetype_tab[(int
) stk_sizetype], step, iv_step )
;
2105 dta->step = fold_build2 (PLUS_EXPR, sizetype, dta->step, step)fold_build2_loc (((location_t) 0), PLUS_EXPR, sizetype_tab[(int
) stk_sizetype], dta->step, step )
;
2106
2107 if (dta->ivopts_data->bivs_not_used_in_addr)
2108 {
2109 if (!iv->biv_p)
2110 iv = find_deriving_biv_for_expr (dta->ivopts_data, iv->ssa_name);
2111
2112 record_biv_for_address_use (dta->ivopts_data, iv);
2113 }
2114 return true;
2115}
2116
2117/* Records use in index IDX. Callback for for_each_index. Ivopts data
2118 object is passed to it in DATA. */
2119
2120static bool
2121idx_record_use (tree base, tree *idx,
2122 void *vdata)
2123{
2124 struct ivopts_data *data = (struct ivopts_data *) vdata;
2125 find_interesting_uses_op (data, *idx);
2126 if (TREE_CODE (base)((enum tree_code) (base)->base.code) == ARRAY_REF || TREE_CODE (base)((enum tree_code) (base)->base.code) == ARRAY_RANGE_REF)
2127 {
2128 find_interesting_uses_op (data, array_ref_element_size (base));
2129 find_interesting_uses_op (data, array_ref_low_bound (base));
2130 }
2131 return true;
2132}
2133
2134/* If we can prove that TOP = cst * BOT for some constant cst,
2135 store cst to MUL and return true. Otherwise return false.
2136 The returned value is always sign-extended, regardless of the
2137 signedness of TOP and BOT. */
2138
2139static bool
2140constant_multiple_of (tree top, tree bot, widest_int *mul)
2141{
2142 tree mby;
2143 enum tree_code code;
2144 unsigned precision = TYPE_PRECISION (TREE_TYPE (top))((tree_class_check ((((contains_struct_check ((top), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2144, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2144, __FUNCTION__))->type_common.precision)
;
2145 widest_int res, p0, p1;
2146
2147 STRIP_NOPS (top)(top) = tree_strip_nop_conversions ((const_cast<union tree_node
*> (((top)))))
;
2148 STRIP_NOPS (bot)(bot) = tree_strip_nop_conversions ((const_cast<union tree_node
*> (((bot)))))
;
2149
2150 if (operand_equal_p (top, bot, 0))
2151 {
2152 *mul = 1;
2153 return true;
2154 }
2155
2156 code = TREE_CODE (top)((enum tree_code) (top)->base.code);
2157 switch (code)
2158 {
2159 case MULT_EXPR:
2160 mby = TREE_OPERAND (top, 1)(*((const_cast<tree*> (tree_operand_check ((top), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2160, __FUNCTION__)))))
;
2161 if (TREE_CODE (mby)((enum tree_code) (mby)->base.code) != INTEGER_CST)
2162 return false;
2163
2164 if (!constant_multiple_of (TREE_OPERAND (top, 0)(*((const_cast<tree*> (tree_operand_check ((top), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2164, __FUNCTION__)))))
, bot, &res))
2165 return false;
2166
2167 *mul = wi::sext (res * wi::to_widest (mby), precision);
2168 return true;
2169
2170 case PLUS_EXPR:
2171 case MINUS_EXPR:
2172 if (!constant_multiple_of (TREE_OPERAND (top, 0)(*((const_cast<tree*> (tree_operand_check ((top), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2172, __FUNCTION__)))))
, bot, &p0)
2173 || !constant_multiple_of (TREE_OPERAND (top, 1)(*((const_cast<tree*> (tree_operand_check ((top), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2173, __FUNCTION__)))))
, bot, &p1))
2174 return false;
2175
2176 if (code == MINUS_EXPR)
2177 p1 = -p1;
2178 *mul = wi::sext (p0 + p1, precision);
2179 return true;
2180
2181 case INTEGER_CST:
2182 if (TREE_CODE (bot)((enum tree_code) (bot)->base.code) != INTEGER_CST)
2183 return false;
2184
2185 p0 = widest_int::from (wi::to_wide (top), SIGNED);
2186 p1 = widest_int::from (wi::to_wide (bot), SIGNED);
2187 if (p1 == 0)
2188 return false;
2189 *mul = wi::sext (wi::divmod_trunc (p0, p1, SIGNED, &res), precision);
2190 return res == 0;
2191
2192 default:
2193 if (POLY_INT_CST_P (top)(1 > 1 && ((enum tree_code) (top)->base.code) ==
POLY_INT_CST)
2194 && POLY_INT_CST_P (bot)(1 > 1 && ((enum tree_code) (bot)->base.code) ==
POLY_INT_CST)
2195 && constant_multiple_p (wi::to_poly_widest (top),
2196 wi::to_poly_widest (bot), mul))
2197 return true;
2198
2199 return false;
2200 }
2201}
2202
2203/* Return true if memory reference REF with step STEP may be unaligned. */
2204
2205static bool
2206may_be_unaligned_p (tree ref, tree step)
2207{
2208 /* TARGET_MEM_REFs are translated directly to valid MEMs on the target,
2209 thus they are not misaligned. */
2210 if (TREE_CODE (ref)((enum tree_code) (ref)->base.code) == TARGET_MEM_REF)
2211 return false;
2212
2213 unsigned int align = TYPE_ALIGN (TREE_TYPE (ref))(((tree_class_check ((((contains_struct_check ((ref), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2213, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2213, __FUNCTION__))->type_common.align) ? ((unsigned)1)
<< (((tree_class_check ((((contains_struct_check ((ref
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2213, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2213, __FUNCTION__))->type_common.align) - 1) : 0)
;
2214 if (GET_MODE_ALIGNMENT (TYPE_MODE (TREE_TYPE (ref)))get_mode_alignment (((((enum tree_code) ((tree_class_check ((
((contains_struct_check ((ref), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2214, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2214, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((ref), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2214, __FUNCTION__))->typed.type)) : (((contains_struct_check
((ref), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2214, __FUNCTION__))->typed.type))->type_common.mode)
)
> align)
2215 align = GET_MODE_ALIGNMENT (TYPE_MODE (TREE_TYPE (ref)))get_mode_alignment (((((enum tree_code) ((tree_class_check ((
((contains_struct_check ((ref), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2215, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2215, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((ref), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2215, __FUNCTION__))->typed.type)) : (((contains_struct_check
((ref), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2215, __FUNCTION__))->typed.type))->type_common.mode)
)
;
2216
2217 unsigned HOST_WIDE_INTlong bitpos;
2218 unsigned int ref_align;
2219 get_object_alignment_1 (ref, &ref_align, &bitpos);
2220 if (ref_align < align
2221 || (bitpos % align) != 0
2222 || (bitpos % BITS_PER_UNIT(8)) != 0)
2223 return true;
2224
2225 unsigned int trailing_zeros = tree_ctz (step);
2226 if (trailing_zeros < HOST_BITS_PER_INT(8 * 4)
2227 && (1U << trailing_zeros) * BITS_PER_UNIT(8) < align)
2228 return true;
2229
2230 return false;
2231}
2232
2233/* Return true if EXPR may be non-addressable. */
2234
2235bool
2236may_be_nonaddressable_p (tree expr)
2237{
2238 switch (TREE_CODE (expr)((enum tree_code) (expr)->base.code))
2239 {
2240 case VAR_DECL:
2241 /* Check if it's a register variable. */
2242 return DECL_HARD_REGISTER (expr)((tree_check ((expr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2242, __FUNCTION__, (VAR_DECL)))->decl_with_vis.hard_register
)
;
2243
2244 case TARGET_MEM_REF:
2245 /* TARGET_MEM_REFs are translated directly to valid MEMs on the
2246 target, thus they are always addressable. */
2247 return false;
2248
2249 case MEM_REF:
2250 /* Likewise for MEM_REFs, modulo the storage order. */
2251 return REF_REVERSE_STORAGE_ORDER (expr)((tree_check2 ((expr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2251, __FUNCTION__, (BIT_FIELD_REF), (MEM_REF)))->base.default_def_flag
)
;
2252
2253 case BIT_FIELD_REF:
2254 if (REF_REVERSE_STORAGE_ORDER (expr)((tree_check2 ((expr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2254, __FUNCTION__, (BIT_FIELD_REF), (MEM_REF)))->base.default_def_flag
)
)
2255 return true;
2256 return may_be_nonaddressable_p (TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2256, __FUNCTION__)))))
);
2257
2258 case COMPONENT_REF:
2259 if (TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_OPERAND (expr, 0)))((tree_check4 ((((contains_struct_check (((*((const_cast<tree
*> (tree_operand_check ((expr), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2259, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2259, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2259, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE
), (ARRAY_TYPE)))->base.u.bits.saturating_flag)
)
2260 return true;
2261 return DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1))((tree_check (((*((const_cast<tree*> (tree_operand_check
((expr), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2261, __FUNCTION__)))))), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2261, __FUNCTION__, (FIELD_DECL)))->decl_common.decl_flag_2
)
2262 || may_be_nonaddressable_p (TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2262, __FUNCTION__)))))
);
2263
2264 case ARRAY_REF:
2265 case ARRAY_RANGE_REF:
2266 if (TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_OPERAND (expr, 0)))((tree_check4 ((((contains_struct_check (((*((const_cast<tree
*> (tree_operand_check ((expr), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2266, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2266, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2266, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE
), (ARRAY_TYPE)))->base.u.bits.saturating_flag)
)
2267 return true;
2268 return may_be_nonaddressable_p (TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2268, __FUNCTION__)))))
);
2269
2270 case VIEW_CONVERT_EXPR:
2271 /* This kind of view-conversions may wrap non-addressable objects
2272 and make them look addressable. After some processing the
2273 non-addressability may be uncovered again, causing ADDR_EXPRs
2274 of inappropriate objects to be built. */
2275 if (is_gimple_reg (TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2275, __FUNCTION__)))))
)
2276 || !is_gimple_addressable (TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2276, __FUNCTION__)))))
))
2277 return true;
2278 return may_be_nonaddressable_p (TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2278, __FUNCTION__)))))
);
2279
2280 CASE_CONVERTcase NOP_EXPR: case CONVERT_EXPR:
2281 return true;
2282
2283 default:
2284 break;
2285 }
2286
2287 return false;
2288}
2289
2290/* Finds addresses in *OP_P inside STMT. */
2291
2292static void
2293find_interesting_uses_address (struct ivopts_data *data, gimple *stmt,
2294 tree *op_p)
2295{
2296 tree base = *op_p, step = size_zero_nodeglobal_trees[TI_SIZE_ZERO];
2297 struct iv *civ;
2298 struct ifs_ivopts_data ifs_ivopts_data;
2299
2300 /* Do not play with volatile memory references. A bit too conservative,
2301 perhaps, but safe. */
2302 if (gimple_has_volatile_ops (stmt))
2303 goto fail;
2304
2305 /* Ignore bitfields for now. Not really something terribly complicated
2306 to handle. TODO. */
2307 if (TREE_CODE (base)((enum tree_code) (base)->base.code) == BIT_FIELD_REF)
2308 goto fail;
2309
2310 base = unshare_expr (base);
2311
2312 if (TREE_CODE (base)((enum tree_code) (base)->base.code) == TARGET_MEM_REF)
2313 {
2314 tree type = build_pointer_type (TREE_TYPE (base)((contains_struct_check ((base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2314, __FUNCTION__))->typed.type)
);
2315 tree astep;
2316
2317 if (TMR_BASE (base)((*((const_cast<tree*> (tree_operand_check (((tree_check
((base), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2317, __FUNCTION__, (TARGET_MEM_REF)))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2317, __FUNCTION__))))))
2318 && TREE_CODE (TMR_BASE (base))((enum tree_code) (((*((const_cast<tree*> (tree_operand_check
(((tree_check ((base), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2318, __FUNCTION__, (TARGET_MEM_REF)))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2318, __FUNCTION__)))))))->base.code)
== SSA_NAME)
2319 {
2320 civ = get_iv (data, TMR_BASE (base)((*((const_cast<tree*> (tree_operand_check (((tree_check
((base), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2320, __FUNCTION__, (TARGET_MEM_REF)))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2320, __FUNCTION__))))))
);
2321 if (!civ)
2322 goto fail;
2323
2324 TMR_BASE (base)((*((const_cast<tree*> (tree_operand_check (((tree_check
((base), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2324, __FUNCTION__, (TARGET_MEM_REF)))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2324, __FUNCTION__))))))
= civ->base;
2325 step = civ->step;
2326 }
2327 if (TMR_INDEX2 (base)((*((const_cast<tree*> (tree_operand_check (((tree_check
((base), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2327, __FUNCTION__, (TARGET_MEM_REF)))), (4), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2327, __FUNCTION__))))))
2328 && TREE_CODE (TMR_INDEX2 (base))((enum tree_code) (((*((const_cast<tree*> (tree_operand_check
(((tree_check ((base), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2328, __FUNCTION__, (TARGET_MEM_REF)))), (4), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2328, __FUNCTION__)))))))->base.code)
== SSA_NAME)
2329 {
2330 civ = get_iv (data, TMR_INDEX2 (base)((*((const_cast<tree*> (tree_operand_check (((tree_check
((base), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2330, __FUNCTION__, (TARGET_MEM_REF)))), (4), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2330, __FUNCTION__))))))
);
2331 if (!civ)
2332 goto fail;
2333
2334 TMR_INDEX2 (base)((*((const_cast<tree*> (tree_operand_check (((tree_check
((base), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2334, __FUNCTION__, (TARGET_MEM_REF)))), (4), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2334, __FUNCTION__))))))
= civ->base;
2335 step = civ->step;
2336 }
2337 if (TMR_INDEX (base)((*((const_cast<tree*> (tree_operand_check (((tree_check
((base), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2337, __FUNCTION__, (TARGET_MEM_REF)))), (2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2337, __FUNCTION__))))))
2338 && TREE_CODE (TMR_INDEX (base))((enum tree_code) (((*((const_cast<tree*> (tree_operand_check
(((tree_check ((base), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2338, __FUNCTION__, (TARGET_MEM_REF)))), (2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2338, __FUNCTION__)))))))->base.code)
== SSA_NAME)
2339 {
2340 civ = get_iv (data, TMR_INDEX (base)((*((const_cast<tree*> (tree_operand_check (((tree_check
((base), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2340, __FUNCTION__, (TARGET_MEM_REF)))), (2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2340, __FUNCTION__))))))
);
2341 if (!civ)
2342 goto fail;
2343
2344 TMR_INDEX (base)((*((const_cast<tree*> (tree_operand_check (((tree_check
((base), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2344, __FUNCTION__, (TARGET_MEM_REF)))), (2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2344, __FUNCTION__))))))
= civ->base;
2345 astep = civ->step;
2346
2347 if (astep)
2348 {
2349 if (TMR_STEP (base)((*((const_cast<tree*> (tree_operand_check (((tree_check
((base), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2349, __FUNCTION__, (TARGET_MEM_REF)))), (3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2349, __FUNCTION__))))))
)
2350 astep = fold_build2 (MULT_EXPR, type, TMR_STEP (base), astep)fold_build2_loc (((location_t) 0), MULT_EXPR, type, ((*((const_cast
<tree*> (tree_operand_check (((tree_check ((base), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2350, __FUNCTION__, (TARGET_MEM_REF)))), (3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2350, __FUNCTION__)))))), astep )
;
2351
2352 step = fold_build2 (PLUS_EXPR, type, step, astep)fold_build2_loc (((location_t) 0), PLUS_EXPR, type, step, astep
)
;
2353 }
2354 }
2355
2356 if (integer_zerop (step))
2357 goto fail;
2358 base = tree_mem_ref_addr (type, base);
2359 }
2360 else
2361 {
2362 ifs_ivopts_data.ivopts_data = data;
2363 ifs_ivopts_data.stmt = stmt;
2364 ifs_ivopts_data.step = size_zero_nodeglobal_trees[TI_SIZE_ZERO];
2365 if (!for_each_index (&base, idx_find_step, &ifs_ivopts_data)
2366 || integer_zerop (ifs_ivopts_data.step))
2367 goto fail;
2368 step = ifs_ivopts_data.step;
2369
2370 /* Check that the base expression is addressable. This needs
2371 to be done after substituting bases of IVs into it. */
2372 if (may_be_nonaddressable_p (base))
2373 goto fail;
2374
2375 /* Moreover, on strict alignment platforms, check that it is
2376 sufficiently aligned. */
2377 if (STRICT_ALIGNMENT0 && may_be_unaligned_p (base, step))
2378 goto fail;
2379
2380 base = build_fold_addr_expr (base)build_fold_addr_expr_loc (((location_t) 0), (base));
2381
2382 /* Substituting bases of IVs into the base expression might
2383 have caused folding opportunities. */
2384 if (TREE_CODE (base)((enum tree_code) (base)->base.code) == ADDR_EXPR)
2385 {
2386 tree *ref = &TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2386, __FUNCTION__)))))
;
2387 while (handled_component_p (*ref))
2388 ref = &TREE_OPERAND (*ref, 0)(*((const_cast<tree*> (tree_operand_check ((*ref), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2388, __FUNCTION__)))))
;
2389 if (TREE_CODE (*ref)((enum tree_code) (*ref)->base.code) == MEM_REF)
2390 {
2391 tree tem = fold_binary (MEM_REF, TREE_TYPE (*ref),fold_binary_loc (((location_t) 0), MEM_REF, ((contains_struct_check
((*ref), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2391, __FUNCTION__))->typed.type), (*((const_cast<tree
*> (tree_operand_check ((*ref), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2392, __FUNCTION__))))), (*((const_cast<tree*> (tree_operand_check
((*ref), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2393, __FUNCTION__))))))
2392 TREE_OPERAND (*ref, 0),fold_binary_loc (((location_t) 0), MEM_REF, ((contains_struct_check
((*ref), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2391, __FUNCTION__))->typed.type), (*((const_cast<tree
*> (tree_operand_check ((*ref), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2392, __FUNCTION__))))), (*((const_cast<tree*> (tree_operand_check
((*ref), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2393, __FUNCTION__))))))
2393 TREE_OPERAND (*ref, 1))fold_binary_loc (((location_t) 0), MEM_REF, ((contains_struct_check
((*ref), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2391, __FUNCTION__))->typed.type), (*((const_cast<tree
*> (tree_operand_check ((*ref), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2392, __FUNCTION__))))), (*((const_cast<tree*> (tree_operand_check
((*ref), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2393, __FUNCTION__))))))
;
2394 if (tem)
2395 *ref = tem;
2396 }
2397 }
2398 }
2399
2400 civ = alloc_iv (data, base, step);
2401 /* Fail if base object of this memory reference is unknown. */
2402 if (civ->base_object == NULL_TREE(tree) nullptr)
2403 goto fail;
2404
2405 record_group_use (data, op_p, civ, stmt, USE_REF_ADDRESS, TREE_TYPE (*op_p)((contains_struct_check ((*op_p), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2405, __FUNCTION__))->typed.type)
);
2406 return;
2407
2408fail:
2409 for_each_index (op_p, idx_record_use, data);
2410}
2411
2412/* Finds and records invariants used in STMT. */
2413
2414static void
2415find_invariants_stmt (struct ivopts_data *data, gimple *stmt)
2416{
2417 ssa_op_iter iter;
2418 use_operand_p use_p;
2419 tree op;
2420
2421 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_USE)for ((use_p) = (gimple_code (stmt) == GIMPLE_PHI ? op_iter_init_phiuse
(&(iter), as_a <gphi *> (stmt), 0x01) : op_iter_init_use
(&(iter), stmt, 0x01)); !op_iter_done (&(iter)); (use_p
) = op_iter_next_use (&(iter)))
2422 {
2423 op = USE_FROM_PTR (use_p)get_use_from_ptr (use_p);
2424 record_invariant (data, op, false);
2425 }
2426}
2427
2428/* CALL calls an internal function. If operand *OP_P will become an
2429 address when the call is expanded, return the type of the memory
2430 being addressed, otherwise return null. */
2431
2432static tree
2433get_mem_type_for_internal_fn (gcall *call, tree *op_p)
2434{
2435 switch (gimple_call_internal_fn (call))
2436 {
2437 case IFN_MASK_LOAD:
2438 case IFN_MASK_LOAD_LANES:
2439 case IFN_LEN_LOAD:
2440 if (op_p == gimple_call_arg_ptr (call, 0))
2441 return TREE_TYPE (gimple_call_lhs (call))((contains_struct_check ((gimple_call_lhs (call)), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2441, __FUNCTION__))->typed.type)
;
2442 return NULL_TREE(tree) nullptr;
2443
2444 case IFN_MASK_STORE:
2445 case IFN_MASK_STORE_LANES:
2446 case IFN_LEN_STORE:
2447 if (op_p == gimple_call_arg_ptr (call, 0))
2448 return TREE_TYPE (gimple_call_arg (call, 3))((contains_struct_check ((gimple_call_arg (call, 3)), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2448, __FUNCTION__))->typed.type)
;
2449 return NULL_TREE(tree) nullptr;
2450
2451 default:
2452 return NULL_TREE(tree) nullptr;
2453 }
2454}
2455
2456/* IV is a (non-address) iv that describes operand *OP_P of STMT.
2457 Return true if the operand will become an address when STMT
2458 is expanded and record the associated address use if so. */
2459
2460static bool
2461find_address_like_use (struct ivopts_data *data, gimple *stmt, tree *op_p,
2462 struct iv *iv)
2463{
2464 /* Fail if base object of this memory reference is unknown. */
2465 if (iv->base_object == NULL_TREE(tree) nullptr)
2466 return false;
2467
2468 tree mem_type = NULL_TREE(tree) nullptr;
2469 if (gcall *call = dyn_cast <gcall *> (stmt))
2470 if (gimple_call_internal_p (call))
2471 mem_type = get_mem_type_for_internal_fn (call, op_p);
2472 if (mem_type)
2473 {
2474 iv = alloc_iv (data, iv->base, iv->step);
2475 record_group_use (data, op_p, iv, stmt, USE_PTR_ADDRESS, mem_type);
2476 return true;
2477 }
2478 return false;
2479}
2480
2481/* Finds interesting uses of induction variables in the statement STMT. */
2482
2483static void
2484find_interesting_uses_stmt (struct ivopts_data *data, gimple *stmt)
2485{
2486 struct iv *iv;
2487 tree op, *lhs, *rhs;
2488 ssa_op_iter iter;
2489 use_operand_p use_p;
2490 enum tree_code code;
2491
2492 find_invariants_stmt (data, stmt);
2493
2494 if (gimple_code (stmt) == GIMPLE_COND)
2495 {
2496 find_interesting_uses_cond (data, stmt);
2497 return;
2498 }
2499
2500 if (is_gimple_assign (stmt))
2501 {
2502 lhs = gimple_assign_lhs_ptr (stmt);
2503 rhs = gimple_assign_rhs1_ptr (stmt);
2504
2505 if (TREE_CODE (*lhs)((enum tree_code) (*lhs)->base.code) == SSA_NAME)
2506 {
2507 /* If the statement defines an induction variable, the uses are not
2508 interesting by themselves. */
2509
2510 iv = get_iv (data, *lhs);
2511
2512 if (iv && !integer_zerop (iv->step))
2513 return;
2514 }
2515
2516 code = gimple_assign_rhs_code (stmt);
2517 if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS
2518 && (REFERENCE_CLASS_P (*rhs)(tree_code_type[(int) (((enum tree_code) (*rhs)->base.code
))] == tcc_reference)
2519 || is_gimple_val (*rhs)))
2520 {
2521 if (REFERENCE_CLASS_P (*rhs)(tree_code_type[(int) (((enum tree_code) (*rhs)->base.code
))] == tcc_reference)
)
2522 find_interesting_uses_address (data, stmt, rhs);
2523 else
2524 find_interesting_uses_op (data, *rhs);
2525
2526 if (REFERENCE_CLASS_P (*lhs)(tree_code_type[(int) (((enum tree_code) (*lhs)->base.code
))] == tcc_reference)
)
2527 find_interesting_uses_address (data, stmt, lhs);
2528 return;
2529 }
2530 else if (TREE_CODE_CLASS (code)tree_code_type[(int) (code)] == tcc_comparison)
2531 {
2532 find_interesting_uses_cond (data, stmt);
2533 return;
2534 }
2535
2536 /* TODO -- we should also handle address uses of type
2537
2538 memory = call (whatever);
2539
2540 and
2541
2542 call (memory). */
2543 }
2544
2545 if (gimple_code (stmt) == GIMPLE_PHI
2546 && gimple_bb (stmt) == data->current_loop->header)
2547 {
2548 iv = get_iv (data, PHI_RESULT (stmt)get_def_from_ptr (gimple_phi_result_ptr (stmt)));
2549
2550 if (iv && !integer_zerop (iv->step))
2551 return;
2552 }
2553
2554 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_USE)for ((use_p) = (gimple_code (stmt) == GIMPLE_PHI ? op_iter_init_phiuse
(&(iter), as_a <gphi *> (stmt), 0x01) : op_iter_init_use
(&(iter), stmt, 0x01)); !op_iter_done (&(iter)); (use_p
) = op_iter_next_use (&(iter)))
2555 {
2556 op = USE_FROM_PTR (use_p)get_use_from_ptr (use_p);
2557
2558 if (TREE_CODE (op)((enum tree_code) (op)->base.code) != SSA_NAME)
2559 continue;
2560
2561 iv = get_iv (data, op);
2562 if (!iv)
2563 continue;
2564
2565 if (!find_address_like_use (data, stmt, use_p->use, iv))
2566 find_interesting_uses_op (data, op);
2567 }
2568}
2569
2570/* Finds interesting uses of induction variables outside of loops
2571 on loop exit edge EXIT. */
2572
2573static void
2574find_interesting_uses_outside (struct ivopts_data *data, edge exit)
2575{
2576 gphi *phi;
2577 gphi_iterator psi;
2578 tree def;
2579
2580 for (psi = gsi_start_phis (exit->dest); !gsi_end_p (psi); gsi_next (&psi))
2581 {
2582 phi = psi.phi ();
2583 def = PHI_ARG_DEF_FROM_EDGE (phi, exit)gimple_phi_arg_def (((phi)), ((exit)->dest_idx));
2584 if (!virtual_operand_p (def))
2585 find_interesting_uses_op (data, def);
2586 }
2587}
2588
2589/* Return TRUE if OFFSET is within the range of [base + offset] addressing
2590 mode for memory reference represented by USE. */
2591
2592static GTY (()) vec<rtx, va_gc> *addr_list;
2593
2594static bool
2595addr_offset_valid_p (struct iv_use *use, poly_int64 offset)
2596{
2597 rtx reg, addr;
2598 unsigned list_index;
2599 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (use->iv->base))((tree_class_check ((((contains_struct_check ((use->iv->
base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2599, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2599, __FUNCTION__))->base.u.bits.address_space)
;
2600 machine_mode addr_mode, mem_mode = TYPE_MODE (use->mem_type)((((enum tree_code) ((tree_class_check ((use->mem_type), (
tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2600, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(use->mem_type) : (use->mem_type)->type_common.mode
)
;
1
Assuming field 'code' is not equal to VECTOR_TYPE
2
'?' condition is false
2601
2602 list_index = (unsigned) as * MAX_MACHINE_MODE + (unsigned) mem_mode;
2603 if (list_index >= vec_safe_length (addr_list))
3
Calling 'vec_safe_length<rtx_def *, va_gc>'
7
Returning from 'vec_safe_length<rtx_def *, va_gc>'
8
Assuming the condition is true
9
Taking true branch
2604 vec_safe_grow_cleared (addr_list, list_index + MAX_MACHINE_MODE, true);
10
Passing value via 1st parameter 'v'
11
Calling 'vec_safe_grow_cleared<rtx_def *, va_gc>'
2605
2606 addr = (*addr_list)[list_index];
2607 if (!addr)
2608 {
2609 addr_mode = targetm.addr_space.address_mode (as);
2610 reg = gen_raw_REG (addr_mode, LAST_VIRTUAL_REGISTER(((76)) + 5) + 1);
2611 addr = gen_rtx_fmt_ee (PLUS, addr_mode, reg, NULL_RTX)gen_rtx_fmt_ee_stat ((PLUS), (addr_mode), (reg), ((rtx) 0) );
2612 (*addr_list)[list_index] = addr;
2613 }
2614 else
2615 addr_mode = GET_MODE (addr)((machine_mode) (addr)->mode);
2616
2617 XEXP (addr, 1)(((addr)->u.fld[1]).rt_rtx) = gen_int_mode (offset, addr_mode);
2618 return (memory_address_addr_space_p (mem_mode, addr, as));
2619}
2620
2621/* Comparison function to sort group in ascending order of addr_offset. */
2622
2623static int
2624group_compare_offset (const void *a, const void *b)
2625{
2626 const struct iv_use *const *u1 = (const struct iv_use *const *) a;
2627 const struct iv_use *const *u2 = (const struct iv_use *const *) b;
2628
2629 return compare_sizes_for_sort ((*u1)->addr_offset, (*u2)->addr_offset);
2630}
2631
2632/* Check if small groups should be split. Return true if no group
2633 contains more than two uses with distinct addr_offsets. Return
2634 false otherwise. We want to split such groups because:
2635
2636 1) Small groups don't have much benefit and may interfer with
2637 general candidate selection.
2638 2) Size for problem with only small groups is usually small and
2639 general algorithm can handle it well.
2640
2641 TODO -- Above claim may not hold when we want to merge memory
2642 accesses with conseuctive addresses. */
2643
2644static bool
2645split_small_address_groups_p (struct ivopts_data *data)
2646{
2647 unsigned int i, j, distinct = 1;
2648 struct iv_use *pre;
2649 struct iv_group *group;
2650
2651 for (i = 0; i < data->vgroups.length (); i++)
2652 {
2653 group = data->vgroups[i];
2654 if (group->vuses.length () == 1)
2655 continue;
2656
2657 gcc_assert (address_p (group->type))((void)(!(address_p (group->type)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2657, __FUNCTION__), 0 : 0))
;
2658 if (group->vuses.length () == 2)
2659 {
2660 if (compare_sizes_for_sort (group->vuses[0]->addr_offset,
2661 group->vuses[1]->addr_offset) > 0)
2662 std::swap (group->vuses[0], group->vuses[1]);
2663 }
2664 else
2665 group->vuses.qsort (group_compare_offset)qsort (group_compare_offset);
2666
2667 if (distinct > 2)
2668 continue;
2669
2670 distinct = 1;
2671 for (pre = group->vuses[0], j = 1; j < group->vuses.length (); j++)
2672 {
2673 if (maybe_ne (group->vuses[j]->addr_offset, pre->addr_offset))
2674 {
2675 pre = group->vuses[j];
2676 distinct++;
2677 }
2678
2679 if (distinct > 2)
2680 break;
2681 }
2682 }
2683
2684 return (distinct <= 2);
2685}
2686
2687/* For each group of address type uses, this function further groups
2688 these uses according to the maximum offset supported by target's
2689 [base + offset] addressing mode. */
2690
2691static void
2692split_address_groups (struct ivopts_data *data)
2693{
2694 unsigned int i, j;
2695 /* Always split group. */
2696 bool split_p = split_small_address_groups_p (data);
2697
2698 for (i = 0; i < data->vgroups.length (); i++)
2699 {
2700 struct iv_group *new_group = NULLnullptr;
2701 struct iv_group *group = data->vgroups[i];
2702 struct iv_use *use = group->vuses[0];
2703
2704 use->id = 0;
2705 use->group_id = group->id;
2706 if (group->vuses.length () == 1)
2707 continue;
2708
2709 gcc_assert (address_p (use->type))((void)(!(address_p (use->type)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2709, __FUNCTION__), 0 : 0))
;
2710
2711 for (j = 1; j < group->vuses.length ();)
2712 {
2713 struct iv_use *next = group->vuses[j];
2714 poly_int64 offset = next->addr_offset - use->addr_offset;
2715
2716 /* Split group if aksed to, or the offset against the first
2717 use can't fit in offset part of addressing mode. IV uses
2718 having the same offset are still kept in one group. */
2719 if (maybe_ne (offset, 0)
2720 && (split_p || !addr_offset_valid_p (use, offset)))
2721 {
2722 if (!new_group)
2723 new_group = record_group (data, group->type);
2724 group->vuses.ordered_remove (j);
2725 new_group->vuses.safe_push (next);
2726 continue;
2727 }
2728
2729 next->id = j;
2730 next->group_id = group->id;
2731 j++;
2732 }
2733 }
2734}
2735
2736/* Finds uses of the induction variables that are interesting. */
2737
2738static void
2739find_interesting_uses (struct ivopts_data *data)
2740{
2741 basic_block bb;
2742 gimple_stmt_iterator bsi;
2743 basic_block *body = get_loop_body (data->current_loop);
2744 unsigned i;
2745 edge e;
2746
2747 for (i = 0; i < data->current_loop->num_nodes; i++)
2748 {
2749 edge_iterator ei;
2750 bb = body[i];
2751
2752 FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
2753 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr)
2754 && !flow_bb_inside_loop_p (data->current_loop, e->dest))
2755 find_interesting_uses_outside (data, e);
2756
2757 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
2758 find_interesting_uses_stmt (data, gsi_stmt (bsi));
2759 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
2760 if (!is_gimple_debug (gsi_stmt (bsi)))
2761 find_interesting_uses_stmt (data, gsi_stmt (bsi));
2762 }
2763 free (body);
2764
2765 split_address_groups (data);
2766
2767 if (dump_file && (dump_flags & TDF_DETAILS))
2768 {
2769 fprintf (dump_file, "\n<IV Groups>:\n");
2770 dump_groups (dump_file, data);
2771 fprintf (dump_file, "\n");
2772 }
2773}
2774
2775/* Strips constant offsets from EXPR and stores them to OFFSET. If INSIDE_ADDR
2776 is true, assume we are inside an address. If TOP_COMPREF is true, assume
2777 we are at the top-level of the processed address. */
2778
2779static tree
2780strip_offset_1 (tree expr, bool inside_addr, bool top_compref,
2781 poly_int64 *offset)
2782{
2783 tree op0 = NULL_TREE(tree) nullptr, op1 = NULL_TREE(tree) nullptr, tmp, step;
2784 enum tree_code code;
2785 tree type, orig_type = TREE_TYPE (expr)((contains_struct_check ((expr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2785, __FUNCTION__))->typed.type)
;
2786 poly_int64 off0, off1;
2787 HOST_WIDE_INTlong st;
2788 tree orig_expr = expr;
2789
2790 STRIP_NOPS (expr)(expr) = tree_strip_nop_conversions ((const_cast<union tree_node
*> (((expr)))))
;
2791
2792 type = TREE_TYPE (expr)((contains_struct_check ((expr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2792, __FUNCTION__))->typed.type)
;
2793 code = TREE_CODE (expr)((enum tree_code) (expr)->base.code);
2794 *offset = 0;
2795
2796 switch (code)
2797 {
2798 case POINTER_PLUS_EXPR:
2799 case PLUS_EXPR:
2800 case MINUS_EXPR:
2801 op0 = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2801, __FUNCTION__)))))
;
2802 op1 = TREE_OPERAND (expr, 1)(*((const_cast<tree*> (tree_operand_check ((expr), (1),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2802, __FUNCTION__)))))
;
2803
2804 op0 = strip_offset_1 (op0, false, false, &off0);
2805 op1 = strip_offset_1 (op1, false, false, &off1);
2806
2807 *offset = (code == MINUS_EXPR ? off0 - off1 : off0 + off1);
2808 if (op0 == TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2808, __FUNCTION__)))))
2809 && op1 == TREE_OPERAND (expr, 1)(*((const_cast<tree*> (tree_operand_check ((expr), (1),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2809, __FUNCTION__)))))
)
2810 return orig_expr;
2811
2812 if (integer_zerop (op1))
2813 expr = op0;
2814 else if (integer_zerop (op0))
2815 {
2816 if (code == MINUS_EXPR)
2817 expr = fold_build1 (NEGATE_EXPR, type, op1)fold_build1_loc (((location_t) 0), NEGATE_EXPR, type, op1 );
2818 else
2819 expr = op1;
2820 }
2821 else
2822 expr = fold_build2 (code, type, op0, op1)fold_build2_loc (((location_t) 0), code, type, op0, op1 );
2823
2824 return fold_convert (orig_type, expr)fold_convert_loc (((location_t) 0), orig_type, expr);
2825
2826 case MULT_EXPR:
2827 op1 = TREE_OPERAND (expr, 1)(*((const_cast<tree*> (tree_operand_check ((expr), (1),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2827, __FUNCTION__)))))
;
2828 if (!cst_and_fits_in_hwi (op1))
2829 return orig_expr;
2830
2831 op0 = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2831, __FUNCTION__)))))
;
2832 op0 = strip_offset_1 (op0, false, false, &off0);
2833 if (op0 == TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2833, __FUNCTION__)))))
)
2834 return orig_expr;
2835
2836 *offset = off0 * int_cst_value (op1);
2837 if (integer_zerop (op0))
2838 expr = op0;
2839 else
2840 expr = fold_build2 (MULT_EXPR, type, op0, op1)fold_build2_loc (((location_t) 0), MULT_EXPR, type, op0, op1 );
2841
2842 return fold_convert (orig_type, expr)fold_convert_loc (((location_t) 0), orig_type, expr);
2843
2844 case ARRAY_REF:
2845 case ARRAY_RANGE_REF:
2846 if (!inside_addr)
2847 return orig_expr;
2848
2849 step = array_ref_element_size (expr);
2850 if (!cst_and_fits_in_hwi (step))
2851 break;
2852
2853 st = int_cst_value (step);
2854 op1 = TREE_OPERAND (expr, 1)(*((const_cast<tree*> (tree_operand_check ((expr), (1),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2854, __FUNCTION__)))))
;
2855 op1 = strip_offset_1 (op1, false, false, &off1);
2856 *offset = off1 * st;
2857
2858 if (top_compref
2859 && integer_zerop (op1))
2860 {
2861 /* Strip the component reference completely. */
2862 op0 = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2862, __FUNCTION__)))))
;
2863 op0 = strip_offset_1 (op0, inside_addr, top_compref, &off0);
2864 *offset += off0;
2865 return op0;
2866 }
2867 break;
2868
2869 case COMPONENT_REF:
2870 {
2871 tree field;
2872
2873 if (!inside_addr)
2874 return orig_expr;
2875
2876 tmp = component_ref_field_offset (expr);
2877 field = TREE_OPERAND (expr, 1)(*((const_cast<tree*> (tree_operand_check ((expr), (1),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2877, __FUNCTION__)))))
;
2878 if (top_compref
2879 && cst_and_fits_in_hwi (tmp)
2880 && cst_and_fits_in_hwi (DECL_FIELD_BIT_OFFSET (field)((tree_check ((field), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2880, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset
)
))
2881 {
2882 HOST_WIDE_INTlong boffset, abs_off;
2883
2884 /* Strip the component reference completely. */
2885 op0 = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2885, __FUNCTION__)))))
;
2886 op0 = strip_offset_1 (op0, inside_addr, top_compref, &off0);
2887 boffset = int_cst_value (DECL_FIELD_BIT_OFFSET (field)((tree_check ((field), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2887, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset
)
);
2888 abs_off = abs_hwi (boffset) / BITS_PER_UNIT(8);
2889 if (boffset < 0)
2890 abs_off = -abs_off;
2891
2892 *offset = off0 + int_cst_value (tmp) + abs_off;
2893 return op0;
2894 }
2895 }
2896 break;
2897
2898 case ADDR_EXPR:
2899 op0 = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2899, __FUNCTION__)))))
;
2900 op0 = strip_offset_1 (op0, true, true, &off0);
2901 *offset += off0;
2902
2903 if (op0 == TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2903, __FUNCTION__)))))
)
2904 return orig_expr;
2905
2906 expr = build_fold_addr_expr (op0)build_fold_addr_expr_loc (((location_t) 0), (op0));
2907 return fold_convert (orig_type, expr)fold_convert_loc (((location_t) 0), orig_type, expr);
2908
2909 case MEM_REF:
2910 /* ??? Offset operand? */
2911 inside_addr = false;
2912 break;
2913
2914 default:
2915 if (ptrdiff_tree_p (expr, offset) && maybe_ne (*offset, 0))
2916 return build_int_cst (orig_type, 0);
2917 return orig_expr;
2918 }
2919
2920 /* Default handling of expressions for that we want to recurse into
2921 the first operand. */
2922 op0 = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2922, __FUNCTION__)))))
;
2923 op0 = strip_offset_1 (op0, inside_addr, false, &off0);
2924 *offset += off0;
2925
2926 if (op0 == TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2926, __FUNCTION__)))))
2927 && (!op1 || op1 == TREE_OPERAND (expr, 1)(*((const_cast<tree*> (tree_operand_check ((expr), (1),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2927, __FUNCTION__)))))
))
2928 return orig_expr;
2929
2930 expr = copy_node (expr);
2931 TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2931, __FUNCTION__)))))
= op0;
2932 if (op1)
2933 TREE_OPERAND (expr, 1)(*((const_cast<tree*> (tree_operand_check ((expr), (1),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2933, __FUNCTION__)))))
= op1;
2934
2935 /* Inside address, we might strip the top level component references,
2936 thus changing type of the expression. Handling of ADDR_EXPR
2937 will fix that. */
2938 expr = fold_convert (orig_type, expr)fold_convert_loc (((location_t) 0), orig_type, expr);
2939
2940 return expr;
2941}
2942
2943/* Strips constant offsets from EXPR and stores them to OFFSET. */
2944
2945tree
2946strip_offset (tree expr, poly_uint64_pod *offset)
2947{
2948 poly_int64 off;
2949 tree core = strip_offset_1 (expr, false, false, &off);
2950 *offset = off;
2951 return core;
2952}
2953
2954/* Returns variant of TYPE that can be used as base for different uses.
2955 We return unsigned type with the same precision, which avoids problems
2956 with overflows. */
2957
2958static tree
2959generic_type_for (tree type)
2960{
2961 if (POINTER_TYPE_P (type)(((enum tree_code) (type)->base.code) == POINTER_TYPE || (
(enum tree_code) (type)->base.code) == REFERENCE_TYPE)
)
2962 return unsigned_type_for (type);
2963
2964 if (TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2964, __FUNCTION__))->base.u.bits.unsigned_flag)
)
2965 return type;
2966
2967 return unsigned_type_for (type);
2968}
2969
2970/* Private data for walk_tree. */
2971
2972struct walk_tree_data
2973{
2974 bitmap *inv_vars;
2975 struct ivopts_data *idata;
2976};
2977
2978/* Callback function for walk_tree, it records invariants and symbol
2979 reference in *EXPR_P. DATA is the structure storing result info. */
2980
2981static tree
2982find_inv_vars_cb (tree *expr_p, int *ws ATTRIBUTE_UNUSED__attribute__ ((__unused__)), void *data)
2983{
2984 tree op = *expr_p;
2985 struct version_info *info;
2986 struct walk_tree_data *wdata = (struct walk_tree_data*) data;
2987
2988 if (TREE_CODE (op)((enum tree_code) (op)->base.code) != SSA_NAME)
2989 return NULL_TREE(tree) nullptr;
2990
2991 info = name_info (wdata->idata, op);
2992 /* Because we expand simple operations when finding IVs, loop invariant
2993 variable that isn't referred by the original loop could be used now.
2994 Record such invariant variables here. */
2995 if (!info->iv)
2996 {
2997 struct ivopts_data *idata = wdata->idata;
2998 basic_block bb = gimple_bb (SSA_NAME_DEF_STMT (op)(tree_check ((op), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 2998, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
);
2999
3000 if (!bb || !flow_bb_inside_loop_p (idata->current_loop, bb))
3001 {
3002 tree steptype = TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3002, __FUNCTION__))->typed.type)
;
3003 if (POINTER_TYPE_P (steptype)(((enum tree_code) (steptype)->base.code) == POINTER_TYPE ||
((enum tree_code) (steptype)->base.code) == REFERENCE_TYPE
)
)
3004 steptype = sizetypesizetype_tab[(int) stk_sizetype];
3005 set_iv (idata, op, op, build_int_cst (steptype, 0), true);
3006 record_invariant (idata, op, false);
3007 }
3008 }
3009 if (!info->inv_id || info->has_nonlin_use)
3010 return NULL_TREE(tree) nullptr;
3011
3012 if (!*wdata->inv_vars)
3013 *wdata->inv_vars = BITMAP_ALLOCbitmap_alloc (NULLnullptr);
3014 bitmap_set_bit (*wdata->inv_vars, info->inv_id);
3015
3016 return NULL_TREE(tree) nullptr;
3017}
3018
3019/* Records invariants in *EXPR_P. INV_VARS is the bitmap to that we should
3020 store it. */
3021
3022static inline void
3023find_inv_vars (struct ivopts_data *data, tree *expr_p, bitmap *inv_vars)
3024{
3025 struct walk_tree_data wdata;
3026
3027 if (!inv_vars)
3028 return;
3029
3030 wdata.idata = data;
3031 wdata.inv_vars = inv_vars;
3032 walk_tree (expr_p, find_inv_vars_cb, &wdata, NULL)walk_tree_1 (expr_p, find_inv_vars_cb, &wdata, nullptr, nullptr
)
;
3033}
3034
3035/* Get entry from invariant expr hash table for INV_EXPR. New entry
3036 will be recorded if it doesn't exist yet. Given below two exprs:
3037 inv_expr + cst1, inv_expr + cst2
3038 It's hard to make decision whether constant part should be stripped
3039 or not. We choose to not strip based on below facts:
3040 1) We need to count ADD cost for constant part if it's stripped,
3041 which isn't always trivial where this functions is called.
3042 2) Stripping constant away may be conflict with following loop
3043 invariant hoisting pass.
3044 3) Not stripping constant away results in more invariant exprs,
3045 which usually leads to decision preferring lower reg pressure. */
3046
3047static iv_inv_expr_ent *
3048get_loop_invariant_expr (struct ivopts_data *data, tree inv_expr)
3049{
3050 STRIP_NOPS (inv_expr)(inv_expr) = tree_strip_nop_conversions ((const_cast<union
tree_node *> (((inv_expr)))))
;
3051
3052 if (poly_int_tree_p (inv_expr)
3053 || TREE_CODE (inv_expr)((enum tree_code) (inv_expr)->base.code) == SSA_NAME)
3054 return NULLnullptr;
3055
3056 /* Don't strip constant part away as we used to. */
3057
3058 /* Stores EXPR in DATA->inv_expr_tab, return pointer to iv_inv_expr_ent. */
3059 struct iv_inv_expr_ent ent;
3060 ent.expr = inv_expr;
3061 ent.hash = iterative_hash_expr (inv_expr, 0);
3062 struct iv_inv_expr_ent **slot = data->inv_expr_tab->find_slot (&ent, INSERT);
3063
3064 if (!*slot)
3065 {
3066 *slot = XNEW (struct iv_inv_expr_ent)((struct iv_inv_expr_ent *) xmalloc (sizeof (struct iv_inv_expr_ent
)))
;
3067 (*slot)->expr = inv_expr;
3068 (*slot)->hash = ent.hash;
3069 (*slot)->id = ++data->max_inv_expr_id;
3070 }
3071
3072 return *slot;
3073}
3074
3075/* Adds a candidate BASE + STEP * i. Important field is set to IMPORTANT and
3076 position to POS. If USE is not NULL, the candidate is set as related to
3077 it. If both BASE and STEP are NULL, we add a pseudocandidate for the
3078 replacement of the final value of the iv by a direct computation. */
3079
3080static struct iv_cand *
3081add_candidate_1 (struct ivopts_data *data, tree base, tree step, bool important,
3082 enum iv_position pos, struct iv_use *use,
3083 gimple *incremented_at, struct iv *orig_iv = NULLnullptr,
3084 bool doloop = false)
3085{
3086 unsigned i;
3087 struct iv_cand *cand = NULLnullptr;
3088 tree type, orig_type;
3089
3090 gcc_assert (base && step)((void)(!(base && step) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3090, __FUNCTION__), 0 : 0))
;
3091
3092 /* -fkeep-gc-roots-live means that we have to keep a real pointer
3093 live, but the ivopts code may replace a real pointer with one
3094 pointing before or after the memory block that is then adjusted
3095 into the memory block during the loop. FIXME: It would likely be
3096 better to actually force the pointer live and still use ivopts;
3097 for example, it would be enough to write the pointer into memory
3098 and keep it there until after the loop. */
3099 if (flag_keep_gc_roots_liveglobal_options.x_flag_keep_gc_roots_live && POINTER_TYPE_P (TREE_TYPE (base))(((enum tree_code) (((contains_struct_check ((base), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3099, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((base), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3099, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
3100 return NULLnullptr;
3101
3102 /* For non-original variables, make sure their values are computed in a type
3103 that does not invoke undefined behavior on overflows (since in general,
3104 we cannot prove that these induction variables are non-wrapping). */
3105 if (pos != IP_ORIGINAL)
3106 {
3107 orig_type = TREE_TYPE (base)((contains_struct_check ((base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3107, __FUNCTION__))->typed.type)
;
3108 type = generic_type_for (orig_type);
3109 if (type != orig_type)
3110 {
3111 base = fold_convert (type, base)fold_convert_loc (((location_t) 0), type, base);
3112 step = fold_convert (type, step)fold_convert_loc (((location_t) 0), type, step);
3113 }
3114 }
3115
3116 for (i = 0; i < data->vcands.length (); i++)
3117 {
3118 cand = data->vcands[i];
3119
3120 if (cand->pos != pos)
3121 continue;
3122
3123 if (cand->incremented_at != incremented_at
3124 || ((pos == IP_AFTER_USE || pos == IP_BEFORE_USE)
3125 && cand->ainc_use != use))
3126 continue;
3127
3128 if (operand_equal_p (base, cand->iv->base, 0)
3129 && operand_equal_p (step, cand->iv->step, 0)
3130 && (TYPE_PRECISION (TREE_TYPE (base))((tree_class_check ((((contains_struct_check ((base), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3130, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3130, __FUNCTION__))->type_common.precision)
3131 == TYPE_PRECISION (TREE_TYPE (cand->iv->base))((tree_class_check ((((contains_struct_check ((cand->iv->
base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3131, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3131, __FUNCTION__))->type_common.precision)
))
3132 break;
3133 }
3134
3135 if (i == data->vcands.length ())
3136 {
3137 cand = XCNEW (struct iv_cand)((struct iv_cand *) xcalloc (1, sizeof (struct iv_cand)));
3138 cand->id = i;
3139 cand->iv = alloc_iv (data, base, step);
3140 cand->pos = pos;
3141 if (pos != IP_ORIGINAL)
3142 {
3143 if (doloop)
3144 cand->var_before = create_tmp_var_raw (TREE_TYPE (base)((contains_struct_check ((base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3144, __FUNCTION__))->typed.type)
, "doloop");
3145 else
3146 cand->var_before = create_tmp_var_raw (TREE_TYPE (base)((contains_struct_check ((base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3146, __FUNCTION__))->typed.type)
, "ivtmp");
3147 cand->var_after = cand->var_before;
3148 }
3149 cand->important = important;
3150 cand->incremented_at = incremented_at;
3151 cand->doloop_p = doloop;
3152 data->vcands.safe_push (cand);
3153
3154 if (!poly_int_tree_p (step))
3155 {
3156 find_inv_vars (data, &step, &cand->inv_vars);
3157
3158 iv_inv_expr_ent *inv_expr = get_loop_invariant_expr (data, step);
3159 /* Share bitmap between inv_vars and inv_exprs for cand. */
3160 if (inv_expr != NULLnullptr)
3161 {
3162 cand->inv_exprs = cand->inv_vars;
3163 cand->inv_vars = NULLnullptr;
3164 if (cand->inv_exprs)
3165 bitmap_clear (cand->inv_exprs);
3166 else
3167 cand->inv_exprs = BITMAP_ALLOCbitmap_alloc (NULLnullptr);
3168
3169 bitmap_set_bit (cand->inv_exprs, inv_expr->id);
3170 }
3171 }
3172
3173 if (pos == IP_AFTER_USE || pos == IP_BEFORE_USE)
3174 cand->ainc_use = use;
3175 else
3176 cand->ainc_use = NULLnullptr;
3177
3178 cand->orig_iv = orig_iv;
3179 if (dump_file && (dump_flags & TDF_DETAILS))
3180 dump_cand (dump_file, cand);
3181 }
3182
3183 cand->important |= important;
3184 cand->doloop_p |= doloop;
3185
3186 /* Relate candidate to the group for which it is added. */
3187 if (use)
3188 bitmap_set_bit (data->vgroups[use->group_id]->related_cands, i);
3189
3190 return cand;
3191}
3192
3193/* Returns true if incrementing the induction variable at the end of the LOOP
3194 is allowed.
3195
3196 The purpose is to avoid splitting latch edge with a biv increment, thus
3197 creating a jump, possibly confusing other optimization passes and leaving
3198 less freedom to scheduler. So we allow IP_END only if IP_NORMAL is not
3199 available (so we do not have a better alternative), or if the latch edge
3200 is already nonempty. */
3201
3202static bool
3203allow_ip_end_pos_p (class loop *loop)
3204{
3205 if (!ip_normal_pos (loop))
3206 return true;
3207
3208 if (!empty_block_p (ip_end_pos (loop)))
3209 return true;
3210
3211 return false;
3212}
3213
3214/* If possible, adds autoincrement candidates BASE + STEP * i based on use USE.
3215 Important field is set to IMPORTANT. */
3216
3217static void
3218add_autoinc_candidates (struct ivopts_data *data, tree base, tree step,
3219 bool important, struct iv_use *use)
3220{
3221 basic_block use_bb = gimple_bb (use->stmt);
3222 machine_mode mem_mode;
3223 unsigned HOST_WIDE_INTlong cstepi;
3224
3225 /* If we insert the increment in any position other than the standard
3226 ones, we must ensure that it is incremented once per iteration.
3227 It must not be in an inner nested loop, or one side of an if
3228 statement. */
3229 if (use_bb->loop_father != data->current_loop
3230 || !dominated_by_p (CDI_DOMINATORS, data->current_loop->latch, use_bb)
3231 || stmt_can_throw_internal (cfun(cfun + 0), use->stmt)
3232 || !cst_and_fits_in_hwi (step))
3233 return;
3234
3235 cstepi = int_cst_value (step);
3236
3237 mem_mode = TYPE_MODE (use->mem_type)((((enum tree_code) ((tree_class_check ((use->mem_type), (
tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3237, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(use->mem_type) : (use->mem_type)->type_common.mode
)
;
3238 if (((USE_LOAD_PRE_INCREMENT (mem_mode)0
3239 || USE_STORE_PRE_INCREMENT (mem_mode)0)
3240 && known_eq (GET_MODE_SIZE (mem_mode), cstepi)(!maybe_ne (GET_MODE_SIZE (mem_mode), cstepi)))
3241 || ((USE_LOAD_PRE_DECREMENT (mem_mode)0
3242 || USE_STORE_PRE_DECREMENT (mem_mode)0)
3243 && known_eq (GET_MODE_SIZE (mem_mode), -cstepi)(!maybe_ne (GET_MODE_SIZE (mem_mode), -cstepi))))
3244 {
3245 enum tree_code code = MINUS_EXPR;
3246 tree new_base;
3247 tree new_step = step;
3248
3249 if (POINTER_TYPE_P (TREE_TYPE (base))(((enum tree_code) (((contains_struct_check ((base), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3249, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((base), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3249, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
3250 {
3251 new_step = fold_build1 (NEGATE_EXPR, TREE_TYPE (step), step)fold_build1_loc (((location_t) 0), NEGATE_EXPR, ((contains_struct_check
((step), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3251, __FUNCTION__))->typed.type), step )
;
3252 code = POINTER_PLUS_EXPR;
3253 }
3254 else
3255 new_step = fold_convert (TREE_TYPE (base), new_step)fold_convert_loc (((location_t) 0), ((contains_struct_check (
(base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3255, __FUNCTION__))->typed.type), new_step)
;
3256 new_base = fold_build2 (code, TREE_TYPE (base), base, new_step)fold_build2_loc (((location_t) 0), code, ((contains_struct_check
((base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3256, __FUNCTION__))->typed.type), base, new_step )
;
3257 add_candidate_1 (data, new_base, step, important, IP_BEFORE_USE, use,
3258 use->stmt);
3259 }
3260 if (((USE_LOAD_POST_INCREMENT (mem_mode)0
3261 || USE_STORE_POST_INCREMENT (mem_mode)0)
3262 && known_eq (GET_MODE_SIZE (mem_mode), cstepi)(!maybe_ne (GET_MODE_SIZE (mem_mode), cstepi)))
3263 || ((USE_LOAD_POST_DECREMENT (mem_mode)0
3264 || USE_STORE_POST_DECREMENT (mem_mode)0)
3265 && known_eq (GET_MODE_SIZE (mem_mode), -cstepi)(!maybe_ne (GET_MODE_SIZE (mem_mode), -cstepi))))
3266 {
3267 add_candidate_1 (data, base, step, important, IP_AFTER_USE, use,
3268 use->stmt);
3269 }
3270}
3271
3272/* Adds a candidate BASE + STEP * i. Important field is set to IMPORTANT and
3273 position to POS. If USE is not NULL, the candidate is set as related to
3274 it. The candidate computation is scheduled before exit condition and at
3275 the end of loop. */
3276
3277static void
3278add_candidate (struct ivopts_data *data, tree base, tree step, bool important,
3279 struct iv_use *use, struct iv *orig_iv = NULLnullptr,
3280 bool doloop = false)
3281{
3282 if (ip_normal_pos (data->current_loop))
3283 add_candidate_1 (data, base, step, important, IP_NORMAL, use, NULLnullptr, orig_iv,
3284 doloop);
3285 /* Exclude doloop candidate here since it requires decrement then comparison
3286 and jump, the IP_END position doesn't match. */
3287 if (!doloop && ip_end_pos (data->current_loop)
3288 && allow_ip_end_pos_p (data->current_loop))
3289 add_candidate_1 (data, base, step, important, IP_END, use, NULLnullptr, orig_iv);
3290}
3291
3292/* Adds standard iv candidates. */
3293
3294static void
3295add_standard_iv_candidates (struct ivopts_data *data)
3296{
3297 add_candidate (data, integer_zero_nodeglobal_trees[TI_INTEGER_ZERO], integer_one_nodeglobal_trees[TI_INTEGER_ONE], true, NULLnullptr);
3298
3299 /* The same for a double-integer type if it is still fast enough. */
3300 if (TYPE_PRECISION((tree_class_check ((integer_types[itk_long]), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3301, __FUNCTION__))->type_common.precision)
3301 (long_integer_type_node)((tree_class_check ((integer_types[itk_long]), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3301, __FUNCTION__))->type_common.precision)
> TYPE_PRECISION (integer_type_node)((tree_class_check ((integer_types[itk_int]), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3301, __FUNCTION__))->type_common.precision)
3302 && TYPE_PRECISION (long_integer_type_node)((tree_class_check ((integer_types[itk_long]), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3302, __FUNCTION__))->type_common.precision)
<= BITS_PER_WORD((8) * (((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4))
)
3303 add_candidate (data, build_int_cst (long_integer_type_nodeinteger_types[itk_long], 0),
3304 build_int_cst (long_integer_type_nodeinteger_types[itk_long], 1), true, NULLnullptr);
3305
3306 /* The same for a double-integer type if it is still fast enough. */
3307 if (TYPE_PRECISION((tree_class_check ((integer_types[itk_long_long]), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3308, __FUNCTION__))->type_common.precision)
3308 (long_long_integer_type_node)((tree_class_check ((integer_types[itk_long_long]), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3308, __FUNCTION__))->type_common.precision)
> TYPE_PRECISION (long_integer_type_node)((tree_class_check ((integer_types[itk_long]), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3308, __FUNCTION__))->type_common.precision)
3309 && TYPE_PRECISION (long_long_integer_type_node)((tree_class_check ((integer_types[itk_long_long]), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3309, __FUNCTION__))->type_common.precision)
<= BITS_PER_WORD((8) * (((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4))
)
3310 add_candidate (data, build_int_cst (long_long_integer_type_nodeinteger_types[itk_long_long], 0),
3311 build_int_cst (long_long_integer_type_nodeinteger_types[itk_long_long], 1), true, NULLnullptr);
3312}
3313
3314
3315/* Adds candidates bases on the old induction variable IV. */
3316
3317static void
3318add_iv_candidate_for_biv (struct ivopts_data *data, struct iv *iv)
3319{
3320 gimple *phi;
3321 tree def;
3322 struct iv_cand *cand;
3323
3324 /* Check if this biv is used in address type use. */
3325 if (iv->no_overflow && iv->have_address_use
3326 && INTEGRAL_TYPE_P (TREE_TYPE (iv->base))(((enum tree_code) (((contains_struct_check ((iv->base), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3326, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((iv->base)
, (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3326, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((iv->base)
, (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3326, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
3327 && TYPE_PRECISION (TREE_TYPE (iv->base))((tree_class_check ((((contains_struct_check ((iv->base), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3327, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3327, __FUNCTION__))->type_common.precision)
< TYPE_PRECISION (sizetype)((tree_class_check ((sizetype_tab[(int) stk_sizetype]), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3327, __FUNCTION__))->type_common.precision)
)
3328 {
3329 tree base = fold_convert (sizetype, iv->base)fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype
], iv->base)
;
3330 tree step = fold_convert (sizetype, iv->step)fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype
], iv->step)
;
3331
3332 /* Add iv cand of same precision as index part in TARGET_MEM_REF. */
3333 add_candidate (data, base, step, true, NULLnullptr, iv);
3334 /* Add iv cand of the original type only if it has nonlinear use. */
3335 if (iv->nonlin_use)
3336 add_candidate (data, iv->base, iv->step, true, NULLnullptr);
3337 }
3338 else
3339 add_candidate (data, iv->base, iv->step, true, NULLnullptr);
3340
3341 /* The same, but with initial value zero. */
3342 if (POINTER_TYPE_P (TREE_TYPE (iv->base))(((enum tree_code) (((contains_struct_check ((iv->base), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3342, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((iv->base)
, (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3342, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
3343 add_candidate (data, size_int (0)size_int_kind (0, stk_sizetype), iv->step, true, NULLnullptr);
3344 else
3345 add_candidate (data, build_int_cst (TREE_TYPE (iv->base)((contains_struct_check ((iv->base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3345, __FUNCTION__))->typed.type)
, 0),
3346 iv->step, true, NULLnullptr);
3347
3348 phi = SSA_NAME_DEF_STMT (iv->ssa_name)(tree_check ((iv->ssa_name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3348, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
3349 if (gimple_code (phi) == GIMPLE_PHI)
3350 {
3351 /* Additionally record the possibility of leaving the original iv
3352 untouched. */
3353 def = PHI_ARG_DEF_FROM_EDGE (phi, loop_latch_edge (data->current_loop))gimple_phi_arg_def (((phi)), ((loop_latch_edge (data->current_loop
))->dest_idx))
;
3354 /* Don't add candidate if it's from another PHI node because
3355 it's an affine iv appearing in the form of PEELED_CHREC. */
3356 phi = SSA_NAME_DEF_STMT (def)(tree_check ((def), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3356, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
3357 if (gimple_code (phi) != GIMPLE_PHI)
3358 {
3359 cand = add_candidate_1 (data,
3360 iv->base, iv->step, true, IP_ORIGINAL, NULLnullptr,
3361 SSA_NAME_DEF_STMT (def)(tree_check ((def), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3361, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
);
3362 if (cand)
3363 {
3364 cand->var_before = iv->ssa_name;
3365 cand->var_after = def;
3366 }
3367 }
3368 else
3369 gcc_assert (gimple_bb (phi) == data->current_loop->header)((void)(!(gimple_bb (phi) == data->current_loop->header
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3369, __FUNCTION__), 0 : 0))
;
3370 }
3371}
3372
3373/* Adds candidates based on the old induction variables. */
3374
3375static void
3376add_iv_candidate_for_bivs (struct ivopts_data *data)
3377{
3378 unsigned i;
3379 struct iv *iv;
3380 bitmap_iterator bi;
3381
3382 EXECUTE_IF_SET_IN_BITMAP (data->relevant, 0, i, bi)for (bmp_iter_set_init (&(bi), (data->relevant), (0), &
(i)); bmp_iter_set (&(bi), &(i)); bmp_iter_next (&
(bi), &(i)))
3383 {
3384 iv = ver_info (data, i)->iv;
3385 if (iv && iv->biv_p && !integer_zerop (iv->step))
3386 add_iv_candidate_for_biv (data, iv);
3387 }
3388}
3389
3390/* Record common candidate {BASE, STEP} derived from USE in hashtable. */
3391
3392static void
3393record_common_cand (struct ivopts_data *data, tree base,
3394 tree step, struct iv_use *use)
3395{
3396 class iv_common_cand ent;
3397 class iv_common_cand **slot;
3398
3399 ent.base = base;
3400 ent.step = step;
3401 ent.hash = iterative_hash_expr (base, 0);
3402 ent.hash = iterative_hash_expr (step, ent.hash);
3403
3404 slot = data->iv_common_cand_tab->find_slot (&ent, INSERT);
3405 if (*slot == NULLnullptr)
3406 {
3407 *slot = new iv_common_cand ();
3408 (*slot)->base = base;
3409 (*slot)->step = step;
3410 (*slot)->uses.create (8);
3411 (*slot)->hash = ent.hash;
3412 data->iv_common_cands.safe_push ((*slot));
3413 }
3414
3415 gcc_assert (use != NULL)((void)(!(use != nullptr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3415, __FUNCTION__), 0 : 0))
;
3416 (*slot)->uses.safe_push (use);
3417 return;
3418}
3419
3420/* Comparison function used to sort common candidates. */
3421
3422static int
3423common_cand_cmp (const void *p1, const void *p2)
3424{
3425 unsigned n1, n2;
3426 const class iv_common_cand *const *const ccand1
3427 = (const class iv_common_cand *const *)p1;
3428 const class iv_common_cand *const *const ccand2
3429 = (const class iv_common_cand *const *)p2;
3430
3431 n1 = (*ccand1)->uses.length ();
3432 n2 = (*ccand2)->uses.length ();
3433 return n2 - n1;
3434}
3435
3436/* Adds IV candidates based on common candidated recorded. */
3437
3438static void
3439add_iv_candidate_derived_from_uses (struct ivopts_data *data)
3440{
3441 unsigned i, j;
3442 struct iv_cand *cand_1, *cand_2;
3443
3444 data->iv_common_cands.qsort (common_cand_cmp)qsort (common_cand_cmp);
3445 for (i = 0; i < data->iv_common_cands.length (); i++)
3446 {
3447 class iv_common_cand *ptr = data->iv_common_cands[i];
3448
3449 /* Only add IV candidate if it's derived from multiple uses. */
3450 if (ptr->uses.length () <= 1)
3451 break;
3452
3453 cand_1 = NULLnullptr;
3454 cand_2 = NULLnullptr;
3455 if (ip_normal_pos (data->current_loop))
3456 cand_1 = add_candidate_1 (data, ptr->base, ptr->step,
3457 false, IP_NORMAL, NULLnullptr, NULLnullptr);
3458
3459 if (ip_end_pos (data->current_loop)
3460 && allow_ip_end_pos_p (data->current_loop))
3461 cand_2 = add_candidate_1 (data, ptr->base, ptr->step,
3462 false, IP_END, NULLnullptr, NULLnullptr);
3463
3464 /* Bind deriving uses and the new candidates. */
3465 for (j = 0; j < ptr->uses.length (); j++)
3466 {
3467 struct iv_group *group = data->vgroups[ptr->uses[j]->group_id];
3468 if (cand_1)
3469 bitmap_set_bit (group->related_cands, cand_1->id);
3470 if (cand_2)
3471 bitmap_set_bit (group->related_cands, cand_2->id);
3472 }
3473 }
3474
3475 /* Release data since it is useless from this point. */
3476 data->iv_common_cand_tab->empty ();
3477 data->iv_common_cands.truncate (0);
3478}
3479
3480/* Adds candidates based on the value of USE's iv. */
3481
3482static void
3483add_iv_candidate_for_use (struct ivopts_data *data, struct iv_use *use)
3484{
3485 poly_uint64 offset;
3486 tree base;
3487 struct iv *iv = use->iv;
3488 tree basetype = TREE_TYPE (iv->base)((contains_struct_check ((iv->base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3488, __FUNCTION__))->typed.type)
;
3489
3490 /* Don't add candidate for iv_use with non integer, pointer or non-mode
3491 precision types, instead, add candidate for the corresponding scev in
3492 unsigned type with the same precision. See PR93674 for more info. */
3493 if ((TREE_CODE (basetype)((enum tree_code) (basetype)->base.code) != INTEGER_TYPE && !POINTER_TYPE_P (basetype)(((enum tree_code) (basetype)->base.code) == POINTER_TYPE ||
((enum tree_code) (basetype)->base.code) == REFERENCE_TYPE
)
)
3494 || !type_has_mode_precision_p (basetype))
3495 {
3496 basetype = lang_hooks.types.type_for_mode (TYPE_MODE (basetype)((((enum tree_code) ((tree_class_check ((basetype), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3496, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(basetype) : (basetype)->type_common.mode)
,
3497 TYPE_UNSIGNED (basetype)((tree_class_check ((basetype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3497, __FUNCTION__))->base.u.bits.unsigned_flag)
);
3498 add_candidate (data, fold_convert (basetype, iv->base)fold_convert_loc (((location_t) 0), basetype, iv->base),
3499 fold_convert (basetype, iv->step)fold_convert_loc (((location_t) 0), basetype, iv->step), false, NULLnullptr);
3500 return;
3501 }
3502
3503 add_candidate (data, iv->base, iv->step, false, use);
3504
3505 /* Record common candidate for use in case it can be shared by others. */
3506 record_common_cand (data, iv->base, iv->step, use);
3507
3508 /* Record common candidate with initial value zero. */
3509 basetype = TREE_TYPE (iv->base)((contains_struct_check ((iv->base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3509, __FUNCTION__))->typed.type)
;
3510 if (POINTER_TYPE_P (basetype)(((enum tree_code) (basetype)->base.code) == POINTER_TYPE ||
((enum tree_code) (basetype)->base.code) == REFERENCE_TYPE
)
)
3511 basetype = sizetypesizetype_tab[(int) stk_sizetype];
3512 record_common_cand (data, build_int_cst (basetype, 0), iv->step, use);
3513
3514 /* Compare the cost of an address with an unscaled index with the cost of
3515 an address with a scaled index and add candidate if useful. */
3516 poly_int64 step;
3517 if (use != NULLnullptr
3518 && poly_int_tree_p (iv->step, &step)
3519 && address_p (use->type))
3520 {
3521 poly_int64 new_step;
3522 unsigned int fact = preferred_mem_scale_factor
3523 (use->iv->base,
3524 TYPE_MODE (use->mem_type)((((enum tree_code) ((tree_class_check ((use->mem_type), (
tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3524, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(use->mem_type) : (use->mem_type)->type_common.mode
)
,
3525 optimize_loop_for_speed_p (data->current_loop));
3526
3527 if (fact != 1
3528 && multiple_p (step, fact, &new_step))
3529 add_candidate (data, size_int (0)size_int_kind (0, stk_sizetype),
3530 wide_int_to_tree (sizetypesizetype_tab[(int) stk_sizetype], new_step),
3531 true, NULLnullptr);
3532 }
3533
3534 /* Record common candidate with constant offset stripped in base.
3535 Like the use itself, we also add candidate directly for it. */
3536 base = strip_offset (iv->base, &offset);
3537 if (maybe_ne (offset, 0U) || base != iv->base)
3538 {
3539 record_common_cand (data, base, iv->step, use);
3540 add_candidate (data, base, iv->step, false, use);
3541 }
3542
3543 /* Record common candidate with base_object removed in base. */
3544 base = iv->base;
3545 STRIP_NOPS (base)(base) = tree_strip_nop_conversions ((const_cast<union tree_node
*> (((base)))))
;
3546 if (iv->base_object != NULLnullptr && TREE_CODE (base)((enum tree_code) (base)->base.code) == POINTER_PLUS_EXPR)
3547 {
3548 tree step = iv->step;
3549
3550 STRIP_NOPS (step)(step) = tree_strip_nop_conversions ((const_cast<union tree_node
*> (((step)))))
;
3551 base = TREE_OPERAND (base, 1)(*((const_cast<tree*> (tree_operand_check ((base), (1),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3551, __FUNCTION__)))))
;
3552 step = fold_convert (sizetype, step)fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype
], step)
;
3553 record_common_cand (data, base, step, use);
3554 /* Also record common candidate with offset stripped. */
3555 base = strip_offset (base, &offset);
3556 if (maybe_ne (offset, 0U))
3557 record_common_cand (data, base, step, use);
3558 }
3559
3560 /* At last, add auto-incremental candidates. Make such variables
3561 important since other iv uses with same base object may be based
3562 on it. */
3563 if (use != NULLnullptr && address_p (use->type))
3564 add_autoinc_candidates (data, iv->base, iv->step, true, use);
3565}
3566
3567/* Adds candidates based on the uses. */
3568
3569static void
3570add_iv_candidate_for_groups (struct ivopts_data *data)
3571{
3572 unsigned i;
3573
3574 /* Only add candidate for the first use in group. */
3575 for (i = 0; i < data->vgroups.length (); i++)
3576 {
3577 struct iv_group *group = data->vgroups[i];
3578
3579 gcc_assert (group->vuses[0] != NULL)((void)(!(group->vuses[0] != nullptr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3579, __FUNCTION__), 0 : 0))
;
3580 add_iv_candidate_for_use (data, group->vuses[0]);
3581 }
3582 add_iv_candidate_derived_from_uses (data);
3583}
3584
3585/* Record important candidates and add them to related_cands bitmaps. */
3586
3587static void
3588record_important_candidates (struct ivopts_data *data)
3589{
3590 unsigned i;
3591 struct iv_group *group;
3592
3593 for (i = 0; i < data->vcands.length (); i++)
3594 {
3595 struct iv_cand *cand = data->vcands[i];
3596
3597 if (cand->important)
3598 bitmap_set_bit (data->important_candidates, i);
3599 }
3600
3601 data->consider_all_candidates = (data->vcands.length ()
3602 <= CONSIDER_ALL_CANDIDATES_BOUND((unsigned) global_options.x_param_iv_consider_all_candidates_bound
)
);
3603
3604 /* Add important candidates to groups' related_cands bitmaps. */
3605 for (i = 0; i < data->vgroups.length (); i++)
3606 {
3607 group = data->vgroups[i];
3608 bitmap_ior_into (group->related_cands, data->important_candidates);
3609 }
3610}
3611
3612/* Allocates the data structure mapping the (use, candidate) pairs to costs.
3613 If consider_all_candidates is true, we use a two-dimensional array, otherwise
3614 we allocate a simple list to every use. */
3615
3616static void
3617alloc_use_cost_map (struct ivopts_data *data)
3618{
3619 unsigned i, size, s;
3620
3621 for (i = 0; i < data->vgroups.length (); i++)
3622 {
3623 struct iv_group *group = data->vgroups[i];
3624
3625 if (data->consider_all_candidates)
3626 size = data->vcands.length ();
3627 else
3628 {
3629 s = bitmap_count_bits (group->related_cands);
3630
3631 /* Round up to the power of two, so that moduling by it is fast. */
3632 size = s ? (1 << ceil_log2 (s)) : 1;
3633 }
3634
3635 group->n_map_members = size;
3636 group->cost_map = XCNEWVEC (class cost_pair, size)((class cost_pair *) xcalloc ((size), sizeof (class cost_pair
)))
;
3637 }
3638}
3639
3640/* Sets cost of (GROUP, CAND) pair to COST and record that it depends
3641 on invariants INV_VARS and that the value used in expressing it is
3642 VALUE, and in case of iv elimination the comparison operator is COMP. */
3643
3644static void
3645set_group_iv_cost (struct ivopts_data *data,
3646 struct iv_group *group, struct iv_cand *cand,
3647 comp_cost cost, bitmap inv_vars, tree value,
3648 enum tree_code comp, bitmap inv_exprs)
3649{
3650 unsigned i, s;
3651
3652 if (cost.infinite_cost_p ())
3653 {
3654 BITMAP_FREE (inv_vars)((void) (bitmap_obstack_free ((bitmap) inv_vars), (inv_vars) =
(bitmap) nullptr))
;
3655 BITMAP_FREE (inv_exprs)((void) (bitmap_obstack_free ((bitmap) inv_exprs), (inv_exprs
) = (bitmap) nullptr))
;
3656 return;
3657 }
3658
3659 if (data->consider_all_candidates)
3660 {
3661 group->cost_map[cand->id].cand = cand;
3662 group->cost_map[cand->id].cost = cost;
3663 group->cost_map[cand->id].inv_vars = inv_vars;
3664 group->cost_map[cand->id].inv_exprs = inv_exprs;
3665 group->cost_map[cand->id].value = value;
3666 group->cost_map[cand->id].comp = comp;
3667 return;
3668 }
3669
3670 /* n_map_members is a power of two, so this computes modulo. */
3671 s = cand->id & (group->n_map_members - 1);
3672 for (i = s; i < group->n_map_members; i++)
3673 if (!group->cost_map[i].cand)
3674 goto found;
3675 for (i = 0; i < s; i++)
3676 if (!group->cost_map[i].cand)
3677 goto found;
3678
3679 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3679, __FUNCTION__))
;
3680
3681found:
3682 group->cost_map[i].cand = cand;
3683 group->cost_map[i].cost = cost;
3684 group->cost_map[i].inv_vars = inv_vars;
3685 group->cost_map[i].inv_exprs = inv_exprs;
3686 group->cost_map[i].value = value;
3687 group->cost_map[i].comp = comp;
3688}
3689
3690/* Gets cost of (GROUP, CAND) pair. */
3691
3692static class cost_pair *
3693get_group_iv_cost (struct ivopts_data *data, struct iv_group *group,
3694 struct iv_cand *cand)
3695{
3696 unsigned i, s;
3697 class cost_pair *ret;
3698
3699 if (!cand)
3700 return NULLnullptr;
3701
3702 if (data->consider_all_candidates)
3703 {
3704 ret = group->cost_map + cand->id;
3705 if (!ret->cand)
3706 return NULLnullptr;
3707
3708 return ret;
3709 }
3710
3711 /* n_map_members is a power of two, so this computes modulo. */
3712 s = cand->id & (group->n_map_members - 1);
3713 for (i = s; i < group->n_map_members; i++)
3714 if (group->cost_map[i].cand == cand)
3715 return group->cost_map + i;
3716 else if (group->cost_map[i].cand == NULLnullptr)
3717 return NULLnullptr;
3718 for (i = 0; i < s; i++)
3719 if (group->cost_map[i].cand == cand)
3720 return group->cost_map + i;
3721 else if (group->cost_map[i].cand == NULLnullptr)
3722 return NULLnullptr;
3723
3724 return NULLnullptr;
3725}
3726
3727/* Produce DECL_RTL for object obj so it looks like it is stored in memory. */
3728static rtx
3729produce_memory_decl_rtl (tree obj, int *regno)
3730{
3731 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (obj))((tree_class_check ((((contains_struct_check ((obj), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3731, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3731, __FUNCTION__))->base.u.bits.address_space)
;
3732 machine_mode address_mode = targetm.addr_space.address_mode (as);
3733 rtx x;
3734
3735 gcc_assert (obj)((void)(!(obj) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3735, __FUNCTION__), 0 : 0))
;
3736 if (TREE_STATIC (obj)((obj)->base.static_flag) || DECL_EXTERNAL (obj)((contains_struct_check ((obj), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3736, __FUNCTION__))->decl_common.decl_flag_1)
)
3737 {
3738 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj))((const char *) (tree_check ((decl_assembler_name (obj)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3738, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)
;
3739 x = gen_rtx_SYMBOL_REF (address_mode, name)gen_rtx_fmt_s0_stat ((SYMBOL_REF), ((address_mode)), ((name))
)
;
3740 SET_SYMBOL_REF_DECL (x, obj)(((void)(!(!(__extension__ ({ __typeof ((x)) const _rtx = ((x
)); if (((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("CONSTANT_POOL_ADDRESS_P", _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3740, __FUNCTION__); _rtx; })->unchanging)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3740, __FUNCTION__), 0 : 0)), ((((x))->u.fld[1]).rt_tree
) = (obj))
;
3741 x = gen_rtx_MEM (DECL_MODE (obj)((contains_struct_check ((obj), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3741, __FUNCTION__))->decl_common.mode)
, x);
3742 set_mem_addr_space (x, as);
3743 targetm.encode_section_info (obj, x, true);
3744 }
3745 else
3746 {
3747 x = gen_raw_REG (address_mode, (*regno)++);
3748 x = gen_rtx_MEM (DECL_MODE (obj)((contains_struct_check ((obj), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3748, __FUNCTION__))->decl_common.mode)
, x);
3749 set_mem_addr_space (x, as);
3750 }
3751
3752 return x;
3753}
3754
3755/* Prepares decl_rtl for variables referred in *EXPR_P. Callback for
3756 walk_tree. DATA contains the actual fake register number. */
3757
3758static tree
3759prepare_decl_rtl (tree *expr_p, int *ws, void *data)
3760{
3761 tree obj = NULL_TREE(tree) nullptr;
3762 rtx x = NULL_RTX(rtx) 0;
3763 int *regno = (int *) data;
3764
3765 switch (TREE_CODE (*expr_p)((enum tree_code) (*expr_p)->base.code))
3766 {
3767 case ADDR_EXPR:
3768 for (expr_p = &TREE_OPERAND (*expr_p, 0)(*((const_cast<tree*> (tree_operand_check ((*expr_p), (
0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3768, __FUNCTION__)))))
;
3769 handled_component_p (*expr_p);
3770 expr_p = &TREE_OPERAND (*expr_p, 0)(*((const_cast<tree*> (tree_operand_check ((*expr_p), (
0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3770, __FUNCTION__)))))
)
3771 continue;
3772 obj = *expr_p;
3773 if (DECL_P (obj)(tree_code_type[(int) (((enum tree_code) (obj)->base.code)
)] == tcc_declaration)
&& HAS_RTL_P (obj)((tree_contains_struct[(((enum tree_code) (obj)->base.code
))][(TS_DECL_WRTL)]))
&& !DECL_RTL_SET_P (obj)(((tree_contains_struct[(((enum tree_code) (obj)->base.code
))][(TS_DECL_WRTL)])) && (contains_struct_check ((obj
), (TS_DECL_WRTL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3773, __FUNCTION__))->decl_with_rtl.rtl != nullptr)
)
3774 x = produce_memory_decl_rtl (obj, regno);
3775 break;
3776
3777 case SSA_NAME:
3778 *ws = 0;
3779 obj = SSA_NAME_VAR (*expr_p)((tree_check ((*expr_p), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3779, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((*expr_p)->ssa_name.var)->
base.code) == IDENTIFIER_NODE ? (tree) nullptr : (*expr_p)->
ssa_name.var)
;
3780 /* Defer handling of anonymous SSA_NAMEs to the expander. */
3781 if (!obj)
3782 return NULL_TREE(tree) nullptr;
3783 if (!DECL_RTL_SET_P (obj)(((tree_contains_struct[(((enum tree_code) (obj)->base.code
))][(TS_DECL_WRTL)])) && (contains_struct_check ((obj
), (TS_DECL_WRTL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3783, __FUNCTION__))->decl_with_rtl.rtl != nullptr)
)
3784 x = gen_raw_REG (DECL_MODE (obj)((contains_struct_check ((obj), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3784, __FUNCTION__))->decl_common.mode)
, (*regno)++);
3785 break;
3786
3787 case VAR_DECL:
3788 case PARM_DECL:
3789 case RESULT_DECL:
3790 *ws = 0;
3791 obj = *expr_p;
3792
3793 if (DECL_RTL_SET_P (obj)(((tree_contains_struct[(((enum tree_code) (obj)->base.code
))][(TS_DECL_WRTL)])) && (contains_struct_check ((obj
), (TS_DECL_WRTL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3793, __FUNCTION__))->decl_with_rtl.rtl != nullptr)
)
3794 break;
3795
3796 if (DECL_MODE (obj)((contains_struct_check ((obj), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3796, __FUNCTION__))->decl_common.mode)
== BLKmode((void) 0, E_BLKmode))
3797 x = produce_memory_decl_rtl (obj, regno);
3798 else
3799 x = gen_raw_REG (DECL_MODE (obj)((contains_struct_check ((obj), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3799, __FUNCTION__))->decl_common.mode)
, (*regno)++);
3800
3801 break;
3802
3803 default:
3804 break;
3805 }
3806
3807 if (x)
3808 {
3809 decl_rtl_to_reset.safe_push (obj);
3810 SET_DECL_RTL (obj, x)set_decl_rtl (obj, x);
3811 }
3812
3813 return NULL_TREE(tree) nullptr;
3814}
3815
3816/* Predict whether the given loop will be transformed in the RTL
3817 doloop_optimize pass. Attempt to duplicate some doloop_optimize checks.
3818 This is only for target independent checks, see targetm.predict_doloop_p
3819 for the target dependent ones.
3820
3821 Note that according to some initial investigation, some checks like costly
3822 niter check and invalid stmt scanning don't have much gains among general
3823 cases, so keep this as simple as possible first.
3824
3825 Some RTL specific checks seems unable to be checked in gimple, if any new
3826 checks or easy checks _are_ missing here, please add them. */
3827
3828static bool
3829generic_predict_doloop_p (struct ivopts_data *data)
3830{
3831 class loop *loop = data->current_loop;
3832
3833 /* Call target hook for target dependent checks. */
3834 if (!targetm.predict_doloop_p (loop))
3835 {
3836 if (dump_file && (dump_flags & TDF_DETAILS))
3837 fprintf (dump_file, "Predict doloop failure due to"
3838 " target specific checks.\n");
3839 return false;
3840 }
3841
3842 /* Similar to doloop_optimize, check iteration description to know it's
3843 suitable or not. Keep it as simple as possible, feel free to extend it
3844 if you find any multiple exits cases matter. */
3845 edge exit = single_dom_exit (loop);
3846 class tree_niter_desc *niter_desc;
3847 if (!exit || !(niter_desc = niter_for_exit (data, exit)))
3848 {
3849 if (dump_file && (dump_flags & TDF_DETAILS))
3850 fprintf (dump_file, "Predict doloop failure due to"
3851 " unexpected niters.\n");
3852 return false;
3853 }
3854
3855 /* Similar to doloop_optimize, check whether iteration count too small
3856 and not profitable. */
3857 HOST_WIDE_INTlong est_niter = get_estimated_loop_iterations_int (loop);
3858 if (est_niter == -1)
3859 est_niter = get_likely_max_loop_iterations_int (loop);
3860 if (est_niter >= 0 && est_niter < 3)
3861 {
3862 if (dump_file && (dump_flags & TDF_DETAILS))
3863 fprintf (dump_file,
3864 "Predict doloop failure due to"
3865 " too few iterations (%u).\n",
3866 (unsigned int) est_niter);
3867 return false;
3868 }
3869
3870 return true;
3871}
3872
3873/* Determines cost of the computation of EXPR. */
3874
3875static unsigned
3876computation_cost (tree expr, bool speed)
3877{
3878 rtx_insn *seq;
3879 rtx rslt;
3880 tree type = TREE_TYPE (expr)((contains_struct_check ((expr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3880, __FUNCTION__))->typed.type)
;
3881 unsigned cost;
3882 /* Avoid using hard regs in ways which may be unsupported. */
3883 int regno = LAST_VIRTUAL_REGISTER(((76)) + 5) + 1;
3884 struct cgraph_node *node = cgraph_node::get (current_function_decl);
3885 enum node_frequency real_frequency = node->frequency;
3886
3887 node->frequency = NODE_FREQUENCY_NORMAL;
3888 crtl(&x_rtl)->maybe_hot_insn_p = speed;
3889 walk_tree (&expr, prepare_decl_rtl, &regno, NULL)walk_tree_1 (&expr, prepare_decl_rtl, &regno, nullptr
, nullptr)
;
3890 start_sequence ();
3891 rslt = expand_expr (expr, NULL_RTX(rtx) 0, TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3891, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
, EXPAND_NORMAL);
3892 seq = get_insns ();
3893 end_sequence ();
3894 default_rtl_profile ();
3895 node->frequency = real_frequency;
3896
3897 cost = seq_cost (seq, speed);
3898 if (MEM_P (rslt)(((enum rtx_code) (rslt)->code) == MEM))
3899 cost += address_cost (XEXP (rslt, 0)(((rslt)->u.fld[0]).rt_rtx), TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3899, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
,
3900 TYPE_ADDR_SPACE (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3900, __FUNCTION__))->base.u.bits.address_space)
, speed);
3901 else if (!REG_P (rslt)(((enum rtx_code) (rslt)->code) == REG))
3902 cost += set_src_cost (rslt, TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3902, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
, speed);
3903
3904 return cost;
3905}
3906
3907/* Returns variable containing the value of candidate CAND at statement AT. */
3908
3909static tree
3910var_at_stmt (class loop *loop, struct iv_cand *cand, gimple *stmt)
3911{
3912 if (stmt_after_increment (loop, cand, stmt))
3913 return cand->var_after;
3914 else
3915 return cand->var_before;
3916}
3917
3918/* If A is (TYPE) BA and B is (TYPE) BB, and the types of BA and BB have the
3919 same precision that is at least as wide as the precision of TYPE, stores
3920 BA to A and BB to B, and returns the type of BA. Otherwise, returns the
3921 type of A and B. */
3922
3923static tree
3924determine_common_wider_type (tree *a, tree *b)
3925{
3926 tree wider_type = NULLnullptr;
3927 tree suba, subb;
3928 tree atype = TREE_TYPE (*a)((contains_struct_check ((*a), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3928, __FUNCTION__))->typed.type)
;
3929
3930 if (CONVERT_EXPR_P (*a)((((enum tree_code) (*a)->base.code)) == NOP_EXPR || (((enum
tree_code) (*a)->base.code)) == CONVERT_EXPR)
)
3931 {
3932 suba = TREE_OPERAND (*a, 0)(*((const_cast<tree*> (tree_operand_check ((*a), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3932, __FUNCTION__)))))
;
3933 wider_type = TREE_TYPE (suba)((contains_struct_check ((suba), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3933, __FUNCTION__))->typed.type)
;
3934 if (TYPE_PRECISION (wider_type)((tree_class_check ((wider_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3934, __FUNCTION__))->type_common.precision)
< TYPE_PRECISION (atype)((tree_class_check ((atype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3934, __FUNCTION__))->type_common.precision)
)
3935 return atype;
3936 }
3937 else
3938 return atype;
3939
3940 if (CONVERT_EXPR_P (*b)((((enum tree_code) (*b)->base.code)) == NOP_EXPR || (((enum
tree_code) (*b)->base.code)) == CONVERT_EXPR)
)
3941 {
3942 subb = TREE_OPERAND (*b, 0)(*((const_cast<tree*> (tree_operand_check ((*b), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3942, __FUNCTION__)))))
;
3943 if (TYPE_PRECISION (wider_type)((tree_class_check ((wider_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3943, __FUNCTION__))->type_common.precision)
!= TYPE_PRECISION (TREE_TYPE (subb))((tree_class_check ((((contains_struct_check ((subb), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3943, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3943, __FUNCTION__))->type_common.precision)
)
3944 return atype;
3945 }
3946 else
3947 return atype;
3948
3949 *a = suba;
3950 *b = subb;
3951 return wider_type;
3952}
3953
3954/* Determines the expression by that USE is expressed from induction variable
3955 CAND at statement AT in LOOP. The expression is stored in two parts in a
3956 decomposed form. The invariant part is stored in AFF_INV; while variant
3957 part in AFF_VAR. Store ratio of CAND.step over USE.step in PRAT if it's
3958 non-null. Returns false if USE cannot be expressed using CAND. */
3959
3960static bool
3961get_computation_aff_1 (class loop *loop, gimple *at, struct iv_use *use,
3962 struct iv_cand *cand, class aff_tree *aff_inv,
3963 class aff_tree *aff_var, widest_int *prat = NULLnullptr)
3964{
3965 tree ubase = use->iv->base, ustep = use->iv->step;
3966 tree cbase = cand->iv->base, cstep = cand->iv->step;
3967 tree common_type, uutype, var, cstep_common;
3968 tree utype = TREE_TYPE (ubase)((contains_struct_check ((ubase), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3968, __FUNCTION__))->typed.type)
, ctype = TREE_TYPE (cbase)((contains_struct_check ((cbase), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3968, __FUNCTION__))->typed.type)
;
3969 aff_tree aff_cbase;
3970 widest_int rat;
3971
3972 /* We must have a precision to express the values of use. */
3973 if (TYPE_PRECISION (utype)((tree_class_check ((utype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3973, __FUNCTION__))->type_common.precision)
> TYPE_PRECISION (ctype)((tree_class_check ((ctype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3973, __FUNCTION__))->type_common.precision)
)
3974 return false;
3975
3976 var = var_at_stmt (loop, cand, at);
3977 uutype = unsigned_type_for (utype);
3978
3979 /* If the conversion is not noop, perform it. */
3980 if (TYPE_PRECISION (utype)((tree_class_check ((utype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3980, __FUNCTION__))->type_common.precision)
< TYPE_PRECISION (ctype)((tree_class_check ((ctype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3980, __FUNCTION__))->type_common.precision)
)
3981 {
3982 if (cand->orig_iv != NULLnullptr && CONVERT_EXPR_P (cbase)((((enum tree_code) (cbase)->base.code)) == NOP_EXPR || ((
(enum tree_code) (cbase)->base.code)) == CONVERT_EXPR)
3983 && (CONVERT_EXPR_P (cstep)((((enum tree_code) (cstep)->base.code)) == NOP_EXPR || ((
(enum tree_code) (cstep)->base.code)) == CONVERT_EXPR)
|| poly_int_tree_p (cstep)))
3984 {
3985 tree inner_base, inner_step, inner_type;
3986 inner_base = TREE_OPERAND (cbase, 0)(*((const_cast<tree*> (tree_operand_check ((cbase), (0)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3986, __FUNCTION__)))))
;
3987 if (CONVERT_EXPR_P (cstep)((((enum tree_code) (cstep)->base.code)) == NOP_EXPR || ((
(enum tree_code) (cstep)->base.code)) == CONVERT_EXPR)
)
3988 inner_step = TREE_OPERAND (cstep, 0)(*((const_cast<tree*> (tree_operand_check ((cstep), (0)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3988, __FUNCTION__)))))
;
3989 else
3990 inner_step = cstep;
3991
3992 inner_type = TREE_TYPE (inner_base)((contains_struct_check ((inner_base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3992, __FUNCTION__))->typed.type)
;
3993 /* If candidate is added from a biv whose type is smaller than
3994 ctype, we know both candidate and the biv won't overflow.
3995 In this case, it's safe to skip the convertion in candidate.
3996 As an example, (unsigned short)((unsigned long)A) equals to
3997 (unsigned short)A, if A has a type no larger than short. */
3998 if (TYPE_PRECISION (inner_type)((tree_class_check ((inner_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3998, __FUNCTION__))->type_common.precision)
<= TYPE_PRECISION (uutype)((tree_class_check ((uutype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 3998, __FUNCTION__))->type_common.precision)
)
3999 {
4000 cbase = inner_base;
4001 cstep = inner_step;
4002 }
4003 }
4004 cbase = fold_convert (uutype, cbase)fold_convert_loc (((location_t) 0), uutype, cbase);
4005 cstep = fold_convert (uutype, cstep)fold_convert_loc (((location_t) 0), uutype, cstep);
4006 var = fold_convert (uutype, var)fold_convert_loc (((location_t) 0), uutype, var);
4007 }
4008
4009 /* Ratio is 1 when computing the value of biv cand by itself.
4010 We can't rely on constant_multiple_of in this case because the
4011 use is created after the original biv is selected. The call
4012 could fail because of inconsistent fold behavior. See PR68021
4013 for more information. */
4014 if (cand->pos == IP_ORIGINAL && cand->incremented_at == use->stmt)
4015 {
4016 gcc_assert (is_gimple_assign (use->stmt))((void)(!(is_gimple_assign (use->stmt)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4016, __FUNCTION__), 0 : 0))
;
4017 gcc_assert (use->iv->ssa_name == cand->var_after)((void)(!(use->iv->ssa_name == cand->var_after) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4017, __FUNCTION__), 0 : 0))
;
4018 gcc_assert (gimple_assign_lhs (use->stmt) == cand->var_after)((void)(!(gimple_assign_lhs (use->stmt) == cand->var_after
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4018, __FUNCTION__), 0 : 0))
;
4019 rat = 1;
4020 }
4021 else if (!constant_multiple_of (ustep, cstep, &rat))
4022 return false;
4023
4024 if (prat)
4025 *prat = rat;
4026
4027 /* In case both UBASE and CBASE are shortened to UUTYPE from some common
4028 type, we achieve better folding by computing their difference in this
4029 wider type, and cast the result to UUTYPE. We do not need to worry about
4030 overflows, as all the arithmetics will in the end be performed in UUTYPE
4031 anyway. */
4032 common_type = determine_common_wider_type (&ubase, &cbase);
4033
4034 /* use = ubase - ratio * cbase + ratio * var. */
4035 tree_to_aff_combination (ubase, common_type, aff_inv);
4036 tree_to_aff_combination (cbase, common_type, &aff_cbase);
4037 tree_to_aff_combination (var, uutype, aff_var);
4038
4039 /* We need to shift the value if we are after the increment. */
4040 if (stmt_after_increment (loop, cand, at))
4041 {
4042 aff_tree cstep_aff;
4043
4044 if (common_type != uutype)
4045 cstep_common = fold_convert (common_type, cstep)fold_convert_loc (((location_t) 0), common_type, cstep);
4046 else
4047 cstep_common = cstep;
4048
4049 tree_to_aff_combination (cstep_common, common_type, &cstep_aff);
4050 aff_combination_add (&aff_cbase, &cstep_aff);
4051 }
4052
4053 aff_combination_scale (&aff_cbase, -rat);
4054 aff_combination_add (aff_inv, &aff_cbase);
4055 if (common_type != uutype)
4056 aff_combination_convert (aff_inv, uutype);
4057
4058 aff_combination_scale (aff_var, rat);
4059 return true;
4060}
4061
4062/* Determines the expression by that USE is expressed from induction variable
4063 CAND at statement AT in LOOP. The expression is stored in a decomposed
4064 form into AFF. Returns false if USE cannot be expressed using CAND. */
4065
4066static bool
4067get_computation_aff (class loop *loop, gimple *at, struct iv_use *use,
4068 struct iv_cand *cand, class aff_tree *aff)
4069{
4070 aff_tree aff_var;
4071
4072 if (!get_computation_aff_1 (loop, at, use, cand, aff, &aff_var))
4073 return false;
4074
4075 aff_combination_add (aff, &aff_var);
4076 return true;
4077}
4078
4079/* Return the type of USE. */
4080
4081static tree
4082get_use_type (struct iv_use *use)
4083{
4084 tree base_type = TREE_TYPE (use->iv->base)((contains_struct_check ((use->iv->base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4084, __FUNCTION__))->typed.type)
;
4085 tree type;
4086
4087 if (use->type == USE_REF_ADDRESS)
4088 {
4089 /* The base_type may be a void pointer. Create a pointer type based on
4090 the mem_ref instead. */
4091 type = build_pointer_type (TREE_TYPE (*use->op_p)((contains_struct_check ((*use->op_p), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4091, __FUNCTION__))->typed.type)
);
4092 gcc_assert (TYPE_ADDR_SPACE (TREE_TYPE (type))((void)(!(((tree_class_check ((((contains_struct_check ((type
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4092, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4092, __FUNCTION__))->base.u.bits.address_space) == ((tree_class_check
((((contains_struct_check ((base_type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4093, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4093, __FUNCTION__))->base.u.bits.address_space)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4093, __FUNCTION__), 0 : 0))
4093 == TYPE_ADDR_SPACE (TREE_TYPE (base_type)))((void)(!(((tree_class_check ((((contains_struct_check ((type
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4092, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4092, __FUNCTION__))->base.u.bits.address_space) == ((tree_class_check
((((contains_struct_check ((base_type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4093, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4093, __FUNCTION__))->base.u.bits.address_space)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4093, __FUNCTION__), 0 : 0))
;
4094 }
4095 else
4096 type = base_type;
4097
4098 return type;
4099}
4100
4101/* Determines the expression by that USE is expressed from induction variable
4102 CAND at statement AT in LOOP. The computation is unshared. */
4103
4104static tree
4105get_computation_at (class loop *loop, gimple *at,
4106 struct iv_use *use, struct iv_cand *cand)
4107{
4108 aff_tree aff;
4109 tree type = get_use_type (use);
4110
4111 if (!get_computation_aff (loop, at, use, cand, &aff))
4112 return NULL_TREE(tree) nullptr;
4113 unshare_aff_combination (&aff);
4114 return fold_convert (type, aff_combination_to_tree (&aff))fold_convert_loc (((location_t) 0), type, aff_combination_to_tree
(&aff))
;
4115}
4116
4117/* Like get_computation_at, but try harder, even if the computation
4118 is more expensive. Intended for debug stmts. */
4119
4120static tree
4121get_debug_computation_at (class loop *loop, gimple *at,
4122 struct iv_use *use, struct iv_cand *cand)
4123{
4124 if (tree ret = get_computation_at (loop, at, use, cand))
4125 return ret;
4126
4127 tree ubase = use->iv->base, ustep = use->iv->step;
4128 tree cbase = cand->iv->base, cstep = cand->iv->step;
4129 tree var;
4130 tree utype = TREE_TYPE (ubase)((contains_struct_check ((ubase), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4130, __FUNCTION__))->typed.type)
, ctype = TREE_TYPE (cbase)((contains_struct_check ((cbase), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4130, __FUNCTION__))->typed.type)
;
4131 widest_int rat;
4132
4133 /* We must have a precision to express the values of use. */
4134 if (TYPE_PRECISION (utype)((tree_class_check ((utype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4134, __FUNCTION__))->type_common.precision)
>= TYPE_PRECISION (ctype)((tree_class_check ((ctype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4134, __FUNCTION__))->type_common.precision)
)
4135 return NULL_TREE(tree) nullptr;
4136
4137 /* Try to handle the case that get_computation_at doesn't,
4138 try to express
4139 use = ubase + (var - cbase) / ratio. */
4140 if (!constant_multiple_of (cstep, fold_convert (TREE_TYPE (cstep), ustep)fold_convert_loc (((location_t) 0), ((contains_struct_check (
(cstep), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4140, __FUNCTION__))->typed.type), ustep)
,
4141 &rat))
4142 return NULL_TREE(tree) nullptr;
4143
4144 bool neg_p = false;
4145 if (wi::neg_p (rat))
4146 {
4147 if (TYPE_UNSIGNED (ctype)((tree_class_check ((ctype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4147, __FUNCTION__))->base.u.bits.unsigned_flag)
)
4148 return NULL_TREE(tree) nullptr;
4149 neg_p = true;
4150 rat = wi::neg (rat);
4151 }
4152
4153 /* If both IVs can wrap around and CAND doesn't have a power of two step,
4154 it is unsafe. Consider uint16_t CAND with step 9, when wrapping around,
4155 the values will be ... 0xfff0, 0xfff9, 2, 11 ... and when use is say
4156 uint8_t with step 3, those values divided by 3 cast to uint8_t will be
4157 ... 0x50, 0x53, 0, 3 ... rather than expected 0x50, 0x53, 0x56, 0x59. */
4158 if (!use->iv->no_overflow
4159 && !cand->iv->no_overflow
4160 && !integer_pow2p (cstep))
4161 return NULL_TREE(tree) nullptr;
4162
4163 int bits = wi::exact_log2 (rat);
4164 if (bits == -1)
4165 bits = wi::floor_log2 (rat) + 1;
4166 if (!cand->iv->no_overflow
4167 && TYPE_PRECISION (utype)((tree_class_check ((utype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4167, __FUNCTION__))->type_common.precision)
+ bits > TYPE_PRECISION (ctype)((tree_class_check ((ctype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4167, __FUNCTION__))->type_common.precision)
)
4168 return NULL_TREE(tree) nullptr;
4169
4170 var = var_at_stmt (loop, cand, at);
4171
4172 if (POINTER_TYPE_P (ctype)(((enum tree_code) (ctype)->base.code) == POINTER_TYPE || (
(enum tree_code) (ctype)->base.code) == REFERENCE_TYPE)
)
4173 {
4174 ctype = unsigned_type_for (ctype);
4175 cbase = fold_convert (ctype, cbase)fold_convert_loc (((location_t) 0), ctype, cbase);
4176 cstep = fold_convert (ctype, cstep)fold_convert_loc (((location_t) 0), ctype, cstep);
4177 var = fold_convert (ctype, var)fold_convert_loc (((location_t) 0), ctype, var);
4178 }
4179
4180 if (stmt_after_increment (loop, cand, at))
4181 var = fold_build2 (MINUS_EXPR, TREE_TYPE (var), var,fold_build2_loc (((location_t) 0), MINUS_EXPR, ((contains_struct_check
((var), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4181, __FUNCTION__))->typed.type), var, unshare_expr (cstep
) )
4182 unshare_expr (cstep))fold_build2_loc (((location_t) 0), MINUS_EXPR, ((contains_struct_check
((var), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4181, __FUNCTION__))->typed.type), var, unshare_expr (cstep
) )
;
4183
4184 var = fold_build2 (MINUS_EXPR, TREE_TYPE (var), var, cbase)fold_build2_loc (((location_t) 0), MINUS_EXPR, ((contains_struct_check
((var), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4184, __FUNCTION__))->typed.type), var, cbase )
;
4185 var = fold_build2 (EXACT_DIV_EXPR, TREE_TYPE (var), var,fold_build2_loc (((location_t) 0), EXACT_DIV_EXPR, ((contains_struct_check
((var), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4185, __FUNCTION__))->typed.type), var, wide_int_to_tree
(((contains_struct_check ((var), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4186, __FUNCTION__))->typed.type), rat) )
4186 wide_int_to_tree (TREE_TYPE (var), rat))fold_build2_loc (((location_t) 0), EXACT_DIV_EXPR, ((contains_struct_check
((var), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4185, __FUNCTION__))->typed.type), var, wide_int_to_tree
(((contains_struct_check ((var), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4186, __FUNCTION__))->typed.type), rat) )
;
4187 if (POINTER_TYPE_P (utype)(((enum tree_code) (utype)->base.code) == POINTER_TYPE || (
(enum tree_code) (utype)->base.code) == REFERENCE_TYPE)
)
4188 {
4189 var = fold_convert (sizetype, var)fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype
], var)
;
4190 if (neg_p)
4191 var = fold_build1 (NEGATE_EXPR, sizetype, var)fold_build1_loc (((location_t) 0), NEGATE_EXPR, sizetype_tab[
(int) stk_sizetype], var )
;
4192 var = fold_build2 (POINTER_PLUS_EXPR, utype, ubase, var)fold_build2_loc (((location_t) 0), POINTER_PLUS_EXPR, utype, ubase
, var )
;
4193 }
4194 else
4195 {
4196 var = fold_convert (utype, var)fold_convert_loc (((location_t) 0), utype, var);
4197 var = fold_build2 (neg_p ? MINUS_EXPR : PLUS_EXPR, utype,fold_build2_loc (((location_t) 0), neg_p ? MINUS_EXPR : PLUS_EXPR
, utype, ubase, var )
4198 ubase, var)fold_build2_loc (((location_t) 0), neg_p ? MINUS_EXPR : PLUS_EXPR
, utype, ubase, var )
;
4199 }
4200 return var;
4201}
4202
4203/* Adjust the cost COST for being in loop setup rather than loop body.
4204 If we're optimizing for space, the loop setup overhead is constant;
4205 if we're optimizing for speed, amortize it over the per-iteration cost.
4206 If ROUND_UP_P is true, the result is round up rather than to zero when
4207 optimizing for speed. */
4208static int64_t
4209adjust_setup_cost (struct ivopts_data *data, int64_t cost,
4210 bool round_up_p = false)
4211{
4212 if (cost == INFTY1000000000)
4213 return cost;
4214 else if (optimize_loop_for_speed_p (data->current_loop))
4215 {
4216 int64_t niters = (int64_t) avg_loop_niter (data->current_loop);
4217 return (cost + (round_up_p ? niters - 1 : 0)) / niters;
4218 }
4219 else
4220 return cost;
4221}
4222
4223/* Calculate the SPEED or size cost of shiftadd EXPR in MODE. MULT is the
4224 EXPR operand holding the shift. COST0 and COST1 are the costs for
4225 calculating the operands of EXPR. Returns true if successful, and returns
4226 the cost in COST. */
4227
4228static bool
4229get_shiftadd_cost (tree expr, scalar_int_mode mode, comp_cost cost0,
4230 comp_cost cost1, tree mult, bool speed, comp_cost *cost)
4231{
4232 comp_cost res;
4233 tree op1 = TREE_OPERAND (expr, 1)(*((const_cast<tree*> (tree_operand_check ((expr), (1),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4233, __FUNCTION__)))))
;
4234 tree cst = TREE_OPERAND (mult, 1)(*((const_cast<tree*> (tree_operand_check ((mult), (1),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4234, __FUNCTION__)))))
;
4235 tree multop = TREE_OPERAND (mult, 0)(*((const_cast<tree*> (tree_operand_check ((mult), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4235, __FUNCTION__)))))
;
4236 int m = exact_log2 (int_cst_value (cst));
4237 int maxm = MIN (BITS_PER_WORD, GET_MODE_BITSIZE (mode))((((8) * (((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4))) < (GET_MODE_BITSIZE (mode)) ? (((8) *
(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? 8 : 4))) : (GET_MODE_BITSIZE (mode)))
;
4238 int as_cost, sa_cost;
4239 bool mult_in_op1;
4240
4241 if (!(m >= 0 && m < maxm))
4242 return false;
4243
4244 STRIP_NOPS (op1)(op1) = tree_strip_nop_conversions ((const_cast<union tree_node
*> (((op1)))))
;
4245 mult_in_op1 = operand_equal_p (op1, mult, 0);
4246
4247 as_cost = add_cost (speed, mode) + shift_cost (speed, mode, m);
4248
4249 /* If the target has a cheap shift-and-add or shift-and-sub instruction,
4250 use that in preference to a shift insn followed by an add insn. */
4251 sa_cost = (TREE_CODE (expr)((enum tree_code) (expr)->base.code) != MINUS_EXPR
4252 ? shiftadd_cost (speed, mode, m)
4253 : (mult_in_op1
4254 ? shiftsub1_cost (speed, mode, m)
4255 : shiftsub0_cost (speed, mode, m)));
4256
4257 res = comp_cost (MIN (as_cost, sa_cost)((as_cost) < (sa_cost) ? (as_cost) : (sa_cost)), 0);
4258 res += (mult_in_op1 ? cost0 : cost1);
4259
4260 STRIP_NOPS (multop)(multop) = tree_strip_nop_conversions ((const_cast<union tree_node
*> (((multop)))))
;
4261 if (!is_gimple_val (multop))
4262 res += force_expr_to_var_cost (multop, speed);
4263
4264 *cost = res;
4265 return true;
4266}
4267
4268/* Estimates cost of forcing expression EXPR into a variable. */
4269
4270static comp_cost
4271force_expr_to_var_cost (tree expr, bool speed)
4272{
4273 static bool costs_initialized = false;
4274 static unsigned integer_cost [2];
4275 static unsigned symbol_cost [2];
4276 static unsigned address_cost [2];
4277 tree op0, op1;
4278 comp_cost cost0, cost1, cost;
4279 machine_mode mode;
4280 scalar_int_mode int_mode;
4281
4282 if (!costs_initialized)
4283 {
4284 tree type = build_pointer_type (integer_type_nodeinteger_types[itk_int]);
4285 tree var, addr;
4286 rtx x;
4287 int i;
4288
4289 var = create_tmp_var_raw (integer_type_nodeinteger_types[itk_int], "test_var");
4290 TREE_STATIC (var)((var)->base.static_flag) = 1;
4291 x = produce_memory_decl_rtl (var, NULLnullptr);
4292 SET_DECL_RTL (var, x)set_decl_rtl (var, x);
4293
4294 addr = build1 (ADDR_EXPR, type, var);
4295
4296
4297 for (i = 0; i < 2; i++)
4298 {
4299 integer_cost[i] = computation_cost (build_int_cst (integer_type_nodeinteger_types[itk_int],
4300 2000), i);
4301
4302 symbol_cost[i] = computation_cost (addr, i) + 1;
4303
4304 address_cost[i]
4305 = computation_cost (fold_build_pointer_plus_hwi (addr, 2000)fold_build_pointer_plus_hwi_loc (((location_t) 0), addr, 2000
)
, i) + 1;
4306 if (dump_file && (dump_flags & TDF_DETAILS))
4307 {
4308 fprintf (dump_file, "force_expr_to_var_cost %s costs:\n", i ? "speed" : "size");
4309 fprintf (dump_file, " integer %d\n", (int) integer_cost[i]);
4310 fprintf (dump_file, " symbol %d\n", (int) symbol_cost[i]);
4311 fprintf (dump_file, " address %d\n", (int) address_cost[i]);
4312 fprintf (dump_file, " other %d\n", (int) target_spill_cost(this_target_cfgloop->x_target_spill_cost)[i]);
4313 fprintf (dump_file, "\n");
4314 }
4315 }
4316
4317 costs_initialized = true;
4318 }
4319
4320 STRIP_NOPS (expr)(expr) = tree_strip_nop_conversions ((const_cast<union tree_node
*> (((expr)))))
;
4321
4322 if (SSA_VAR_P (expr)(((enum tree_code) (expr)->base.code) == VAR_DECL || ((enum
tree_code) (expr)->base.code) == PARM_DECL || ((enum tree_code
) (expr)->base.code) == RESULT_DECL || ((enum tree_code) (
expr)->base.code) == SSA_NAME)
)
4323 return no_cost;
4324
4325 if (is_gimple_min_invariant (expr))
4326 {
4327 if (poly_int_tree_p (expr))
4328 return comp_cost (integer_cost [speed], 0);
4329
4330 if (TREE_CODE (expr)((enum tree_code) (expr)->base.code) == ADDR_EXPR)
4331 {
4332 tree obj = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4332, __FUNCTION__)))))
;
4333
4334 if (VAR_P (obj)(((enum tree_code) (obj)->base.code) == VAR_DECL)
4335 || TREE_CODE (obj)((enum tree_code) (obj)->base.code) == PARM_DECL
4336 || TREE_CODE (obj)((enum tree_code) (obj)->base.code) == RESULT_DECL)
4337 return comp_cost (symbol_cost [speed], 0);
4338 }
4339
4340 return comp_cost (address_cost [speed], 0);
4341 }
4342
4343 switch (TREE_CODE (expr)((enum tree_code) (expr)->base.code))
4344 {
4345 case POINTER_PLUS_EXPR:
4346 case PLUS_EXPR:
4347 case MINUS_EXPR:
4348 case MULT_EXPR:
4349 case TRUNC_DIV_EXPR:
4350 case BIT_AND_EXPR:
4351 case BIT_IOR_EXPR:
4352 case LSHIFT_EXPR:
4353 case RSHIFT_EXPR:
4354 op0 = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4354, __FUNCTION__)))))
;
4355 op1 = TREE_OPERAND (expr, 1)(*((const_cast<tree*> (tree_operand_check ((expr), (1),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4355, __FUNCTION__)))))
;
4356 STRIP_NOPS (op0)(op0) = tree_strip_nop_conversions ((const_cast<union tree_node
*> (((op0)))))
;
4357 STRIP_NOPS (op1)(op1) = tree_strip_nop_conversions ((const_cast<union tree_node
*> (((op1)))))
;
4358 break;
4359
4360 CASE_CONVERTcase NOP_EXPR: case CONVERT_EXPR:
4361 case NEGATE_EXPR:
4362 case BIT_NOT_EXPR:
4363 op0 = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4363, __FUNCTION__)))))
;
4364 STRIP_NOPS (op0)(op0) = tree_strip_nop_conversions ((const_cast<union tree_node
*> (((op0)))))
;
4365 op1 = NULL_TREE(tree) nullptr;
4366 break;
4367 /* See add_iv_candidate_for_doloop, for doloop may_be_zero case, we
4368 introduce COND_EXPR for IV base, need to support better cost estimation
4369 for this COND_EXPR and tcc_comparison. */
4370 case COND_EXPR:
4371 op0 = TREE_OPERAND (expr, 1)(*((const_cast<tree*> (tree_operand_check ((expr), (1),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4371, __FUNCTION__)))))
;
4372 STRIP_NOPS (op0)(op0) = tree_strip_nop_conversions ((const_cast<union tree_node
*> (((op0)))))
;
4373 op1 = TREE_OPERAND (expr, 2)(*((const_cast<tree*> (tree_operand_check ((expr), (2),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4373, __FUNCTION__)))))
;
4374 STRIP_NOPS (op1)(op1) = tree_strip_nop_conversions ((const_cast<union tree_node
*> (((op1)))))
;
4375 break;
4376 case LT_EXPR:
4377 case LE_EXPR:
4378 case GT_EXPR:
4379 case GE_EXPR:
4380 case EQ_EXPR:
4381 case NE_EXPR:
4382 case UNORDERED_EXPR:
4383 case ORDERED_EXPR:
4384 case UNLT_EXPR:
4385 case UNLE_EXPR:
4386 case UNGT_EXPR:
4387 case UNGE_EXPR:
4388 case UNEQ_EXPR:
4389 case LTGT_EXPR:
4390 case MAX_EXPR:
4391 case MIN_EXPR:
4392 op0 = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4392, __FUNCTION__)))))
;
4393 STRIP_NOPS (op0)(op0) = tree_strip_nop_conversions ((const_cast<union tree_node
*> (((op0)))))
;
4394 op1 = TREE_OPERAND (expr, 1)(*((const_cast<tree*> (tree_operand_check ((expr), (1),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4394, __FUNCTION__)))))
;
4395 STRIP_NOPS (op1)(op1) = tree_strip_nop_conversions ((const_cast<union tree_node
*> (((op1)))))
;
4396 break;
4397
4398 default:
4399 /* Just an arbitrary value, FIXME. */
4400 return comp_cost (target_spill_cost(this_target_cfgloop->x_target_spill_cost)[speed], 0);
4401 }
4402
4403 if (op0 == NULL_TREE(tree) nullptr
4404 || TREE_CODE (op0)((enum tree_code) (op0)->base.code) == SSA_NAME || CONSTANT_CLASS_P (op0)(tree_code_type[(int) (((enum tree_code) (op0)->base.code)
)] == tcc_constant)
)
4405 cost0 = no_cost;
4406 else
4407 cost0 = force_expr_to_var_cost (op0, speed);
4408
4409 if (op1 == NULL_TREE(tree) nullptr
4410 || TREE_CODE (op1)((enum tree_code) (op1)->base.code) == SSA_NAME || CONSTANT_CLASS_P (op1)(tree_code_type[(int) (((enum tree_code) (op1)->base.code)
)] == tcc_constant)
)
4411 cost1 = no_cost;
4412 else
4413 cost1 = force_expr_to_var_cost (op1, speed);
4414
4415 mode = TYPE_MODE (TREE_TYPE (expr))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((expr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4415, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4415, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((expr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4415, __FUNCTION__))->typed.type)) : (((contains_struct_check
((expr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4415, __FUNCTION__))->typed.type))->type_common.mode)
;
4416 switch (TREE_CODE (expr)((enum tree_code) (expr)->base.code))
4417 {
4418 case POINTER_PLUS_EXPR:
4419 case PLUS_EXPR:
4420 case MINUS_EXPR:
4421 case NEGATE_EXPR:
4422 cost = comp_cost (add_cost (speed, mode), 0);
4423 if (TREE_CODE (expr)((enum tree_code) (expr)->base.code) != NEGATE_EXPR)
4424 {
4425 tree mult = NULL_TREE(tree) nullptr;
4426 comp_cost sa_cost;
4427 if (TREE_CODE (op1)((enum tree_code) (op1)->base.code) == MULT_EXPR)
4428 mult = op1;
4429 else if (TREE_CODE (op0)((enum tree_code) (op0)->base.code) == MULT_EXPR)
4430 mult = op0;
4431
4432 if (mult != NULL_TREE(tree) nullptr
4433 && is_a <scalar_int_mode> (mode, &int_mode)
4434 && cst_and_fits_in_hwi (TREE_OPERAND (mult, 1)(*((const_cast<tree*> (tree_operand_check ((mult), (1),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4434, __FUNCTION__)))))
)
4435 && get_shiftadd_cost (expr, int_mode, cost0, cost1, mult,
4436 speed, &sa_cost))
4437 return sa_cost;
4438 }
4439 break;
4440
4441 CASE_CONVERTcase NOP_EXPR: case CONVERT_EXPR:
4442 {
4443 tree inner_mode, outer_mode;
4444 outer_mode = TREE_TYPE (expr)((contains_struct_check ((expr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4444, __FUNCTION__))->typed.type)
;
4445 inner_mode = TREE_TYPE (op0)((contains_struct_check ((op0), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4445, __FUNCTION__))->typed.type)
;
4446 cost = comp_cost (convert_cost (TYPE_MODE (outer_mode)((((enum tree_code) ((tree_class_check ((outer_mode), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4446, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(outer_mode) : (outer_mode)->type_common.mode)
,
4447 TYPE_MODE (inner_mode)((((enum tree_code) ((tree_class_check ((inner_mode), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4447, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(inner_mode) : (inner_mode)->type_common.mode)
, speed), 0);
4448 }
4449 break;
4450
4451 case MULT_EXPR:
4452 if (cst_and_fits_in_hwi (op0))
4453 cost = comp_cost (mult_by_coeff_cost (int_cst_value (op0),
4454 mode, speed), 0);
4455 else if (cst_and_fits_in_hwi (op1))
4456 cost = comp_cost (mult_by_coeff_cost (int_cst_value (op1),
4457 mode, speed), 0);
4458 else
4459 return comp_cost (target_spill_cost(this_target_cfgloop->x_target_spill_cost) [speed], 0);
4460 break;
4461
4462 case TRUNC_DIV_EXPR:
4463 /* Division by power of two is usually cheap, so we allow it. Forbid
4464 anything else. */
4465 if (integer_pow2p (TREE_OPERAND (expr, 1)(*((const_cast<tree*> (tree_operand_check ((expr), (1),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4465, __FUNCTION__)))))
))
4466 cost = comp_cost (add_cost (speed, mode), 0);
4467 else
4468 cost = comp_cost (target_spill_cost(this_target_cfgloop->x_target_spill_cost)[speed], 0);
4469 break;
4470
4471 case BIT_AND_EXPR:
4472 case BIT_IOR_EXPR:
4473 case BIT_NOT_EXPR:
4474 case LSHIFT_EXPR:
4475 case RSHIFT_EXPR:
4476 cost = comp_cost (add_cost (speed, mode), 0);
4477 break;
4478 case COND_EXPR:
4479 op0 = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4479, __FUNCTION__)))))
;
4480 STRIP_NOPS (op0)(op0) = tree_strip_nop_conversions ((const_cast<union tree_node
*> (((op0)))))
;
4481 if (op0 == NULL_TREE(tree) nullptr || TREE_CODE (op0)((enum tree_code) (op0)->base.code) == SSA_NAME
4482 || CONSTANT_CLASS_P (op0)(tree_code_type[(int) (((enum tree_code) (op0)->base.code)
)] == tcc_constant)
)
4483 cost = no_cost;
4484 else
4485 cost = force_expr_to_var_cost (op0, speed);
4486 break;
4487 case LT_EXPR:
4488 case LE_EXPR:
4489 case GT_EXPR:
4490 case GE_EXPR:
4491 case EQ_EXPR:
4492 case NE_EXPR:
4493 case UNORDERED_EXPR:
4494 case ORDERED_EXPR:
4495 case UNLT_EXPR:
4496 case UNLE_EXPR:
4497 case UNGT_EXPR:
4498 case UNGE_EXPR:
4499 case UNEQ_EXPR:
4500 case LTGT_EXPR:
4501 case MAX_EXPR:
4502 case MIN_EXPR:
4503 /* Simply use add cost for now, FIXME if there is some more accurate cost
4504 evaluation way. */
4505 cost = comp_cost (add_cost (speed, mode), 0);
4506 break;
4507
4508 default:
4509 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4509, __FUNCTION__))
;
4510 }
4511
4512 cost += cost0;
4513 cost += cost1;
4514 return cost;
4515}
4516
4517/* Estimates cost of forcing EXPR into a variable. INV_VARS is a set of the
4518 invariants the computation depends on. */
4519
4520static comp_cost
4521force_var_cost (struct ivopts_data *data, tree expr, bitmap *inv_vars)
4522{
4523 if (!expr)
4524 return no_cost;
4525
4526 find_inv_vars (data, &expr, inv_vars);
4527 return force_expr_to_var_cost (expr, data->speed);
4528}
4529
4530/* Returns cost of auto-modifying address expression in shape base + offset.
4531 AINC_STEP is step size of the address IV. AINC_OFFSET is offset of the
4532 address expression. The address expression has ADDR_MODE in addr space
4533 AS. The memory access has MEM_MODE. SPEED means we are optimizing for
4534 speed or size. */
4535
4536enum ainc_type
4537{
4538 AINC_PRE_INC, /* Pre increment. */
4539 AINC_PRE_DEC, /* Pre decrement. */
4540 AINC_POST_INC, /* Post increment. */
4541 AINC_POST_DEC, /* Post decrement. */
4542 AINC_NONE /* Also the number of auto increment types. */
4543};
4544
4545struct ainc_cost_data
4546{
4547 int64_t costs[AINC_NONE];
4548};
4549
4550static comp_cost
4551get_address_cost_ainc (poly_int64 ainc_step, poly_int64 ainc_offset,
4552 machine_mode addr_mode, machine_mode mem_mode,
4553 addr_space_t as, bool speed)
4554{
4555 if (!USE_LOAD_PRE_DECREMENT (mem_mode)0
4556 && !USE_STORE_PRE_DECREMENT (mem_mode)0
4557 && !USE_LOAD_POST_DECREMENT (mem_mode)0
4558 && !USE_STORE_POST_DECREMENT (mem_mode)0
4559 && !USE_LOAD_PRE_INCREMENT (mem_mode)0
4560 && !USE_STORE_PRE_INCREMENT (mem_mode)0
4561 && !USE_LOAD_POST_INCREMENT (mem_mode)0
4562 && !USE_STORE_POST_INCREMENT (mem_mode)0)
4563 return infinite_cost;
4564
4565 static vec<ainc_cost_data *> ainc_cost_data_list;
4566 unsigned idx = (unsigned) as * MAX_MACHINE_MODE + (unsigned) mem_mode;
4567 if (idx >= ainc_cost_data_list.length ())
4568 {
4569 unsigned nsize = ((unsigned) as + 1) *MAX_MACHINE_MODE;
4570
4571 gcc_assert (nsize > idx)((void)(!(nsize > idx) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4571, __FUNCTION__), 0 : 0))
;
4572 ainc_cost_data_list.safe_grow_cleared (nsize, true);
4573 }
4574
4575 ainc_cost_data *data = ainc_cost_data_list[idx];
4576 if (data == NULLnullptr)
4577 {
4578 rtx reg = gen_raw_REG (addr_mode, LAST_VIRTUAL_REGISTER(((76)) + 5) + 1);
4579
4580 data = (ainc_cost_data *) xcalloc (1, sizeof (*data));
4581 data->costs[AINC_PRE_DEC] = INFTY1000000000;
4582 data->costs[AINC_POST_DEC] = INFTY1000000000;
4583 data->costs[AINC_PRE_INC] = INFTY1000000000;
4584 data->costs[AINC_POST_INC] = INFTY1000000000;
4585 if (USE_LOAD_PRE_DECREMENT (mem_mode)0
4586 || USE_STORE_PRE_DECREMENT (mem_mode)0)
4587 {
4588 rtx addr = gen_rtx_PRE_DEC (addr_mode, reg)gen_rtx_fmt_e_stat ((PRE_DEC), ((addr_mode)), ((reg)) );
4589
4590 if (memory_address_addr_space_p (mem_mode, addr, as))
4591 data->costs[AINC_PRE_DEC]
4592 = address_cost (addr, mem_mode, as, speed);
4593 }
4594 if (USE_LOAD_POST_DECREMENT (mem_mode)0
4595 || USE_STORE_POST_DECREMENT (mem_mode)0)
4596 {
4597 rtx addr = gen_rtx_POST_DEC (addr_mode, reg)gen_rtx_fmt_e_stat ((POST_DEC), ((addr_mode)), ((reg)) );
4598
4599 if (memory_address_addr_space_p (mem_mode, addr, as))
4600 data->costs[AINC_POST_DEC]
4601 = address_cost (addr, mem_mode, as, speed);
4602 }
4603 if (USE_LOAD_PRE_INCREMENT (mem_mode)0
4604 || USE_STORE_PRE_INCREMENT (mem_mode)0)
4605 {
4606 rtx addr = gen_rtx_PRE_INC (addr_mode, reg)gen_rtx_fmt_e_stat ((PRE_INC), ((addr_mode)), ((reg)) );
4607
4608 if (memory_address_addr_space_p (mem_mode, addr, as))
4609 data->costs[AINC_PRE_INC]
4610 = address_cost (addr, mem_mode, as, speed);
4611 }
4612 if (USE_LOAD_POST_INCREMENT (mem_mode)0
4613 || USE_STORE_POST_INCREMENT (mem_mode)0)
4614 {
4615 rtx addr = gen_rtx_POST_INC (addr_mode, reg)gen_rtx_fmt_e_stat ((POST_INC), ((addr_mode)), ((reg)) );
4616
4617 if (memory_address_addr_space_p (mem_mode, addr, as))
4618 data->costs[AINC_POST_INC]
4619 = address_cost (addr, mem_mode, as, speed);
4620 }
4621 ainc_cost_data_list[idx] = data;
4622 }
4623
4624 poly_int64 msize = GET_MODE_SIZE (mem_mode);
4625 if (known_eq (ainc_offset, 0)(!maybe_ne (ainc_offset, 0)) && known_eq (msize, ainc_step)(!maybe_ne (msize, ainc_step)))
4626 return comp_cost (data->costs[AINC_POST_INC], 0);
4627 if (known_eq (ainc_offset, 0)(!maybe_ne (ainc_offset, 0)) && known_eq (msize, -ainc_step)(!maybe_ne (msize, -ainc_step)))
4628 return comp_cost (data->costs[AINC_POST_DEC], 0);
4629 if (known_eq (ainc_offset, msize)(!maybe_ne (ainc_offset, msize)) && known_eq (msize, ainc_step)(!maybe_ne (msize, ainc_step)))
4630 return comp_cost (data->costs[AINC_PRE_INC], 0);
4631 if (known_eq (ainc_offset, -msize)(!maybe_ne (ainc_offset, -msize)) && known_eq (msize, -ainc_step)(!maybe_ne (msize, -ainc_step)))
4632 return comp_cost (data->costs[AINC_PRE_DEC], 0);
4633
4634 return infinite_cost;
4635}
4636
4637/* Return cost of computing USE's address expression by using CAND.
4638 AFF_INV and AFF_VAR represent invariant and variant parts of the
4639 address expression, respectively. If AFF_INV is simple, store
4640 the loop invariant variables which are depended by it in INV_VARS;
4641 if AFF_INV is complicated, handle it as a new invariant expression
4642 and record it in INV_EXPR. RATIO indicates multiple times between
4643 steps of USE and CAND. If CAN_AUTOINC is nonNULL, store boolean
4644 value to it indicating if this is an auto-increment address. */
4645
4646static comp_cost
4647get_address_cost (struct ivopts_data *data, struct iv_use *use,
4648 struct iv_cand *cand, aff_tree *aff_inv,
4649 aff_tree *aff_var, HOST_WIDE_INTlong ratio,
4650 bitmap *inv_vars, iv_inv_expr_ent **inv_expr,
4651 bool *can_autoinc, bool speed)
4652{
4653 rtx addr;
4654 bool simple_inv = true;
4655 tree comp_inv = NULL_TREE(tree) nullptr, type = aff_var->type;
4656 comp_cost var_cost = no_cost, cost = no_cost;
4657 struct mem_address parts = {NULL_TREE(tree) nullptr, integer_one_nodeglobal_trees[TI_INTEGER_ONE],
4658 NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr};
4659 machine_mode addr_mode = TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4659, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
;
4660 machine_mode mem_mode = TYPE_MODE (use->mem_type)((((enum tree_code) ((tree_class_check ((use->mem_type), (
tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4660, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(use->mem_type) : (use->mem_type)->type_common.mode
)
;
4661 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (use->iv->base))((tree_class_check ((((contains_struct_check ((use->iv->
base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4661, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4661, __FUNCTION__))->base.u.bits.address_space)
;
4662 /* Only true if ratio != 1. */
4663 bool ok_with_ratio_p = false;
4664 bool ok_without_ratio_p = false;
4665
4666 if (!aff_combination_const_p (aff_inv))
4667 {
4668 parts.index = integer_one_nodeglobal_trees[TI_INTEGER_ONE];
4669 /* Addressing mode "base + index". */
4670 ok_without_ratio_p = valid_mem_ref_p (mem_mode, as, &parts);
4671 if (ratio != 1)
4672 {
4673 parts.step = wide_int_to_tree (type, ratio);
4674 /* Addressing mode "base + index << scale". */
4675 ok_with_ratio_p = valid_mem_ref_p (mem_mode, as, &parts);
4676 if (!ok_with_ratio_p)
4677 parts.step = NULL_TREE(tree) nullptr;
4678 }
4679 if (ok_with_ratio_p || ok_without_ratio_p)
4680 {
4681 if (maybe_ne (aff_inv->offset, 0))
4682 {
4683 parts.offset = wide_int_to_tree (sizetypesizetype_tab[(int) stk_sizetype], aff_inv->offset);
4684 /* Addressing mode "base + index [<< scale] + offset". */
4685 if (!valid_mem_ref_p (mem_mode, as, &parts))
4686 parts.offset = NULL_TREE(tree) nullptr;
4687 else
4688 aff_inv->offset = 0;
4689 }
4690
4691 move_fixed_address_to_symbol (&parts, aff_inv);
4692 /* Base is fixed address and is moved to symbol part. */
4693 if (parts.symbol != NULL_TREE(tree) nullptr && aff_combination_zero_p (aff_inv))
4694 parts.base = NULL_TREE(tree) nullptr;
4695
4696 /* Addressing mode "symbol + base + index [<< scale] [+ offset]". */
4697 if (parts.symbol != NULL_TREE(tree) nullptr
4698 && !valid_mem_ref_p (mem_mode, as, &parts))
4699 {
4700 aff_combination_add_elt (aff_inv, parts.symbol, 1);
4701 parts.symbol = NULL_TREE(tree) nullptr;
4702 /* Reset SIMPLE_INV since symbol address needs to be computed
4703 outside of address expression in this case. */
4704 simple_inv = false;
4705 /* Symbol part is moved back to base part, it can't be NULL. */
4706 parts.base = integer_one_nodeglobal_trees[TI_INTEGER_ONE];
4707 }
4708 }
4709 else
4710 parts.index = NULL_TREE(tree) nullptr;
4711 }
4712 else
4713 {
4714 poly_int64 ainc_step;
4715 if (can_autoinc
4716 && ratio == 1
4717 && ptrdiff_tree_p (cand->iv->step, &ainc_step))
4718 {
4719 poly_int64 ainc_offset = (aff_inv->offset).force_shwi ();
4720
4721 if (stmt_after_increment (data->current_loop, cand, use->stmt))
4722 ainc_offset += ainc_step;
4723 cost = get_address_cost_ainc (ainc_step, ainc_offset,
4724 addr_mode, mem_mode, as, speed);
4725 if (!cost.infinite_cost_p ())
4726 {
4727 *can_autoinc = true;
4728 return cost;
4729 }
4730 cost = no_cost;
4731 }
4732 if (!aff_combination_zero_p (aff_inv))
4733 {
4734 parts.offset = wide_int_to_tree (sizetypesizetype_tab[(int) stk_sizetype], aff_inv->offset);
4735 /* Addressing mode "base + offset". */
4736 if (!valid_mem_ref_p (mem_mode, as, &parts))
4737 parts.offset = NULL_TREE(tree) nullptr;
4738 else
4739 aff_inv->offset = 0;
4740 }
4741 }
4742
4743 if (simple_inv)
4744 simple_inv = (aff_inv == NULLnullptr
4745 || aff_combination_const_p (aff_inv)
4746 || aff_combination_singleton_var_p (aff_inv));
4747 if (!aff_combination_zero_p (aff_inv))
4748 comp_inv = aff_combination_to_tree (aff_inv);
4749 if (comp_inv != NULL_TREE(tree) nullptr)
4750 cost = force_var_cost (data, comp_inv, inv_vars);
4751 if (ratio != 1 && parts.step == NULL_TREE(tree) nullptr)
4752 var_cost += mult_by_coeff_cost (ratio, addr_mode, speed);
4753 if (comp_inv != NULL_TREE(tree) nullptr && parts.index == NULL_TREE(tree) nullptr)
4754 var_cost += add_cost (speed, addr_mode);
4755
4756 if (comp_inv && inv_expr && !simple_inv)
4757 {
4758 *inv_expr = get_loop_invariant_expr (data, comp_inv);
4759 /* Clear depends on. */
4760 if (*inv_expr != NULLnullptr && inv_vars && *inv_vars)
4761 bitmap_clear (*inv_vars);
4762
4763 /* Cost of small invariant expression adjusted against loop niters
4764 is usually zero, which makes it difficult to be differentiated
4765 from candidate based on loop invariant variables. Secondly, the
4766 generated invariant expression may not be hoisted out of loop by
4767 following pass. We penalize the cost by rounding up in order to
4768 neutralize such effects. */
4769 cost.cost = adjust_setup_cost (data, cost.cost, true);
4770 cost.scratch = cost.cost;
4771 }
4772
4773 cost += var_cost;
4774 addr = addr_for_mem_ref (&parts, as, false);
4775 gcc_assert (memory_address_addr_space_p (mem_mode, addr, as))((void)(!(memory_address_addr_space_p (mem_mode, addr, as)) ?
fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4775, __FUNCTION__), 0 : 0))
;
4776 cost += address_cost (addr, mem_mode, as, speed);
4777
4778 if (parts.symbol != NULL_TREE(tree) nullptr)
4779 cost.complexity += 1;
4780 /* Don't increase the complexity of adding a scaled index if it's
4781 the only kind of index that the target allows. */
4782 if (parts.step != NULL_TREE(tree) nullptr && ok_without_ratio_p)
4783 cost.complexity += 1;
4784 if (parts.base != NULL_TREE(tree) nullptr && parts.index != NULL_TREE(tree) nullptr)
4785 cost.complexity += 1;
4786 if (parts.offset != NULL_TREE(tree) nullptr && !integer_zerop (parts.offset))
4787 cost.complexity += 1;
4788
4789 return cost;
4790}
4791
4792/* Scale (multiply) the computed COST (except scratch part that should be
4793 hoisted out a loop) by header->frequency / AT->frequency, which makes
4794 expected cost more accurate. */
4795
4796static comp_cost
4797get_scaled_computation_cost_at (ivopts_data *data, gimple *at, comp_cost cost)
4798{
4799 if (data->speed
4800 && data->current_loop->header->count.to_frequency (cfun(cfun + 0)) > 0)
4801 {
4802 basic_block bb = gimple_bb (at);
4803 gcc_assert (cost.scratch <= cost.cost)((void)(!(cost.scratch <= cost.cost) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4803, __FUNCTION__), 0 : 0))
;
4804 int scale_factor = (int)(intptr_t) bb->aux;
4805 if (scale_factor == 1)
4806 return cost;
4807
4808 int64_t scaled_cost
4809 = cost.scratch + (cost.cost - cost.scratch) * scale_factor;
4810
4811 if (dump_file && (dump_flags & TDF_DETAILS))
4812 fprintf (dump_file, "Scaling cost based on bb prob by %2.2f: "
4813 "%" PRId64"l" "d" " (scratch: %" PRId64"l" "d" ") -> %" PRId64"l" "d" "\n",
4814 1.0f * scale_factor, cost.cost, cost.scratch, scaled_cost);
4815
4816 cost.cost = scaled_cost;
4817 }
4818
4819 return cost;
4820}
4821
4822/* Determines the cost of the computation by that USE is expressed
4823 from induction variable CAND. If ADDRESS_P is true, we just need
4824 to create an address from it, otherwise we want to get it into
4825 register. A set of invariants we depend on is stored in INV_VARS.
4826 If CAN_AUTOINC is nonnull, use it to record whether autoinc
4827 addressing is likely. If INV_EXPR is nonnull, record invariant
4828 expr entry in it. */
4829
4830static comp_cost
4831get_computation_cost (struct ivopts_data *data, struct iv_use *use,
4832 struct iv_cand *cand, bool address_p, bitmap *inv_vars,
4833 bool *can_autoinc, iv_inv_expr_ent **inv_expr)
4834{
4835 gimple *at = use->stmt;
4836 tree ubase = use->iv->base, cbase = cand->iv->base;
4837 tree utype = TREE_TYPE (ubase)((contains_struct_check ((ubase), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4837, __FUNCTION__))->typed.type)
, ctype = TREE_TYPE (cbase)((contains_struct_check ((cbase), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4837, __FUNCTION__))->typed.type)
;
4838 tree comp_inv = NULL_TREE(tree) nullptr;
4839 HOST_WIDE_INTlong ratio, aratio;
4840 comp_cost cost;
4841 widest_int rat;
4842 aff_tree aff_inv, aff_var;
4843 bool speed = optimize_bb_for_speed_p (gimple_bb (at));
4844
4845 if (inv_vars)
4846 *inv_vars = NULLnullptr;
4847 if (can_autoinc)
4848 *can_autoinc = false;
4849 if (inv_expr)
4850 *inv_expr = NULLnullptr;
4851
4852 /* Check if we have enough precision to express the values of use. */
4853 if (TYPE_PRECISION (utype)((tree_class_check ((utype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4853, __FUNCTION__))->type_common.precision)
> TYPE_PRECISION (ctype)((tree_class_check ((ctype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4853, __FUNCTION__))->type_common.precision)
)
4854 return infinite_cost;
4855
4856 if (address_p
4857 || (use->iv->base_object
4858 && cand->iv->base_object
4859 && POINTER_TYPE_P (TREE_TYPE (use->iv->base_object))(((enum tree_code) (((contains_struct_check ((use->iv->
base_object), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4859, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((use->iv->
base_object), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4859, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
4860 && POINTER_TYPE_P (TREE_TYPE (cand->iv->base_object))(((enum tree_code) (((contains_struct_check ((cand->iv->
base_object), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4860, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((cand->iv->
base_object), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4860, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
))
4861 {
4862 /* Do not try to express address of an object with computation based
4863 on address of a different object. This may cause problems in rtl
4864 level alias analysis (that does not expect this to be happening,
4865 as this is illegal in C), and would be unlikely to be useful
4866 anyway. */
4867 if (use->iv->base_object
4868 && cand->iv->base_object
4869 && !operand_equal_p (use->iv->base_object, cand->iv->base_object, 0))
4870 return infinite_cost;
4871 }
4872
4873 if (!get_computation_aff_1 (data->current_loop, at, use,
4874 cand, &aff_inv, &aff_var, &rat)
4875 || !wi::fits_shwi_p (rat))
4876 return infinite_cost;
4877
4878 ratio = rat.to_shwi ();
4879 if (address_p)
4880 {
4881 cost = get_address_cost (data, use, cand, &aff_inv, &aff_var, ratio,
4882 inv_vars, inv_expr, can_autoinc, speed);
4883 cost = get_scaled_computation_cost_at (data, at, cost);
4884 /* For doloop IV cand, add on the extra cost. */
4885 cost += cand->doloop_p ? targetm.doloop_cost_for_address : 0;
4886 return cost;
4887 }
4888
4889 bool simple_inv = (aff_combination_const_p (&aff_inv)
4890 || aff_combination_singleton_var_p (&aff_inv));
4891 tree signed_type = signed_type_for (aff_combination_type (&aff_inv));
4892 aff_combination_convert (&aff_inv, signed_type);
4893 if (!aff_combination_zero_p (&aff_inv))
4894 comp_inv = aff_combination_to_tree (&aff_inv);
4895
4896 cost = force_var_cost (data, comp_inv, inv_vars);
4897 if (comp_inv && inv_expr && !simple_inv)
4898 {
4899 *inv_expr = get_loop_invariant_expr (data, comp_inv);
4900 /* Clear depends on. */
4901 if (*inv_expr != NULLnullptr && inv_vars && *inv_vars)
4902 bitmap_clear (*inv_vars);
4903
4904 cost.cost = adjust_setup_cost (data, cost.cost);
4905 /* Record setup cost in scratch field. */
4906 cost.scratch = cost.cost;
4907 }
4908 /* Cost of constant integer can be covered when adding invariant part to
4909 variant part. */
4910 else if (comp_inv && CONSTANT_CLASS_P (comp_inv)(tree_code_type[(int) (((enum tree_code) (comp_inv)->base.
code))] == tcc_constant)
)
4911 cost = no_cost;
4912
4913 /* Need type narrowing to represent use with cand. */
4914 if (TYPE_PRECISION (utype)((tree_class_check ((utype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4914, __FUNCTION__))->type_common.precision)
< TYPE_PRECISION (ctype)((tree_class_check ((ctype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4914, __FUNCTION__))->type_common.precision)
)
4915 {
4916 machine_mode outer_mode = TYPE_MODE (utype)((((enum tree_code) ((tree_class_check ((utype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4916, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(utype) : (utype)->type_common.mode)
;
4917 machine_mode inner_mode = TYPE_MODE (ctype)((((enum tree_code) ((tree_class_check ((ctype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4917, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(ctype) : (ctype)->type_common.mode)
;
4918 cost += comp_cost (convert_cost (outer_mode, inner_mode, speed), 0);
4919 }
4920
4921 /* Turn a + i * (-c) into a - i * c. */
4922 if (ratio < 0 && comp_inv && !integer_zerop (comp_inv))
4923 aratio = -ratio;
4924 else
4925 aratio = ratio;
4926
4927 if (ratio != 1)
4928 cost += mult_by_coeff_cost (aratio, TYPE_MODE (utype)((((enum tree_code) ((tree_class_check ((utype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4928, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(utype) : (utype)->type_common.mode)
, speed);
4929
4930 /* TODO: We may also need to check if we can compute a + i * 4 in one
4931 instruction. */
4932 /* Need to add up the invariant and variant parts. */
4933 if (comp_inv && !integer_zerop (comp_inv))
4934 cost += add_cost (speed, TYPE_MODE (utype)((((enum tree_code) ((tree_class_check ((utype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 4934, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(utype) : (utype)->type_common.mode)
);
4935
4936 cost = get_scaled_computation_cost_at (data, at, cost);
4937
4938 /* For doloop IV cand, add on the extra cost. */
4939 if (cand->doloop_p && use->type == USE_NONLINEAR_EXPR)
4940 cost += targetm.doloop_cost_for_generic;
4941
4942 return cost;
4943}
4944
4945/* Determines cost of computing the use in GROUP with CAND in a generic
4946 expression. */
4947
4948static bool
4949determine_group_iv_cost_generic (struct ivopts_data *data,
4950 struct iv_group *group, struct iv_cand *cand)
4951{
4952 comp_cost cost;
4953 iv_inv_expr_ent *inv_expr = NULLnullptr;
4954 bitmap inv_vars = NULLnullptr, inv_exprs = NULLnullptr;
4955 struct iv_use *use = group->vuses[0];
4956
4957 /* The simple case first -- if we need to express value of the preserved
4958 original biv, the cost is 0. This also prevents us from counting the
4959 cost of increment twice -- once at this use and once in the cost of
4960 the candidate. */
4961 if (cand->pos == IP_ORIGINAL && cand->incremented_at == use->stmt)
4962 cost = no_cost;
4963 else
4964 cost = get_computation_cost (data, use, cand, false,
4965 &inv_vars, NULLnullptr, &inv_expr);
4966
4967 if (inv_expr)
4968 {
4969 inv_exprs = BITMAP_ALLOCbitmap_alloc (NULLnullptr);
4970 bitmap_set_bit (inv_exprs, inv_expr->id);
4971 }
4972 set_group_iv_cost (data, group, cand, cost, inv_vars,
4973 NULL_TREE(tree) nullptr, ERROR_MARK, inv_exprs);
4974 return !cost.infinite_cost_p ();
4975}
4976
4977/* Determines cost of computing uses in GROUP with CAND in addresses. */
4978
4979static bool
4980determine_group_iv_cost_address (struct ivopts_data *data,
4981 struct iv_group *group, struct iv_cand *cand)
4982{
4983 unsigned i;
4984 bitmap inv_vars = NULLnullptr, inv_exprs = NULLnullptr;
4985 bool can_autoinc;
4986 iv_inv_expr_ent *inv_expr = NULLnullptr;
4987 struct iv_use *use = group->vuses[0];
4988 comp_cost sum_cost = no_cost, cost;
4989
4990 cost = get_computation_cost (data, use, cand, true,
4991 &inv_vars, &can_autoinc, &inv_expr);
4992
4993 if (inv_expr)
4994 {
4995 inv_exprs = BITMAP_ALLOCbitmap_alloc (NULLnullptr);
4996 bitmap_set_bit (inv_exprs, inv_expr->id);
4997 }
4998 sum_cost = cost;
4999 if (!sum_cost.infinite_cost_p () && cand->ainc_use == use)
5000 {
5001 if (can_autoinc)
5002 sum_cost -= cand->cost_step;
5003 /* If we generated the candidate solely for exploiting autoincrement
5004 opportunities, and it turns out it can't be used, set the cost to
5005 infinity to make sure we ignore it. */
5006 else if (cand->pos == IP_AFTER_USE || cand->pos == IP_BEFORE_USE)
5007 sum_cost = infinite_cost;
5008 }
5009
5010 /* Uses in a group can share setup code, so only add setup cost once. */
5011 cost -= cost.scratch;
5012 /* Compute and add costs for rest uses of this group. */
5013 for (i = 1; i < group->vuses.length () && !sum_cost.infinite_cost_p (); i++)
5014 {
5015 struct iv_use *next = group->vuses[i];
5016
5017 /* TODO: We could skip computing cost for sub iv_use when it has the
5018 same cost as the first iv_use, but the cost really depends on the
5019 offset and where the iv_use is. */
5020 cost = get_computation_cost (data, next, cand, true,
5021 NULLnullptr, &can_autoinc, &inv_expr);
5022 if (inv_expr)
5023 {
5024 if (!inv_exprs)
5025 inv_exprs = BITMAP_ALLOCbitmap_alloc (NULLnullptr);
5026
5027 bitmap_set_bit (inv_exprs, inv_expr->id);
5028 }
5029 sum_cost += cost;
5030 }
5031 set_group_iv_cost (data, group, cand, sum_cost, inv_vars,
5032 NULL_TREE(tree) nullptr, ERROR_MARK, inv_exprs);
5033
5034 return !sum_cost.infinite_cost_p ();
5035}
5036
5037/* Computes value of candidate CAND at position AT in iteration NITER, and
5038 stores it to VAL. */
5039
5040static void
5041cand_value_at (class loop *loop, struct iv_cand *cand, gimple *at, tree niter,
5042 aff_tree *val)
5043{
5044 aff_tree step, delta, nit;
5045 struct iv *iv = cand->iv;
5046 tree type = TREE_TYPE (iv->base)((contains_struct_check ((iv->base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5046, __FUNCTION__))->typed.type)
;
5047 tree steptype;
5048 if (POINTER_TYPE_P (type)(((enum tree_code) (type)->base.code) == POINTER_TYPE || (
(enum tree_code) (type)->base.code) == REFERENCE_TYPE)
)
5049 steptype = sizetypesizetype_tab[(int) stk_sizetype];
5050 else
5051 steptype = unsigned_type_for (type);
5052
5053 tree_to_aff_combination (iv->step, TREE_TYPE (iv->step)((contains_struct_check ((iv->step), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5053, __FUNCTION__))->typed.type)
, &step);
5054 aff_combination_convert (&step, steptype);
5055 tree_to_aff_combination (niter, TREE_TYPE (niter)((contains_struct_check ((niter), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5055, __FUNCTION__))->typed.type)
, &nit);
5056 aff_combination_convert (&nit, steptype);
5057 aff_combination_mult (&nit, &step, &delta);
5058 if (stmt_after_increment (loop, cand, at))
5059 aff_combination_add (&delta, &step);
5060
5061 tree_to_aff_combination (iv->base, type, val);
5062 if (!POINTER_TYPE_P (type)(((enum tree_code) (type)->base.code) == POINTER_TYPE || (
(enum tree_code) (type)->base.code) == REFERENCE_TYPE)
)
5063 aff_combination_convert (val, steptype);
5064 aff_combination_add (val, &delta);
5065}
5066
5067/* Returns period of induction variable iv. */
5068
5069static tree
5070iv_period (struct iv *iv)
5071{
5072 tree step = iv->step, period, type;
5073 tree pow2div;
5074
5075 gcc_assert (step && TREE_CODE (step) == INTEGER_CST)((void)(!(step && ((enum tree_code) (step)->base.code
) == INTEGER_CST) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5075, __FUNCTION__), 0 : 0))
;
5076
5077 type = unsigned_type_for (TREE_TYPE (step)((contains_struct_check ((step), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5077, __FUNCTION__))->typed.type)
);
5078 /* Period of the iv is lcm (step, type_range)/step -1,
5079 i.e., N*type_range/step - 1. Since type range is power
5080 of two, N == (step >> num_of_ending_zeros_binary (step),
5081 so the final result is
5082
5083 (type_range >> num_of_ending_zeros_binary (step)) - 1
5084
5085 */
5086 pow2div = num_ending_zeros (step);
5087
5088 period = build_low_bits_mask (type,
5089 (TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5089, __FUNCTION__))->type_common.precision)
5090 - tree_to_uhwi (pow2div)));
5091
5092 return period;
5093}
5094
5095/* Returns the comparison operator used when eliminating the iv USE. */
5096
5097static enum tree_code
5098iv_elimination_compare (struct ivopts_data *data, struct iv_use *use)
5099{
5100 class loop *loop = data->current_loop;
5101 basic_block ex_bb;
5102 edge exit;
5103
5104 ex_bb = gimple_bb (use->stmt);
5105 exit = EDGE_SUCC (ex_bb, 0)(*(ex_bb)->succs)[(0)];
5106 if (flow_bb_inside_loop_p (loop, exit->dest))
5107 exit = EDGE_SUCC (ex_bb, 1)(*(ex_bb)->succs)[(1)];
5108
5109 return (exit->flags & EDGE_TRUE_VALUE ? EQ_EXPR : NE_EXPR);
5110}
5111
5112/* Returns true if we can prove that BASE - OFFSET does not overflow. For now,
5113 we only detect the situation that BASE = SOMETHING + OFFSET, where the
5114 calculation is performed in non-wrapping type.
5115
5116 TODO: More generally, we could test for the situation that
5117 BASE = SOMETHING + OFFSET' and OFFSET is between OFFSET' and zero.
5118 This would require knowing the sign of OFFSET. */
5119
5120static bool
5121difference_cannot_overflow_p (struct ivopts_data *data, tree base, tree offset)
5122{
5123 enum tree_code code;
5124 tree e1, e2;
5125 aff_tree aff_e1, aff_e2, aff_offset;
5126
5127 if (!nowrap_type_p (TREE_TYPE (base)((contains_struct_check ((base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5127, __FUNCTION__))->typed.type)
))
5128 return false;
5129
5130 base = expand_simple_operations (base);
5131
5132 if (TREE_CODE (base)((enum tree_code) (base)->base.code) == SSA_NAME)
5133 {
5134 gimple *stmt = SSA_NAME_DEF_STMT (base)(tree_check ((base), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5134, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
5135
5136 if (gimple_code (stmt) != GIMPLE_ASSIGN)
5137 return false;
5138
5139 code = gimple_assign_rhs_code (stmt);
5140 if (get_gimple_rhs_class (code) != GIMPLE_BINARY_RHS)
5141 return false;
5142
5143 e1 = gimple_assign_rhs1 (stmt);
5144 e2 = gimple_assign_rhs2 (stmt);
5145 }
5146 else
5147 {
5148 code = TREE_CODE (base)((enum tree_code) (base)->base.code);
5149 if (get_gimple_rhs_class (code) != GIMPLE_BINARY_RHS)
5150 return false;
5151 e1 = TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5151, __FUNCTION__)))))
;
5152 e2 = TREE_OPERAND (base, 1)(*((const_cast<tree*> (tree_operand_check ((base), (1),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5152, __FUNCTION__)))))
;
5153 }
5154
5155 /* Use affine expansion as deeper inspection to prove the equality. */
5156 tree_to_aff_combination_expand (e2, TREE_TYPE (e2)((contains_struct_check ((e2), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5156, __FUNCTION__))->typed.type)
,
5157 &aff_e2, &data->name_expansion_cache);
5158 tree_to_aff_combination_expand (offset, TREE_TYPE (offset)((contains_struct_check ((offset), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5158, __FUNCTION__))->typed.type)
,
5159 &aff_offset, &data->name_expansion_cache);
5160 aff_combination_scale (&aff_offset, -1);
5161 switch (code)
5162 {
5163 case PLUS_EXPR:
5164 aff_combination_add (&aff_e2, &aff_offset);
5165 if (aff_combination_zero_p (&aff_e2))
5166 return true;
5167
5168 tree_to_aff_combination_expand (e1, TREE_TYPE (e1)((contains_struct_check ((e1), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5168, __FUNCTION__))->typed.type)
,
5169 &aff_e1, &data->name_expansion_cache);
5170 aff_combination_add (&aff_e1, &aff_offset);
5171 return aff_combination_zero_p (&aff_e1);
5172
5173 case POINTER_PLUS_EXPR:
5174 aff_combination_add (&aff_e2, &aff_offset);
5175 return aff_combination_zero_p (&aff_e2);
5176
5177 default:
5178 return false;
5179 }
5180}
5181
5182/* Tries to replace loop exit by one formulated in terms of a LT_EXPR
5183 comparison with CAND. NITER describes the number of iterations of
5184 the loops. If successful, the comparison in COMP_P is altered accordingly.
5185
5186 We aim to handle the following situation:
5187
5188 sometype *base, *p;
5189 int a, b, i;
5190
5191 i = a;
5192 p = p_0 = base + a;
5193
5194 do
5195 {
5196 bla (*p);
5197 p++;
5198 i++;
5199 }
5200 while (i < b);
5201
5202 Here, the number of iterations of the loop is (a + 1 > b) ? 0 : b - a - 1.
5203 We aim to optimize this to
5204
5205 p = p_0 = base + a;
5206 do
5207 {
5208 bla (*p);
5209 p++;
5210 }
5211 while (p < p_0 - a + b);
5212
5213 This preserves the correctness, since the pointer arithmetics does not
5214 overflow. More precisely:
5215
5216 1) if a + 1 <= b, then p_0 - a + b is the final value of p, hence there is no
5217 overflow in computing it or the values of p.
5218 2) if a + 1 > b, then we need to verify that the expression p_0 - a does not
5219 overflow. To prove this, we use the fact that p_0 = base + a. */
5220
5221static bool
5222iv_elimination_compare_lt (struct ivopts_data *data,
5223 struct iv_cand *cand, enum tree_code *comp_p,
5224 class tree_niter_desc *niter)
5225{
5226 tree cand_type, a, b, mbz, nit_type = TREE_TYPE (niter->niter)((contains_struct_check ((niter->niter), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5226, __FUNCTION__))->typed.type)
, offset;
5227 class aff_tree nit, tmpa, tmpb;
5228 enum tree_code comp;
5229 HOST_WIDE_INTlong step;
5230
5231 /* We need to know that the candidate induction variable does not overflow.
5232 While more complex analysis may be used to prove this, for now just
5233 check that the variable appears in the original program and that it
5234 is computed in a type that guarantees no overflows. */
5235 cand_type = TREE_TYPE (cand->iv->base)((contains_struct_check ((cand->iv->base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5235, __FUNCTION__))->typed.type)
;
5236 if (cand->pos != IP_ORIGINAL || !nowrap_type_p (cand_type))
5237 return false;
5238
5239 /* Make sure that the loop iterates till the loop bound is hit, as otherwise
5240 the calculation of the BOUND could overflow, making the comparison
5241 invalid. */
5242 if (!data->loop_single_exit_p)
5243 return false;
5244
5245 /* We need to be able to decide whether candidate is increasing or decreasing
5246 in order to choose the right comparison operator. */
5247 if (!cst_and_fits_in_hwi (cand->iv->step))
5248 return false;
5249 step = int_cst_value (cand->iv->step);
5250
5251 /* Check that the number of iterations matches the expected pattern:
5252 a + 1 > b ? 0 : b - a - 1. */
5253 mbz = niter->may_be_zero;
5254 if (TREE_CODE (mbz)((enum tree_code) (mbz)->base.code) == GT_EXPR)
5255 {
5256 /* Handle a + 1 > b. */
5257 tree op0 = TREE_OPERAND (mbz, 0)(*((const_cast<tree*> (tree_operand_check ((mbz), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5257, __FUNCTION__)))))
;
5258 if (TREE_CODE (op0)((enum tree_code) (op0)->base.code) == PLUS_EXPR && integer_onep (TREE_OPERAND (op0, 1)(*((const_cast<tree*> (tree_operand_check ((op0), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5258, __FUNCTION__)))))
))
5259 {
5260 a = TREE_OPERAND (op0, 0)(*((const_cast<tree*> (tree_operand_check ((op0), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5260, __FUNCTION__)))))
;
5261 b = TREE_OPERAND (mbz, 1)(*((const_cast<tree*> (tree_operand_check ((mbz), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5261, __FUNCTION__)))))
;
5262 }
5263 else
5264 return false;
5265 }
5266 else if (TREE_CODE (mbz)((enum tree_code) (mbz)->base.code) == LT_EXPR)
5267 {
5268 tree op1 = TREE_OPERAND (mbz, 1)(*((const_cast<tree*> (tree_operand_check ((mbz), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5268, __FUNCTION__)))))
;
5269
5270 /* Handle b < a + 1. */
5271 if (TREE_CODE (op1)((enum tree_code) (op1)->base.code) == PLUS_EXPR && integer_onep (TREE_OPERAND (op1, 1)(*((const_cast<tree*> (tree_operand_check ((op1), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5271, __FUNCTION__)))))
))
5272 {
5273 a = TREE_OPERAND (op1, 0)(*((const_cast<tree*> (tree_operand_check ((op1), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5273, __FUNCTION__)))))
;
5274 b = TREE_OPERAND (mbz, 0)(*((const_cast<tree*> (tree_operand_check ((mbz), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5274, __FUNCTION__)))))
;
5275 }
5276 else
5277 return false;
5278 }
5279 else
5280 return false;
5281
5282 /* Expected number of iterations is B - A - 1. Check that it matches
5283 the actual number, i.e., that B - A - NITER = 1. */
5284 tree_to_aff_combination (niter->niter, nit_type, &nit);
5285 tree_to_aff_combination (fold_convert (nit_type, a)fold_convert_loc (((location_t) 0), nit_type, a), nit_type, &tmpa);
5286 tree_to_aff_combination (fold_convert (nit_type, b)fold_convert_loc (((location_t) 0), nit_type, b), nit_type, &tmpb);
5287 aff_combination_scale (&nit, -1);
5288 aff_combination_scale (&tmpa, -1);
5289 aff_combination_add (&tmpb, &tmpa);
5290 aff_combination_add (&tmpb, &nit);
5291 if (tmpb.n != 0 || maybe_ne (tmpb.offset, 1))
5292 return false;
5293
5294 /* Finally, check that CAND->IV->BASE - CAND->IV->STEP * A does not
5295 overflow. */
5296 offset = fold_build2 (MULT_EXPR, TREE_TYPE (cand->iv->step),fold_build2_loc (((location_t) 0), MULT_EXPR, ((contains_struct_check
((cand->iv->step), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5296, __FUNCTION__))->typed.type), cand->iv->step,
fold_convert_loc (((location_t) 0), ((contains_struct_check (
(cand->iv->step), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5298, __FUNCTION__))->typed.type), a) )
5297 cand->iv->step,fold_build2_loc (((location_t) 0), MULT_EXPR, ((contains_struct_check
((cand->iv->step), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5296, __FUNCTION__))->typed.type), cand->iv->step,
fold_convert_loc (((location_t) 0), ((contains_struct_check (
(cand->iv->step), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5298, __FUNCTION__))->typed.type), a) )
5298 fold_convert (TREE_TYPE (cand->iv->step), a))fold_build2_loc (((location_t) 0), MULT_EXPR, ((contains_struct_check
((cand->iv->step), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5296, __FUNCTION__))->typed.type), cand->iv->step,
fold_convert_loc (((location_t) 0), ((contains_struct_check (
(cand->iv->step), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5298, __FUNCTION__))->typed.type), a) )
;
5299 if (!difference_cannot_overflow_p (data, cand->iv->base, offset))
5300 return false;
5301
5302 /* Determine the new comparison operator. */
5303 comp = step < 0 ? GT_EXPR : LT_EXPR;
5304 if (*comp_p == NE_EXPR)
5305 *comp_p = comp;
5306 else if (*comp_p == EQ_EXPR)
5307 *comp_p = invert_tree_comparison (comp, false);
5308 else
5309 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5309, __FUNCTION__))
;
5310
5311 return true;
5312}
5313
5314/* Check whether it is possible to express the condition in USE by comparison
5315 of candidate CAND. If so, store the value compared with to BOUND, and the
5316 comparison operator to COMP. */
5317
5318static bool
5319may_eliminate_iv (struct ivopts_data *data,
5320 struct iv_use *use, struct iv_cand *cand, tree *bound,
5321 enum tree_code *comp)
5322{
5323 basic_block ex_bb;
5324 edge exit;
5325 tree period;
5326 class loop *loop = data->current_loop;
5327 aff_tree bnd;
5328 class tree_niter_desc *desc = NULLnullptr;
5329
5330 if (TREE_CODE (cand->iv->step)((enum tree_code) (cand->iv->step)->base.code) != INTEGER_CST)
5331 return false;
5332
5333 /* For now works only for exits that dominate the loop latch.
5334 TODO: extend to other conditions inside loop body. */
5335 ex_bb = gimple_bb (use->stmt);
5336 if (use->stmt != last_stmt (ex_bb)
5337 || gimple_code (use->stmt) != GIMPLE_COND
5338 || !dominated_by_p (CDI_DOMINATORS, loop->latch, ex_bb))
5339 return false;
5340
5341 exit = EDGE_SUCC (ex_bb, 0)(*(ex_bb)->succs)[(0)];
5342 if (flow_bb_inside_loop_p (loop, exit->dest))
5343 exit = EDGE_SUCC (ex_bb, 1)(*(ex_bb)->succs)[(1)];
5344 if (flow_bb_inside_loop_p (loop, exit->dest))
5345 return false;
5346
5347 desc = niter_for_exit (data, exit);
5348 if (!desc)
5349 return false;
5350
5351 /* Determine whether we can use the variable to test the exit condition.
5352 This is the case iff the period of the induction variable is greater
5353 than the number of iterations for which the exit condition is true. */
5354 period = iv_period (cand->iv);
5355
5356 /* If the number of iterations is constant, compare against it directly. */
5357 if (TREE_CODE (desc->niter)((enum tree_code) (desc->niter)->base.code) == INTEGER_CST)
5358 {
5359 /* See cand_value_at. */
5360 if (stmt_after_increment (loop, cand, use->stmt))
5361 {
5362 if (!tree_int_cst_lt (desc->niter, period))
5363 return false;
5364 }
5365 else
5366 {
5367 if (tree_int_cst_lt (period, desc->niter))
5368 return false;
5369 }
5370 }
5371
5372 /* If not, and if this is the only possible exit of the loop, see whether
5373 we can get a conservative estimate on the number of iterations of the
5374 entire loop and compare against that instead. */
5375 else
5376 {
5377 widest_int period_value, max_niter;
5378
5379 max_niter = desc->max;
5380 if (stmt_after_increment (loop, cand, use->stmt))
5381 max_niter += 1;
5382 period_value = wi::to_widest (period);
5383 if (wi::gtu_p (max_niter, period_value))
5384 {
5385 /* See if we can take advantage of inferred loop bound
5386 information. */
5387 if (data->loop_single_exit_p)
5388 {
5389 if (!max_loop_iterations (loop, &max_niter))
5390 return false;
5391 /* The loop bound is already adjusted by adding 1. */
5392 if (wi::gtu_p (max_niter, period_value))
5393 return false;
5394 }
5395 else
5396 return false;
5397 }
5398 }
5399
5400 /* For doloop IV cand, the bound would be zero. It's safe whether
5401 may_be_zero set or not. */
5402 if (cand->doloop_p)
5403 {
5404 *bound = build_int_cst (TREE_TYPE (cand->iv->base)((contains_struct_check ((cand->iv->base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5404, __FUNCTION__))->typed.type)
, 0);
5405 *comp = iv_elimination_compare (data, use);
5406 return true;
5407 }
5408
5409 cand_value_at (loop, cand, use->stmt, desc->niter, &bnd);
5410
5411 *bound = fold_convert (TREE_TYPE (cand->iv->base),fold_convert_loc (((location_t) 0), ((contains_struct_check (
(cand->iv->base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5411, __FUNCTION__))->typed.type), aff_combination_to_tree
(&bnd))
5412 aff_combination_to_tree (&bnd))fold_convert_loc (((location_t) 0), ((contains_struct_check (
(cand->iv->base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5411, __FUNCTION__))->typed.type), aff_combination_to_tree
(&bnd))
;
5413 *comp = iv_elimination_compare (data, use);
5414
5415 /* It is unlikely that computing the number of iterations using division
5416 would be more profitable than keeping the original induction variable. */
5417 if (expression_expensive_p (*bound))
5418 return false;
5419
5420 /* Sometimes, it is possible to handle the situation that the number of
5421 iterations may be zero unless additional assumptions by using <
5422 instead of != in the exit condition.
5423
5424 TODO: we could also calculate the value MAY_BE_ZERO ? 0 : NITER and
5425 base the exit condition on it. However, that is often too
5426 expensive. */
5427 if (!integer_zerop (desc->may_be_zero))
5428 return iv_elimination_compare_lt (data, cand, comp, desc);
5429
5430 return true;
5431}
5432
5433 /* Calculates the cost of BOUND, if it is a PARM_DECL. A PARM_DECL must
5434 be copied, if it is used in the loop body and DATA->body_includes_call. */
5435
5436static int
5437parm_decl_cost (struct ivopts_data *data, tree bound)
5438{
5439 tree sbound = bound;
5440 STRIP_NOPS (sbound)(sbound) = tree_strip_nop_conversions ((const_cast<union tree_node
*> (((sbound)))))
;
5441
5442 if (TREE_CODE (sbound)((enum tree_code) (sbound)->base.code) == SSA_NAME
5443 && SSA_NAME_IS_DEFAULT_DEF (sbound)(tree_check ((sbound), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5443, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
5444 && TREE_CODE (SSA_NAME_VAR (sbound))((enum tree_code) (((tree_check ((sbound), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5444, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((sbound)->ssa_name.var)->
base.code) == IDENTIFIER_NODE ? (tree) nullptr : (sbound)->
ssa_name.var))->base.code)
== PARM_DECL
5445 && data->body_includes_call)
5446 return COSTS_N_INSNS (1)((1) * 4);
5447
5448 return 0;
5449}
5450
5451/* Determines cost of computing the use in GROUP with CAND in a condition. */
5452
5453static bool
5454determine_group_iv_cost_cond (struct ivopts_data *data,
5455 struct iv_group *group, struct iv_cand *cand)
5456{
5457 tree bound = NULL_TREE(tree) nullptr;
5458 struct iv *cmp_iv;
5459 bitmap inv_exprs = NULLnullptr;
5460 bitmap inv_vars_elim = NULLnullptr, inv_vars_express = NULLnullptr, inv_vars;
5461 comp_cost elim_cost = infinite_cost, express_cost, cost, bound_cost;
5462 enum comp_iv_rewrite rewrite_type;
5463 iv_inv_expr_ent *inv_expr_elim = NULLnullptr, *inv_expr_express = NULLnullptr, *inv_expr;
5464 tree *control_var, *bound_cst;
5465 enum tree_code comp = ERROR_MARK;
5466 struct iv_use *use = group->vuses[0];
5467
5468 /* Extract condition operands. */
5469 rewrite_type = extract_cond_operands (data, use->stmt, &control_var,
5470 &bound_cst, NULLnullptr, &cmp_iv);
5471 gcc_assert (rewrite_type != COMP_IV_NA)((void)(!(rewrite_type != COMP_IV_NA) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5471, __FUNCTION__), 0 : 0))
;
5472
5473 /* Try iv elimination. */
5474 if (rewrite_type == COMP_IV_ELIM
5475 && may_eliminate_iv (data, use, cand, &bound, &comp))
5476 {
5477 elim_cost = force_var_cost (data, bound, &inv_vars_elim);
5478 if (elim_cost.cost == 0)
5479 elim_cost.cost = parm_decl_cost (data, bound);
5480 else if (TREE_CODE (bound)((enum tree_code) (bound)->base.code) == INTEGER_CST)
5481 elim_cost.cost = 0;
5482 /* If we replace a loop condition 'i < n' with 'p < base + n',
5483 inv_vars_elim will have 'base' and 'n' set, which implies that both
5484 'base' and 'n' will be live during the loop. More likely,
5485 'base + n' will be loop invariant, resulting in only one live value
5486 during the loop. So in that case we clear inv_vars_elim and set
5487 inv_expr_elim instead. */
5488 if (inv_vars_elim && bitmap_count_bits (inv_vars_elim) > 1)
5489 {
5490 inv_expr_elim = get_loop_invariant_expr (data, bound);
5491 bitmap_clear (inv_vars_elim);
5492 }
5493 /* The bound is a loop invariant, so it will be only computed
5494 once. */
5495 elim_cost.cost = adjust_setup_cost (data, elim_cost.cost);
5496 }
5497
5498 /* When the condition is a comparison of the candidate IV against
5499 zero, prefer this IV.
5500
5501 TODO: The constant that we're subtracting from the cost should
5502 be target-dependent. This information should be added to the
5503 target costs for each backend. */
5504 if (!elim_cost.infinite_cost_p () /* Do not try to decrease infinite! */
5505 && integer_zerop (*bound_cst)
5506 && (operand_equal_p (*control_var, cand->var_after, 0)
5507 || operand_equal_p (*control_var, cand->var_before, 0)))
5508 elim_cost -= 1;
5509
5510 express_cost = get_computation_cost (data, use, cand, false,
5511 &inv_vars_express, NULLnullptr,
5512 &inv_expr_express);
5513 if (cmp_iv != NULLnullptr)
5514 find_inv_vars (data, &cmp_iv->base, &inv_vars_express);
5515
5516 /* Count the cost of the original bound as well. */
5517 bound_cost = force_var_cost (data, *bound_cst, NULLnullptr);
5518 if (bound_cost.cost == 0)
5519 bound_cost.cost = parm_decl_cost (data, *bound_cst);
5520 else if (TREE_CODE (*bound_cst)((enum tree_code) (*bound_cst)->base.code) == INTEGER_CST)
5521 bound_cost.cost = 0;
5522 express_cost += bound_cost;
5523
5524 /* Choose the better approach, preferring the eliminated IV. */
5525 if (elim_cost <= express_cost)
5526 {
5527 cost = elim_cost;
5528 inv_vars = inv_vars_elim;
5529 inv_vars_elim = NULLnullptr;
5530 inv_expr = inv_expr_elim;
5531 /* For doloop candidate/use pair, adjust to zero cost. */
5532 if (group->doloop_p && cand->doloop_p && elim_cost.cost > no_cost.cost)
5533 cost = no_cost;
5534 }
5535 else
5536 {
5537 cost = express_cost;
5538 inv_vars = inv_vars_express;
5539 inv_vars_express = NULLnullptr;
5540 bound = NULL_TREE(tree) nullptr;
5541 comp = ERROR_MARK;
5542 inv_expr = inv_expr_express;
5543 }
5544
5545 if (inv_expr)
5546 {
5547 inv_exprs = BITMAP_ALLOCbitmap_alloc (NULLnullptr);
5548 bitmap_set_bit (inv_exprs, inv_expr->id);
5549 }
5550 set_group_iv_cost (data, group, cand, cost,
5551 inv_vars, bound, comp, inv_exprs);
5552
5553 if (inv_vars_elim)
5554 BITMAP_FREE (inv_vars_elim)((void) (bitmap_obstack_free ((bitmap) inv_vars_elim), (inv_vars_elim
) = (bitmap) nullptr))
;
5555 if (inv_vars_express)
5556 BITMAP_FREE (inv_vars_express)((void) (bitmap_obstack_free ((bitmap) inv_vars_express), (inv_vars_express
) = (bitmap) nullptr))
;
5557
5558 return !cost.infinite_cost_p ();
5559}
5560
5561/* Determines cost of computing uses in GROUP with CAND. Returns false
5562 if USE cannot be represented with CAND. */
5563
5564static bool
5565determine_group_iv_cost (struct ivopts_data *data,
5566 struct iv_group *group, struct iv_cand *cand)
5567{
5568 switch (group->type)
5569 {
5570 case USE_NONLINEAR_EXPR:
5571 return determine_group_iv_cost_generic (data, group, cand);
5572
5573 case USE_REF_ADDRESS:
5574 case USE_PTR_ADDRESS:
5575 return determine_group_iv_cost_address (data, group, cand);
5576
5577 case USE_COMPARE:
5578 return determine_group_iv_cost_cond (data, group, cand);
5579
5580 default:
5581 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5581, __FUNCTION__))
;
5582 }
5583}
5584
5585/* Return true if get_computation_cost indicates that autoincrement is
5586 a possibility for the pair of USE and CAND, false otherwise. */
5587
5588static bool
5589autoinc_possible_for_pair (struct ivopts_data *data, struct iv_use *use,
5590 struct iv_cand *cand)
5591{
5592 if (!address_p (use->type))
5593 return false;
5594
5595 bool can_autoinc = false;
5596 get_computation_cost (data, use, cand, true, NULLnullptr, &can_autoinc, NULLnullptr);
5597 return can_autoinc;
5598}
5599
5600/* Examine IP_ORIGINAL candidates to see if they are incremented next to a
5601 use that allows autoincrement, and set their AINC_USE if possible. */
5602
5603static void
5604set_autoinc_for_original_candidates (struct ivopts_data *data)
5605{
5606 unsigned i, j;
5607
5608 for (i = 0; i < data->vcands.length (); i++)
5609 {
5610 struct iv_cand *cand = data->vcands[i];
5611 struct iv_use *closest_before = NULLnullptr;
5612 struct iv_use *closest_after = NULLnullptr;
5613 if (cand->pos != IP_ORIGINAL)
5614 continue;
5615
5616 for (j = 0; j < data->vgroups.length (); j++)
5617 {
5618 struct iv_group *group = data->vgroups[j];
5619 struct iv_use *use = group->vuses[0];
5620 unsigned uid = gimple_uid (use->stmt);
5621
5622 if (gimple_bb (use->stmt) != gimple_bb (cand->incremented_at))
5623 continue;
5624
5625 if (uid < gimple_uid (cand->incremented_at)
5626 && (closest_before == NULLnullptr
5627 || uid > gimple_uid (closest_before->stmt)))
5628 closest_before = use;
5629
5630 if (uid > gimple_uid (cand->incremented_at)
5631 && (closest_after == NULLnullptr
5632 || uid < gimple_uid (closest_after->stmt)))
5633 closest_after = use;
5634 }
5635
5636 if (closest_before != NULLnullptr
5637 && autoinc_possible_for_pair (data, closest_before, cand))
5638 cand->ainc_use = closest_before;
5639 else if (closest_after != NULLnullptr
5640 && autoinc_possible_for_pair (data, closest_after, cand))
5641 cand->ainc_use = closest_after;
5642 }
5643}
5644
5645/* Relate compare use with all candidates. */
5646
5647static void
5648relate_compare_use_with_all_cands (struct ivopts_data *data)
5649{
5650 unsigned i, count = data->vcands.length ();
5651 for (i = 0; i < data->vgroups.length (); i++)
5652 {
5653 struct iv_group *group = data->vgroups[i];
5654
5655 if (group->type == USE_COMPARE)
5656 bitmap_set_range (group->related_cands, 0, count);
5657 }
5658}
5659
5660/* If PREFERRED_MODE is suitable and profitable, use the preferred
5661 PREFERRED_MODE to compute doloop iv base from niter: base = niter + 1. */
5662
5663static tree
5664compute_doloop_base_on_mode (machine_mode preferred_mode, tree niter,
5665 const widest_int &iterations_max)
5666{
5667 tree ntype = TREE_TYPE (niter)((contains_struct_check ((niter), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5667, __FUNCTION__))->typed.type)
;
5668 tree pref_type = lang_hooks.types.type_for_mode (preferred_mode, 1);
5669 if (!pref_type)
5670 return fold_build2 (PLUS_EXPR, ntype, unshare_expr (niter),fold_build2_loc (((location_t) 0), PLUS_EXPR, ntype, unshare_expr
(niter), build_int_cst (ntype, 1) )
5671 build_int_cst (ntype, 1))fold_build2_loc (((location_t) 0), PLUS_EXPR, ntype, unshare_expr
(niter), build_int_cst (ntype, 1) )
;
5672
5673 gcc_assert (TREE_CODE (pref_type) == INTEGER_TYPE)((void)(!(((enum tree_code) (pref_type)->base.code) == INTEGER_TYPE
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5673, __FUNCTION__), 0 : 0))
;
5674
5675 int prec = TYPE_PRECISION (ntype)((tree_class_check ((ntype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5675, __FUNCTION__))->type_common.precision)
;
5676 int pref_prec = TYPE_PRECISION (pref_type)((tree_class_check ((pref_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-loop-ivopts.c"
, 5676, __FUNCTION__))->type_common.precision)
;
5677
5678 tree base;
5679
5680 /* Check if the PREFERRED_MODED is able to present niter. */
5681 if (pref_prec > prec
5682 || wi::ltu_p (iterations_max,
5683 widest_int::from (wi::max_value (pref_prec, UNSIGNED),
5684 UNSIGNED)))
5685 {
5686 /* No wrap, it is safe to use preferred type after niter + 1. */
5687 if (wi::ltu_p (iterations_max,
5688 widest_int::from (wi::max_value (prec, UNSIGNED),
5689 UNSIGNED)))
5690 {
5691 /* This could help to optimize "-1 +1" pair when niter looks
5692 like "n-1": n is in original mode. "base = (n - 1) + 1"
5693 in PREFERRED_MODED: it could be base = (PREFERRED_TYPE)n. */
5694 base = fold_build2 (PLUS_EXPR, ntype, unshare_expr (niter),fold_build2_loc (((location_t) 0), PLUS_EXPR, ntype, unshare_expr
(niter), build_int_cst (ntype, 1) )
5695 build_int_cst (ntype, 1))fold_build2_loc (((location_t) 0), PLUS_EXPR, ntype, unshare_expr
(niter), build_int_cst (ntype, 1) )
;
5696 base = fold_convert (pref_type, base)fold_convert_loc (((location_t) 0), pref_type, base);
5697 }
5698
5699 /* To avoid wrap, convert niter to preferred type before plus 1. */
5700 else
5701 {
5702 niter = fold_convert (pref_type, niter)fold_convert_loc (((location_t) 0), pref_type, niter);
5703 base = fold_build2 (PLUS_EXPR, pref_type, unshare_expr (niter),fold_build2_loc (((location_t) 0), PLUS_EXPR, pref_type, unshare_expr
(niter), build_int_cst (pref_type, 1) )
5704 build_int_cst (pref_type, 1))fold_build2_loc (((location_t) 0), PLUS_EXPR, pref_type, unshare_expr
(niter), build_int_cst (pref_type, 1) )
;
5705 }
5706 }
5707 else
5708 base = fold_build2 (PLUS_EXPR, ntype, unshare_expr (niter),fold_build2_loc (((location_t) 0), PLUS_EXPR, ntype, unshare_expr
(niter), build_int_cst (ntype, 1) )
5709 build_int_cst (ntype, 1))fold_build2_loc (((loca