File: | build/gcc/tree-ssa-pre.cc |
Warning: | line 4243, column 4 Value stored to 'set_bb_may_notreturn' is never read |
Press '?' to see keyboard shortcuts
Keyboard shortcuts:
1 | /* Full and partial redundancy elimination and code hoisting on SSA GIMPLE. |
2 | Copyright (C) 2001-2023 Free Software Foundation, Inc. |
3 | Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher |
4 | <stevenb@suse.de> |
5 | |
6 | This file is part of GCC. |
7 | |
8 | GCC is free software; you can redistribute it and/or modify |
9 | it under the terms of the GNU General Public License as published by |
10 | the Free Software Foundation; either version 3, or (at your option) |
11 | any later version. |
12 | |
13 | GCC is distributed in the hope that it will be useful, |
14 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
15 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
16 | GNU General Public License for more details. |
17 | |
18 | You should have received a copy of the GNU General Public License |
19 | along with GCC; see the file COPYING3. If not see |
20 | <http://www.gnu.org/licenses/>. */ |
21 | |
22 | #include "config.h" |
23 | #include "system.h" |
24 | #include "coretypes.h" |
25 | #include "backend.h" |
26 | #include "rtl.h" |
27 | #include "tree.h" |
28 | #include "gimple.h" |
29 | #include "predict.h" |
30 | #include "alloc-pool.h" |
31 | #include "tree-pass.h" |
32 | #include "ssa.h" |
33 | #include "cgraph.h" |
34 | #include "gimple-pretty-print.h" |
35 | #include "fold-const.h" |
36 | #include "cfganal.h" |
37 | #include "gimple-iterator.h" |
38 | #include "gimple-fold.h" |
39 | #include "tree-eh.h" |
40 | #include "gimplify.h" |
41 | #include "tree-cfg.h" |
42 | #include "tree-into-ssa.h" |
43 | #include "tree-dfa.h" |
44 | #include "tree-ssa.h" |
45 | #include "cfgloop.h" |
46 | #include "tree-ssa-sccvn.h" |
47 | #include "tree-scalar-evolution.h" |
48 | #include "dbgcnt.h" |
49 | #include "domwalk.h" |
50 | #include "tree-ssa-propagate.h" |
51 | #include "tree-ssa-dce.h" |
52 | #include "tree-cfgcleanup.h" |
53 | #include "alias.h" |
54 | #include "gimple-range.h" |
55 | |
56 | /* Even though this file is called tree-ssa-pre.cc, we actually |
57 | implement a bit more than just PRE here. All of them piggy-back |
58 | on GVN which is implemented in tree-ssa-sccvn.cc. |
59 | |
60 | 1. Full Redundancy Elimination (FRE) |
61 | This is the elimination phase of GVN. |
62 | |
63 | 2. Partial Redundancy Elimination (PRE) |
64 | This is adds computation of AVAIL_OUT and ANTIC_IN and |
65 | doing expression insertion to form GVN-PRE. |
66 | |
67 | 3. Code hoisting |
68 | This optimization uses the ANTIC_IN sets computed for PRE |
69 | to move expressions further up than PRE would do, to make |
70 | multiple computations of the same value fully redundant. |
71 | This pass is explained below (after the explanation of the |
72 | basic algorithm for PRE). |
73 | */ |
74 | |
75 | /* TODO: |
76 | |
77 | 1. Avail sets can be shared by making an avail_find_leader that |
78 | walks up the dominator tree and looks in those avail sets. |
79 | This might affect code optimality, it's unclear right now. |
80 | Currently the AVAIL_OUT sets are the remaining quadraticness in |
81 | memory of GVN-PRE. |
82 | 2. Strength reduction can be performed by anticipating expressions |
83 | we can repair later on. |
84 | 3. We can do back-substitution or smarter value numbering to catch |
85 | commutative expressions split up over multiple statements. |
86 | */ |
87 | |
88 | /* For ease of terminology, "expression node" in the below refers to |
89 | every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs |
90 | represent the actual statement containing the expressions we care about, |
91 | and we cache the value number by putting it in the expression. */ |
92 | |
93 | /* Basic algorithm for Partial Redundancy Elimination: |
94 | |
95 | First we walk the statements to generate the AVAIL sets, the |
96 | EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the |
97 | generation of values/expressions by a given block. We use them |
98 | when computing the ANTIC sets. The AVAIL sets consist of |
99 | SSA_NAME's that represent values, so we know what values are |
100 | available in what blocks. AVAIL is a forward dataflow problem. In |
101 | SSA, values are never killed, so we don't need a kill set, or a |
102 | fixpoint iteration, in order to calculate the AVAIL sets. In |
103 | traditional parlance, AVAIL sets tell us the downsafety of the |
104 | expressions/values. |
105 | |
106 | Next, we generate the ANTIC sets. These sets represent the |
107 | anticipatable expressions. ANTIC is a backwards dataflow |
108 | problem. An expression is anticipatable in a given block if it could |
109 | be generated in that block. This means that if we had to perform |
110 | an insertion in that block, of the value of that expression, we |
111 | could. Calculating the ANTIC sets requires phi translation of |
112 | expressions, because the flow goes backwards through phis. We must |
113 | iterate to a fixpoint of the ANTIC sets, because we have a kill |
114 | set. Even in SSA form, values are not live over the entire |
115 | function, only from their definition point onwards. So we have to |
116 | remove values from the ANTIC set once we go past the definition |
117 | point of the leaders that make them up. |
118 | compute_antic/compute_antic_aux performs this computation. |
119 | |
120 | Third, we perform insertions to make partially redundant |
121 | expressions fully redundant. |
122 | |
123 | An expression is partially redundant (excluding partial |
124 | anticipation) if: |
125 | |
126 | 1. It is AVAIL in some, but not all, of the predecessors of a |
127 | given block. |
128 | 2. It is ANTIC in all the predecessors. |
129 | |
130 | In order to make it fully redundant, we insert the expression into |
131 | the predecessors where it is not available, but is ANTIC. |
132 | |
133 | When optimizing for size, we only eliminate the partial redundancy |
134 | if we need to insert in only one predecessor. This avoids almost |
135 | completely the code size increase that PRE usually causes. |
136 | |
137 | For the partial anticipation case, we only perform insertion if it |
138 | is partially anticipated in some block, and fully available in all |
139 | of the predecessors. |
140 | |
141 | do_pre_regular_insertion/do_pre_partial_partial_insertion |
142 | performs these steps, driven by insert/insert_aux. |
143 | |
144 | Fourth, we eliminate fully redundant expressions. |
145 | This is a simple statement walk that replaces redundant |
146 | calculations with the now available values. */ |
147 | |
148 | /* Basic algorithm for Code Hoisting: |
149 | |
150 | Code hoisting is: Moving value computations up in the control flow |
151 | graph to make multiple copies redundant. Typically this is a size |
152 | optimization, but there are cases where it also is helpful for speed. |
153 | |
154 | A simple code hoisting algorithm is implemented that piggy-backs on |
155 | the PRE infrastructure. For code hoisting, we have to know ANTIC_OUT |
156 | which is effectively ANTIC_IN - AVAIL_OUT. The latter two have to be |
157 | computed for PRE, and we can use them to perform a limited version of |
158 | code hoisting, too. |
159 | |
160 | For the purpose of this implementation, a value is hoistable to a basic |
161 | block B if the following properties are met: |
162 | |
163 | 1. The value is in ANTIC_IN(B) -- the value will be computed on all |
164 | paths from B to function exit and it can be computed in B); |
165 | |
166 | 2. The value is not in AVAIL_OUT(B) -- there would be no need to |
167 | compute the value again and make it available twice; |
168 | |
169 | 3. All successors of B are dominated by B -- makes sure that inserting |
170 | a computation of the value in B will make the remaining |
171 | computations fully redundant; |
172 | |
173 | 4. At least one successor has the value in AVAIL_OUT -- to avoid |
174 | hoisting values up too far; |
175 | |
176 | 5. There are at least two successors of B -- hoisting in straight |
177 | line code is pointless. |
178 | |
179 | The third condition is not strictly necessary, but it would complicate |
180 | the hoisting pass a lot. In fact, I don't know of any code hoisting |
181 | algorithm that does not have this requirement. Fortunately, experiments |
182 | have show that most candidate hoistable values are in regions that meet |
183 | this condition (e.g. diamond-shape regions). |
184 | |
185 | The forth condition is necessary to avoid hoisting things up too far |
186 | away from the uses of the value. Nothing else limits the algorithm |
187 | from hoisting everything up as far as ANTIC_IN allows. Experiments |
188 | with SPEC and CSiBE have shown that hoisting up too far results in more |
189 | spilling, less benefits for code size, and worse benchmark scores. |
190 | Fortunately, in practice most of the interesting hoisting opportunities |
191 | are caught despite this limitation. |
192 | |
193 | For hoistable values that meet all conditions, expressions are inserted |
194 | to make the calculation of the hoistable value fully redundant. We |
195 | perform code hoisting insertions after each round of PRE insertions, |
196 | because code hoisting never exposes new PRE opportunities, but PRE can |
197 | create new code hoisting opportunities. |
198 | |
199 | The code hoisting algorithm is implemented in do_hoist_insert, driven |
200 | by insert/insert_aux. */ |
201 | |
202 | /* Representations of value numbers: |
203 | |
204 | Value numbers are represented by a representative SSA_NAME. We |
205 | will create fake SSA_NAME's in situations where we need a |
206 | representative but do not have one (because it is a complex |
207 | expression). In order to facilitate storing the value numbers in |
208 | bitmaps, and keep the number of wasted SSA_NAME's down, we also |
209 | associate a value_id with each value number, and create full blown |
210 | ssa_name's only where we actually need them (IE in operands of |
211 | existing expressions). |
212 | |
213 | Theoretically you could replace all the value_id's with |
214 | SSA_NAME_VERSION, but this would allocate a large number of |
215 | SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number. |
216 | It would also require an additional indirection at each point we |
217 | use the value id. */ |
218 | |
219 | /* Representation of expressions on value numbers: |
220 | |
221 | Expressions consisting of value numbers are represented the same |
222 | way as our VN internally represents them, with an additional |
223 | "pre_expr" wrapping around them in order to facilitate storing all |
224 | of the expressions in the same sets. */ |
225 | |
226 | /* Representation of sets: |
227 | |
228 | The dataflow sets do not need to be sorted in any particular order |
229 | for the majority of their lifetime, are simply represented as two |
230 | bitmaps, one that keeps track of values present in the set, and one |
231 | that keeps track of expressions present in the set. |
232 | |
233 | When we need them in topological order, we produce it on demand by |
234 | transforming the bitmap into an array and sorting it into topo |
235 | order. */ |
236 | |
237 | /* Type of expression, used to know which member of the PRE_EXPR union |
238 | is valid. */ |
239 | |
240 | enum pre_expr_kind |
241 | { |
242 | NAME, |
243 | NARY, |
244 | REFERENCE, |
245 | CONSTANT |
246 | }; |
247 | |
248 | union pre_expr_union |
249 | { |
250 | tree name; |
251 | tree constant; |
252 | vn_nary_op_t nary; |
253 | vn_reference_t reference; |
254 | }; |
255 | |
256 | typedef struct pre_expr_d : nofree_ptr_hash <pre_expr_d> |
257 | { |
258 | enum pre_expr_kind kind; |
259 | unsigned int id; |
260 | unsigned value_id; |
261 | location_t loc; |
262 | pre_expr_union u; |
263 | |
264 | /* hash_table support. */ |
265 | static inline hashval_t hash (const pre_expr_d *); |
266 | static inline int equal (const pre_expr_d *, const pre_expr_d *); |
267 | } *pre_expr; |
268 | |
269 | #define PRE_EXPR_NAME(e)(e)->u.name (e)->u.name |
270 | #define PRE_EXPR_NARY(e)(e)->u.nary (e)->u.nary |
271 | #define PRE_EXPR_REFERENCE(e)(e)->u.reference (e)->u.reference |
272 | #define PRE_EXPR_CONSTANT(e)(e)->u.constant (e)->u.constant |
273 | |
274 | /* Compare E1 and E1 for equality. */ |
275 | |
276 | inline int |
277 | pre_expr_d::equal (const pre_expr_d *e1, const pre_expr_d *e2) |
278 | { |
279 | if (e1->kind != e2->kind) |
280 | return false; |
281 | |
282 | switch (e1->kind) |
283 | { |
284 | case CONSTANT: |
285 | return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1)(e1)->u.constant, |
286 | PRE_EXPR_CONSTANT (e2)(e2)->u.constant); |
287 | case NAME: |
288 | return PRE_EXPR_NAME (e1)(e1)->u.name == PRE_EXPR_NAME (e2)(e2)->u.name; |
289 | case NARY: |
290 | return vn_nary_op_eq (PRE_EXPR_NARY (e1)(e1)->u.nary, PRE_EXPR_NARY (e2)(e2)->u.nary); |
291 | case REFERENCE: |
292 | return vn_reference_eq (PRE_EXPR_REFERENCE (e1)(e1)->u.reference, |
293 | PRE_EXPR_REFERENCE (e2)(e2)->u.reference); |
294 | default: |
295 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 295, __FUNCTION__)); |
296 | } |
297 | } |
298 | |
299 | /* Hash E. */ |
300 | |
301 | inline hashval_t |
302 | pre_expr_d::hash (const pre_expr_d *e) |
303 | { |
304 | switch (e->kind) |
305 | { |
306 | case CONSTANT: |
307 | return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e)(e)->u.constant); |
308 | case NAME: |
309 | return SSA_NAME_VERSION (PRE_EXPR_NAME (e))(tree_check (((e)->u.name), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 309, __FUNCTION__, (SSA_NAME)))->base.u.version; |
310 | case NARY: |
311 | return PRE_EXPR_NARY (e)(e)->u.nary->hashcode; |
312 | case REFERENCE: |
313 | return PRE_EXPR_REFERENCE (e)(e)->u.reference->hashcode; |
314 | default: |
315 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 315, __FUNCTION__)); |
316 | } |
317 | } |
318 | |
319 | /* Next global expression id number. */ |
320 | static unsigned int next_expression_id; |
321 | |
322 | /* Mapping from expression to id number we can use in bitmap sets. */ |
323 | static vec<pre_expr> expressions; |
324 | static hash_table<pre_expr_d> *expression_to_id; |
325 | static vec<unsigned> name_to_id; |
326 | static obstack pre_expr_obstack; |
327 | |
328 | /* Allocate an expression id for EXPR. */ |
329 | |
330 | static inline unsigned int |
331 | alloc_expression_id (pre_expr expr) |
332 | { |
333 | struct pre_expr_d **slot; |
334 | /* Make sure we won't overflow. */ |
335 | gcc_assert (next_expression_id + 1 > next_expression_id)((void)(!(next_expression_id + 1 > next_expression_id) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 335, __FUNCTION__), 0 : 0)); |
336 | expr->id = next_expression_id++; |
337 | expressions.safe_push (expr); |
338 | if (expr->kind == NAME) |
339 | { |
340 | unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr))(tree_check (((expr)->u.name), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 340, __FUNCTION__, (SSA_NAME)))->base.u.version; |
341 | /* vec::safe_grow_cleared allocates no headroom. Avoid frequent |
342 | re-allocations by using vec::reserve upfront. */ |
343 | unsigned old_len = name_to_id.length (); |
344 | name_to_id.reserve (num_ssa_names(vec_safe_length ((cfun + 0)->gimple_df->ssa_names)) - old_len); |
345 | name_to_id.quick_grow_cleared (num_ssa_names(vec_safe_length ((cfun + 0)->gimple_df->ssa_names))); |
346 | gcc_assert (name_to_id[version] == 0)((void)(!(name_to_id[version] == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 346, __FUNCTION__), 0 : 0)); |
347 | name_to_id[version] = expr->id; |
348 | } |
349 | else |
350 | { |
351 | slot = expression_to_id->find_slot (expr, INSERT); |
352 | gcc_assert (!*slot)((void)(!(!*slot) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 352, __FUNCTION__), 0 : 0)); |
353 | *slot = expr; |
354 | } |
355 | return next_expression_id - 1; |
356 | } |
357 | |
358 | /* Return the expression id for tree EXPR. */ |
359 | |
360 | static inline unsigned int |
361 | get_expression_id (const pre_expr expr) |
362 | { |
363 | return expr->id; |
364 | } |
365 | |
366 | static inline unsigned int |
367 | lookup_expression_id (const pre_expr expr) |
368 | { |
369 | struct pre_expr_d **slot; |
370 | |
371 | if (expr->kind == NAME) |
372 | { |
373 | unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr))(tree_check (((expr)->u.name), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 373, __FUNCTION__, (SSA_NAME)))->base.u.version; |
374 | if (name_to_id.length () <= version) |
375 | return 0; |
376 | return name_to_id[version]; |
377 | } |
378 | else |
379 | { |
380 | slot = expression_to_id->find_slot (expr, NO_INSERT); |
381 | if (!slot) |
382 | return 0; |
383 | return ((pre_expr)*slot)->id; |
384 | } |
385 | } |
386 | |
387 | /* Return the expression that has expression id ID */ |
388 | |
389 | static inline pre_expr |
390 | expression_for_id (unsigned int id) |
391 | { |
392 | return expressions[id]; |
393 | } |
394 | |
395 | static object_allocator<pre_expr_d> pre_expr_pool ("pre_expr nodes"); |
396 | |
397 | /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */ |
398 | |
399 | static pre_expr |
400 | get_or_alloc_expr_for_name (tree name) |
401 | { |
402 | struct pre_expr_d expr; |
403 | pre_expr result; |
404 | unsigned int result_id; |
405 | |
406 | expr.kind = NAME; |
407 | expr.id = 0; |
408 | PRE_EXPR_NAME (&expr)(&expr)->u.name = name; |
409 | result_id = lookup_expression_id (&expr); |
410 | if (result_id != 0) |
411 | return expression_for_id (result_id); |
412 | |
413 | result = pre_expr_pool.allocate (); |
414 | result->kind = NAME; |
415 | result->loc = UNKNOWN_LOCATION((location_t) 0); |
416 | result->value_id = VN_INFO (name)->value_id; |
417 | PRE_EXPR_NAME (result)(result)->u.name = name; |
418 | alloc_expression_id (result); |
419 | return result; |
420 | } |
421 | |
422 | /* Given an NARY, get or create a pre_expr to represent it. Assign |
423 | VALUE_ID to it or allocate a new value-id if it is zero. Record |
424 | LOC as the original location of the expression. */ |
425 | |
426 | static pre_expr |
427 | get_or_alloc_expr_for_nary (vn_nary_op_t nary, unsigned value_id, |
428 | location_t loc = UNKNOWN_LOCATION((location_t) 0)) |
429 | { |
430 | struct pre_expr_d expr; |
431 | pre_expr result; |
432 | unsigned int result_id; |
433 | |
434 | gcc_assert (value_id == 0 || !value_id_constant_p (value_id))((void)(!(value_id == 0 || !value_id_constant_p (value_id)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 434, __FUNCTION__), 0 : 0)); |
435 | |
436 | expr.kind = NARY; |
437 | expr.id = 0; |
438 | nary->hashcode = vn_nary_op_compute_hash (nary); |
439 | PRE_EXPR_NARY (&expr)(&expr)->u.nary = nary; |
440 | result_id = lookup_expression_id (&expr); |
441 | if (result_id != 0) |
442 | return expression_for_id (result_id); |
443 | |
444 | result = pre_expr_pool.allocate (); |
445 | result->kind = NARY; |
446 | result->loc = loc; |
447 | result->value_id = value_id ? value_id : get_next_value_id (); |
448 | PRE_EXPR_NARY (result)(result)->u.nary |
449 | = alloc_vn_nary_op_noinit (nary->length, &pre_expr_obstack); |
450 | memcpy (PRE_EXPR_NARY (result)(result)->u.nary, nary, sizeof_vn_nary_op (nary->length)); |
451 | alloc_expression_id (result); |
452 | return result; |
453 | } |
454 | |
455 | /* Given an REFERENCE, get or create a pre_expr to represent it. */ |
456 | |
457 | static pre_expr |
458 | get_or_alloc_expr_for_reference (vn_reference_t reference, |
459 | location_t loc = UNKNOWN_LOCATION((location_t) 0)) |
460 | { |
461 | struct pre_expr_d expr; |
462 | pre_expr result; |
463 | unsigned int result_id; |
464 | |
465 | expr.kind = REFERENCE; |
466 | expr.id = 0; |
467 | PRE_EXPR_REFERENCE (&expr)(&expr)->u.reference = reference; |
468 | result_id = lookup_expression_id (&expr); |
469 | if (result_id != 0) |
470 | return expression_for_id (result_id); |
471 | |
472 | result = pre_expr_pool.allocate (); |
473 | result->kind = REFERENCE; |
474 | result->loc = loc; |
475 | result->value_id = reference->value_id; |
476 | PRE_EXPR_REFERENCE (result)(result)->u.reference = reference; |
477 | alloc_expression_id (result); |
478 | return result; |
479 | } |
480 | |
481 | |
482 | /* An unordered bitmap set. One bitmap tracks values, the other, |
483 | expressions. */ |
484 | typedef class bitmap_set |
485 | { |
486 | public: |
487 | bitmap_head expressions; |
488 | bitmap_head values; |
489 | } *bitmap_set_t; |
490 | |
491 | #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi)for (bmp_iter_set_init (&((bi)), (&(set)->expressions ), (0), &((id))); bmp_iter_set (&((bi)), &((id))) ; bmp_iter_next (&((bi)), &((id)))) \ |
492 | EXECUTE_IF_SET_IN_BITMAP (&(set)->expressions, 0, (id), (bi))for (bmp_iter_set_init (&((bi)), (&(set)->expressions ), (0), &((id))); bmp_iter_set (&((bi)), &((id))) ; bmp_iter_next (&((bi)), &((id)))) |
493 | |
494 | #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi)for (bmp_iter_set_init (&((bi)), (&(set)->values), (0), &((id))); bmp_iter_set (&((bi)), &((id))); bmp_iter_next (&((bi)), &((id)))) \ |
495 | EXECUTE_IF_SET_IN_BITMAP (&(set)->values, 0, (id), (bi))for (bmp_iter_set_init (&((bi)), (&(set)->values), (0), &((id))); bmp_iter_set (&((bi)), &((id))); bmp_iter_next (&((bi)), &((id)))) |
496 | |
497 | /* Mapping from value id to expressions with that value_id. */ |
498 | static vec<bitmap> value_expressions; |
499 | /* We just record a single expression for each constant value, |
500 | one of kind CONSTANT. */ |
501 | static vec<pre_expr> constant_value_expressions; |
502 | |
503 | |
504 | /* This structure is used to keep track of statistics on what |
505 | optimization PRE was able to perform. */ |
506 | static struct |
507 | { |
508 | /* The number of new expressions/temporaries generated by PRE. */ |
509 | int insertions; |
510 | |
511 | /* The number of inserts found due to partial anticipation */ |
512 | int pa_insert; |
513 | |
514 | /* The number of inserts made for code hoisting. */ |
515 | int hoist_insert; |
516 | |
517 | /* The number of new PHI nodes added by PRE. */ |
518 | int phis; |
519 | } pre_stats; |
520 | |
521 | static bool do_partial_partial; |
522 | static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int); |
523 | static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr); |
524 | static bool bitmap_value_replace_in_set (bitmap_set_t, pre_expr); |
525 | static void bitmap_set_copy (bitmap_set_t, bitmap_set_t); |
526 | static bool bitmap_set_contains_value (bitmap_set_t, unsigned int); |
527 | static void bitmap_insert_into_set (bitmap_set_t, pre_expr); |
528 | static bitmap_set_t bitmap_set_new (void); |
529 | static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *, |
530 | tree); |
531 | static tree find_or_generate_expression (basic_block, tree, gimple_seq *); |
532 | static unsigned int get_expr_value_id (pre_expr); |
533 | |
534 | /* We can add and remove elements and entries to and from sets |
535 | and hash tables, so we use alloc pools for them. */ |
536 | |
537 | static object_allocator<bitmap_set> bitmap_set_pool ("Bitmap sets"); |
538 | static bitmap_obstack grand_bitmap_obstack; |
539 | |
540 | /* A three tuple {e, pred, v} used to cache phi translations in the |
541 | phi_translate_table. */ |
542 | |
543 | typedef struct expr_pred_trans_d : public typed_noop_remove <expr_pred_trans_d> |
544 | { |
545 | typedef expr_pred_trans_d value_type; |
546 | typedef expr_pred_trans_d compare_type; |
547 | |
548 | /* The expression ID. */ |
549 | unsigned e; |
550 | |
551 | /* The value expression ID that resulted from the translation. */ |
552 | unsigned v; |
553 | |
554 | /* hash_table support. */ |
555 | static inline void mark_empty (expr_pred_trans_d &); |
556 | static inline bool is_empty (const expr_pred_trans_d &); |
557 | static inline void mark_deleted (expr_pred_trans_d &); |
558 | static inline bool is_deleted (const expr_pred_trans_d &); |
559 | static const bool empty_zero_p = true; |
560 | static inline hashval_t hash (const expr_pred_trans_d &); |
561 | static inline int equal (const expr_pred_trans_d &, const expr_pred_trans_d &); |
562 | } *expr_pred_trans_t; |
563 | typedef const struct expr_pred_trans_d *const_expr_pred_trans_t; |
564 | |
565 | inline bool |
566 | expr_pred_trans_d::is_empty (const expr_pred_trans_d &e) |
567 | { |
568 | return e.e == 0; |
569 | } |
570 | |
571 | inline bool |
572 | expr_pred_trans_d::is_deleted (const expr_pred_trans_d &e) |
573 | { |
574 | return e.e == -1u; |
575 | } |
576 | |
577 | inline void |
578 | expr_pred_trans_d::mark_empty (expr_pred_trans_d &e) |
579 | { |
580 | e.e = 0; |
581 | } |
582 | |
583 | inline void |
584 | expr_pred_trans_d::mark_deleted (expr_pred_trans_d &e) |
585 | { |
586 | e.e = -1u; |
587 | } |
588 | |
589 | inline hashval_t |
590 | expr_pred_trans_d::hash (const expr_pred_trans_d &e) |
591 | { |
592 | return e.e; |
593 | } |
594 | |
595 | inline int |
596 | expr_pred_trans_d::equal (const expr_pred_trans_d &ve1, |
597 | const expr_pred_trans_d &ve2) |
598 | { |
599 | return ve1.e == ve2.e; |
600 | } |
601 | |
602 | /* Sets that we need to keep track of. */ |
603 | typedef struct bb_bitmap_sets |
604 | { |
605 | /* The EXP_GEN set, which represents expressions/values generated in |
606 | a basic block. */ |
607 | bitmap_set_t exp_gen; |
608 | |
609 | /* The PHI_GEN set, which represents PHI results generated in a |
610 | basic block. */ |
611 | bitmap_set_t phi_gen; |
612 | |
613 | /* The TMP_GEN set, which represents results/temporaries generated |
614 | in a basic block. IE the LHS of an expression. */ |
615 | bitmap_set_t tmp_gen; |
616 | |
617 | /* The AVAIL_OUT set, which represents which values are available in |
618 | a given basic block. */ |
619 | bitmap_set_t avail_out; |
620 | |
621 | /* The ANTIC_IN set, which represents which values are anticipatable |
622 | in a given basic block. */ |
623 | bitmap_set_t antic_in; |
624 | |
625 | /* The PA_IN set, which represents which values are |
626 | partially anticipatable in a given basic block. */ |
627 | bitmap_set_t pa_in; |
628 | |
629 | /* The NEW_SETS set, which is used during insertion to augment the |
630 | AVAIL_OUT set of blocks with the new insertions performed during |
631 | the current iteration. */ |
632 | bitmap_set_t new_sets; |
633 | |
634 | /* A cache for value_dies_in_block_x. */ |
635 | bitmap expr_dies; |
636 | |
637 | /* The live virtual operand on successor edges. */ |
638 | tree vop_on_exit; |
639 | |
640 | /* PHI translate cache for the single successor edge. */ |
641 | hash_table<expr_pred_trans_d> *phi_translate_table; |
642 | |
643 | /* True if we have visited this block during ANTIC calculation. */ |
644 | unsigned int visited : 1; |
645 | |
646 | /* True when the block contains a call that might not return. */ |
647 | unsigned int contains_may_not_return_call : 1; |
648 | } *bb_value_sets_t; |
649 | |
650 | #define EXP_GEN(BB)((bb_value_sets_t) ((BB)->aux))->exp_gen ((bb_value_sets_t) ((BB)->aux))->exp_gen |
651 | #define PHI_GEN(BB)((bb_value_sets_t) ((BB)->aux))->phi_gen ((bb_value_sets_t) ((BB)->aux))->phi_gen |
652 | #define TMP_GEN(BB)((bb_value_sets_t) ((BB)->aux))->tmp_gen ((bb_value_sets_t) ((BB)->aux))->tmp_gen |
653 | #define AVAIL_OUT(BB)((bb_value_sets_t) ((BB)->aux))->avail_out ((bb_value_sets_t) ((BB)->aux))->avail_out |
654 | #define ANTIC_IN(BB)((bb_value_sets_t) ((BB)->aux))->antic_in ((bb_value_sets_t) ((BB)->aux))->antic_in |
655 | #define PA_IN(BB)((bb_value_sets_t) ((BB)->aux))->pa_in ((bb_value_sets_t) ((BB)->aux))->pa_in |
656 | #define NEW_SETS(BB)((bb_value_sets_t) ((BB)->aux))->new_sets ((bb_value_sets_t) ((BB)->aux))->new_sets |
657 | #define EXPR_DIES(BB)((bb_value_sets_t) ((BB)->aux))->expr_dies ((bb_value_sets_t) ((BB)->aux))->expr_dies |
658 | #define PHI_TRANS_TABLE(BB)((bb_value_sets_t) ((BB)->aux))->phi_translate_table ((bb_value_sets_t) ((BB)->aux))->phi_translate_table |
659 | #define BB_VISITED(BB)((bb_value_sets_t) ((BB)->aux))->visited ((bb_value_sets_t) ((BB)->aux))->visited |
660 | #define BB_MAY_NOTRETURN(BB)((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call |
661 | #define BB_LIVE_VOP_ON_EXIT(BB)((bb_value_sets_t) ((BB)->aux))->vop_on_exit ((bb_value_sets_t) ((BB)->aux))->vop_on_exit |
662 | |
663 | |
664 | /* Add the tuple mapping from {expression E, basic block PRED} to |
665 | the phi translation table and return whether it pre-existed. */ |
666 | |
667 | static inline bool |
668 | phi_trans_add (expr_pred_trans_t *entry, pre_expr e, basic_block pred) |
669 | { |
670 | if (!PHI_TRANS_TABLE (pred)((bb_value_sets_t) ((pred)->aux))->phi_translate_table) |
671 | PHI_TRANS_TABLE (pred)((bb_value_sets_t) ((pred)->aux))->phi_translate_table = new hash_table<expr_pred_trans_d> (11); |
672 | |
673 | expr_pred_trans_t slot; |
674 | expr_pred_trans_d tem; |
675 | unsigned id = get_expression_id (e); |
676 | tem.e = id; |
677 | slot = PHI_TRANS_TABLE (pred)((bb_value_sets_t) ((pred)->aux))->phi_translate_table->find_slot_with_hash (tem, id, INSERT); |
678 | if (slot->e) |
679 | { |
680 | *entry = slot; |
681 | return true; |
682 | } |
683 | |
684 | *entry = slot; |
685 | slot->e = id; |
686 | return false; |
687 | } |
688 | |
689 | |
690 | /* Add expression E to the expression set of value id V. */ |
691 | |
692 | static void |
693 | add_to_value (unsigned int v, pre_expr e) |
694 | { |
695 | gcc_checking_assert (get_expr_value_id (e) == v)((void)(!(get_expr_value_id (e) == v) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 695, __FUNCTION__), 0 : 0)); |
696 | |
697 | if (value_id_constant_p (v)) |
698 | { |
699 | if (e->kind != CONSTANT) |
700 | return; |
701 | |
702 | if (-v >= constant_value_expressions.length ()) |
703 | constant_value_expressions.safe_grow_cleared (-v + 1); |
704 | |
705 | pre_expr leader = constant_value_expressions[-v]; |
706 | if (!leader) |
707 | constant_value_expressions[-v] = e; |
708 | } |
709 | else |
710 | { |
711 | if (v >= value_expressions.length ()) |
712 | value_expressions.safe_grow_cleared (v + 1); |
713 | |
714 | bitmap set = value_expressions[v]; |
715 | if (!set) |
716 | { |
717 | set = BITMAP_ALLOCbitmap_alloc (&grand_bitmap_obstack); |
718 | value_expressions[v] = set; |
719 | } |
720 | bitmap_set_bit (set, get_expression_id (e)); |
721 | } |
722 | } |
723 | |
724 | /* Create a new bitmap set and return it. */ |
725 | |
726 | static bitmap_set_t |
727 | bitmap_set_new (void) |
728 | { |
729 | bitmap_set_t ret = bitmap_set_pool.allocate (); |
730 | bitmap_initialize (&ret->expressions, &grand_bitmap_obstack); |
731 | bitmap_initialize (&ret->values, &grand_bitmap_obstack); |
732 | return ret; |
733 | } |
734 | |
735 | /* Return the value id for a PRE expression EXPR. */ |
736 | |
737 | static unsigned int |
738 | get_expr_value_id (pre_expr expr) |
739 | { |
740 | /* ??? We cannot assert that expr has a value-id (it can be 0), because |
741 | we assign value-ids only to expressions that have a result |
742 | in set_hashtable_value_ids. */ |
743 | return expr->value_id; |
744 | } |
745 | |
746 | /* Return a VN valnum (SSA name or constant) for the PRE value-id VAL. */ |
747 | |
748 | static tree |
749 | vn_valnum_from_value_id (unsigned int val) |
750 | { |
751 | if (value_id_constant_p (val)) |
752 | { |
753 | pre_expr vexpr = constant_value_expressions[-val]; |
754 | if (vexpr) |
755 | return PRE_EXPR_CONSTANT (vexpr)(vexpr)->u.constant; |
756 | return NULL_TREE(tree) nullptr; |
757 | } |
758 | |
759 | bitmap exprset = value_expressions[val]; |
760 | bitmap_iterator bi; |
761 | unsigned int i; |
762 | EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)for (bmp_iter_set_init (&(bi), (exprset), (0), &(i)); bmp_iter_set (&(bi), &(i)); bmp_iter_next (&(bi) , &(i))) |
763 | { |
764 | pre_expr vexpr = expression_for_id (i); |
765 | if (vexpr->kind == NAME) |
766 | return VN_INFO (PRE_EXPR_NAME (vexpr)(vexpr)->u.name)->valnum; |
767 | } |
768 | return NULL_TREE(tree) nullptr; |
769 | } |
770 | |
771 | /* Insert an expression EXPR into a bitmapped set. */ |
772 | |
773 | static void |
774 | bitmap_insert_into_set (bitmap_set_t set, pre_expr expr) |
775 | { |
776 | unsigned int val = get_expr_value_id (expr); |
777 | if (! value_id_constant_p (val)) |
778 | { |
779 | /* Note this is the only function causing multiple expressions |
780 | for the same value to appear in a set. This is needed for |
781 | TMP_GEN, PHI_GEN and NEW_SETs. */ |
782 | bitmap_set_bit (&set->values, val); |
783 | bitmap_set_bit (&set->expressions, get_expression_id (expr)); |
784 | } |
785 | } |
786 | |
787 | /* Copy a bitmapped set ORIG, into bitmapped set DEST. */ |
788 | |
789 | static void |
790 | bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig) |
791 | { |
792 | bitmap_copy (&dest->expressions, &orig->expressions); |
793 | bitmap_copy (&dest->values, &orig->values); |
794 | } |
795 | |
796 | |
797 | /* Free memory used up by SET. */ |
798 | static void |
799 | bitmap_set_free (bitmap_set_t set) |
800 | { |
801 | bitmap_clear (&set->expressions); |
802 | bitmap_clear (&set->values); |
803 | } |
804 | |
805 | static void |
806 | pre_expr_DFS (pre_expr expr, bitmap_set_t set, bitmap val_visited, |
807 | vec<pre_expr> &post); |
808 | |
809 | /* DFS walk leaders of VAL to their operands with leaders in SET, collecting |
810 | expressions in SET in postorder into POST. */ |
811 | |
812 | static void |
813 | pre_expr_DFS (unsigned val, bitmap_set_t set, bitmap val_visited, |
814 | vec<pre_expr> &post) |
815 | { |
816 | unsigned int i; |
817 | bitmap_iterator bi; |
818 | |
819 | /* Iterate over all leaders and DFS recurse. Borrowed from |
820 | bitmap_find_leader. */ |
821 | bitmap exprset = value_expressions[val]; |
822 | if (!exprset->first->next) |
823 | { |
824 | EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)for (bmp_iter_set_init (&(bi), (exprset), (0), &(i)); bmp_iter_set (&(bi), &(i)); bmp_iter_next (&(bi) , &(i))) |
825 | if (bitmap_bit_p (&set->expressions, i)) |
826 | pre_expr_DFS (expression_for_id (i), set, val_visited, post); |
827 | return; |
828 | } |
829 | |
830 | EXECUTE_IF_AND_IN_BITMAP (exprset, &set->expressions, 0, i, bi)for (bmp_iter_and_init (&(bi), (exprset), (&set->expressions ), (0), &(i)); bmp_iter_and (&(bi), &(i)); bmp_iter_next (&(bi), &(i))) |
831 | pre_expr_DFS (expression_for_id (i), set, val_visited, post); |
832 | } |
833 | |
834 | /* DFS walk EXPR to its operands with leaders in SET, collecting |
835 | expressions in SET in postorder into POST. */ |
836 | |
837 | static void |
838 | pre_expr_DFS (pre_expr expr, bitmap_set_t set, bitmap val_visited, |
839 | vec<pre_expr> &post) |
840 | { |
841 | switch (expr->kind) |
842 | { |
843 | case NARY: |
844 | { |
845 | vn_nary_op_t nary = PRE_EXPR_NARY (expr)(expr)->u.nary; |
846 | for (unsigned i = 0; i < nary->length; i++) |
847 | { |
848 | if (TREE_CODE (nary->op[i])((enum tree_code) (nary->op[i])->base.code) != SSA_NAME) |
849 | continue; |
850 | unsigned int op_val_id = VN_INFO (nary->op[i])->value_id; |
851 | /* If we already found a leader for the value we've |
852 | recursed already. Avoid the costly bitmap_find_leader. */ |
853 | if (bitmap_bit_p (&set->values, op_val_id) |
854 | && bitmap_set_bit (val_visited, op_val_id)) |
855 | pre_expr_DFS (op_val_id, set, val_visited, post); |
856 | } |
857 | break; |
858 | } |
859 | case REFERENCE: |
860 | { |
861 | vn_reference_t ref = PRE_EXPR_REFERENCE (expr)(expr)->u.reference; |
862 | vec<vn_reference_op_s> operands = ref->operands; |
863 | vn_reference_op_t operand; |
864 | for (unsigned i = 0; operands.iterate (i, &operand); i++) |
865 | { |
866 | tree op[3]; |
867 | op[0] = operand->op0; |
868 | op[1] = operand->op1; |
869 | op[2] = operand->op2; |
870 | for (unsigned n = 0; n < 3; ++n) |
871 | { |
872 | if (!op[n] || TREE_CODE (op[n])((enum tree_code) (op[n])->base.code) != SSA_NAME) |
873 | continue; |
874 | unsigned op_val_id = VN_INFO (op[n])->value_id; |
875 | if (bitmap_bit_p (&set->values, op_val_id) |
876 | && bitmap_set_bit (val_visited, op_val_id)) |
877 | pre_expr_DFS (op_val_id, set, val_visited, post); |
878 | } |
879 | } |
880 | break; |
881 | } |
882 | default:; |
883 | } |
884 | post.quick_push (expr); |
885 | } |
886 | |
887 | /* Generate an topological-ordered array of bitmap set SET. */ |
888 | |
889 | static vec<pre_expr> |
890 | sorted_array_from_bitmap_set (bitmap_set_t set) |
891 | { |
892 | unsigned int i; |
893 | bitmap_iterator bi; |
894 | vec<pre_expr> result; |
895 | |
896 | /* Pre-allocate enough space for the array. */ |
897 | result.create (bitmap_count_bits (&set->expressions)); |
898 | |
899 | auto_bitmap val_visited (&grand_bitmap_obstack); |
900 | bitmap_tree_view (val_visited); |
901 | FOR_EACH_VALUE_ID_IN_SET (set, i, bi)for (bmp_iter_set_init (&((bi)), (&(set)->values), (0), &((i))); bmp_iter_set (&((bi)), &((i))); bmp_iter_next (&((bi)), &((i)))) |
902 | if (bitmap_set_bit (val_visited, i)) |
903 | pre_expr_DFS (i, set, val_visited, result); |
904 | |
905 | return result; |
906 | } |
907 | |
908 | /* Subtract all expressions contained in ORIG from DEST. */ |
909 | |
910 | static bitmap_set_t |
911 | bitmap_set_subtract_expressions (bitmap_set_t dest, bitmap_set_t orig) |
912 | { |
913 | bitmap_set_t result = bitmap_set_new (); |
914 | bitmap_iterator bi; |
915 | unsigned int i; |
916 | |
917 | bitmap_and_compl (&result->expressions, &dest->expressions, |
918 | &orig->expressions); |
919 | |
920 | FOR_EACH_EXPR_ID_IN_SET (result, i, bi)for (bmp_iter_set_init (&((bi)), (&(result)->expressions ), (0), &((i))); bmp_iter_set (&((bi)), &((i))); bmp_iter_next (&((bi)), &((i)))) |
921 | { |
922 | pre_expr expr = expression_for_id (i); |
923 | unsigned int value_id = get_expr_value_id (expr); |
924 | bitmap_set_bit (&result->values, value_id); |
925 | } |
926 | |
927 | return result; |
928 | } |
929 | |
930 | /* Subtract all values in bitmap set B from bitmap set A. */ |
931 | |
932 | static void |
933 | bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b) |
934 | { |
935 | unsigned int i; |
936 | bitmap_iterator bi; |
937 | unsigned to_remove = -1U; |
938 | bitmap_and_compl_into (&a->values, &b->values); |
939 | FOR_EACH_EXPR_ID_IN_SET (a, i, bi)for (bmp_iter_set_init (&((bi)), (&(a)->expressions ), (0), &((i))); bmp_iter_set (&((bi)), &((i))); bmp_iter_next (&((bi)), &((i)))) |
940 | { |
941 | if (to_remove != -1U) |
942 | { |
943 | bitmap_clear_bit (&a->expressions, to_remove); |
944 | to_remove = -1U; |
945 | } |
946 | pre_expr expr = expression_for_id (i); |
947 | if (! bitmap_bit_p (&a->values, get_expr_value_id (expr))) |
948 | to_remove = i; |
949 | } |
950 | if (to_remove != -1U) |
951 | bitmap_clear_bit (&a->expressions, to_remove); |
952 | } |
953 | |
954 | |
955 | /* Return true if bitmapped set SET contains the value VALUE_ID. */ |
956 | |
957 | static bool |
958 | bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id) |
959 | { |
960 | if (value_id_constant_p (value_id)) |
961 | return true; |
962 | |
963 | return bitmap_bit_p (&set->values, value_id); |
964 | } |
965 | |
966 | /* Return true if two bitmap sets are equal. */ |
967 | |
968 | static bool |
969 | bitmap_set_equal (bitmap_set_t a, bitmap_set_t b) |
970 | { |
971 | return bitmap_equal_p (&a->values, &b->values); |
972 | } |
973 | |
974 | /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists, |
975 | and add it otherwise. Return true if any changes were made. */ |
976 | |
977 | static bool |
978 | bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr) |
979 | { |
980 | unsigned int val = get_expr_value_id (expr); |
981 | if (value_id_constant_p (val)) |
982 | return false; |
983 | |
984 | if (bitmap_set_contains_value (set, val)) |
985 | { |
986 | /* The number of expressions having a given value is usually |
987 | significantly less than the total number of expressions in SET. |
988 | Thus, rather than check, for each expression in SET, whether it |
989 | has the value LOOKFOR, we walk the reverse mapping that tells us |
990 | what expressions have a given value, and see if any of those |
991 | expressions are in our set. For large testcases, this is about |
992 | 5-10x faster than walking the bitmap. If this is somehow a |
993 | significant lose for some cases, we can choose which set to walk |
994 | based on the set size. */ |
995 | unsigned int i; |
996 | bitmap_iterator bi; |
997 | bitmap exprset = value_expressions[val]; |
998 | EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)for (bmp_iter_set_init (&(bi), (exprset), (0), &(i)); bmp_iter_set (&(bi), &(i)); bmp_iter_next (&(bi) , &(i))) |
999 | { |
1000 | if (bitmap_clear_bit (&set->expressions, i)) |
1001 | { |
1002 | bitmap_set_bit (&set->expressions, get_expression_id (expr)); |
1003 | return i != get_expression_id (expr); |
1004 | } |
1005 | } |
1006 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 1006, __FUNCTION__)); |
1007 | } |
1008 | |
1009 | bitmap_insert_into_set (set, expr); |
1010 | return true; |
1011 | } |
1012 | |
1013 | /* Insert EXPR into SET if EXPR's value is not already present in |
1014 | SET. */ |
1015 | |
1016 | static void |
1017 | bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr) |
1018 | { |
1019 | unsigned int val = get_expr_value_id (expr); |
1020 | |
1021 | gcc_checking_assert (expr->id == get_expression_id (expr))((void)(!(expr->id == get_expression_id (expr)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 1021, __FUNCTION__), 0 : 0)); |
1022 | |
1023 | /* Constant values are always considered to be part of the set. */ |
1024 | if (value_id_constant_p (val)) |
1025 | return; |
1026 | |
1027 | /* If the value membership changed, add the expression. */ |
1028 | if (bitmap_set_bit (&set->values, val)) |
1029 | bitmap_set_bit (&set->expressions, expr->id); |
1030 | } |
1031 | |
1032 | /* Print out EXPR to outfile. */ |
1033 | |
1034 | static void |
1035 | print_pre_expr (FILE *outfile, const pre_expr expr) |
1036 | { |
1037 | if (! expr) |
1038 | { |
1039 | fprintf (outfile, "NULL"); |
1040 | return; |
1041 | } |
1042 | switch (expr->kind) |
1043 | { |
1044 | case CONSTANT: |
1045 | print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr)(expr)->u.constant); |
1046 | break; |
1047 | case NAME: |
1048 | print_generic_expr (outfile, PRE_EXPR_NAME (expr)(expr)->u.name); |
1049 | break; |
1050 | case NARY: |
1051 | { |
1052 | unsigned int i; |
1053 | vn_nary_op_t nary = PRE_EXPR_NARY (expr)(expr)->u.nary; |
1054 | fprintf (outfile, "{%s,", get_tree_code_name (nary->opcode)); |
1055 | for (i = 0; i < nary->length; i++) |
1056 | { |
1057 | print_generic_expr (outfile, nary->op[i]); |
1058 | if (i != (unsigned) nary->length - 1) |
1059 | fprintf (outfile, ","); |
1060 | } |
1061 | fprintf (outfile, "}"); |
1062 | } |
1063 | break; |
1064 | |
1065 | case REFERENCE: |
1066 | { |
1067 | vn_reference_t ref = PRE_EXPR_REFERENCE (expr)(expr)->u.reference; |
1068 | print_vn_reference_ops (outfile, ref->operands); |
1069 | if (ref->vuse) |
1070 | { |
1071 | fprintf (outfile, "@"); |
1072 | print_generic_expr (outfile, ref->vuse); |
1073 | } |
1074 | } |
1075 | break; |
1076 | } |
1077 | } |
1078 | void debug_pre_expr (pre_expr); |
1079 | |
1080 | /* Like print_pre_expr but always prints to stderr. */ |
1081 | DEBUG_FUNCTION__attribute__ ((__used__)) void |
1082 | debug_pre_expr (pre_expr e) |
1083 | { |
1084 | print_pre_expr (stderrstderr, e); |
1085 | fprintf (stderrstderr, "\n"); |
1086 | } |
1087 | |
1088 | /* Print out SET to OUTFILE. */ |
1089 | |
1090 | static void |
1091 | print_bitmap_set (FILE *outfile, bitmap_set_t set, |
1092 | const char *setname, int blockindex) |
1093 | { |
1094 | fprintf (outfile, "%s[%d] := { ", setname, blockindex); |
1095 | if (set) |
1096 | { |
1097 | bool first = true; |
1098 | unsigned i; |
1099 | bitmap_iterator bi; |
1100 | |
1101 | FOR_EACH_EXPR_ID_IN_SET (set, i, bi)for (bmp_iter_set_init (&((bi)), (&(set)->expressions ), (0), &((i))); bmp_iter_set (&((bi)), &((i))); bmp_iter_next (&((bi)), &((i)))) |
1102 | { |
1103 | const pre_expr expr = expression_for_id (i); |
1104 | |
1105 | if (!first) |
1106 | fprintf (outfile, ", "); |
1107 | first = false; |
1108 | print_pre_expr (outfile, expr); |
1109 | |
1110 | fprintf (outfile, " (%04d)", get_expr_value_id (expr)); |
1111 | } |
1112 | } |
1113 | fprintf (outfile, " }\n"); |
1114 | } |
1115 | |
1116 | void debug_bitmap_set (bitmap_set_t); |
1117 | |
1118 | DEBUG_FUNCTION__attribute__ ((__used__)) void |
1119 | debug_bitmap_set (bitmap_set_t set) |
1120 | { |
1121 | print_bitmap_set (stderrstderr, set, "debug", 0); |
1122 | } |
1123 | |
1124 | void debug_bitmap_sets_for (basic_block); |
1125 | |
1126 | DEBUG_FUNCTION__attribute__ ((__used__)) void |
1127 | debug_bitmap_sets_for (basic_block bb) |
1128 | { |
1129 | print_bitmap_set (stderrstderr, AVAIL_OUT (bb)((bb_value_sets_t) ((bb)->aux))->avail_out, "avail_out", bb->index); |
1130 | print_bitmap_set (stderrstderr, EXP_GEN (bb)((bb_value_sets_t) ((bb)->aux))->exp_gen, "exp_gen", bb->index); |
1131 | print_bitmap_set (stderrstderr, PHI_GEN (bb)((bb_value_sets_t) ((bb)->aux))->phi_gen, "phi_gen", bb->index); |
1132 | print_bitmap_set (stderrstderr, TMP_GEN (bb)((bb_value_sets_t) ((bb)->aux))->tmp_gen, "tmp_gen", bb->index); |
1133 | print_bitmap_set (stderrstderr, ANTIC_IN (bb)((bb_value_sets_t) ((bb)->aux))->antic_in, "antic_in", bb->index); |
1134 | if (do_partial_partial) |
1135 | print_bitmap_set (stderrstderr, PA_IN (bb)((bb_value_sets_t) ((bb)->aux))->pa_in, "pa_in", bb->index); |
1136 | print_bitmap_set (stderrstderr, NEW_SETS (bb)((bb_value_sets_t) ((bb)->aux))->new_sets, "new_sets", bb->index); |
1137 | } |
1138 | |
1139 | /* Print out the expressions that have VAL to OUTFILE. */ |
1140 | |
1141 | static void |
1142 | print_value_expressions (FILE *outfile, unsigned int val) |
1143 | { |
1144 | bitmap set = value_expressions[val]; |
1145 | if (set) |
1146 | { |
1147 | bitmap_set x; |
1148 | char s[10]; |
1149 | sprintf (s, "%04d", val); |
1150 | x.expressions = *set; |
1151 | print_bitmap_set (outfile, &x, s, 0); |
1152 | } |
1153 | } |
1154 | |
1155 | |
1156 | DEBUG_FUNCTION__attribute__ ((__used__)) void |
1157 | debug_value_expressions (unsigned int val) |
1158 | { |
1159 | print_value_expressions (stderrstderr, val); |
1160 | } |
1161 | |
1162 | /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to |
1163 | represent it. */ |
1164 | |
1165 | static pre_expr |
1166 | get_or_alloc_expr_for_constant (tree constant) |
1167 | { |
1168 | unsigned int result_id; |
1169 | struct pre_expr_d expr; |
1170 | pre_expr newexpr; |
1171 | |
1172 | expr.kind = CONSTANT; |
1173 | PRE_EXPR_CONSTANT (&expr)(&expr)->u.constant = constant; |
1174 | result_id = lookup_expression_id (&expr); |
1175 | if (result_id != 0) |
1176 | return expression_for_id (result_id); |
1177 | |
1178 | newexpr = pre_expr_pool.allocate (); |
1179 | newexpr->kind = CONSTANT; |
1180 | newexpr->loc = UNKNOWN_LOCATION((location_t) 0); |
1181 | PRE_EXPR_CONSTANT (newexpr)(newexpr)->u.constant = constant; |
1182 | alloc_expression_id (newexpr); |
1183 | newexpr->value_id = get_or_alloc_constant_value_id (constant); |
1184 | add_to_value (newexpr->value_id, newexpr); |
1185 | return newexpr; |
1186 | } |
1187 | |
1188 | /* Return the folded version of T if T, when folded, is a gimple |
1189 | min_invariant or an SSA name. Otherwise, return T. */ |
1190 | |
1191 | static pre_expr |
1192 | fully_constant_expression (pre_expr e) |
1193 | { |
1194 | switch (e->kind) |
1195 | { |
1196 | case CONSTANT: |
1197 | return e; |
1198 | case NARY: |
1199 | { |
1200 | vn_nary_op_t nary = PRE_EXPR_NARY (e)(e)->u.nary; |
1201 | tree res = vn_nary_simplify (nary); |
1202 | if (!res) |
1203 | return e; |
1204 | if (is_gimple_min_invariant (res)) |
1205 | return get_or_alloc_expr_for_constant (res); |
1206 | if (TREE_CODE (res)((enum tree_code) (res)->base.code) == SSA_NAME) |
1207 | return get_or_alloc_expr_for_name (res); |
1208 | return e; |
1209 | } |
1210 | case REFERENCE: |
1211 | { |
1212 | vn_reference_t ref = PRE_EXPR_REFERENCE (e)(e)->u.reference; |
1213 | tree folded; |
1214 | if ((folded = fully_constant_vn_reference_p (ref))) |
1215 | return get_or_alloc_expr_for_constant (folded); |
1216 | return e; |
1217 | } |
1218 | default: |
1219 | return e; |
1220 | } |
1221 | } |
1222 | |
1223 | /* Translate the VUSE backwards through phi nodes in E->dest, so that |
1224 | it has the value it would have in E->src. Set *SAME_VALID to true |
1225 | in case the new vuse doesn't change the value id of the OPERANDS. */ |
1226 | |
1227 | static tree |
1228 | translate_vuse_through_block (vec<vn_reference_op_s> operands, |
1229 | alias_set_type set, alias_set_type base_set, |
1230 | tree type, tree vuse, edge e, bool *same_valid) |
1231 | { |
1232 | basic_block phiblock = e->dest; |
1233 | gimple *phi = SSA_NAME_DEF_STMT (vuse)(tree_check ((vuse), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 1233, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt; |
1234 | ao_ref ref; |
1235 | |
1236 | if (same_valid) |
1237 | *same_valid = true; |
1238 | |
1239 | /* If value-numbering provided a memory state for this |
1240 | that dominates PHIBLOCK we can just use that. */ |
1241 | if (gimple_nop_p (phi) |
1242 | || (gimple_bb (phi) != phiblock |
1243 | && dominated_by_p (CDI_DOMINATORS, phiblock, gimple_bb (phi)))) |
1244 | return vuse; |
1245 | |
1246 | /* We have pruned expressions that are killed in PHIBLOCK via |
1247 | prune_clobbered_mems but we have not rewritten the VUSE to the one |
1248 | live at the start of the block. If there is no virtual PHI to translate |
1249 | through return the VUSE live at entry. Otherwise the VUSE to translate |
1250 | is the def of the virtual PHI node. */ |
1251 | phi = get_virtual_phi (phiblock); |
1252 | if (!phi) |
1253 | return BB_LIVE_VOP_ON_EXIT((bb_value_sets_t) ((get_immediate_dominator (CDI_DOMINATORS, phiblock))->aux))->vop_on_exit |
1254 | (get_immediate_dominator (CDI_DOMINATORS, phiblock))((bb_value_sets_t) ((get_immediate_dominator (CDI_DOMINATORS, phiblock))->aux))->vop_on_exit; |
1255 | |
1256 | if (same_valid |
1257 | && ao_ref_init_from_vn_reference (&ref, set, base_set, type, operands)) |
1258 | { |
1259 | bitmap visited = NULLnullptr; |
1260 | /* Try to find a vuse that dominates this phi node by skipping |
1261 | non-clobbering statements. */ |
1262 | unsigned int cnt = param_sccvn_max_alias_queries_per_accessglobal_options.x_param_sccvn_max_alias_queries_per_access; |
1263 | vuse = get_continuation_for_phi (phi, &ref, true, |
1264 | cnt, &visited, false, NULLnullptr, NULLnullptr); |
1265 | if (visited) |
1266 | BITMAP_FREE (visited)((void) (bitmap_obstack_free ((bitmap) visited), (visited) = ( bitmap) nullptr)); |
1267 | } |
1268 | else |
1269 | vuse = NULL_TREE(tree) nullptr; |
1270 | /* If we didn't find any, the value ID can't stay the same. */ |
1271 | if (!vuse && same_valid) |
1272 | *same_valid = false; |
1273 | |
1274 | /* ??? We would like to return vuse here as this is the canonical |
1275 | upmost vdef that this reference is associated with. But during |
1276 | insertion of the references into the hash tables we only ever |
1277 | directly insert with their direct gimple_vuse, hence returning |
1278 | something else would make us not find the other expression. */ |
1279 | return PHI_ARG_DEF (phi, e->dest_idx)gimple_phi_arg_def ((phi), (e->dest_idx)); |
1280 | } |
1281 | |
1282 | /* Like bitmap_find_leader, but checks for the value existing in SET1 *or* |
1283 | SET2 *or* SET3. This is used to avoid making a set consisting of the union |
1284 | of PA_IN and ANTIC_IN during insert and phi-translation. */ |
1285 | |
1286 | static inline pre_expr |
1287 | find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2, |
1288 | bitmap_set_t set3 = NULLnullptr) |
1289 | { |
1290 | pre_expr result = NULLnullptr; |
1291 | |
1292 | if (set1) |
1293 | result = bitmap_find_leader (set1, val); |
1294 | if (!result && set2) |
1295 | result = bitmap_find_leader (set2, val); |
1296 | if (!result && set3) |
1297 | result = bitmap_find_leader (set3, val); |
1298 | return result; |
1299 | } |
1300 | |
1301 | /* Get the tree type for our PRE expression e. */ |
1302 | |
1303 | static tree |
1304 | get_expr_type (const pre_expr e) |
1305 | { |
1306 | switch (e->kind) |
1307 | { |
1308 | case NAME: |
1309 | return TREE_TYPE (PRE_EXPR_NAME (e))((contains_struct_check (((e)->u.name), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 1309, __FUNCTION__))->typed.type); |
1310 | case CONSTANT: |
1311 | return TREE_TYPE (PRE_EXPR_CONSTANT (e))((contains_struct_check (((e)->u.constant), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 1311, __FUNCTION__))->typed.type); |
1312 | case REFERENCE: |
1313 | return PRE_EXPR_REFERENCE (e)(e)->u.reference->type; |
1314 | case NARY: |
1315 | return PRE_EXPR_NARY (e)(e)->u.nary->type; |
1316 | } |
1317 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 1317, __FUNCTION__)); |
1318 | } |
1319 | |
1320 | /* Get a representative SSA_NAME for a given expression that is available in B. |
1321 | Since all of our sub-expressions are treated as values, we require |
1322 | them to be SSA_NAME's for simplicity. |
1323 | Prior versions of GVNPRE used to use "value handles" here, so that |
1324 | an expression would be VH.11 + VH.10 instead of d_3 + e_6. In |
1325 | either case, the operands are really values (IE we do not expect |
1326 | them to be usable without finding leaders). */ |
1327 | |
1328 | static tree |
1329 | get_representative_for (const pre_expr e, basic_block b = NULLnullptr) |
1330 | { |
1331 | tree name, valnum = NULL_TREE(tree) nullptr; |
1332 | unsigned int value_id = get_expr_value_id (e); |
1333 | |
1334 | switch (e->kind) |
1335 | { |
1336 | case NAME: |
1337 | return PRE_EXPR_NAME (e)(e)->u.name; |
1338 | case CONSTANT: |
1339 | return PRE_EXPR_CONSTANT (e)(e)->u.constant; |
1340 | case NARY: |
1341 | case REFERENCE: |
1342 | { |
1343 | /* Go through all of the expressions representing this value |
1344 | and pick out an SSA_NAME. */ |
1345 | unsigned int i; |
1346 | bitmap_iterator bi; |
1347 | bitmap exprs = value_expressions[value_id]; |
1348 | EXECUTE_IF_SET_IN_BITMAP (exprs, 0, i, bi)for (bmp_iter_set_init (&(bi), (exprs), (0), &(i)); bmp_iter_set (&(bi), &(i)); bmp_iter_next (&(bi), &(i))) |
1349 | { |
1350 | pre_expr rep = expression_for_id (i); |
1351 | if (rep->kind == NAME) |
1352 | { |
1353 | tree name = PRE_EXPR_NAME (rep)(rep)->u.name; |
1354 | valnum = VN_INFO (name)->valnum; |
1355 | gimple *def = SSA_NAME_DEF_STMT (name)(tree_check ((name), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 1355, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt; |
1356 | /* We have to return either a new representative or one |
1357 | that can be used for expression simplification and thus |
1358 | is available in B. */ |
1359 | if (! b |
1360 | || gimple_nop_p (def) |
1361 | || dominated_by_p (CDI_DOMINATORS, b, gimple_bb (def))) |
1362 | return name; |
1363 | } |
1364 | else if (rep->kind == CONSTANT) |
1365 | return PRE_EXPR_CONSTANT (rep)(rep)->u.constant; |
1366 | } |
1367 | } |
1368 | break; |
1369 | } |
1370 | |
1371 | /* If we reached here we couldn't find an SSA_NAME. This can |
1372 | happen when we've discovered a value that has never appeared in |
1373 | the program as set to an SSA_NAME, as the result of phi translation. |
1374 | Create one here. |
1375 | ??? We should be able to re-use this when we insert the statement |
1376 | to compute it. */ |
1377 | name = make_temp_ssa_name (get_expr_type (e), gimple_build_nop (), "pretmp"); |
1378 | vn_ssa_aux_t vn_info = VN_INFO (name); |
1379 | vn_info->value_id = value_id; |
1380 | vn_info->valnum = valnum ? valnum : name; |
1381 | vn_info->visited = true; |
1382 | /* ??? For now mark this SSA name for release by VN. */ |
1383 | vn_info->needs_insertion = true; |
1384 | add_to_value (value_id, get_or_alloc_expr_for_name (name)); |
1385 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1386 | { |
1387 | fprintf (dump_file, "Created SSA_NAME representative "); |
1388 | print_generic_expr (dump_file, name); |
1389 | fprintf (dump_file, " for expression:"); |
1390 | print_pre_expr (dump_file, e); |
1391 | fprintf (dump_file, " (%04d)\n", value_id); |
1392 | } |
1393 | |
1394 | return name; |
1395 | } |
1396 | |
1397 | |
1398 | static pre_expr |
1399 | phi_translate (bitmap_set_t, pre_expr, bitmap_set_t, bitmap_set_t, edge); |
1400 | |
1401 | /* Translate EXPR using phis in PHIBLOCK, so that it has the values of |
1402 | the phis in PRED. Return NULL if we can't find a leader for each part |
1403 | of the translated expression. */ |
1404 | |
1405 | static pre_expr |
1406 | phi_translate_1 (bitmap_set_t dest, |
1407 | pre_expr expr, bitmap_set_t set1, bitmap_set_t set2, edge e) |
1408 | { |
1409 | basic_block pred = e->src; |
1410 | basic_block phiblock = e->dest; |
1411 | location_t expr_loc = expr->loc; |
1412 | switch (expr->kind) |
1413 | { |
1414 | case NARY: |
1415 | { |
1416 | unsigned int i; |
1417 | bool changed = false; |
1418 | vn_nary_op_t nary = PRE_EXPR_NARY (expr)(expr)->u.nary; |
1419 | vn_nary_op_t newnary = XALLOCAVAR (struct vn_nary_op_s,((struct vn_nary_op_s *) __builtin_alloca((sizeof_vn_nary_op ( nary->length)))) |
1420 | sizeof_vn_nary_op (nary->length))((struct vn_nary_op_s *) __builtin_alloca((sizeof_vn_nary_op ( nary->length)))); |
1421 | memcpy (newnary, nary, sizeof_vn_nary_op (nary->length)); |
1422 | |
1423 | for (i = 0; i < newnary->length; i++) |
1424 | { |
1425 | if (TREE_CODE (newnary->op[i])((enum tree_code) (newnary->op[i])->base.code) != SSA_NAME) |
1426 | continue; |
1427 | else |
1428 | { |
1429 | pre_expr leader, result; |
1430 | unsigned int op_val_id = VN_INFO (newnary->op[i])->value_id; |
1431 | leader = find_leader_in_sets (op_val_id, set1, set2); |
1432 | result = phi_translate (dest, leader, set1, set2, e); |
1433 | if (result && result != leader) |
1434 | /* If op has a leader in the sets we translate make |
1435 | sure to use the value of the translated expression. |
1436 | We might need a new representative for that. */ |
1437 | newnary->op[i] = get_representative_for (result, pred); |
1438 | else if (!result) |
1439 | return NULLnullptr; |
1440 | |
1441 | changed |= newnary->op[i] != nary->op[i]; |
1442 | } |
1443 | } |
1444 | if (changed) |
1445 | { |
1446 | pre_expr constant; |
1447 | unsigned int new_val_id; |
1448 | |
1449 | PRE_EXPR_NARY (expr)(expr)->u.nary = newnary; |
1450 | constant = fully_constant_expression (expr); |
1451 | PRE_EXPR_NARY (expr)(expr)->u.nary = nary; |
1452 | if (constant != expr) |
1453 | { |
1454 | /* For non-CONSTANTs we have to make sure we can eventually |
1455 | insert the expression. Which means we need to have a |
1456 | leader for it. */ |
1457 | if (constant->kind != CONSTANT) |
1458 | { |
1459 | /* Do not allow simplifications to non-constants over |
1460 | backedges as this will likely result in a loop PHI node |
1461 | to be inserted and increased register pressure. |
1462 | See PR77498 - this avoids doing predcoms work in |
1463 | a less efficient way. */ |
1464 | if (e->flags & EDGE_DFS_BACK) |
1465 | ; |
1466 | else |
1467 | { |
1468 | unsigned value_id = get_expr_value_id (constant); |
1469 | /* We want a leader in ANTIC_OUT or AVAIL_OUT here. |
1470 | dest has what we computed into ANTIC_OUT sofar |
1471 | so pick from that - since topological sorting |
1472 | by sorted_array_from_bitmap_set isn't perfect |
1473 | we may lose some cases here. */ |
1474 | constant = find_leader_in_sets (value_id, dest, |
1475 | AVAIL_OUT (pred)((bb_value_sets_t) ((pred)->aux))->avail_out); |
1476 | if (constant) |
1477 | { |
1478 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1479 | { |
1480 | fprintf (dump_file, "simplifying "); |
1481 | print_pre_expr (dump_file, expr); |
1482 | fprintf (dump_file, " translated %d -> %d to ", |
1483 | phiblock->index, pred->index); |
1484 | PRE_EXPR_NARY (expr)(expr)->u.nary = newnary; |
1485 | print_pre_expr (dump_file, expr); |
1486 | PRE_EXPR_NARY (expr)(expr)->u.nary = nary; |
1487 | fprintf (dump_file, " to "); |
1488 | print_pre_expr (dump_file, constant); |
1489 | fprintf (dump_file, "\n"); |
1490 | } |
1491 | return constant; |
1492 | } |
1493 | } |
1494 | } |
1495 | else |
1496 | return constant; |
1497 | } |
1498 | |
1499 | tree result = vn_nary_op_lookup_pieces (newnary->length, |
1500 | newnary->opcode, |
1501 | newnary->type, |
1502 | &newnary->op[0], |
1503 | &nary); |
1504 | if (result && is_gimple_min_invariant (result)) |
1505 | return get_or_alloc_expr_for_constant (result); |
1506 | |
1507 | if (!nary || nary->predicated_values) |
1508 | new_val_id = 0; |
1509 | else |
1510 | new_val_id = nary->value_id; |
1511 | expr = get_or_alloc_expr_for_nary (newnary, new_val_id, expr_loc); |
1512 | add_to_value (get_expr_value_id (expr), expr); |
1513 | } |
1514 | return expr; |
1515 | } |
1516 | break; |
1517 | |
1518 | case REFERENCE: |
1519 | { |
1520 | vn_reference_t ref = PRE_EXPR_REFERENCE (expr)(expr)->u.reference; |
1521 | vec<vn_reference_op_s> operands = ref->operands; |
1522 | tree vuse = ref->vuse; |
1523 | tree newvuse = vuse; |
1524 | vec<vn_reference_op_s> newoperands = vNULL; |
1525 | bool changed = false, same_valid = true; |
1526 | unsigned int i, n; |
1527 | vn_reference_op_t operand; |
1528 | vn_reference_t newref; |
1529 | |
1530 | for (i = 0; operands.iterate (i, &operand); i++) |
1531 | { |
1532 | pre_expr opresult; |
1533 | pre_expr leader; |
1534 | tree op[3]; |
1535 | tree type = operand->type; |
1536 | vn_reference_op_s newop = *operand; |
1537 | op[0] = operand->op0; |
1538 | op[1] = operand->op1; |
1539 | op[2] = operand->op2; |
1540 | for (n = 0; n < 3; ++n) |
1541 | { |
1542 | unsigned int op_val_id; |
1543 | if (!op[n]) |
1544 | continue; |
1545 | if (TREE_CODE (op[n])((enum tree_code) (op[n])->base.code) != SSA_NAME) |
1546 | { |
1547 | /* We can't possibly insert these. */ |
1548 | if (n != 0 |
1549 | && !is_gimple_min_invariant (op[n])) |
1550 | break; |
1551 | continue; |
1552 | } |
1553 | op_val_id = VN_INFO (op[n])->value_id; |
1554 | leader = find_leader_in_sets (op_val_id, set1, set2); |
1555 | opresult = phi_translate (dest, leader, set1, set2, e); |
1556 | if (opresult && opresult != leader) |
1557 | { |
1558 | tree name = get_representative_for (opresult); |
1559 | changed |= name != op[n]; |
1560 | op[n] = name; |
1561 | } |
1562 | else if (!opresult) |
1563 | break; |
1564 | } |
1565 | if (n != 3) |
1566 | { |
1567 | newoperands.release (); |
1568 | return NULLnullptr; |
1569 | } |
1570 | /* When we translate a MEM_REF across a backedge and we have |
1571 | restrict info that's not from our functions parameters |
1572 | we have to remap it since we now may deal with a different |
1573 | instance where the dependence info is no longer valid. |
1574 | See PR102970. Note instead of keeping a remapping table |
1575 | per backedge we simply throw away restrict info. */ |
1576 | if ((newop.opcode == MEM_REF |
1577 | || newop.opcode == TARGET_MEM_REF) |
1578 | && newop.clique > 1 |
1579 | && (e->flags & EDGE_DFS_BACK)) |
1580 | { |
1581 | newop.clique = 0; |
1582 | newop.base = 0; |
1583 | changed = true; |
1584 | } |
1585 | if (!changed) |
1586 | continue; |
1587 | if (!newoperands.exists ()) |
1588 | newoperands = operands.copy (); |
1589 | /* We may have changed from an SSA_NAME to a constant */ |
1590 | if (newop.opcode == SSA_NAME && TREE_CODE (op[0])((enum tree_code) (op[0])->base.code) != SSA_NAME) |
1591 | newop.opcode = TREE_CODE (op[0])((enum tree_code) (op[0])->base.code); |
1592 | newop.type = type; |
1593 | newop.op0 = op[0]; |
1594 | newop.op1 = op[1]; |
1595 | newop.op2 = op[2]; |
1596 | newoperands[i] = newop; |
1597 | } |
1598 | gcc_checking_assert (i == operands.length ())((void)(!(i == operands.length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 1598, __FUNCTION__), 0 : 0)); |
1599 | |
1600 | if (vuse) |
1601 | { |
1602 | newvuse = translate_vuse_through_block (newoperands.exists () |
1603 | ? newoperands : operands, |
1604 | ref->set, ref->base_set, |
1605 | ref->type, vuse, e, |
1606 | changed |
1607 | ? NULLnullptr : &same_valid); |
1608 | if (newvuse == NULL_TREE(tree) nullptr) |
1609 | { |
1610 | newoperands.release (); |
1611 | return NULLnullptr; |
1612 | } |
1613 | } |
1614 | |
1615 | if (changed || newvuse != vuse) |
1616 | { |
1617 | unsigned int new_val_id; |
1618 | |
1619 | tree result = vn_reference_lookup_pieces (newvuse, ref->set, |
1620 | ref->base_set, |
1621 | ref->type, |
1622 | newoperands.exists () |
1623 | ? newoperands : operands, |
1624 | &newref, VN_WALK); |
1625 | if (result) |
1626 | newoperands.release (); |
1627 | |
1628 | /* We can always insert constants, so if we have a partial |
1629 | redundant constant load of another type try to translate it |
1630 | to a constant of appropriate type. */ |
1631 | if (result && is_gimple_min_invariant (result)) |
1632 | { |
1633 | tree tem = result; |
1634 | if (!useless_type_conversion_p (ref->type, TREE_TYPE (result)((contains_struct_check ((result), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 1634, __FUNCTION__))->typed.type))) |
1635 | { |
1636 | tem = fold_unary (VIEW_CONVERT_EXPR, ref->type, result)fold_unary_loc (((location_t) 0), VIEW_CONVERT_EXPR, ref-> type, result); |
1637 | if (tem && !is_gimple_min_invariant (tem)) |
1638 | tem = NULL_TREE(tree) nullptr; |
1639 | } |
1640 | if (tem) |
1641 | return get_or_alloc_expr_for_constant (tem); |
1642 | } |
1643 | |
1644 | /* If we'd have to convert things we would need to validate |
1645 | if we can insert the translated expression. So fail |
1646 | here for now - we cannot insert an alias with a different |
1647 | type in the VN tables either, as that would assert. */ |
1648 | if (result |
1649 | && !useless_type_conversion_p (ref->type, TREE_TYPE (result)((contains_struct_check ((result), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 1649, __FUNCTION__))->typed.type))) |
1650 | return NULLnullptr; |
1651 | else if (!result && newref |
1652 | && !useless_type_conversion_p (ref->type, newref->type)) |
1653 | { |
1654 | newoperands.release (); |
1655 | return NULLnullptr; |
1656 | } |
1657 | |
1658 | if (newref) |
1659 | new_val_id = newref->value_id; |
1660 | else |
1661 | { |
1662 | if (changed || !same_valid) |
1663 | new_val_id = get_next_value_id (); |
1664 | else |
1665 | new_val_id = ref->value_id; |
1666 | if (!newoperands.exists ()) |
1667 | newoperands = operands.copy (); |
1668 | newref = vn_reference_insert_pieces (newvuse, ref->set, |
1669 | ref->base_set, ref->type, |
1670 | newoperands, |
1671 | result, new_val_id); |
1672 | newoperands = vNULL; |
1673 | } |
1674 | expr = get_or_alloc_expr_for_reference (newref, expr_loc); |
1675 | add_to_value (new_val_id, expr); |
1676 | } |
1677 | newoperands.release (); |
1678 | return expr; |
1679 | } |
1680 | break; |
1681 | |
1682 | case NAME: |
1683 | { |
1684 | tree name = PRE_EXPR_NAME (expr)(expr)->u.name; |
1685 | gimple *def_stmt = SSA_NAME_DEF_STMT (name)(tree_check ((name), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 1685, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt; |
1686 | /* If the SSA name is defined by a PHI node in this block, |
1687 | translate it. */ |
1688 | if (gimple_code (def_stmt) == GIMPLE_PHI |
1689 | && gimple_bb (def_stmt) == phiblock) |
1690 | { |
1691 | tree def = PHI_ARG_DEF (def_stmt, e->dest_idx)gimple_phi_arg_def ((def_stmt), (e->dest_idx)); |
1692 | |
1693 | /* Handle constant. */ |
1694 | if (is_gimple_min_invariant (def)) |
1695 | return get_or_alloc_expr_for_constant (def); |
1696 | |
1697 | return get_or_alloc_expr_for_name (def); |
1698 | } |
1699 | /* Otherwise return it unchanged - it will get removed if its |
1700 | value is not available in PREDs AVAIL_OUT set of expressions |
1701 | by the subtraction of TMP_GEN. */ |
1702 | return expr; |
1703 | } |
1704 | |
1705 | default: |
1706 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 1706, __FUNCTION__)); |
1707 | } |
1708 | } |
1709 | |
1710 | /* Wrapper around phi_translate_1 providing caching functionality. */ |
1711 | |
1712 | static pre_expr |
1713 | phi_translate (bitmap_set_t dest, pre_expr expr, |
1714 | bitmap_set_t set1, bitmap_set_t set2, edge e) |
1715 | { |
1716 | expr_pred_trans_t slot = NULLnullptr; |
1717 | pre_expr phitrans; |
1718 | |
1719 | if (!expr) |
1720 | return NULLnullptr; |
1721 | |
1722 | /* Constants contain no values that need translation. */ |
1723 | if (expr->kind == CONSTANT) |
1724 | return expr; |
1725 | |
1726 | if (value_id_constant_p (get_expr_value_id (expr))) |
1727 | return expr; |
1728 | |
1729 | /* Don't add translations of NAMEs as those are cheap to translate. */ |
1730 | if (expr->kind != NAME) |
1731 | { |
1732 | if (phi_trans_add (&slot, expr, e->src)) |
1733 | return slot->v == 0 ? NULLnullptr : expression_for_id (slot->v); |
1734 | /* Store NULL for the value we want to return in the case of |
1735 | recursing. */ |
1736 | slot->v = 0; |
1737 | } |
1738 | |
1739 | /* Translate. */ |
1740 | basic_block saved_valueize_bb = vn_context_bb; |
1741 | vn_context_bb = e->src; |
1742 | phitrans = phi_translate_1 (dest, expr, set1, set2, e); |
1743 | vn_context_bb = saved_valueize_bb; |
1744 | |
1745 | if (slot) |
1746 | { |
1747 | /* We may have reallocated. */ |
1748 | phi_trans_add (&slot, expr, e->src); |
1749 | if (phitrans) |
1750 | slot->v = get_expression_id (phitrans); |
1751 | else |
1752 | /* Remove failed translations again, they cause insert |
1753 | iteration to not pick up new opportunities reliably. */ |
1754 | PHI_TRANS_TABLE (e->src)((bb_value_sets_t) ((e->src)->aux))->phi_translate_table->clear_slot (slot); |
1755 | } |
1756 | |
1757 | return phitrans; |
1758 | } |
1759 | |
1760 | |
1761 | /* For each expression in SET, translate the values through phi nodes |
1762 | in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting |
1763 | expressions in DEST. */ |
1764 | |
1765 | static void |
1766 | phi_translate_set (bitmap_set_t dest, bitmap_set_t set, edge e) |
1767 | { |
1768 | bitmap_iterator bi; |
1769 | unsigned int i; |
1770 | |
1771 | if (gimple_seq_empty_p (phi_nodes (e->dest))) |
1772 | { |
1773 | bitmap_set_copy (dest, set); |
1774 | return; |
1775 | } |
1776 | |
1777 | /* Allocate the phi-translation cache where we have an idea about |
1778 | its size. hash-table implementation internals tell us that |
1779 | allocating the table to fit twice the number of elements will |
1780 | make sure we do not usually re-allocate. */ |
1781 | if (!PHI_TRANS_TABLE (e->src)((bb_value_sets_t) ((e->src)->aux))->phi_translate_table) |
1782 | PHI_TRANS_TABLE (e->src)((bb_value_sets_t) ((e->src)->aux))->phi_translate_table = new hash_table<expr_pred_trans_d> |
1783 | (2 * bitmap_count_bits (&set->expressions)); |
1784 | FOR_EACH_EXPR_ID_IN_SET (set, i, bi)for (bmp_iter_set_init (&((bi)), (&(set)->expressions ), (0), &((i))); bmp_iter_set (&((bi)), &((i))); bmp_iter_next (&((bi)), &((i)))) |
1785 | { |
1786 | pre_expr expr = expression_for_id (i); |
1787 | pre_expr translated = phi_translate (dest, expr, set, NULLnullptr, e); |
1788 | if (!translated) |
1789 | continue; |
1790 | |
1791 | bitmap_insert_into_set (dest, translated); |
1792 | } |
1793 | } |
1794 | |
1795 | /* Find the leader for a value (i.e., the name representing that |
1796 | value) in a given set, and return it. Return NULL if no leader |
1797 | is found. */ |
1798 | |
1799 | static pre_expr |
1800 | bitmap_find_leader (bitmap_set_t set, unsigned int val) |
1801 | { |
1802 | if (value_id_constant_p (val)) |
1803 | return constant_value_expressions[-val]; |
1804 | |
1805 | if (bitmap_set_contains_value (set, val)) |
1806 | { |
1807 | /* Rather than walk the entire bitmap of expressions, and see |
1808 | whether any of them has the value we are looking for, we look |
1809 | at the reverse mapping, which tells us the set of expressions |
1810 | that have a given value (IE value->expressions with that |
1811 | value) and see if any of those expressions are in our set. |
1812 | The number of expressions per value is usually significantly |
1813 | less than the number of expressions in the set. In fact, for |
1814 | large testcases, doing it this way is roughly 5-10x faster |
1815 | than walking the bitmap. |
1816 | If this is somehow a significant lose for some cases, we can |
1817 | choose which set to walk based on which set is smaller. */ |
1818 | unsigned int i; |
1819 | bitmap_iterator bi; |
1820 | bitmap exprset = value_expressions[val]; |
1821 | |
1822 | if (!exprset->first->next) |
1823 | EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)for (bmp_iter_set_init (&(bi), (exprset), (0), &(i)); bmp_iter_set (&(bi), &(i)); bmp_iter_next (&(bi) , &(i))) |
1824 | if (bitmap_bit_p (&set->expressions, i)) |
1825 | return expression_for_id (i); |
1826 | |
1827 | EXECUTE_IF_AND_IN_BITMAP (exprset, &set->expressions, 0, i, bi)for (bmp_iter_and_init (&(bi), (exprset), (&set->expressions ), (0), &(i)); bmp_iter_and (&(bi), &(i)); bmp_iter_next (&(bi), &(i))) |
1828 | return expression_for_id (i); |
1829 | } |
1830 | return NULLnullptr; |
1831 | } |
1832 | |
1833 | /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of |
1834 | BLOCK by seeing if it is not killed in the block. Note that we are |
1835 | only determining whether there is a store that kills it. Because |
1836 | of the order in which clean iterates over values, we are guaranteed |
1837 | that altered operands will have caused us to be eliminated from the |
1838 | ANTIC_IN set already. */ |
1839 | |
1840 | static bool |
1841 | value_dies_in_block_x (pre_expr expr, basic_block block) |
1842 | { |
1843 | tree vuse = PRE_EXPR_REFERENCE (expr)(expr)->u.reference->vuse; |
1844 | vn_reference_t refx = PRE_EXPR_REFERENCE (expr)(expr)->u.reference; |
1845 | gimple *def; |
1846 | gimple_stmt_iterator gsi; |
1847 | unsigned id = get_expression_id (expr); |
1848 | bool res = false; |
1849 | ao_ref ref; |
1850 | |
1851 | if (!vuse) |
1852 | return false; |
1853 | |
1854 | /* Lookup a previously calculated result. */ |
1855 | if (EXPR_DIES (block)((bb_value_sets_t) ((block)->aux))->expr_dies |
1856 | && bitmap_bit_p (EXPR_DIES (block)((bb_value_sets_t) ((block)->aux))->expr_dies, id * 2)) |
1857 | return bitmap_bit_p (EXPR_DIES (block)((bb_value_sets_t) ((block)->aux))->expr_dies, id * 2 + 1); |
1858 | |
1859 | /* A memory expression {e, VUSE} dies in the block if there is a |
1860 | statement that may clobber e. If, starting statement walk from the |
1861 | top of the basic block, a statement uses VUSE there can be no kill |
1862 | inbetween that use and the original statement that loaded {e, VUSE}, |
1863 | so we can stop walking. */ |
1864 | ref.base = NULL_TREE(tree) nullptr; |
1865 | for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi)) |
1866 | { |
1867 | tree def_vuse, def_vdef; |
1868 | def = gsi_stmt (gsi); |
1869 | def_vuse = gimple_vuse (def); |
1870 | def_vdef = gimple_vdef (def); |
1871 | |
1872 | /* Not a memory statement. */ |
1873 | if (!def_vuse) |
1874 | continue; |
1875 | |
1876 | /* Not a may-def. */ |
1877 | if (!def_vdef) |
1878 | { |
1879 | /* A load with the same VUSE, we're done. */ |
1880 | if (def_vuse == vuse) |
1881 | break; |
1882 | |
1883 | continue; |
1884 | } |
1885 | |
1886 | /* Init ref only if we really need it. */ |
1887 | if (ref.base == NULL_TREE(tree) nullptr |
1888 | && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->base_set, |
1889 | refx->type, refx->operands)) |
1890 | { |
1891 | res = true; |
1892 | break; |
1893 | } |
1894 | /* If the statement may clobber expr, it dies. */ |
1895 | if (stmt_may_clobber_ref_p_1 (def, &ref)) |
1896 | { |
1897 | res = true; |
1898 | break; |
1899 | } |
1900 | } |
1901 | |
1902 | /* Remember the result. */ |
1903 | if (!EXPR_DIES (block)((bb_value_sets_t) ((block)->aux))->expr_dies) |
1904 | EXPR_DIES (block)((bb_value_sets_t) ((block)->aux))->expr_dies = BITMAP_ALLOCbitmap_alloc (&grand_bitmap_obstack); |
1905 | bitmap_set_bit (EXPR_DIES (block)((bb_value_sets_t) ((block)->aux))->expr_dies, id * 2); |
1906 | if (res) |
1907 | bitmap_set_bit (EXPR_DIES (block)((bb_value_sets_t) ((block)->aux))->expr_dies, id * 2 + 1); |
1908 | |
1909 | return res; |
1910 | } |
1911 | |
1912 | |
1913 | /* Determine if OP is valid in SET1 U SET2, which it is when the union |
1914 | contains its value-id. */ |
1915 | |
1916 | static bool |
1917 | op_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, tree op) |
1918 | { |
1919 | if (op && TREE_CODE (op)((enum tree_code) (op)->base.code) == SSA_NAME) |
1920 | { |
1921 | unsigned int value_id = VN_INFO (op)->value_id; |
1922 | if (!(bitmap_set_contains_value (set1, value_id) |
1923 | || (set2 && bitmap_set_contains_value (set2, value_id)))) |
1924 | return false; |
1925 | } |
1926 | return true; |
1927 | } |
1928 | |
1929 | /* Determine if the expression EXPR is valid in SET1 U SET2. |
1930 | ONLY SET2 CAN BE NULL. |
1931 | This means that we have a leader for each part of the expression |
1932 | (if it consists of values), or the expression is an SSA_NAME. |
1933 | For loads/calls, we also see if the vuse is killed in this block. */ |
1934 | |
1935 | static bool |
1936 | valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr) |
1937 | { |
1938 | switch (expr->kind) |
1939 | { |
1940 | case NAME: |
1941 | /* By construction all NAMEs are available. Non-available |
1942 | NAMEs are removed by subtracting TMP_GEN from the sets. */ |
1943 | return true; |
1944 | case NARY: |
1945 | { |
1946 | unsigned int i; |
1947 | vn_nary_op_t nary = PRE_EXPR_NARY (expr)(expr)->u.nary; |
1948 | for (i = 0; i < nary->length; i++) |
1949 | if (!op_valid_in_sets (set1, set2, nary->op[i])) |
1950 | return false; |
1951 | return true; |
1952 | } |
1953 | break; |
1954 | case REFERENCE: |
1955 | { |
1956 | vn_reference_t ref = PRE_EXPR_REFERENCE (expr)(expr)->u.reference; |
1957 | vn_reference_op_t vro; |
1958 | unsigned int i; |
1959 | |
1960 | FOR_EACH_VEC_ELT (ref->operands, i, vro)for (i = 0; (ref->operands).iterate ((i), &(vro)); ++( i)) |
1961 | { |
1962 | if (!op_valid_in_sets (set1, set2, vro->op0) |
1963 | || !op_valid_in_sets (set1, set2, vro->op1) |
1964 | || !op_valid_in_sets (set1, set2, vro->op2)) |
1965 | return false; |
1966 | } |
1967 | return true; |
1968 | } |
1969 | default: |
1970 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 1970, __FUNCTION__)); |
1971 | } |
1972 | } |
1973 | |
1974 | /* Clean the set of expressions SET1 that are no longer valid in SET1 or SET2. |
1975 | This means expressions that are made up of values we have no leaders for |
1976 | in SET1 or SET2. */ |
1977 | |
1978 | static void |
1979 | clean (bitmap_set_t set1, bitmap_set_t set2 = NULLnullptr) |
1980 | { |
1981 | vec<pre_expr> exprs = sorted_array_from_bitmap_set (set1); |
1982 | pre_expr expr; |
1983 | int i; |
1984 | |
1985 | FOR_EACH_VEC_ELT (exprs, i, expr)for (i = 0; (exprs).iterate ((i), &(expr)); ++(i)) |
1986 | { |
1987 | if (!valid_in_sets (set1, set2, expr)) |
1988 | { |
1989 | unsigned int val = get_expr_value_id (expr); |
1990 | bitmap_clear_bit (&set1->expressions, get_expression_id (expr)); |
1991 | /* We are entered with possibly multiple expressions for a value |
1992 | so before removing a value from the set see if there's an |
1993 | expression for it left. */ |
1994 | if (! bitmap_find_leader (set1, val)) |
1995 | bitmap_clear_bit (&set1->values, val); |
1996 | } |
1997 | } |
1998 | exprs.release (); |
1999 | |
2000 | if (flag_checkingglobal_options.x_flag_checking) |
2001 | { |
2002 | unsigned j; |
2003 | bitmap_iterator bi; |
2004 | FOR_EACH_EXPR_ID_IN_SET (set1, j, bi)for (bmp_iter_set_init (&((bi)), (&(set1)->expressions ), (0), &((j))); bmp_iter_set (&((bi)), &((j))); bmp_iter_next (&((bi)), &((j)))) |
2005 | gcc_assert (valid_in_sets (set1, set2, expression_for_id (j)))((void)(!(valid_in_sets (set1, set2, expression_for_id (j))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2005, __FUNCTION__), 0 : 0)); |
2006 | } |
2007 | } |
2008 | |
2009 | /* Clean the set of expressions that are no longer valid in SET because |
2010 | they are clobbered in BLOCK or because they trap and may not be executed. */ |
2011 | |
2012 | static void |
2013 | prune_clobbered_mems (bitmap_set_t set, basic_block block) |
2014 | { |
2015 | bitmap_iterator bi; |
2016 | unsigned i; |
2017 | unsigned to_remove = -1U; |
2018 | bool any_removed = false; |
2019 | |
2020 | FOR_EACH_EXPR_ID_IN_SET (set, i, bi)for (bmp_iter_set_init (&((bi)), (&(set)->expressions ), (0), &((i))); bmp_iter_set (&((bi)), &((i))); bmp_iter_next (&((bi)), &((i)))) |
2021 | { |
2022 | /* Remove queued expr. */ |
2023 | if (to_remove != -1U) |
2024 | { |
2025 | bitmap_clear_bit (&set->expressions, to_remove); |
2026 | any_removed = true; |
2027 | to_remove = -1U; |
2028 | } |
2029 | |
2030 | pre_expr expr = expression_for_id (i); |
2031 | if (expr->kind == REFERENCE) |
2032 | { |
2033 | vn_reference_t ref = PRE_EXPR_REFERENCE (expr)(expr)->u.reference; |
2034 | if (ref->vuse) |
2035 | { |
2036 | gimple *def_stmt = SSA_NAME_DEF_STMT (ref->vuse)(tree_check ((ref->vuse), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2036, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt; |
2037 | if (!gimple_nop_p (def_stmt) |
2038 | /* If value-numbering provided a memory state for this |
2039 | that dominates BLOCK we're done, otherwise we have |
2040 | to check if the value dies in BLOCK. */ |
2041 | && !(gimple_bb (def_stmt) != block |
2042 | && dominated_by_p (CDI_DOMINATORS, |
2043 | block, gimple_bb (def_stmt))) |
2044 | && value_dies_in_block_x (expr, block)) |
2045 | to_remove = i; |
2046 | } |
2047 | /* If the REFERENCE may trap make sure the block does not contain |
2048 | a possible exit point. |
2049 | ??? This is overly conservative if we translate AVAIL_OUT |
2050 | as the available expression might be after the exit point. */ |
2051 | if (BB_MAY_NOTRETURN (block)((bb_value_sets_t) ((block)->aux))->contains_may_not_return_call |
2052 | && vn_reference_may_trap (ref)) |
2053 | to_remove = i; |
2054 | } |
2055 | else if (expr->kind == NARY) |
2056 | { |
2057 | vn_nary_op_t nary = PRE_EXPR_NARY (expr)(expr)->u.nary; |
2058 | /* If the NARY may trap make sure the block does not contain |
2059 | a possible exit point. |
2060 | ??? This is overly conservative if we translate AVAIL_OUT |
2061 | as the available expression might be after the exit point. */ |
2062 | if (BB_MAY_NOTRETURN (block)((bb_value_sets_t) ((block)->aux))->contains_may_not_return_call |
2063 | && vn_nary_may_trap (nary)) |
2064 | to_remove = i; |
2065 | } |
2066 | } |
2067 | |
2068 | /* Remove queued expr. */ |
2069 | if (to_remove != -1U) |
2070 | { |
2071 | bitmap_clear_bit (&set->expressions, to_remove); |
2072 | any_removed = true; |
2073 | } |
2074 | |
2075 | /* Above we only removed expressions, now clean the set of values |
2076 | which no longer have any corresponding expression. We cannot |
2077 | clear the value at the time we remove an expression since there |
2078 | may be multiple expressions per value. |
2079 | If we'd queue possibly to be removed values we could use |
2080 | the bitmap_find_leader way to see if there's still an expression |
2081 | for it. For some ratio of to be removed values and number of |
2082 | values/expressions in the set this might be faster than rebuilding |
2083 | the value-set. */ |
2084 | if (any_removed) |
2085 | { |
2086 | bitmap_clear (&set->values); |
2087 | FOR_EACH_EXPR_ID_IN_SET (set, i, bi)for (bmp_iter_set_init (&((bi)), (&(set)->expressions ), (0), &((i))); bmp_iter_set (&((bi)), &((i))); bmp_iter_next (&((bi)), &((i)))) |
2088 | { |
2089 | pre_expr expr = expression_for_id (i); |
2090 | unsigned int value_id = get_expr_value_id (expr); |
2091 | bitmap_set_bit (&set->values, value_id); |
2092 | } |
2093 | } |
2094 | } |
2095 | |
2096 | /* Compute the ANTIC set for BLOCK. |
2097 | |
2098 | If succs(BLOCK) > 1 then |
2099 | ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK) |
2100 | else if succs(BLOCK) == 1 then |
2101 | ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)]) |
2102 | |
2103 | ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK]) |
2104 | |
2105 | Note that clean() is deferred until after the iteration. */ |
2106 | |
2107 | static bool |
2108 | compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge) |
2109 | { |
2110 | bitmap_set_t S, old, ANTIC_OUT; |
2111 | edge e; |
2112 | edge_iterator ei; |
2113 | |
2114 | bool was_visited = BB_VISITED (block)((bb_value_sets_t) ((block)->aux))->visited; |
2115 | bool changed = ! BB_VISITED (block)((bb_value_sets_t) ((block)->aux))->visited; |
2116 | BB_VISITED (block)((bb_value_sets_t) ((block)->aux))->visited = 1; |
2117 | old = ANTIC_OUT = S = NULLnullptr; |
2118 | |
2119 | /* If any edges from predecessors are abnormal, antic_in is empty, |
2120 | so do nothing. */ |
2121 | if (block_has_abnormal_pred_edge) |
2122 | goto maybe_dump_sets; |
2123 | |
2124 | old = ANTIC_IN (block)((bb_value_sets_t) ((block)->aux))->antic_in; |
2125 | ANTIC_OUT = bitmap_set_new (); |
2126 | |
2127 | /* If the block has no successors, ANTIC_OUT is empty. */ |
2128 | if (EDGE_COUNT (block->succs)vec_safe_length (block->succs) == 0) |
2129 | ; |
2130 | /* If we have one successor, we could have some phi nodes to |
2131 | translate through. */ |
2132 | else if (single_succ_p (block)) |
2133 | { |
2134 | e = single_succ_edge (block); |
2135 | gcc_assert (BB_VISITED (e->dest))((void)(!(((bb_value_sets_t) ((e->dest)->aux))->visited ) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2135, __FUNCTION__), 0 : 0)); |
2136 | phi_translate_set (ANTIC_OUT, ANTIC_IN (e->dest)((bb_value_sets_t) ((e->dest)->aux))->antic_in, e); |
2137 | } |
2138 | /* If we have multiple successors, we take the intersection of all of |
2139 | them. Note that in the case of loop exit phi nodes, we may have |
2140 | phis to translate through. */ |
2141 | else |
2142 | { |
2143 | size_t i; |
2144 | edge first = NULLnullptr; |
2145 | |
2146 | auto_vec<edge> worklist (EDGE_COUNT (block->succs)vec_safe_length (block->succs)); |
2147 | FOR_EACH_EDGE (e, ei, block->succs)for ((ei) = ei_start_1 (&((block->succs))); ei_cond (( ei), &(e)); ei_next (&(ei))) |
2148 | { |
2149 | if (!first |
2150 | && BB_VISITED (e->dest)((bb_value_sets_t) ((e->dest)->aux))->visited) |
2151 | first = e; |
2152 | else if (BB_VISITED (e->dest)((bb_value_sets_t) ((e->dest)->aux))->visited) |
2153 | worklist.quick_push (e); |
2154 | else |
2155 | { |
2156 | /* Unvisited successors get their ANTIC_IN replaced by the |
2157 | maximal set to arrive at a maximum ANTIC_IN solution. |
2158 | We can ignore them in the intersection operation and thus |
2159 | need not explicitely represent that maximum solution. */ |
2160 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2161 | fprintf (dump_file, "ANTIC_IN is MAX on %d->%d\n", |
2162 | e->src->index, e->dest->index); |
2163 | } |
2164 | } |
2165 | |
2166 | /* Of multiple successors we have to have visited one already |
2167 | which is guaranteed by iteration order. */ |
2168 | gcc_assert (first != NULL)((void)(!(first != nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2168, __FUNCTION__), 0 : 0)); |
2169 | |
2170 | phi_translate_set (ANTIC_OUT, ANTIC_IN (first->dest)((bb_value_sets_t) ((first->dest)->aux))->antic_in, first); |
2171 | |
2172 | /* If we have multiple successors we need to intersect the ANTIC_OUT |
2173 | sets. For values that's a simple intersection but for |
2174 | expressions it is a union. Given we want to have a single |
2175 | expression per value in our sets we have to canonicalize. |
2176 | Avoid randomness and running into cycles like for PR82129 and |
2177 | canonicalize the expression we choose to the one with the |
2178 | lowest id. This requires we actually compute the union first. */ |
2179 | FOR_EACH_VEC_ELT (worklist, i, e)for (i = 0; (worklist).iterate ((i), &(e)); ++(i)) |
2180 | { |
2181 | if (!gimple_seq_empty_p (phi_nodes (e->dest))) |
2182 | { |
2183 | bitmap_set_t tmp = bitmap_set_new (); |
2184 | phi_translate_set (tmp, ANTIC_IN (e->dest)((bb_value_sets_t) ((e->dest)->aux))->antic_in, e); |
2185 | bitmap_and_into (&ANTIC_OUT->values, &tmp->values); |
2186 | bitmap_ior_into (&ANTIC_OUT->expressions, &tmp->expressions); |
2187 | bitmap_set_free (tmp); |
2188 | } |
2189 | else |
2190 | { |
2191 | bitmap_and_into (&ANTIC_OUT->values, &ANTIC_IN (e->dest)((bb_value_sets_t) ((e->dest)->aux))->antic_in->values); |
2192 | bitmap_ior_into (&ANTIC_OUT->expressions, |
2193 | &ANTIC_IN (e->dest)((bb_value_sets_t) ((e->dest)->aux))->antic_in->expressions); |
2194 | } |
2195 | } |
2196 | if (! worklist.is_empty ()) |
2197 | { |
2198 | /* Prune expressions not in the value set. */ |
2199 | bitmap_iterator bi; |
2200 | unsigned int i; |
2201 | unsigned int to_clear = -1U; |
2202 | FOR_EACH_EXPR_ID_IN_SET (ANTIC_OUT, i, bi)for (bmp_iter_set_init (&((bi)), (&(ANTIC_OUT)->expressions ), (0), &((i))); bmp_iter_set (&((bi)), &((i))); bmp_iter_next (&((bi)), &((i)))) |
2203 | { |
2204 | if (to_clear != -1U) |
2205 | { |
2206 | bitmap_clear_bit (&ANTIC_OUT->expressions, to_clear); |
2207 | to_clear = -1U; |
2208 | } |
2209 | pre_expr expr = expression_for_id (i); |
2210 | unsigned int value_id = get_expr_value_id (expr); |
2211 | if (!bitmap_bit_p (&ANTIC_OUT->values, value_id)) |
2212 | to_clear = i; |
2213 | } |
2214 | if (to_clear != -1U) |
2215 | bitmap_clear_bit (&ANTIC_OUT->expressions, to_clear); |
2216 | } |
2217 | } |
2218 | |
2219 | /* Prune expressions that are clobbered in block and thus become |
2220 | invalid if translated from ANTIC_OUT to ANTIC_IN. */ |
2221 | prune_clobbered_mems (ANTIC_OUT, block); |
2222 | |
2223 | /* Generate ANTIC_OUT - TMP_GEN. */ |
2224 | S = bitmap_set_subtract_expressions (ANTIC_OUT, TMP_GEN (block)((bb_value_sets_t) ((block)->aux))->tmp_gen); |
2225 | |
2226 | /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */ |
2227 | ANTIC_IN (block)((bb_value_sets_t) ((block)->aux))->antic_in = bitmap_set_subtract_expressions (EXP_GEN (block)((bb_value_sets_t) ((block)->aux))->exp_gen, |
2228 | TMP_GEN (block)((bb_value_sets_t) ((block)->aux))->tmp_gen); |
2229 | |
2230 | /* Then union in the ANTIC_OUT - TMP_GEN values, |
2231 | to get ANTIC_OUT U EXP_GEN - TMP_GEN */ |
2232 | bitmap_ior_into (&ANTIC_IN (block)((bb_value_sets_t) ((block)->aux))->antic_in->values, &S->values); |
2233 | bitmap_ior_into (&ANTIC_IN (block)((bb_value_sets_t) ((block)->aux))->antic_in->expressions, &S->expressions); |
2234 | |
2235 | /* clean (ANTIC_IN (block)) is defered to after the iteration converged |
2236 | because it can cause non-convergence, see for example PR81181. */ |
2237 | |
2238 | /* Intersect ANTIC_IN with the old ANTIC_IN. This is required until |
2239 | we properly represent the maximum expression set, thus not prune |
2240 | values without expressions during the iteration. */ |
2241 | if (was_visited |
2242 | && bitmap_and_into (&ANTIC_IN (block)((bb_value_sets_t) ((block)->aux))->antic_in->values, &old->values)) |
2243 | { |
2244 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2245 | fprintf (dump_file, "warning: intersecting with old ANTIC_IN " |
2246 | "shrinks the set\n"); |
2247 | /* Prune expressions not in the value set. */ |
2248 | bitmap_iterator bi; |
2249 | unsigned int i; |
2250 | unsigned int to_clear = -1U; |
2251 | FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (block), i, bi)for (bmp_iter_set_init (&((bi)), (&(((bb_value_sets_t ) ((block)->aux))->antic_in)->expressions), (0), & ((i))); bmp_iter_set (&((bi)), &((i))); bmp_iter_next (&((bi)), &((i)))) |
2252 | { |
2253 | if (to_clear != -1U) |
2254 | { |
2255 | bitmap_clear_bit (&ANTIC_IN (block)((bb_value_sets_t) ((block)->aux))->antic_in->expressions, to_clear); |
2256 | to_clear = -1U; |
2257 | } |
2258 | pre_expr expr = expression_for_id (i); |
2259 | unsigned int value_id = get_expr_value_id (expr); |
2260 | if (!bitmap_bit_p (&ANTIC_IN (block)((bb_value_sets_t) ((block)->aux))->antic_in->values, value_id)) |
2261 | to_clear = i; |
2262 | } |
2263 | if (to_clear != -1U) |
2264 | bitmap_clear_bit (&ANTIC_IN (block)((bb_value_sets_t) ((block)->aux))->antic_in->expressions, to_clear); |
2265 | } |
2266 | |
2267 | if (!bitmap_set_equal (old, ANTIC_IN (block)((bb_value_sets_t) ((block)->aux))->antic_in)) |
2268 | changed = true; |
2269 | |
2270 | maybe_dump_sets: |
2271 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2272 | { |
2273 | if (ANTIC_OUT) |
2274 | print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index); |
2275 | |
2276 | if (changed) |
2277 | fprintf (dump_file, "[changed] "); |
2278 | print_bitmap_set (dump_file, ANTIC_IN (block)((bb_value_sets_t) ((block)->aux))->antic_in, "ANTIC_IN", |
2279 | block->index); |
2280 | |
2281 | if (S) |
2282 | print_bitmap_set (dump_file, S, "S", block->index); |
2283 | } |
2284 | if (old) |
2285 | bitmap_set_free (old); |
2286 | if (S) |
2287 | bitmap_set_free (S); |
2288 | if (ANTIC_OUT) |
2289 | bitmap_set_free (ANTIC_OUT); |
2290 | return changed; |
2291 | } |
2292 | |
2293 | /* Compute PARTIAL_ANTIC for BLOCK. |
2294 | |
2295 | If succs(BLOCK) > 1 then |
2296 | PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not |
2297 | in ANTIC_OUT for all succ(BLOCK) |
2298 | else if succs(BLOCK) == 1 then |
2299 | PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)]) |
2300 | |
2301 | PA_IN[BLOCK] = clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK] - ANTIC_IN[BLOCK]) |
2302 | |
2303 | */ |
2304 | static void |
2305 | compute_partial_antic_aux (basic_block block, |
2306 | bool block_has_abnormal_pred_edge) |
2307 | { |
2308 | bitmap_set_t old_PA_IN; |
2309 | bitmap_set_t PA_OUT; |
2310 | edge e; |
2311 | edge_iterator ei; |
2312 | unsigned long max_pa = param_max_partial_antic_lengthglobal_options.x_param_max_partial_antic_length; |
2313 | |
2314 | old_PA_IN = PA_OUT = NULLnullptr; |
2315 | |
2316 | /* If any edges from predecessors are abnormal, antic_in is empty, |
2317 | so do nothing. */ |
2318 | if (block_has_abnormal_pred_edge) |
2319 | goto maybe_dump_sets; |
2320 | |
2321 | /* If there are too many partially anticipatable values in the |
2322 | block, phi_translate_set can take an exponential time: stop |
2323 | before the translation starts. */ |
2324 | if (max_pa |
2325 | && single_succ_p (block) |
2326 | && bitmap_count_bits (&PA_IN (single_succ (block))((bb_value_sets_t) ((single_succ (block))->aux))->pa_in->values) > max_pa) |
2327 | goto maybe_dump_sets; |
2328 | |
2329 | old_PA_IN = PA_IN (block)((bb_value_sets_t) ((block)->aux))->pa_in; |
2330 | PA_OUT = bitmap_set_new (); |
2331 | |
2332 | /* If the block has no successors, ANTIC_OUT is empty. */ |
2333 | if (EDGE_COUNT (block->succs)vec_safe_length (block->succs) == 0) |
2334 | ; |
2335 | /* If we have one successor, we could have some phi nodes to |
2336 | translate through. Note that we can't phi translate across DFS |
2337 | back edges in partial antic, because it uses a union operation on |
2338 | the successors. For recurrences like IV's, we will end up |
2339 | generating a new value in the set on each go around (i + 3 (VH.1) |
2340 | VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */ |
2341 | else if (single_succ_p (block)) |
2342 | { |
2343 | e = single_succ_edge (block); |
2344 | if (!(e->flags & EDGE_DFS_BACK)) |
2345 | phi_translate_set (PA_OUT, PA_IN (e->dest)((bb_value_sets_t) ((e->dest)->aux))->pa_in, e); |
2346 | } |
2347 | /* If we have multiple successors, we take the union of all of |
2348 | them. */ |
2349 | else |
2350 | { |
2351 | size_t i; |
2352 | |
2353 | auto_vec<edge> worklist (EDGE_COUNT (block->succs)vec_safe_length (block->succs)); |
2354 | FOR_EACH_EDGE (e, ei, block->succs)for ((ei) = ei_start_1 (&((block->succs))); ei_cond (( ei), &(e)); ei_next (&(ei))) |
2355 | { |
2356 | if (e->flags & EDGE_DFS_BACK) |
2357 | continue; |
2358 | worklist.quick_push (e); |
2359 | } |
2360 | if (worklist.length () > 0) |
2361 | { |
2362 | FOR_EACH_VEC_ELT (worklist, i, e)for (i = 0; (worklist).iterate ((i), &(e)); ++(i)) |
2363 | { |
2364 | unsigned int i; |
2365 | bitmap_iterator bi; |
2366 | |
2367 | if (!gimple_seq_empty_p (phi_nodes (e->dest))) |
2368 | { |
2369 | bitmap_set_t antic_in = bitmap_set_new (); |
2370 | phi_translate_set (antic_in, ANTIC_IN (e->dest)((bb_value_sets_t) ((e->dest)->aux))->antic_in, e); |
2371 | FOR_EACH_EXPR_ID_IN_SET (antic_in, i, bi)for (bmp_iter_set_init (&((bi)), (&(antic_in)->expressions ), (0), &((i))); bmp_iter_set (&((bi)), &((i))); bmp_iter_next (&((bi)), &((i)))) |
2372 | bitmap_value_insert_into_set (PA_OUT, |
2373 | expression_for_id (i)); |
2374 | bitmap_set_free (antic_in); |
2375 | bitmap_set_t pa_in = bitmap_set_new (); |
2376 | phi_translate_set (pa_in, PA_IN (e->dest)((bb_value_sets_t) ((e->dest)->aux))->pa_in, e); |
2377 | FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)for (bmp_iter_set_init (&((bi)), (&(pa_in)->expressions ), (0), &((i))); bmp_iter_set (&((bi)), &((i))); bmp_iter_next (&((bi)), &((i)))) |
2378 | bitmap_value_insert_into_set (PA_OUT, |
2379 | expression_for_id (i)); |
2380 | bitmap_set_free (pa_in); |
2381 | } |
2382 | else |
2383 | { |
2384 | FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (e->dest), i, bi)for (bmp_iter_set_init (&((bi)), (&(((bb_value_sets_t ) ((e->dest)->aux))->antic_in)->expressions), (0) , &((i))); bmp_iter_set (&((bi)), &((i))); bmp_iter_next (&((bi)), &((i)))) |
2385 | bitmap_value_insert_into_set (PA_OUT, |
2386 | expression_for_id (i)); |
2387 | FOR_EACH_EXPR_ID_IN_SET (PA_IN (e->dest), i, bi)for (bmp_iter_set_init (&((bi)), (&(((bb_value_sets_t ) ((e->dest)->aux))->pa_in)->expressions), (0), & ((i))); bmp_iter_set (&((bi)), &((i))); bmp_iter_next (&((bi)), &((i)))) |
2388 | bitmap_value_insert_into_set (PA_OUT, |
2389 | expression_for_id (i)); |
2390 | } |
2391 | } |
2392 | } |
2393 | } |
2394 | |
2395 | /* Prune expressions that are clobbered in block and thus become |
2396 | invalid if translated from PA_OUT to PA_IN. */ |
2397 | prune_clobbered_mems (PA_OUT, block); |
2398 | |
2399 | /* PA_IN starts with PA_OUT - TMP_GEN. |
2400 | Then we subtract things from ANTIC_IN. */ |
2401 | PA_IN (block)((bb_value_sets_t) ((block)->aux))->pa_in = bitmap_set_subtract_expressions (PA_OUT, TMP_GEN (block)((bb_value_sets_t) ((block)->aux))->tmp_gen); |
2402 | |
2403 | /* For partial antic, we want to put back in the phi results, since |
2404 | we will properly avoid making them partially antic over backedges. */ |
2405 | bitmap_ior_into (&PA_IN (block)((bb_value_sets_t) ((block)->aux))->pa_in->values, &PHI_GEN (block)((bb_value_sets_t) ((block)->aux))->phi_gen->values); |
2406 | bitmap_ior_into (&PA_IN (block)((bb_value_sets_t) ((block)->aux))->pa_in->expressions, &PHI_GEN (block)((bb_value_sets_t) ((block)->aux))->phi_gen->expressions); |
2407 | |
2408 | /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */ |
2409 | bitmap_set_subtract_values (PA_IN (block)((bb_value_sets_t) ((block)->aux))->pa_in, ANTIC_IN (block)((bb_value_sets_t) ((block)->aux))->antic_in); |
2410 | |
2411 | clean (PA_IN (block)((bb_value_sets_t) ((block)->aux))->pa_in, ANTIC_IN (block)((bb_value_sets_t) ((block)->aux))->antic_in); |
2412 | |
2413 | maybe_dump_sets: |
2414 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2415 | { |
2416 | if (PA_OUT) |
2417 | print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index); |
2418 | |
2419 | print_bitmap_set (dump_file, PA_IN (block)((bb_value_sets_t) ((block)->aux))->pa_in, "PA_IN", block->index); |
2420 | } |
2421 | if (old_PA_IN) |
2422 | bitmap_set_free (old_PA_IN); |
2423 | if (PA_OUT) |
2424 | bitmap_set_free (PA_OUT); |
2425 | } |
2426 | |
2427 | /* Compute ANTIC and partial ANTIC sets. */ |
2428 | |
2429 | static void |
2430 | compute_antic (void) |
2431 | { |
2432 | bool changed = true; |
2433 | int num_iterations = 0; |
2434 | basic_block block; |
2435 | int i; |
2436 | edge_iterator ei; |
2437 | edge e; |
2438 | |
2439 | /* If any predecessor edges are abnormal, we punt, so antic_in is empty. |
2440 | We pre-build the map of blocks with incoming abnormal edges here. */ |
2441 | auto_sbitmap has_abnormal_preds (last_basic_block_for_fn (cfun)(((cfun + 0))->cfg->x_last_basic_block)); |
2442 | bitmap_clear (has_abnormal_preds); |
2443 | |
2444 | FOR_ALL_BB_FN (block, cfun)for (block = (((cfun + 0))->cfg->x_entry_block_ptr); block ; block = block->next_bb) |
2445 | { |
2446 | BB_VISITED (block)((bb_value_sets_t) ((block)->aux))->visited = 0; |
2447 | |
2448 | FOR_EACH_EDGE (e, ei, block->preds)for ((ei) = ei_start_1 (&((block->preds))); ei_cond (( ei), &(e)); ei_next (&(ei))) |
2449 | if (e->flags & EDGE_ABNORMAL) |
2450 | { |
2451 | bitmap_set_bit (has_abnormal_preds, block->index); |
2452 | break; |
2453 | } |
2454 | |
2455 | /* While we are here, give empty ANTIC_IN sets to each block. */ |
2456 | ANTIC_IN (block)((bb_value_sets_t) ((block)->aux))->antic_in = bitmap_set_new (); |
2457 | if (do_partial_partial) |
2458 | PA_IN (block)((bb_value_sets_t) ((block)->aux))->pa_in = bitmap_set_new (); |
2459 | } |
2460 | |
2461 | /* At the exit block we anticipate nothing. */ |
2462 | BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun))((bb_value_sets_t) (((((cfun + 0))->cfg->x_exit_block_ptr ))->aux))->visited = 1; |
2463 | |
2464 | /* For ANTIC computation we need a postorder that also guarantees that |
2465 | a block with a single successor is visited after its successor. |
2466 | RPO on the inverted CFG has this property. */ |
2467 | auto_vec<int, 20> postorder; |
2468 | inverted_post_order_compute (&postorder); |
2469 | |
2470 | auto_sbitmap worklist (last_basic_block_for_fn (cfun)(((cfun + 0))->cfg->x_last_basic_block) + 1); |
2471 | bitmap_clear (worklist); |
2472 | FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)for ((ei) = ei_start_1 (&(((((cfun + 0))->cfg->x_exit_block_ptr )->preds))); ei_cond ((ei), &(e)); ei_next (&(ei)) ) |
2473 | bitmap_set_bit (worklist, e->src->index); |
2474 | while (changed) |
2475 | { |
2476 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2477 | fprintf (dump_file, "Starting iteration %d\n", num_iterations); |
2478 | /* ??? We need to clear our PHI translation cache here as the |
2479 | ANTIC sets shrink and we restrict valid translations to |
2480 | those having operands with leaders in ANTIC. Same below |
2481 | for PA ANTIC computation. */ |
2482 | num_iterations++; |
2483 | changed = false; |
2484 | for (i = postorder.length () - 1; i >= 0; i--) |
2485 | { |
2486 | if (bitmap_bit_p (worklist, postorder[i])) |
2487 | { |
2488 | basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i])((*(((cfun + 0))->cfg->x_basic_block_info))[(postorder[ i])]); |
2489 | bitmap_clear_bit (worklist, block->index); |
2490 | if (compute_antic_aux (block, |
2491 | bitmap_bit_p (has_abnormal_preds, |
2492 | block->index))) |
2493 | { |
2494 | FOR_EACH_EDGE (e, ei, block->preds)for ((ei) = ei_start_1 (&((block->preds))); ei_cond (( ei), &(e)); ei_next (&(ei))) |
2495 | bitmap_set_bit (worklist, e->src->index); |
2496 | changed = true; |
2497 | } |
2498 | } |
2499 | } |
2500 | /* Theoretically possible, but *highly* unlikely. */ |
2501 | gcc_checking_assert (num_iterations < 500)((void)(!(num_iterations < 500) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2501, __FUNCTION__), 0 : 0)); |
2502 | } |
2503 | |
2504 | /* We have to clean after the dataflow problem converged as cleaning |
2505 | can cause non-convergence because it is based on expressions |
2506 | rather than values. */ |
2507 | FOR_EACH_BB_FN (block, cfun)for (block = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb ; block != ((cfun + 0))->cfg->x_exit_block_ptr; block = block->next_bb) |
2508 | clean (ANTIC_IN (block)((bb_value_sets_t) ((block)->aux))->antic_in); |
2509 | |
2510 | statistics_histogram_event (cfun(cfun + 0), "compute_antic iterations", |
2511 | num_iterations); |
2512 | |
2513 | if (do_partial_partial) |
2514 | { |
2515 | /* For partial antic we ignore backedges and thus we do not need |
2516 | to perform any iteration when we process blocks in postorder. */ |
2517 | for (i = postorder.length () - 1; i >= 0; i--) |
2518 | { |
2519 | basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i])((*(((cfun + 0))->cfg->x_basic_block_info))[(postorder[ i])]); |
2520 | compute_partial_antic_aux (block, |
2521 | bitmap_bit_p (has_abnormal_preds, |
2522 | block->index)); |
2523 | } |
2524 | } |
2525 | } |
2526 | |
2527 | |
2528 | /* Inserted expressions are placed onto this worklist, which is used |
2529 | for performing quick dead code elimination of insertions we made |
2530 | that didn't turn out to be necessary. */ |
2531 | static bitmap inserted_exprs; |
2532 | |
2533 | /* The actual worker for create_component_ref_by_pieces. */ |
2534 | |
2535 | static tree |
2536 | create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref, |
2537 | unsigned int *operand, gimple_seq *stmts) |
2538 | { |
2539 | vn_reference_op_t currop = &ref->operands[*operand]; |
2540 | tree genop; |
2541 | ++*operand; |
2542 | switch (currop->opcode) |
2543 | { |
2544 | case CALL_EXPR: |
2545 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2545, __FUNCTION__)); |
2546 | |
2547 | case MEM_REF: |
2548 | { |
2549 | tree baseop = create_component_ref_by_pieces_1 (block, ref, operand, |
2550 | stmts); |
2551 | if (!baseop) |
2552 | return NULL_TREE(tree) nullptr; |
2553 | tree offset = currop->op0; |
2554 | if (TREE_CODE (baseop)((enum tree_code) (baseop)->base.code) == ADDR_EXPR |
2555 | && handled_component_p (TREE_OPERAND (baseop, 0)(*((const_cast<tree*> (tree_operand_check ((baseop), (0 ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2555, __FUNCTION__))))))) |
2556 | { |
2557 | poly_int64 off; |
2558 | tree base; |
2559 | base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0)(*((const_cast<tree*> (tree_operand_check ((baseop), (0 ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2559, __FUNCTION__))))), |
2560 | &off); |
2561 | gcc_assert (base)((void)(!(base) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2561, __FUNCTION__), 0 : 0)); |
2562 | offset = int_const_binop (PLUS_EXPR, offset, |
2563 | build_int_cst (TREE_TYPE (offset)((contains_struct_check ((offset), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2563, __FUNCTION__))->typed.type), |
2564 | off)); |
2565 | baseop = build_fold_addr_expr (base)build_fold_addr_expr_loc (((location_t) 0), (base)); |
2566 | } |
2567 | genop = build2 (MEM_REF, currop->type, baseop, offset); |
2568 | MR_DEPENDENCE_CLIQUE (genop)((tree_check2 ((genop), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2568, __FUNCTION__, (MEM_REF), (TARGET_MEM_REF)))->base. u.dependence_info.clique) = currop->clique; |
2569 | MR_DEPENDENCE_BASE (genop)((tree_check2 ((genop), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2569, __FUNCTION__, (MEM_REF), (TARGET_MEM_REF)))->base. u.dependence_info.base) = currop->base; |
2570 | REF_REVERSE_STORAGE_ORDER (genop)((tree_check2 ((genop), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2570, __FUNCTION__, (BIT_FIELD_REF), (MEM_REF)))->base.default_def_flag ) = currop->reverse; |
2571 | return genop; |
2572 | } |
2573 | |
2574 | case TARGET_MEM_REF: |
2575 | { |
2576 | tree genop0 = NULL_TREE(tree) nullptr, genop1 = NULL_TREE(tree) nullptr; |
2577 | vn_reference_op_t nextop = &ref->operands[(*operand)++]; |
2578 | tree baseop = create_component_ref_by_pieces_1 (block, ref, operand, |
2579 | stmts); |
2580 | if (!baseop) |
2581 | return NULL_TREE(tree) nullptr; |
2582 | if (currop->op0) |
2583 | { |
2584 | genop0 = find_or_generate_expression (block, currop->op0, stmts); |
2585 | if (!genop0) |
2586 | return NULL_TREE(tree) nullptr; |
2587 | } |
2588 | if (nextop->op0) |
2589 | { |
2590 | genop1 = find_or_generate_expression (block, nextop->op0, stmts); |
2591 | if (!genop1) |
2592 | return NULL_TREE(tree) nullptr; |
2593 | } |
2594 | genop = build5 (TARGET_MEM_REF, currop->type, |
2595 | baseop, currop->op2, genop0, currop->op1, genop1); |
2596 | |
2597 | MR_DEPENDENCE_CLIQUE (genop)((tree_check2 ((genop), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2597, __FUNCTION__, (MEM_REF), (TARGET_MEM_REF)))->base. u.dependence_info.clique) = currop->clique; |
2598 | MR_DEPENDENCE_BASE (genop)((tree_check2 ((genop), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2598, __FUNCTION__, (MEM_REF), (TARGET_MEM_REF)))->base. u.dependence_info.base) = currop->base; |
2599 | return genop; |
2600 | } |
2601 | |
2602 | case ADDR_EXPR: |
2603 | if (currop->op0) |
2604 | { |
2605 | gcc_assert (is_gimple_min_invariant (currop->op0))((void)(!(is_gimple_min_invariant (currop->op0)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2605, __FUNCTION__), 0 : 0)); |
2606 | return currop->op0; |
2607 | } |
2608 | /* Fallthrough. */ |
2609 | case REALPART_EXPR: |
2610 | case IMAGPART_EXPR: |
2611 | case VIEW_CONVERT_EXPR: |
2612 | { |
2613 | tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand, |
2614 | stmts); |
2615 | if (!genop0) |
2616 | return NULL_TREE(tree) nullptr; |
2617 | return fold_build1 (currop->opcode, currop->type, genop0)fold_build1_loc (((location_t) 0), currop->opcode, currop-> type, genop0 ); |
2618 | } |
2619 | |
2620 | case WITH_SIZE_EXPR: |
2621 | { |
2622 | tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand, |
2623 | stmts); |
2624 | if (!genop0) |
2625 | return NULL_TREE(tree) nullptr; |
2626 | tree genop1 = find_or_generate_expression (block, currop->op0, stmts); |
2627 | if (!genop1) |
2628 | return NULL_TREE(tree) nullptr; |
2629 | return fold_build2 (currop->opcode, currop->type, genop0, genop1)fold_build2_loc (((location_t) 0), currop->opcode, currop-> type, genop0, genop1 ); |
2630 | } |
2631 | |
2632 | case BIT_FIELD_REF: |
2633 | { |
2634 | tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand, |
2635 | stmts); |
2636 | if (!genop0) |
2637 | return NULL_TREE(tree) nullptr; |
2638 | tree op1 = currop->op0; |
2639 | tree op2 = currop->op1; |
2640 | tree t = build3 (BIT_FIELD_REF, currop->type, genop0, op1, op2); |
2641 | REF_REVERSE_STORAGE_ORDER (t)((tree_check2 ((t), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2641, __FUNCTION__, (BIT_FIELD_REF), (MEM_REF)))->base.default_def_flag ) = currop->reverse; |
2642 | return fold (t); |
2643 | } |
2644 | |
2645 | /* For array ref vn_reference_op's, operand 1 of the array ref |
2646 | is op0 of the reference op and operand 3 of the array ref is |
2647 | op1. */ |
2648 | case ARRAY_RANGE_REF: |
2649 | case ARRAY_REF: |
2650 | { |
2651 | tree genop0; |
2652 | tree genop1 = currop->op0; |
2653 | tree genop2 = currop->op1; |
2654 | tree genop3 = currop->op2; |
2655 | genop0 = create_component_ref_by_pieces_1 (block, ref, operand, |
2656 | stmts); |
2657 | if (!genop0) |
2658 | return NULL_TREE(tree) nullptr; |
2659 | genop1 = find_or_generate_expression (block, genop1, stmts); |
2660 | if (!genop1) |
2661 | return NULL_TREE(tree) nullptr; |
2662 | if (genop2) |
2663 | { |
2664 | tree domain_type = TYPE_DOMAIN (TREE_TYPE (genop0))((tree_check ((((contains_struct_check ((genop0), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2664, __FUNCTION__))->typed.type)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2664, __FUNCTION__, (ARRAY_TYPE)))->type_non_common.values ); |
2665 | /* Drop zero minimum index if redundant. */ |
2666 | if (integer_zerop (genop2) |
2667 | && (!domain_type |
2668 | || integer_zerop (TYPE_MIN_VALUE (domain_type)((tree_check5 ((domain_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2668, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE ), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.minval )))) |
2669 | genop2 = NULL_TREE(tree) nullptr; |
2670 | else |
2671 | { |
2672 | genop2 = find_or_generate_expression (block, genop2, stmts); |
2673 | if (!genop2) |
2674 | return NULL_TREE(tree) nullptr; |
2675 | } |
2676 | } |
2677 | if (genop3) |
2678 | { |
2679 | tree elmt_type = TREE_TYPE (TREE_TYPE (genop0))((contains_struct_check ((((contains_struct_check ((genop0), ( TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2679, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2679, __FUNCTION__))->typed.type); |
2680 | /* We can't always put a size in units of the element alignment |
2681 | here as the element alignment may be not visible. See |
2682 | PR43783. Simply drop the element size for constant |
2683 | sizes. */ |
2684 | if (TREE_CODE (genop3)((enum tree_code) (genop3)->base.code) == INTEGER_CST |
2685 | && TREE_CODE (TYPE_SIZE_UNIT (elmt_type))((enum tree_code) (((tree_class_check ((elmt_type), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2685, __FUNCTION__))->type_common.size_unit))->base.code ) == INTEGER_CST |
2686 | && wi::eq_p (wi::to_offset (TYPE_SIZE_UNIT (elmt_type)((tree_class_check ((elmt_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2686, __FUNCTION__))->type_common.size_unit)), |
2687 | (wi::to_offset (genop3) |
2688 | * vn_ref_op_align_unit (currop)))) |
2689 | genop3 = NULL_TREE(tree) nullptr; |
2690 | else |
2691 | { |
2692 | genop3 = find_or_generate_expression (block, genop3, stmts); |
2693 | if (!genop3) |
2694 | return NULL_TREE(tree) nullptr; |
2695 | } |
2696 | } |
2697 | return build4 (currop->opcode, currop->type, genop0, genop1, |
2698 | genop2, genop3); |
2699 | } |
2700 | case COMPONENT_REF: |
2701 | { |
2702 | tree op0; |
2703 | tree op1; |
2704 | tree genop2 = currop->op1; |
2705 | op0 = create_component_ref_by_pieces_1 (block, ref, operand, stmts); |
2706 | if (!op0) |
2707 | return NULL_TREE(tree) nullptr; |
2708 | /* op1 should be a FIELD_DECL, which are represented by themselves. */ |
2709 | op1 = currop->op0; |
2710 | if (genop2) |
2711 | { |
2712 | genop2 = find_or_generate_expression (block, genop2, stmts); |
2713 | if (!genop2) |
2714 | return NULL_TREE(tree) nullptr; |
2715 | } |
2716 | return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1, genop2)fold_build3_loc (((location_t) 0), COMPONENT_REF, ((contains_struct_check ((op1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2716, __FUNCTION__))->typed.type), op0, op1, genop2 ); |
2717 | } |
2718 | |
2719 | case SSA_NAME: |
2720 | { |
2721 | genop = find_or_generate_expression (block, currop->op0, stmts); |
2722 | return genop; |
2723 | } |
2724 | case STRING_CST: |
2725 | case INTEGER_CST: |
2726 | case POLY_INT_CST: |
2727 | case COMPLEX_CST: |
2728 | case VECTOR_CST: |
2729 | case REAL_CST: |
2730 | case CONSTRUCTOR: |
2731 | case VAR_DECL: |
2732 | case PARM_DECL: |
2733 | case CONST_DECL: |
2734 | case RESULT_DECL: |
2735 | case FUNCTION_DECL: |
2736 | return currop->op0; |
2737 | |
2738 | default: |
2739 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2739, __FUNCTION__)); |
2740 | } |
2741 | } |
2742 | |
2743 | /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the |
2744 | COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with |
2745 | trying to rename aggregates into ssa form directly, which is a no no. |
2746 | |
2747 | Thus, this routine doesn't create temporaries, it just builds a |
2748 | single access expression for the array, calling |
2749 | find_or_generate_expression to build the innermost pieces. |
2750 | |
2751 | This function is a subroutine of create_expression_by_pieces, and |
2752 | should not be called on it's own unless you really know what you |
2753 | are doing. */ |
2754 | |
2755 | static tree |
2756 | create_component_ref_by_pieces (basic_block block, vn_reference_t ref, |
2757 | gimple_seq *stmts) |
2758 | { |
2759 | unsigned int op = 0; |
2760 | return create_component_ref_by_pieces_1 (block, ref, &op, stmts); |
2761 | } |
2762 | |
2763 | /* Find a simple leader for an expression, or generate one using |
2764 | create_expression_by_pieces from a NARY expression for the value. |
2765 | BLOCK is the basic_block we are looking for leaders in. |
2766 | OP is the tree expression to find a leader for or generate. |
2767 | Returns the leader or NULL_TREE on failure. */ |
2768 | |
2769 | static tree |
2770 | find_or_generate_expression (basic_block block, tree op, gimple_seq *stmts) |
2771 | { |
2772 | /* Constants are always leaders. */ |
2773 | if (is_gimple_min_invariant (op)) |
2774 | return op; |
2775 | |
2776 | gcc_assert (TREE_CODE (op) == SSA_NAME)((void)(!(((enum tree_code) (op)->base.code) == SSA_NAME) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2776, __FUNCTION__), 0 : 0)); |
2777 | vn_ssa_aux_t info = VN_INFO (op); |
2778 | unsigned int lookfor = info->value_id; |
2779 | if (value_id_constant_p (lookfor)) |
2780 | return info->valnum; |
2781 | |
2782 | pre_expr leader = bitmap_find_leader (AVAIL_OUT (block)((bb_value_sets_t) ((block)->aux))->avail_out, lookfor); |
2783 | if (leader) |
2784 | { |
2785 | if (leader->kind == NAME) |
2786 | return PRE_EXPR_NAME (leader)(leader)->u.name; |
2787 | else if (leader->kind == CONSTANT) |
2788 | return PRE_EXPR_CONSTANT (leader)(leader)->u.constant; |
2789 | |
2790 | /* Defer. */ |
2791 | return NULL_TREE(tree) nullptr; |
2792 | } |
2793 | gcc_assert (!value_id_constant_p (lookfor))((void)(!(!value_id_constant_p (lookfor)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2793, __FUNCTION__), 0 : 0)); |
2794 | |
2795 | /* It must be a complex expression, so generate it recursively. Note |
2796 | that this is only necessary to handle gcc.dg/tree-ssa/ssa-pre28.c |
2797 | where the insert algorithm fails to insert a required expression. */ |
2798 | bitmap exprset = value_expressions[lookfor]; |
2799 | bitmap_iterator bi; |
2800 | unsigned int i; |
2801 | EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)for (bmp_iter_set_init (&(bi), (exprset), (0), &(i)); bmp_iter_set (&(bi), &(i)); bmp_iter_next (&(bi) , &(i))) |
2802 | { |
2803 | pre_expr temp = expression_for_id (i); |
2804 | /* We cannot insert random REFERENCE expressions at arbitrary |
2805 | places. We can insert NARYs which eventually re-materializes |
2806 | its operand values. */ |
2807 | if (temp->kind == NARY) |
2808 | return create_expression_by_pieces (block, temp, stmts, |
2809 | TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2809, __FUNCTION__))->typed.type)); |
2810 | } |
2811 | |
2812 | /* Defer. */ |
2813 | return NULL_TREE(tree) nullptr; |
2814 | } |
2815 | |
2816 | /* Create an expression in pieces, so that we can handle very complex |
2817 | expressions that may be ANTIC, but not necessary GIMPLE. |
2818 | BLOCK is the basic block the expression will be inserted into, |
2819 | EXPR is the expression to insert (in value form) |
2820 | STMTS is a statement list to append the necessary insertions into. |
2821 | |
2822 | This function will die if we hit some value that shouldn't be |
2823 | ANTIC but is (IE there is no leader for it, or its components). |
2824 | The function returns NULL_TREE in case a different antic expression |
2825 | has to be inserted first. |
2826 | This function may also generate expressions that are themselves |
2827 | partially or fully redundant. Those that are will be either made |
2828 | fully redundant during the next iteration of insert (for partially |
2829 | redundant ones), or eliminated by eliminate (for fully redundant |
2830 | ones). */ |
2831 | |
2832 | static tree |
2833 | create_expression_by_pieces (basic_block block, pre_expr expr, |
2834 | gimple_seq *stmts, tree type) |
2835 | { |
2836 | tree name; |
2837 | tree folded; |
2838 | gimple_seq forced_stmts = NULLnullptr; |
2839 | unsigned int value_id; |
2840 | gimple_stmt_iterator gsi; |
2841 | tree exprtype = type ? type : get_expr_type (expr); |
2842 | pre_expr nameexpr; |
2843 | gassign *newstmt; |
2844 | |
2845 | switch (expr->kind) |
2846 | { |
2847 | /* We may hit the NAME/CONSTANT case if we have to convert types |
2848 | that value numbering saw through. */ |
2849 | case NAME: |
2850 | folded = PRE_EXPR_NAME (expr)(expr)->u.name; |
2851 | if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (folded)(tree_check ((folded), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2851, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag) |
2852 | return NULL_TREE(tree) nullptr; |
2853 | if (useless_type_conversion_p (exprtype, TREE_TYPE (folded)((contains_struct_check ((folded), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2853, __FUNCTION__))->typed.type))) |
2854 | return folded; |
2855 | break; |
2856 | case CONSTANT: |
2857 | { |
2858 | folded = PRE_EXPR_CONSTANT (expr)(expr)->u.constant; |
2859 | tree tem = fold_convert (exprtype, folded)fold_convert_loc (((location_t) 0), exprtype, folded); |
2860 | if (is_gimple_min_invariant (tem)) |
2861 | return tem; |
2862 | break; |
2863 | } |
2864 | case REFERENCE: |
2865 | if (PRE_EXPR_REFERENCE (expr)(expr)->u.reference->operands[0].opcode == CALL_EXPR) |
2866 | { |
2867 | vn_reference_t ref = PRE_EXPR_REFERENCE (expr)(expr)->u.reference; |
2868 | unsigned int operand = 1; |
2869 | vn_reference_op_t currop = &ref->operands[0]; |
2870 | tree sc = NULL_TREE(tree) nullptr; |
2871 | tree fn = NULL_TREE(tree) nullptr; |
2872 | if (currop->op0) |
2873 | { |
2874 | fn = find_or_generate_expression (block, currop->op0, stmts); |
2875 | if (!fn) |
2876 | return NULL_TREE(tree) nullptr; |
2877 | } |
2878 | if (currop->op1) |
2879 | { |
2880 | sc = find_or_generate_expression (block, currop->op1, stmts); |
2881 | if (!sc) |
2882 | return NULL_TREE(tree) nullptr; |
2883 | } |
2884 | auto_vec<tree> args (ref->operands.length () - 1); |
2885 | while (operand < ref->operands.length ()) |
2886 | { |
2887 | tree arg = create_component_ref_by_pieces_1 (block, ref, |
2888 | &operand, stmts); |
2889 | if (!arg) |
2890 | return NULL_TREE(tree) nullptr; |
2891 | args.quick_push (arg); |
2892 | } |
2893 | gcall *call; |
2894 | if (currop->op0) |
2895 | { |
2896 | call = gimple_build_call_vec (fn, args); |
2897 | gimple_call_set_fntype (call, currop->type); |
2898 | } |
2899 | else |
2900 | call = gimple_build_call_internal_vec ((internal_fn)currop->clique, |
2901 | args); |
2902 | gimple_set_location (call, expr->loc); |
2903 | if (sc) |
2904 | gimple_call_set_chain (call, sc); |
2905 | tree forcedname = make_ssa_name (ref->type); |
2906 | gimple_call_set_lhs (call, forcedname); |
2907 | /* There's no CCP pass after PRE which would re-compute alignment |
2908 | information so make sure we re-materialize this here. */ |
2909 | if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED) |
2910 | && args.length () - 2 <= 1 |
2911 | && tree_fits_uhwi_p (args[1]) |
2912 | && (args.length () != 3 || tree_fits_uhwi_p (args[2]))) |
2913 | { |
2914 | unsigned HOST_WIDE_INTlong halign = tree_to_uhwi (args[1]); |
2915 | unsigned HOST_WIDE_INTlong hmisalign |
2916 | = args.length () == 3 ? tree_to_uhwi (args[2]) : 0; |
2917 | if ((halign & (halign - 1)) == 0 |
2918 | && (hmisalign & ~(halign - 1)) == 0 |
2919 | && (unsigned int)halign != 0) |
2920 | set_ptr_info_alignment (get_ptr_info (forcedname), |
2921 | halign, hmisalign); |
2922 | } |
2923 | gimple_set_vuse (call, BB_LIVE_VOP_ON_EXIT (block)((bb_value_sets_t) ((block)->aux))->vop_on_exit); |
2924 | gimple_seq_add_stmt_without_update (&forced_stmts, call); |
2925 | folded = forcedname; |
2926 | } |
2927 | else |
2928 | { |
2929 | folded = create_component_ref_by_pieces (block, |
2930 | PRE_EXPR_REFERENCE (expr)(expr)->u.reference, |
2931 | stmts); |
2932 | if (!folded) |
2933 | return NULL_TREE(tree) nullptr; |
2934 | name = make_temp_ssa_name (exprtype, NULLnullptr, "pretmp"); |
2935 | newstmt = gimple_build_assign (name, folded); |
2936 | gimple_set_location (newstmt, expr->loc); |
2937 | gimple_seq_add_stmt_without_update (&forced_stmts, newstmt); |
2938 | gimple_set_vuse (newstmt, BB_LIVE_VOP_ON_EXIT (block)((bb_value_sets_t) ((block)->aux))->vop_on_exit); |
2939 | folded = name; |
2940 | } |
2941 | break; |
2942 | case NARY: |
2943 | { |
2944 | vn_nary_op_t nary = PRE_EXPR_NARY (expr)(expr)->u.nary; |
2945 | tree *genop = XALLOCAVEC (tree, nary->length)((tree *) __builtin_alloca(sizeof (tree) * (nary->length)) ); |
2946 | unsigned i; |
2947 | for (i = 0; i < nary->length; ++i) |
2948 | { |
2949 | genop[i] = find_or_generate_expression (block, nary->op[i], stmts); |
2950 | if (!genop[i]) |
2951 | return NULL_TREE(tree) nullptr; |
2952 | /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It |
2953 | may have conversions stripped. */ |
2954 | if (nary->opcode == POINTER_PLUS_EXPR) |
2955 | { |
2956 | if (i == 0) |
2957 | genop[i] = gimple_convert (&forced_stmts, |
2958 | nary->type, genop[i]); |
2959 | else if (i == 1) |
2960 | genop[i] = gimple_convert (&forced_stmts, |
2961 | sizetypesizetype_tab[(int) stk_sizetype], genop[i]); |
2962 | } |
2963 | else |
2964 | genop[i] = gimple_convert (&forced_stmts, |
2965 | TREE_TYPE (nary->op[i])((contains_struct_check ((nary->op[i]), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2965, __FUNCTION__))->typed.type), genop[i]); |
2966 | } |
2967 | if (nary->opcode == CONSTRUCTOR) |
2968 | { |
2969 | vec<constructor_elt, va_gc> *elts = NULLnullptr; |
2970 | for (i = 0; i < nary->length; ++i) |
2971 | CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i])do { constructor_elt _ce___ = {(tree) nullptr, genop[i]}; vec_safe_push ((elts), _ce___); } while (0); |
2972 | folded = build_constructor (nary->type, elts); |
2973 | name = make_temp_ssa_name (exprtype, NULLnullptr, "pretmp"); |
2974 | newstmt = gimple_build_assign (name, folded); |
2975 | gimple_set_location (newstmt, expr->loc); |
2976 | gimple_seq_add_stmt_without_update (&forced_stmts, newstmt); |
2977 | folded = name; |
2978 | } |
2979 | else |
2980 | { |
2981 | switch (nary->length) |
2982 | { |
2983 | case 1: |
2984 | folded = gimple_build (&forced_stmts, expr->loc, |
2985 | nary->opcode, nary->type, genop[0]); |
2986 | break; |
2987 | case 2: |
2988 | folded = gimple_build (&forced_stmts, expr->loc, nary->opcode, |
2989 | nary->type, genop[0], genop[1]); |
2990 | break; |
2991 | case 3: |
2992 | folded = gimple_build (&forced_stmts, expr->loc, nary->opcode, |
2993 | nary->type, genop[0], genop[1], |
2994 | genop[2]); |
2995 | break; |
2996 | default: |
2997 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 2997, __FUNCTION__)); |
2998 | } |
2999 | } |
3000 | } |
3001 | break; |
3002 | default: |
3003 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 3003, __FUNCTION__)); |
3004 | } |
3005 | |
3006 | folded = gimple_convert (&forced_stmts, exprtype, folded); |
3007 | |
3008 | /* If there is nothing to insert, return the simplified result. */ |
3009 | if (gimple_seq_empty_p (forced_stmts)) |
3010 | return folded; |
3011 | /* If we simplified to a constant return it and discard eventually |
3012 | built stmts. */ |
3013 | if (is_gimple_min_invariant (folded)) |
3014 | { |
3015 | gimple_seq_discard (forced_stmts); |
3016 | return folded; |
3017 | } |
3018 | /* Likewise if we simplified to sth not queued for insertion. */ |
3019 | bool found = false; |
3020 | gsi = gsi_last (forced_stmts); |
3021 | for (; !gsi_end_p (gsi); gsi_prev (&gsi)) |
3022 | { |
3023 | gimple *stmt = gsi_stmt (gsi); |
3024 | tree forcedname = gimple_get_lhs (stmt); |
3025 | if (forcedname == folded) |
3026 | { |
3027 | found = true; |
3028 | break; |
3029 | } |
3030 | } |
3031 | if (! found) |
3032 | { |
3033 | gimple_seq_discard (forced_stmts); |
3034 | return folded; |
3035 | } |
3036 | gcc_assert (TREE_CODE (folded) == SSA_NAME)((void)(!(((enum tree_code) (folded)->base.code) == SSA_NAME ) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 3036, __FUNCTION__), 0 : 0)); |
3037 | |
3038 | /* If we have any intermediate expressions to the value sets, add them |
3039 | to the value sets and chain them in the instruction stream. */ |
3040 | if (forced_stmts) |
3041 | { |
3042 | gsi = gsi_start (forced_stmts); |
3043 | for (; !gsi_end_p (gsi); gsi_next (&gsi)) |
3044 | { |
3045 | gimple *stmt = gsi_stmt (gsi); |
3046 | tree forcedname = gimple_get_lhs (stmt); |
3047 | pre_expr nameexpr; |
3048 | |
3049 | if (forcedname != folded) |
3050 | { |
3051 | vn_ssa_aux_t vn_info = VN_INFO (forcedname); |
3052 | vn_info->valnum = forcedname; |
3053 | vn_info->value_id = get_next_value_id (); |
3054 | nameexpr = get_or_alloc_expr_for_name (forcedname); |
3055 | add_to_value (vn_info->value_id, nameexpr); |
3056 | if (NEW_SETS (block)((bb_value_sets_t) ((block)->aux))->new_sets) |
3057 | bitmap_value_replace_in_set (NEW_SETS (block)((bb_value_sets_t) ((block)->aux))->new_sets, nameexpr); |
3058 | bitmap_value_replace_in_set (AVAIL_OUT (block)((bb_value_sets_t) ((block)->aux))->avail_out, nameexpr); |
3059 | } |
3060 | |
3061 | bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname)(tree_check ((forcedname), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 3061, __FUNCTION__, (SSA_NAME)))->base.u.version); |
3062 | } |
3063 | gimple_seq_add_seq (stmts, forced_stmts); |
3064 | } |
3065 | |
3066 | name = folded; |
3067 | |
3068 | /* Fold the last statement. */ |
3069 | gsi = gsi_last (*stmts); |
3070 | if (fold_stmt_inplace (&gsi)) |
3071 | update_stmt (gsi_stmt (gsi)); |
3072 | |
3073 | /* Add a value number to the temporary. |
3074 | The value may already exist in either NEW_SETS, or AVAIL_OUT, because |
3075 | we are creating the expression by pieces, and this particular piece of |
3076 | the expression may have been represented. There is no harm in replacing |
3077 | here. */ |
3078 | value_id = get_expr_value_id (expr); |
3079 | vn_ssa_aux_t vn_info = VN_INFO (name); |
3080 | vn_info->value_id = value_id; |
3081 | vn_info->valnum = vn_valnum_from_value_id (value_id); |
3082 | if (vn_info->valnum == NULL_TREE(tree) nullptr) |
3083 | vn_info->valnum = name; |
3084 | gcc_assert (vn_info->valnum != NULL_TREE)((void)(!(vn_info->valnum != (tree) nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 3084, __FUNCTION__), 0 : 0)); |
3085 | nameexpr = get_or_alloc_expr_for_name (name); |
3086 | add_to_value (value_id, nameexpr); |
3087 | if (NEW_SETS (block)((bb_value_sets_t) ((block)->aux))->new_sets) |
3088 | bitmap_value_replace_in_set (NEW_SETS (block)((bb_value_sets_t) ((block)->aux))->new_sets, nameexpr); |
3089 | bitmap_value_replace_in_set (AVAIL_OUT (block)((bb_value_sets_t) ((block)->aux))->avail_out, nameexpr); |
3090 | |
3091 | pre_stats.insertions++; |
3092 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3093 | { |
3094 | fprintf (dump_file, "Inserted "); |
3095 | print_gimple_stmt (dump_file, gsi_stmt (gsi_last (*stmts)), 0); |
3096 | fprintf (dump_file, " in predecessor %d (%04d)\n", |
3097 | block->index, value_id); |
3098 | } |
3099 | |
3100 | return name; |
3101 | } |
3102 | |
3103 | |
3104 | /* Insert the to-be-made-available values of expression EXPRNUM for each |
3105 | predecessor, stored in AVAIL, into the predecessors of BLOCK, and |
3106 | merge the result with a phi node, given the same value number as |
3107 | NODE. Return true if we have inserted new stuff. */ |
3108 | |
3109 | static bool |
3110 | insert_into_preds_of_block (basic_block block, unsigned int exprnum, |
3111 | vec<pre_expr> &avail) |
3112 | { |
3113 | pre_expr expr = expression_for_id (exprnum); |
3114 | pre_expr newphi; |
3115 | unsigned int val = get_expr_value_id (expr); |
3116 | edge pred; |
3117 | bool insertions = false; |
3118 | bool nophi = false; |
3119 | basic_block bprime; |
3120 | pre_expr eprime; |
3121 | edge_iterator ei; |
3122 | tree type = get_expr_type (expr); |
3123 | tree temp; |
3124 | gphi *phi; |
3125 | |
3126 | /* Make sure we aren't creating an induction variable. */ |
3127 | if (bb_loop_depth (block) > 0 && EDGE_COUNT (block->preds)vec_safe_length (block->preds) == 2) |
3128 | { |
3129 | bool firstinsideloop = false; |
3130 | bool secondinsideloop = false; |
3131 | firstinsideloop = flow_bb_inside_loop_p (block->loop_father, |
3132 | EDGE_PRED (block, 0)(*(block)->preds)[(0)]->src); |
3133 | secondinsideloop = flow_bb_inside_loop_p (block->loop_father, |
3134 | EDGE_PRED (block, 1)(*(block)->preds)[(1)]->src); |
3135 | /* Induction variables only have one edge inside the loop. */ |
3136 | if ((firstinsideloop ^ secondinsideloop) |
3137 | && expr->kind != REFERENCE) |
3138 | { |
3139 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3140 | fprintf (dump_file, "Skipping insertion of phi for partial " |
3141 | "redundancy: Looks like an induction variable\n"); |
3142 | nophi = true; |
3143 | } |
3144 | } |
3145 | |
3146 | /* Make the necessary insertions. */ |
3147 | FOR_EACH_EDGE (pred, ei, block->preds)for ((ei) = ei_start_1 (&((block->preds))); ei_cond (( ei), &(pred)); ei_next (&(ei))) |
3148 | { |
3149 | /* When we are not inserting a PHI node do not bother inserting |
3150 | into places that do not dominate the anticipated computations. */ |
3151 | if (nophi && !dominated_by_p (CDI_DOMINATORS, block, pred->src)) |
3152 | continue; |
3153 | gimple_seq stmts = NULLnullptr; |
3154 | tree builtexpr; |
3155 | bprime = pred->src; |
3156 | eprime = avail[pred->dest_idx]; |
3157 | builtexpr = create_expression_by_pieces (bprime, eprime, |
3158 | &stmts, type); |
3159 | gcc_assert (!(pred->flags & EDGE_ABNORMAL))((void)(!(!(pred->flags & EDGE_ABNORMAL)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 3159, __FUNCTION__), 0 : 0)); |
3160 | if (!gimple_seq_empty_p (stmts)) |
3161 | { |
3162 | basic_block new_bb = gsi_insert_seq_on_edge_immediate (pred, stmts); |
3163 | gcc_assert (! new_bb)((void)(!(! new_bb) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 3163, __FUNCTION__), 0 : 0)); |
3164 | insertions = true; |
3165 | } |
3166 | if (!builtexpr) |
3167 | { |
3168 | /* We cannot insert a PHI node if we failed to insert |
3169 | on one edge. */ |
3170 | nophi = true; |
3171 | continue; |
3172 | } |
3173 | if (is_gimple_min_invariant (builtexpr)) |
3174 | avail[pred->dest_idx] = get_or_alloc_expr_for_constant (builtexpr); |
3175 | else |
3176 | avail[pred->dest_idx] = get_or_alloc_expr_for_name (builtexpr); |
3177 | } |
3178 | /* If we didn't want a phi node, and we made insertions, we still have |
3179 | inserted new stuff, and thus return true. If we didn't want a phi node, |
3180 | and didn't make insertions, we haven't added anything new, so return |
3181 | false. */ |
3182 | if (nophi && insertions) |
3183 | return true; |
3184 | else if (nophi && !insertions) |
3185 | return false; |
3186 | |
3187 | /* Now build a phi for the new variable. */ |
3188 | temp = make_temp_ssa_name (type, NULLnullptr, "prephitmp"); |
3189 | phi = create_phi_node (temp, block); |
3190 | |
3191 | vn_ssa_aux_t vn_info = VN_INFO (temp); |
3192 | vn_info->value_id = val; |
3193 | vn_info->valnum = vn_valnum_from_value_id (val); |
3194 | if (vn_info->valnum == NULL_TREE(tree) nullptr) |
3195 | vn_info->valnum = temp; |
3196 | bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp)(tree_check ((temp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 3196, __FUNCTION__, (SSA_NAME)))->base.u.version); |
3197 | FOR_EACH_EDGE (pred, ei, block->preds)for ((ei) = ei_start_1 (&((block->preds))); ei_cond (( ei), &(pred)); ei_next (&(ei))) |
3198 | { |
3199 | pre_expr ae = avail[pred->dest_idx]; |
3200 | gcc_assert (get_expr_type (ae) == type((void)(!(get_expr_type (ae) == type || useless_type_conversion_p (type, get_expr_type (ae))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 3201, __FUNCTION__), 0 : 0)) |
3201 | || useless_type_conversion_p (type, get_expr_type (ae)))((void)(!(get_expr_type (ae) == type || useless_type_conversion_p (type, get_expr_type (ae))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 3201, __FUNCTION__), 0 : 0)); |
3202 | if (ae->kind == CONSTANT) |
3203 | add_phi_arg (phi, unshare_expr (PRE_EXPR_CONSTANT (ae)(ae)->u.constant), |
3204 | pred, UNKNOWN_LOCATION((location_t) 0)); |
3205 | else |
3206 | add_phi_arg (phi, PRE_EXPR_NAME (ae)(ae)->u.name, pred, UNKNOWN_LOCATION((location_t) 0)); |
3207 | } |
3208 | |
3209 | newphi = get_or_alloc_expr_for_name (temp); |
3210 | add_to_value (val, newphi); |
3211 | |
3212 | /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing |
3213 | this insertion, since we test for the existence of this value in PHI_GEN |
3214 | before proceeding with the partial redundancy checks in insert_aux. |
3215 | |
3216 | The value may exist in AVAIL_OUT, in particular, it could be represented |
3217 | by the expression we are trying to eliminate, in which case we want the |
3218 | replacement to occur. If it's not existing in AVAIL_OUT, we want it |
3219 | inserted there. |
3220 | |
3221 | Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of |
3222 | this block, because if it did, it would have existed in our dominator's |
3223 | AVAIL_OUT, and would have been skipped due to the full redundancy check. |
3224 | */ |
3225 | |
3226 | bitmap_insert_into_set (PHI_GEN (block)((bb_value_sets_t) ((block)->aux))->phi_gen, newphi); |
3227 | bitmap_value_replace_in_set (AVAIL_OUT (block)((bb_value_sets_t) ((block)->aux))->avail_out, |
3228 | newphi); |
3229 | if (NEW_SETS (block)((bb_value_sets_t) ((block)->aux))->new_sets) |
3230 | bitmap_insert_into_set (NEW_SETS (block)((bb_value_sets_t) ((block)->aux))->new_sets, newphi); |
3231 | |
3232 | /* If we insert a PHI node for a conversion of another PHI node |
3233 | in the same basic-block try to preserve range information. |
3234 | This is important so that followup loop passes receive optimal |
3235 | number of iteration analysis results. See PR61743. */ |
3236 | if (expr->kind == NARY |
3237 | && CONVERT_EXPR_CODE_P (expr->u.nary->opcode)((expr->u.nary->opcode) == NOP_EXPR || (expr->u.nary ->opcode) == CONVERT_EXPR) |
3238 | && TREE_CODE (expr->u.nary->op[0])((enum tree_code) (expr->u.nary->op[0])->base.code) == SSA_NAME |
3239 | && gimple_bb (SSA_NAME_DEF_STMT (expr->u.nary->op[0])(tree_check ((expr->u.nary->op[0]), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 3239, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt) == block |
3240 | && INTEGRAL_TYPE_P (type)(((enum tree_code) (type)->base.code) == ENUMERAL_TYPE || ( (enum tree_code) (type)->base.code) == BOOLEAN_TYPE || ((enum tree_code) (type)->base.code) == INTEGER_TYPE) |
3241 | && INTEGRAL_TYPE_P (TREE_TYPE (expr->u.nary->op[0]))(((enum tree_code) (((contains_struct_check ((expr->u.nary ->op[0]), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 3241, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE || ((enum tree_code) (((contains_struct_check ((expr->u.nary ->op[0]), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 3241, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE || ((enum tree_code) (((contains_struct_check ((expr->u.nary ->op[0]), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 3241, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE ) |
3242 | && (TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 3242, __FUNCTION__))->type_common.precision) |
3243 | >= TYPE_PRECISION (TREE_TYPE (expr->u.nary->op[0]))((tree_class_check ((((contains_struct_check ((expr->u.nary ->op[0]), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 3243, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 3243, __FUNCTION__))->type_common.precision)) |
3244 | && SSA_NAME_RANGE_INFO (expr->u.nary->op[0])(tree_check ((expr->u.nary->op[0]), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 3244, __FUNCTION__, (SSA_NAME)))->ssa_name.info.range_info) |
3245 | { |
3246 | value_range r; |
3247 | if (get_range_query (cfun(cfun + 0))->range_of_expr (r, expr->u.nary->op[0]) |
3248 | && r.kind () == VR_RANGE |
3249 | && !wi::neg_p (r.lower_bound (), SIGNED) |
3250 | && !wi::neg_p (r.upper_bound (), SIGNED)) |
3251 | { |
3252 | /* Just handle extension and sign-changes of all-positive ranges. */ |
3253 | range_cast (r, type); |
3254 | set_range_info (temp, r); |
3255 | } |
3256 | } |
3257 | |
3258 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3259 | { |
3260 | fprintf (dump_file, "Created phi "); |
3261 | print_gimple_stmt (dump_file, phi, 0); |
3262 | fprintf (dump_file, " in block %d (%04d)\n", block->index, val); |
3263 | } |
3264 | pre_stats.phis++; |
3265 | return true; |
3266 | } |
3267 | |
3268 | |
3269 | |
3270 | /* Perform insertion of partially redundant or hoistable values. |
3271 | For BLOCK, do the following: |
3272 | 1. Propagate the NEW_SETS of the dominator into the current block. |
3273 | If the block has multiple predecessors, |
3274 | 2a. Iterate over the ANTIC expressions for the block to see if |
3275 | any of them are partially redundant. |
3276 | 2b. If so, insert them into the necessary predecessors to make |
3277 | the expression fully redundant. |
3278 | 2c. Insert a new PHI merging the values of the predecessors. |
3279 | 2d. Insert the new PHI, and the new expressions, into the |
3280 | NEW_SETS set. |
3281 | If the block has multiple successors, |
3282 | 3a. Iterate over the ANTIC values for the block to see if |
3283 | any of them are good candidates for hoisting. |
3284 | 3b. If so, insert expressions computing the values in BLOCK, |
3285 | and add the new expressions into the NEW_SETS set. |
3286 | 4. Recursively call ourselves on the dominator children of BLOCK. |
3287 | |
3288 | Steps 1, 2a, and 4 are done by insert_aux. 2b, 2c and 2d are done by |
3289 | do_pre_regular_insertion and do_partial_insertion. 3a and 3b are |
3290 | done in do_hoist_insertion. |
3291 | */ |
3292 | |
3293 | static bool |
3294 | do_pre_regular_insertion (basic_block block, basic_block dom, |
3295 | vec<pre_expr> exprs) |
3296 | { |
3297 | bool new_stuff = false; |
3298 | pre_expr expr; |
3299 | auto_vec<pre_expr, 2> avail; |
3300 | int i; |
3301 | |
3302 | avail.safe_grow (EDGE_COUNT (block->preds)vec_safe_length (block->preds), true); |
3303 | |
3304 | FOR_EACH_VEC_ELT (exprs, i, expr)for (i = 0; (exprs).iterate ((i), &(expr)); ++(i)) |
3305 | { |
3306 | if (expr->kind == NARY |
3307 | || expr->kind == REFERENCE) |
3308 | { |
3309 | unsigned int val; |
3310 | bool by_some = false; |
3311 | bool cant_insert = false; |
3312 | bool all_same = true; |
3313 | pre_expr first_s = NULLnullptr; |
3314 | edge pred; |
3315 | basic_block bprime; |
3316 | pre_expr eprime = NULLnullptr; |
3317 | edge_iterator ei; |
3318 | pre_expr edoubleprime = NULLnullptr; |
3319 | bool do_insertion = false; |
3320 | |
3321 | val = get_expr_value_id (expr); |
3322 | if (bitmap_set_contains_value (PHI_GEN (block)((bb_value_sets_t) ((block)->aux))->phi_gen, val)) |
3323 | continue; |
3324 | if (bitmap_set_contains_value (AVAIL_OUT (dom)((bb_value_sets_t) ((dom)->aux))->avail_out, val)) |
3325 | { |
3326 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3327 | { |
3328 | fprintf (dump_file, "Found fully redundant value: "); |
3329 | print_pre_expr (dump_file, expr); |
3330 | fprintf (dump_file, "\n"); |
3331 | } |
3332 | continue; |
3333 | } |
3334 | |
3335 | FOR_EACH_EDGE (pred, ei, block->preds)for ((ei) = ei_start_1 (&((block->preds))); ei_cond (( ei), &(pred)); ei_next (&(ei))) |
3336 | { |
3337 | unsigned int vprime; |
3338 | |
3339 | /* We should never run insertion for the exit block |
3340 | and so not come across fake pred edges. */ |
3341 | gcc_assert (!(pred->flags & EDGE_FAKE))((void)(!(!(pred->flags & EDGE_FAKE)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 3341, __FUNCTION__), 0 : 0)); |
3342 | bprime = pred->src; |
3343 | /* We are looking at ANTIC_OUT of bprime. */ |
3344 | eprime = phi_translate (NULLnullptr, expr, ANTIC_IN (block)((bb_value_sets_t) ((block)->aux))->antic_in, NULLnullptr, pred); |
3345 | |
3346 | /* eprime will generally only be NULL if the |
3347 | value of the expression, translated |
3348 | through the PHI for this predecessor, is |
3349 | undefined. If that is the case, we can't |
3350 | make the expression fully redundant, |
3351 | because its value is undefined along a |
3352 | predecessor path. We can thus break out |
3353 | early because it doesn't matter what the |
3354 | rest of the results are. */ |
3355 | if (eprime == NULLnullptr) |
3356 | { |
3357 | avail[pred->dest_idx] = NULLnullptr; |
3358 | cant_insert = true; |
3359 | break; |
3360 | } |
3361 | |
3362 | vprime = get_expr_value_id (eprime); |
3363 | edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime)((bb_value_sets_t) ((bprime)->aux))->avail_out, |
3364 | vprime); |
3365 | if (edoubleprime == NULLnullptr) |
3366 | { |
3367 | avail[pred->dest_idx] = eprime; |
3368 | all_same = false; |
3369 | } |
3370 | else |
3371 | { |
3372 | avail[pred->dest_idx] = edoubleprime; |
3373 | by_some = true; |
3374 | /* We want to perform insertions to remove a redundancy on |
3375 | a path in the CFG we want to optimize for speed. */ |
3376 | if (optimize_edge_for_speed_p (pred)) |
3377 | do_insertion = true; |
3378 | if (first_s == NULLnullptr) |
3379 | first_s = edoubleprime; |
3380 | else if (!pre_expr_d::equal (first_s, edoubleprime)) |
3381 | all_same = false; |
3382 | } |
3383 | } |
3384 | /* If we can insert it, it's not the same value |
3385 | already existing along every predecessor, and |
3386 | it's defined by some predecessor, it is |
3387 | partially redundant. */ |
3388 | if (!cant_insert && !all_same && by_some) |
3389 | { |
3390 | if (!do_insertion) |
3391 | { |
3392 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3393 | { |
3394 | fprintf (dump_file, "Skipping partial redundancy for " |
3395 | "expression "); |
3396 | print_pre_expr (dump_file, expr); |
3397 | fprintf (dump_file, " (%04d), no redundancy on to be " |
3398 | "optimized for speed edge\n", val); |
3399 | } |
3400 | } |
3401 | else if (dbg_cnt (treepre_insert)) |
3402 | { |
3403 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3404 | { |
3405 | fprintf (dump_file, "Found partial redundancy for " |
3406 | "expression "); |
3407 | print_pre_expr (dump_file, expr); |
3408 | fprintf (dump_file, " (%04d)\n", |
3409 | get_expr_value_id (expr)); |
3410 | } |
3411 | if (insert_into_preds_of_block (block, |
3412 | get_expression_id (expr), |
3413 | avail)) |
3414 | new_stuff = true; |
3415 | } |
3416 | } |
3417 | /* If all edges produce the same value and that value is |
3418 | an invariant, then the PHI has the same value on all |
3419 | edges. Note this. */ |
3420 | else if (!cant_insert |
3421 | && all_same |
3422 | && (edoubleprime->kind != NAME |
3423 | || !SSA_NAME_OCCURS_IN_ABNORMAL_PHI(tree_check (((edoubleprime)->u.name), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 3424, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag |
3424 | (PRE_EXPR_NAME (edoubleprime))(tree_check (((edoubleprime)->u.name), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 3424, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag)) |
3425 | { |
3426 | gcc_assert (edoubleprime->kind == CONSTANT((void)(!(edoubleprime->kind == CONSTANT || edoubleprime-> kind == NAME) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 3427, __FUNCTION__), 0 : 0)) |
3427 | || edoubleprime->kind == NAME)((void)(!(edoubleprime->kind == CONSTANT || edoubleprime-> kind == NAME) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 3427, __FUNCTION__), 0 : 0)); |
3428 | |
3429 | tree temp = make_temp_ssa_name (get_expr_type (expr), |
3430 | NULLnullptr, "pretmp"); |
3431 | gassign *assign |
3432 | = gimple_build_assign (temp, |
3433 | edoubleprime->kind == CONSTANT ? |
3434 | PRE_EXPR_CONSTANT (edoubleprime)(edoubleprime)->u.constant : |
3435 | PRE_EXPR_NAME (edoubleprime)(edoubleprime)->u.name); |
3436 | gimple_stmt_iterator gsi = gsi_after_labels (block); |
3437 | gsi_insert_before (&gsi, assign, GSI_NEW_STMT); |
3438 | |
3439 | vn_ssa_aux_t vn_info = VN_INFO (temp); |
3440 | vn_info->value_id = val; |
3441 | vn_info->valnum = vn_valnum_from_value_id (val); |
3442 | if (vn_info->valnum == NULL_TREE(tree) nullptr) |
3443 | vn_info->valnum = temp; |
3444 | bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp)(tree_check ((temp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 3444, __FUNCTION__, (SSA_NAME)))->base.u.version); |
3445 | pre_expr newe = get_or_alloc_expr_for_name (temp); |
3446 | add_to_value (val, newe); |
3447 | bitmap_value_replace_in_set (AVAIL_OUT (block)((bb_value_sets_t) ((block)->aux))->avail_out, newe); |
3448 | bitmap_insert_into_set (NEW_SETS (block)((bb_value_sets_t) ((block)->aux))->new_sets, newe); |
3449 | bitmap_insert_into_set (PHI_GEN (block)((bb_value_sets_t) ((block)->aux))->phi_gen, newe); |
3450 | } |
3451 | } |
3452 | } |
3453 | |
3454 | return new_stuff; |
3455 | } |
3456 | |
3457 | |
3458 | /* Perform insertion for partially anticipatable expressions. There |
3459 | is only one case we will perform insertion for these. This case is |
3460 | if the expression is partially anticipatable, and fully available. |
3461 | In this case, we know that putting it earlier will enable us to |
3462 | remove the later computation. */ |
3463 | |
3464 | static bool |
3465 | do_pre_partial_partial_insertion (basic_block block, basic_block dom, |
3466 | vec<pre_expr> exprs) |
3467 | { |
3468 | bool new_stuff = false; |
3469 | pre_expr expr; |
3470 | auto_vec<pre_expr, 2> avail; |
3471 | int i; |
3472 | |
3473 | avail.safe_grow (EDGE_COUNT (block->preds)vec_safe_length (block->preds), true); |
3474 | |
3475 | FOR_EACH_VEC_ELT (exprs, i, expr)for (i = 0; (exprs).iterate ((i), &(expr)); ++(i)) |
3476 | { |
3477 | if (expr->kind == NARY |
3478 | || expr->kind == REFERENCE) |
3479 | { |
3480 | unsigned int val; |
3481 | bool by_all = true; |
3482 | bool cant_insert = false; |
3483 | edge pred; |
3484 | basic_block bprime; |
3485 | pre_expr eprime = NULLnullptr; |
3486 | edge_iterator ei; |
3487 | |
3488 | val = get_expr_value_id (expr); |
3489 | if (bitmap_set_contains_value (PHI_GEN (block)((bb_value_sets_t) ((block)->aux))->phi_gen, val)) |
3490 | continue; |
3491 | if (bitmap_set_contains_value (AVAIL_OUT (dom)((bb_value_sets_t) ((dom)->aux))->avail_out, val)) |
3492 | continue; |
3493 | |
3494 | FOR_EACH_EDGE (pred, ei, block->preds)for ((ei) = ei_start_1 (&((block->preds))); ei_cond (( ei), &(pred)); ei_next (&(ei))) |
3495 | { |
3496 | unsigned int vprime; |
3497 | pre_expr edoubleprime; |
3498 | |
3499 | /* We should never run insertion for the exit block |
3500 | and so not come across fake pred edges. */ |
3501 | gcc_assert (!(pred->flags & EDGE_FAKE))((void)(!(!(pred->flags & EDGE_FAKE)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 3501, __FUNCTION__), 0 : 0)); |
3502 | bprime = pred->src; |
3503 | eprime = phi_translate (NULLnullptr, expr, ANTIC_IN (block)((bb_value_sets_t) ((block)->aux))->antic_in, |
3504 | PA_IN (block)((bb_value_sets_t) ((block)->aux))->pa_in, pred); |
3505 | |
3506 | /* eprime will generally only be NULL if the |
3507 | value of the expression, translated |
3508 | through the PHI for this predecessor, is |
3509 | undefined. If that is the case, we can't |
3510 | make the expression fully redundant, |
3511 | because its value is undefined along a |
3512 | predecessor path. We can thus break out |
3513 | early because it doesn't matter what the |
3514 | rest of the results are. */ |
3515 | if (eprime == NULLnullptr) |
3516 | { |
3517 | avail[pred->dest_idx] = NULLnullptr; |
3518 | cant_insert = true; |
3519 | break; |
3520 | } |
3521 | |
3522 | vprime = get_expr_value_id (eprime); |
3523 | edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime)((bb_value_sets_t) ((bprime)->aux))->avail_out, vprime); |
3524 | avail[pred->dest_idx] = edoubleprime; |
3525 | if (edoubleprime == NULLnullptr) |
3526 | { |
3527 | by_all = false; |
3528 | break; |
3529 | } |
3530 | } |
3531 | |
3532 | /* If we can insert it, it's not the same value |
3533 | already existing along every predecessor, and |
3534 | it's defined by some predecessor, it is |
3535 | partially redundant. */ |
3536 | if (!cant_insert && by_all) |
3537 | { |
3538 | edge succ; |
3539 | bool do_insertion = false; |
3540 | |
3541 | /* Insert only if we can remove a later expression on a path |
3542 | that we want to optimize for speed. |
3543 | The phi node that we will be inserting in BLOCK is not free, |
3544 | and inserting it for the sake of !optimize_for_speed successor |
3545 | may cause regressions on the speed path. */ |
3546 | FOR_EACH_EDGE (succ, ei, block->succs)for ((ei) = ei_start_1 (&((block->succs))); ei_cond (( ei), &(succ)); ei_next (&(ei))) |
3547 | { |
3548 | if (bitmap_set_contains_value (PA_IN (succ->dest)((bb_value_sets_t) ((succ->dest)->aux))->pa_in, val) |
3549 | || bitmap_set_contains_value (ANTIC_IN (succ->dest)((bb_value_sets_t) ((succ->dest)->aux))->antic_in, val)) |
3550 | { |
3551 | if (optimize_edge_for_speed_p (succ)) |
3552 | do_insertion = true; |
3553 | } |
3554 | } |
3555 | |
3556 | if (!do_insertion) |
3557 | { |
3558 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3559 | { |
3560 | fprintf (dump_file, "Skipping partial partial redundancy " |
3561 | "for expression "); |
3562 | print_pre_expr (dump_file, expr); |
3563 | fprintf (dump_file, " (%04d), not (partially) anticipated " |
3564 | "on any to be optimized for speed edges\n", val); |
3565 | } |
3566 | } |
3567 | else if (dbg_cnt (treepre_insert)) |
3568 | { |
3569 | pre_stats.pa_insert++; |
3570 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3571 | { |
3572 | fprintf (dump_file, "Found partial partial redundancy " |
3573 | "for expression "); |
3574 | print_pre_expr (dump_file, expr); |
3575 | fprintf (dump_file, " (%04d)\n", |
3576 | get_expr_value_id (expr)); |
3577 | } |
3578 | if (insert_into_preds_of_block (block, |
3579 | get_expression_id (expr), |
3580 | avail)) |
3581 | new_stuff = true; |
3582 | } |
3583 | } |
3584 | } |
3585 | } |
3586 | |
3587 | return new_stuff; |
3588 | } |
3589 | |
3590 | /* Insert expressions in BLOCK to compute hoistable values up. |
3591 | Return TRUE if something was inserted, otherwise return FALSE. |
3592 | The caller has to make sure that BLOCK has at least two successors. */ |
3593 | |
3594 | static bool |
3595 | do_hoist_insertion (basic_block block) |
3596 | { |
3597 | edge e; |
3598 | edge_iterator ei; |
3599 | bool new_stuff = false; |
3600 | unsigned i; |
3601 | gimple_stmt_iterator last; |
3602 | |
3603 | /* At least two successors, or else... */ |
3604 | gcc_assert (EDGE_COUNT (block->succs) >= 2)((void)(!(vec_safe_length (block->succs) >= 2) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 3604, __FUNCTION__), 0 : 0)); |
3605 | |
3606 | /* Check that all successors of BLOCK are dominated by block. |
3607 | We could use dominated_by_p() for this, but actually there is a much |
3608 | quicker check: any successor that is dominated by BLOCK can't have |
3609 | more than one predecessor edge. */ |
3610 | FOR_EACH_EDGE (e, ei, block->succs)for ((ei) = ei_start_1 (&((block->succs))); ei_cond (( ei), &(e)); ei_next (&(ei))) |
3611 | if (! single_pred_p (e->dest)) |
3612 | return false; |
3613 | |
3614 | /* Determine the insertion point. If we cannot safely insert before |
3615 | the last stmt if we'd have to, bail out. */ |
3616 | last = gsi_last_bb (block); |
3617 | if (!gsi_end_p (last) |
3618 | && !is_ctrl_stmt (gsi_stmt (last)) |
3619 | && stmt_ends_bb_p (gsi_stmt (last))) |
3620 | return false; |
3621 | |
3622 | /* Compute the set of hoistable expressions from ANTIC_IN. First compute |
3623 | hoistable values. */ |
3624 | bitmap_set hoistable_set; |
3625 | |
3626 | /* A hoistable value must be in ANTIC_IN(block) |
3627 | but not in AVAIL_OUT(BLOCK). */ |
3628 | bitmap_initialize (&hoistable_set.values, &grand_bitmap_obstack); |
3629 | bitmap_and_compl (&hoistable_set.values, |
3630 | &ANTIC_IN (block)((bb_value_sets_t) ((block)->aux))->antic_in->values, &AVAIL_OUT (block)((bb_value_sets_t) ((block)->aux))->avail_out->values); |
3631 | |
3632 | /* Short-cut for a common case: hoistable_set is empty. */ |
3633 | if (bitmap_empty_p (&hoistable_set.values)) |
3634 | return false; |
3635 | |
3636 | /* Compute which of the hoistable values is in AVAIL_OUT of |
3637 | at least one of the successors of BLOCK. */ |
3638 | bitmap_head availout_in_some; |
3639 | bitmap_initialize (&availout_in_some, &grand_bitmap_obstack); |
3640 | FOR_EACH_EDGE (e, ei, block->succs)for ((ei) = ei_start_1 (&((block->succs))); ei_cond (( ei), &(e)); ei_next (&(ei))) |
3641 | /* Do not consider expressions solely because their availability |
3642 | on loop exits. They'd be ANTIC-IN throughout the whole loop |
3643 | and thus effectively hoisted across loops by combination of |
3644 | PRE and hoisting. */ |
3645 | if (! loop_exit_edge_p (block->loop_father, e)) |
3646 | bitmap_ior_and_into (&availout_in_some, &hoistable_set.values, |
3647 | &AVAIL_OUT (e->dest)((bb_value_sets_t) ((e->dest)->aux))->avail_out->values); |
3648 | bitmap_clear (&hoistable_set.values); |
3649 | |
3650 | /* Short-cut for a common case: availout_in_some is empty. */ |
3651 | if (bitmap_empty_p (&availout_in_some)) |
3652 | return false; |
3653 | |
3654 | /* Hack hoitable_set in-place so we can use sorted_array_from_bitmap_set. */ |
3655 | bitmap_move (&hoistable_set.values, &availout_in_some); |
3656 | hoistable_set.expressions = ANTIC_IN (block)((bb_value_sets_t) ((block)->aux))->antic_in->expressions; |
3657 | |
3658 | /* Now finally construct the topological-ordered expression set. */ |
3659 | vec<pre_expr> exprs = sorted_array_from_bitmap_set (&hoistable_set); |
3660 | |
3661 | bitmap_clear (&hoistable_set.values); |
3662 | |
3663 | /* If there are candidate values for hoisting, insert expressions |
3664 | strategically to make the hoistable expressions fully redundant. */ |
3665 | pre_expr expr; |
3666 | FOR_EACH_VEC_ELT (exprs, i, expr)for (i = 0; (exprs).iterate ((i), &(expr)); ++(i)) |
3667 | { |
3668 | /* While we try to sort expressions topologically above the |
3669 | sorting doesn't work out perfectly. Catch expressions we |
3670 | already inserted. */ |
3671 | unsigned int value_id = get_expr_value_id (expr); |
3672 | if (bitmap_set_contains_value (AVAIL_OUT (block)((bb_value_sets_t) ((block)->aux))->avail_out, value_id)) |
3673 | { |
3674 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3675 | { |
3676 | fprintf (dump_file, |
3677 | "Already inserted expression for "); |
3678 | print_pre_expr (dump_file, expr); |
3679 | fprintf (dump_file, " (%04d)\n", value_id); |
3680 | } |
3681 | continue; |
3682 | } |
3683 | |
3684 | /* If we end up with a punned expression representation and this |
3685 | happens to be a float typed one give up - we can't know for |
3686 | sure whether all paths perform the floating-point load we are |
3687 | about to insert and on some targets this can cause correctness |
3688 | issues. See PR88240. */ |
3689 | if (expr->kind == REFERENCE |
3690 | && PRE_EXPR_REFERENCE (expr)(expr)->u.reference->punned |
3691 | && FLOAT_TYPE_P (get_expr_type (expr))((((enum tree_code) (get_expr_type (expr))->base.code) == REAL_TYPE ) || ((((enum tree_code) (get_expr_type (expr))->base.code ) == COMPLEX_TYPE || (((enum tree_code) (get_expr_type (expr) )->base.code) == VECTOR_TYPE)) && (((enum tree_code ) (((contains_struct_check ((get_expr_type (expr)), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 3691, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE )))) |
3692 | continue; |
3693 | |
3694 | /* OK, we should hoist this value. Perform the transformation. */ |
3695 | pre_stats.hoist_insert++; |
3696 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3697 | { |
3698 | fprintf (dump_file, |
3699 | "Inserting expression in block %d for code hoisting: ", |
3700 | block->index); |
3701 | print_pre_expr (dump_file, expr); |
3702 | fprintf (dump_file, " (%04d)\n", value_id); |
3703 | } |
3704 | |
3705 | gimple_seq stmts = NULLnullptr; |
3706 | tree res = create_expression_by_pieces (block, expr, &stmts, |
3707 | get_expr_type (expr)); |
3708 | |
3709 | /* Do not return true if expression creation ultimately |
3710 | did not insert any statements. */ |
3711 | if (gimple_seq_empty_p (stmts)) |
3712 | res = NULL_TREE(tree) nullptr; |
3713 | else |
3714 | { |
3715 | if (gsi_end_p (last) || is_ctrl_stmt (gsi_stmt (last))) |
3716 | gsi_insert_seq_before (&last, stmts, GSI_SAME_STMT); |
3717 | else |
3718 | gsi_insert_seq_after (&last, stmts, GSI_NEW_STMT); |
3719 | } |
3720 | |
3721 | /* Make sure to not return true if expression creation ultimately |
3722 | failed but also make sure to insert any stmts produced as they |
3723 | are tracked in inserted_exprs. */ |
3724 | if (! res) |
3725 | continue; |
3726 | |
3727 | new_stuff = true; |
3728 | } |
3729 | |
3730 | exprs.release (); |
3731 | |
3732 | return new_stuff; |
3733 | } |
3734 | |
3735 | /* Perform insertion of partially redundant and hoistable values. */ |
3736 | |
3737 | static void |
3738 | insert (void) |
3739 | { |
3740 | basic_block bb; |
3741 | |
3742 | FOR_ALL_BB_FN (bb, cfun)for (bb = (((cfun + 0))->cfg->x_entry_block_ptr); bb; bb = bb->next_bb) |
3743 | NEW_SETS (bb)((bb_value_sets_t) ((bb)->aux))->new_sets = bitmap_set_new (); |
3744 | |
3745 | int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (cfun))((int *) xmalloc (sizeof (int) * ((((cfun + 0))->cfg->x_n_basic_blocks )))); |
3746 | int *bb_rpo = XNEWVEC (int, last_basic_block_for_fn (cfun) + 1)((int *) xmalloc (sizeof (int) * ((((cfun + 0))->cfg->x_last_basic_block ) + 1))); |
3747 | int rpo_num = pre_and_rev_post_order_compute (NULLnullptr, rpo, false); |
3748 | for (int i = 0; i < rpo_num; ++i) |
3749 | bb_rpo[rpo[i]] = i; |
3750 | |
3751 | int num_iterations = 0; |
3752 | bool changed; |
3753 | do |
3754 | { |
3755 | num_iterations++; |
3756 | if (dump_file && dump_flags & TDF_DETAILS) |
3757 | fprintf (dump_file, "Starting insert iteration %d\n", num_iterations); |
3758 | |
3759 | changed = false; |
3760 | for (int idx = 0; idx < rpo_num; ++idx) |
3761 | { |
3762 | basic_block block = BASIC_BLOCK_FOR_FN (cfun, rpo[idx])((*(((cfun + 0))->cfg->x_basic_block_info))[(rpo[idx])] ); |
3763 | basic_block dom = get_immediate_dominator (CDI_DOMINATORS, block); |
3764 | if (dom) |
3765 | { |
3766 | unsigned i; |
3767 | bitmap_iterator bi; |
3768 | bitmap_set_t newset; |
3769 | |
3770 | /* First, update the AVAIL_OUT set with anything we may have |
3771 | inserted higher up in the dominator tree. */ |
3772 | newset = NEW_SETS (dom)((bb_value_sets_t) ((dom)->aux))->new_sets; |
3773 | |
3774 | /* Note that we need to value_replace both NEW_SETS, and |
3775 | AVAIL_OUT. For both the case of NEW_SETS, the value may be |
3776 | represented by some non-simple expression here that we want |
3777 | to replace it with. */ |
3778 | bool avail_out_changed = false; |
3779 | FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)for (bmp_iter_set_init (&((bi)), (&(newset)->expressions ), (0), &((i))); bmp_iter_set (&((bi)), &((i))); bmp_iter_next (&((bi)), &((i)))) |
3780 | { |
3781 | pre_expr expr = expression_for_id (i); |
3782 | bitmap_value_replace_in_set (NEW_SETS (block)((bb_value_sets_t) ((block)->aux))->new_sets, expr); |
3783 | avail_out_changed |
3784 | |= bitmap_value_replace_in_set (AVAIL_OUT (block)((bb_value_sets_t) ((block)->aux))->avail_out, expr); |
3785 | } |
3786 | /* We need to iterate if AVAIL_OUT of an already processed |
3787 | block source changed. */ |
3788 | if (avail_out_changed && !changed) |
3789 | { |
3790 | edge_iterator ei; |
3791 | edge e; |
3792 | FOR_EACH_EDGE (e, ei, block->succs)for ((ei) = ei_start_1 (&((block->succs))); ei_cond (( ei), &(e)); ei_next (&(ei))) |
3793 | if (e->dest->index != EXIT_BLOCK(1) |
3794 | && bb_rpo[e->dest->index] < idx) |
3795 | changed = true; |
3796 | } |
3797 | |
3798 | /* Insert expressions for partial redundancies. */ |
3799 | if (flag_tree_preglobal_options.x_flag_tree_pre && !single_pred_p (block)) |
3800 | { |
3801 | vec<pre_expr> exprs |
3802 | = sorted_array_from_bitmap_set (ANTIC_IN (block)((bb_value_sets_t) ((block)->aux))->antic_in); |
3803 | /* Sorting is not perfect, iterate locally. */ |
3804 | while (do_pre_regular_insertion (block, dom, exprs)) |
3805 | ; |
3806 | exprs.release (); |
3807 | if (do_partial_partial) |
3808 | { |
3809 | exprs = sorted_array_from_bitmap_set (PA_IN (block)((bb_value_sets_t) ((block)->aux))->pa_in); |
3810 | while (do_pre_partial_partial_insertion (block, dom, |
3811 | exprs)) |
3812 | ; |
3813 | exprs.release (); |
3814 | } |
3815 | } |
3816 | } |
3817 | } |
3818 | |
3819 | /* Clear the NEW sets before the next iteration. We have already |
3820 | fully propagated its contents. */ |
3821 | if (changed) |
3822 | FOR_ALL_BB_FN (bb, cfun)for (bb = (((cfun + 0))->cfg->x_entry_block_ptr); bb; bb = bb->next_bb) |
3823 | bitmap_set_free (NEW_SETS (bb)((bb_value_sets_t) ((bb)->aux))->new_sets); |
3824 | } |
3825 | while (changed); |
3826 | |
3827 | statistics_histogram_event (cfun(cfun + 0), "insert iterations", num_iterations); |
3828 | |
3829 | /* AVAIL_OUT is not needed after insertion so we don't have to |
3830 | propagate NEW_SETS from hoist insertion. */ |
3831 | FOR_ALL_BB_FN (bb, cfun)for (bb = (((cfun + 0))->cfg->x_entry_block_ptr); bb; bb = bb->next_bb) |
3832 | { |
3833 | bitmap_set_free (NEW_SETS (bb)((bb_value_sets_t) ((bb)->aux))->new_sets); |
3834 | bitmap_set_pool.remove (NEW_SETS (bb)((bb_value_sets_t) ((bb)->aux))->new_sets); |
3835 | NEW_SETS (bb)((bb_value_sets_t) ((bb)->aux))->new_sets = NULLnullptr; |
3836 | } |
3837 | |
3838 | /* Insert expressions for hoisting. Do a backward walk here since |
3839 | inserting into BLOCK exposes new opportunities in its predecessors. |
3840 | Since PRE and hoist insertions can cause back-to-back iteration |
3841 | and we are interested in PRE insertion exposed hoisting opportunities |
3842 | but not in hoisting exposed PRE ones do hoist insertion only after |
3843 | PRE insertion iteration finished and do not iterate it. */ |
3844 | if (flag_code_hoistingglobal_options.x_flag_code_hoisting) |
3845 | for (int idx = rpo_num - 1; idx >= 0; --idx) |
3846 | { |
3847 | basic_block block = BASIC_BLOCK_FOR_FN (cfun, rpo[idx])((*(((cfun + 0))->cfg->x_basic_block_info))[(rpo[idx])] ); |
3848 | if (EDGE_COUNT (block->succs)vec_safe_length (block->succs) >= 2) |
3849 | changed |= do_hoist_insertion (block); |
3850 | } |
3851 | |
3852 | free (rpo); |
3853 | free (bb_rpo); |
3854 | } |
3855 | |
3856 | |
3857 | /* Compute the AVAIL set for all basic blocks. |
3858 | |
3859 | This function performs value numbering of the statements in each basic |
3860 | block. The AVAIL sets are built from information we glean while doing |
3861 | this value numbering, since the AVAIL sets contain only one entry per |
3862 | value. |
3863 | |
3864 | AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)]. |
3865 | AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */ |
3866 | |
3867 | static void |
3868 | compute_avail (function *fun) |
3869 | { |
3870 | |
3871 | basic_block block, son; |
3872 | basic_block *worklist; |
3873 | size_t sp = 0; |
3874 | unsigned i; |
3875 | tree name; |
3876 | |
3877 | /* We pretend that default definitions are defined in the entry block. |
3878 | This includes function arguments and the static chain decl. */ |
3879 | FOR_EACH_SSA_NAME (i, name, fun)for (i = 1; (fun)->gimple_df->ssa_names->iterate (i, &name); ++i) if (name) |
3880 | { |
3881 | pre_expr e; |
3882 | if (!SSA_NAME_IS_DEFAULT_DEF (name)(tree_check ((name), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 3882, __FUNCTION__, (SSA_NAME)))->base.default_def_flag |
3883 | || has_zero_uses (name) |
3884 | || virtual_operand_p (name)) |
3885 | continue; |
3886 | |
3887 | e = get_or_alloc_expr_for_name (name); |
3888 | add_to_value (get_expr_value_id (e), e); |
3889 | bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (fun))((bb_value_sets_t) ((((fun)->cfg->x_entry_block_ptr))-> aux))->tmp_gen, e); |
3890 | bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (fun))((bb_value_sets_t) ((((fun)->cfg->x_entry_block_ptr))-> aux))->avail_out, |
3891 | e); |
3892 | } |
3893 | |
3894 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3895 | { |
3896 | print_bitmap_set (dump_file, TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (fun))((bb_value_sets_t) ((((fun)->cfg->x_entry_block_ptr))-> aux))->tmp_gen, |
3897 | "tmp_gen", ENTRY_BLOCK(0)); |
3898 | print_bitmap_set (dump_file, AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (fun))((bb_value_sets_t) ((((fun)->cfg->x_entry_block_ptr))-> aux))->avail_out, |
3899 | "avail_out", ENTRY_BLOCK(0)); |
3900 | } |
3901 | |
3902 | /* Allocate the worklist. */ |
3903 | worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (fun))((basic_block *) xmalloc (sizeof (basic_block) * (((fun)-> cfg->x_n_basic_blocks)))); |
3904 | |
3905 | /* Seed the algorithm by putting the dominator children of the entry |
3906 | block on the worklist. */ |
3907 | for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR_FOR_FN (fun)((fun)->cfg->x_entry_block_ptr)); |
3908 | son; |
3909 | son = next_dom_son (CDI_DOMINATORS, son)) |
3910 | worklist[sp++] = son; |
3911 | |
3912 | BB_LIVE_VOP_ON_EXIT (ENTRY_BLOCK_PTR_FOR_FN (fun))((bb_value_sets_t) ((((fun)->cfg->x_entry_block_ptr))-> aux))->vop_on_exit |
3913 | = ssa_default_def (fun, gimple_vop (fun)); |
3914 | |
3915 | /* Loop until the worklist is empty. */ |
3916 | while (sp) |
3917 | { |
3918 | gimple *stmt; |
3919 | basic_block dom; |
3920 | |
3921 | /* Pick a block from the worklist. */ |
3922 | block = worklist[--sp]; |
3923 | vn_context_bb = block; |
3924 | |
3925 | /* Initially, the set of available values in BLOCK is that of |
3926 | its immediate dominator. */ |
3927 | dom = get_immediate_dominator (CDI_DOMINATORS, block); |
3928 | if (dom) |
3929 | { |
3930 | bitmap_set_copy (AVAIL_OUT (block)((bb_value_sets_t) ((block)->aux))->avail_out, AVAIL_OUT (dom)((bb_value_sets_t) ((dom)->aux))->avail_out); |
3931 | BB_LIVE_VOP_ON_EXIT (block)((bb_value_sets_t) ((block)->aux))->vop_on_exit = BB_LIVE_VOP_ON_EXIT (dom)((bb_value_sets_t) ((dom)->aux))->vop_on_exit; |
3932 | } |
3933 | |
3934 | /* Generate values for PHI nodes. */ |
3935 | for (gphi_iterator gsi = gsi_start_phis (block); !gsi_end_p (gsi); |
3936 | gsi_next (&gsi)) |
3937 | { |
3938 | tree result = gimple_phi_result (gsi.phi ()); |
3939 | |
3940 | /* We have no need for virtual phis, as they don't represent |
3941 | actual computations. */ |
3942 | if (virtual_operand_p (result)) |
3943 | { |
3944 | BB_LIVE_VOP_ON_EXIT (block)((bb_value_sets_t) ((block)->aux))->vop_on_exit = result; |
3945 | continue; |
3946 | } |
3947 | |
3948 | pre_expr e = get_or_alloc_expr_for_name (result); |
3949 | add_to_value (get_expr_value_id (e), e); |
3950 | bitmap_value_insert_into_set (AVAIL_OUT (block)((bb_value_sets_t) ((block)->aux))->avail_out, e); |
3951 | bitmap_insert_into_set (PHI_GEN (block)((bb_value_sets_t) ((block)->aux))->phi_gen, e); |
3952 | } |
3953 | |
3954 | BB_MAY_NOTRETURN (block)((bb_value_sets_t) ((block)->aux))->contains_may_not_return_call = 0; |
3955 | |
3956 | /* Now compute value numbers and populate value sets with all |
3957 | the expressions computed in BLOCK. */ |
3958 | bool set_bb_may_notreturn = false; |
3959 | for (gimple_stmt_iterator gsi = gsi_start_bb (block); !gsi_end_p (gsi); |
3960 | gsi_next (&gsi)) |
3961 | { |
3962 | ssa_op_iter iter; |
3963 | tree op; |
3964 | |
3965 | stmt = gsi_stmt (gsi); |
3966 | |
3967 | if (set_bb_may_notreturn) |
3968 | { |
3969 | BB_MAY_NOTRETURN (block)((bb_value_sets_t) ((block)->aux))->contains_may_not_return_call = 1; |
3970 | set_bb_may_notreturn = false; |
3971 | } |
3972 | |
3973 | /* Cache whether the basic-block has any non-visible side-effect |
3974 | or control flow. |
3975 | If this isn't a call or it is the last stmt in the |
3976 | basic-block then the CFG represents things correctly. */ |
3977 | if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt)) |
3978 | { |
3979 | /* Non-looping const functions always return normally. |
3980 | Otherwise the call might not return or have side-effects |
3981 | that forbids hoisting possibly trapping expressions |
3982 | before it. */ |
3983 | int flags = gimple_call_flags (stmt); |
3984 | if (!(flags & (ECF_CONST(1 << 0)|ECF_PURE(1 << 1))) |
3985 | || (flags & ECF_LOOPING_CONST_OR_PURE(1 << 2)) |
3986 | || stmt_can_throw_external (fun, stmt)) |
3987 | /* Defer setting of BB_MAY_NOTRETURN to avoid it |
3988 | influencing the processing of the call itself. */ |
3989 | set_bb_may_notreturn = true; |
3990 | } |
3991 | |
3992 | FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)for (op = op_iter_init_tree (&(iter), stmt, 0x02); !op_iter_done (&(iter)); (void) (op = op_iter_next_tree (&(iter))) ) |
3993 | { |
3994 | pre_expr e = get_or_alloc_expr_for_name (op); |
3995 | add_to_value (get_expr_value_id (e), e); |
3996 | bitmap_insert_into_set (TMP_GEN (block)((bb_value_sets_t) ((block)->aux))->tmp_gen, e); |
3997 | bitmap_value_insert_into_set (AVAIL_OUT (block)((bb_value_sets_t) ((block)->aux))->avail_out, e); |
3998 | } |
3999 | |
4000 | if (gimple_vdef (stmt)) |
4001 | BB_LIVE_VOP_ON_EXIT (block)((bb_value_sets_t) ((block)->aux))->vop_on_exit = gimple_vdef (stmt); |
4002 | |
4003 | if (gimple_has_side_effects (stmt) |
4004 | || stmt_could_throw_p (fun, stmt) |
4005 | || is_gimple_debug (stmt)) |
4006 | continue; |
4007 | |
4008 | FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)for (op = op_iter_init_tree (&(iter), stmt, 0x01); !op_iter_done (&(iter)); (void) (op = op_iter_next_tree (&(iter))) ) |
4009 | { |
4010 | if (ssa_undefined_value_p (op)) |
4011 | continue; |
4012 | pre_expr e = get_or_alloc_expr_for_name (op); |
4013 | bitmap_value_insert_into_set (EXP_GEN (block)((bb_value_sets_t) ((block)->aux))->exp_gen, e); |
4014 | } |
4015 | |
4016 | switch (gimple_code (stmt)) |
4017 | { |
4018 | case GIMPLE_RETURN: |
4019 | continue; |
4020 | |
4021 | case GIMPLE_CALL: |
4022 | { |
4023 | vn_reference_t ref; |
4024 | vn_reference_s ref1; |
4025 | pre_expr result = NULLnullptr; |
4026 | |
4027 | vn_reference_lookup_call (as_a <gcall *> (stmt), &ref, &ref1); |
4028 | /* There is no point to PRE a call without a value. */ |
4029 | if (!ref || !ref->result) |
4030 | continue; |
4031 | |
4032 | /* If the value of the call is not invalidated in |
4033 | this block until it is computed, add the expression |
4034 | to EXP_GEN. */ |
4035 | if ((!gimple_vuse (stmt) |
4036 | || gimple_code |
4037 | (SSA_NAME_DEF_STMT (gimple_vuse (stmt))(tree_check ((gimple_vuse (stmt)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 4037, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt) == GIMPLE_PHI |
4038 | || gimple_bb (SSA_NAME_DEF_STMT(tree_check ((gimple_vuse (stmt)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 4039, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt |
4039 | (gimple_vuse (stmt))(tree_check ((gimple_vuse (stmt)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 4039, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt) != block) |
4040 | /* If the REFERENCE traps and there was a preceding |
4041 | point in the block that might not return avoid |
4042 | adding the reference to EXP_GEN. */ |
4043 | && (!BB_MAY_NOTRETURN (block)((bb_value_sets_t) ((block)->aux))->contains_may_not_return_call |
4044 | || !vn_reference_may_trap (ref))) |
4045 | { |
4046 | result = get_or_alloc_expr_for_reference |
4047 | (ref, gimple_location (stmt)); |
4048 | add_to_value (get_expr_value_id (result), result); |
4049 | bitmap_value_insert_into_set (EXP_GEN (block)((bb_value_sets_t) ((block)->aux))->exp_gen, result); |
4050 | } |
4051 | continue; |
4052 | } |
4053 | |
4054 | case GIMPLE_ASSIGN: |
4055 | { |
4056 | pre_expr result = NULLnullptr; |
4057 | switch (vn_get_stmt_kind (stmt)) |
4058 | { |
4059 | case VN_NARY: |
4060 | { |
4061 | enum tree_code code = gimple_assign_rhs_code (stmt); |
4062 | vn_nary_op_t nary; |
4063 | |
4064 | /* COND_EXPR is awkward in that it contains an |
4065 | embedded complex expression. |
4066 | Don't even try to shove it through PRE. */ |
4067 | if (code == COND_EXPR) |
4068 | continue; |
4069 | |
4070 | vn_nary_op_lookup_stmt (stmt, &nary); |
4071 | if (!nary || nary->predicated_values) |
4072 | continue; |
4073 | |
4074 | unsigned value_id = nary->value_id; |
4075 | if (value_id_constant_p (value_id)) |
4076 | continue; |
4077 | |
4078 | /* Record the un-valueized expression for EXP_GEN. */ |
4079 | nary = XALLOCAVAR (struct vn_nary_op_s,((struct vn_nary_op_s *) __builtin_alloca((sizeof_vn_nary_op ( vn_nary_length_from_stmt (stmt))))) |
4080 | sizeof_vn_nary_op((struct vn_nary_op_s *) __builtin_alloca((sizeof_vn_nary_op ( vn_nary_length_from_stmt (stmt))))) |
4081 | (vn_nary_length_from_stmt (stmt)))((struct vn_nary_op_s *) __builtin_alloca((sizeof_vn_nary_op ( vn_nary_length_from_stmt (stmt))))); |
4082 | init_vn_nary_op_from_stmt (nary, as_a <gassign *> (stmt)); |
4083 | |
4084 | /* If the NARY traps and there was a preceding |
4085 | point in the block that might not return avoid |
4086 | adding the nary to EXP_GEN. */ |
4087 | if (BB_MAY_NOTRETURN (block)((bb_value_sets_t) ((block)->aux))->contains_may_not_return_call |
4088 | && vn_nary_may_trap (nary)) |
4089 | continue; |
4090 | |
4091 | result = get_or_alloc_expr_for_nary |
4092 | (nary, value_id, gimple_location (stmt)); |
4093 | break; |
4094 | } |
4095 | |
4096 | case VN_REFERENCE: |
4097 | { |
4098 | tree rhs1 = gimple_assign_rhs1 (stmt); |
4099 | ao_ref rhs1_ref; |
4100 | ao_ref_init (&rhs1_ref, rhs1); |
4101 | alias_set_type set = ao_ref_alias_set (&rhs1_ref); |
4102 | alias_set_type base_set |
4103 | = ao_ref_base_alias_set (&rhs1_ref); |
4104 | vec<vn_reference_op_s> operands |
4105 | = vn_reference_operands_for_lookup (rhs1); |
4106 | vn_reference_t ref; |
4107 | vn_reference_lookup_pieces (gimple_vuse (stmt), set, |
4108 | base_set, TREE_TYPE (rhs1)((contains_struct_check ((rhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 4108, __FUNCTION__))->typed.type), |
4109 | operands, &ref, VN_WALK); |
4110 | if (!ref) |
4111 | { |
4112 | operands.release (); |
4113 | continue; |
4114 | } |
4115 | |
4116 | /* If the REFERENCE traps and there was a preceding |
4117 | point in the block that might not return avoid |
4118 | adding the reference to EXP_GEN. */ |
4119 | if (BB_MAY_NOTRETURN (block)((bb_value_sets_t) ((block)->aux))->contains_may_not_return_call |
4120 | && vn_reference_may_trap (ref)) |
4121 | { |
4122 | operands.release (); |
4123 | continue; |
4124 | } |
4125 | |
4126 | /* If the value of the reference is not invalidated in |
4127 | this block until it is computed, add the expression |
4128 | to EXP_GEN. */ |
4129 | if (gimple_vuse (stmt)) |
4130 | { |
4131 | gimple *def_stmt; |
4132 | bool ok = true; |
4133 | def_stmt = SSA_NAME_DEF_STMT (gimple_vuse (stmt))(tree_check ((gimple_vuse (stmt)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 4133, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt; |
4134 | while (!gimple_nop_p (def_stmt) |
4135 | && gimple_code (def_stmt) != GIMPLE_PHI |
4136 | && gimple_bb (def_stmt) == block) |
4137 | { |
4138 | if (stmt_may_clobber_ref_p |
4139 | (def_stmt, gimple_assign_rhs1 (stmt))) |
4140 | { |
4141 | ok = false; |
4142 | break; |
4143 | } |
4144 | def_stmt |
4145 | = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt))(tree_check ((gimple_vuse (def_stmt)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 4145, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt; |
4146 | } |
4147 | if (!ok) |
4148 | { |
4149 | operands.release (); |
4150 | continue; |
4151 | } |
4152 | } |
4153 | |
4154 | /* If the load was value-numbered to another |
4155 | load make sure we do not use its expression |
4156 | for insertion if it wouldn't be a valid |
4157 | replacement. */ |
4158 | /* At the momemt we have a testcase |
4159 | for hoist insertion of aligned vs. misaligned |
4160 | variants in gcc.dg/torture/pr65270-1.c thus |
4161 | with just alignment to be considered we can |
4162 | simply replace the expression in the hashtable |
4163 | with the most conservative one. */ |
4164 | vn_reference_op_t ref1 = &ref->operands.last (); |
4165 | while (ref1->opcode != TARGET_MEM_REF |
4166 | && ref1->opcode != MEM_REF |
4167 | && ref1 != &ref->operands[0]) |
4168 | --ref1; |
4169 | vn_reference_op_t ref2 = &operands.last (); |
4170 | while (ref2->opcode != TARGET_MEM_REF |
4171 | && ref2->opcode != MEM_REF |
4172 | && ref2 != &operands[0]) |
4173 | --ref2; |
4174 | if ((ref1->opcode == TARGET_MEM_REF |
4175 | || ref1->opcode == MEM_REF) |
4176 | && (TYPE_ALIGN (ref1->type)(((tree_class_check ((ref1->type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 4176, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((ref1->type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 4176, __FUNCTION__))->type_common.align) - 1) : 0) |
4177 | > TYPE_ALIGN (ref2->type)(((tree_class_check ((ref2->type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 4177, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((ref2->type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 4177, __FUNCTION__))->type_common.align) - 1) : 0))) |
4178 | ref1->type |
4179 | = build_aligned_type (ref1->type, |
4180 | TYPE_ALIGN (ref2->type)(((tree_class_check ((ref2->type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 4180, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((ref2->type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 4180, __FUNCTION__))->type_common.align) - 1) : 0)); |
4181 | /* TBAA behavior is an obvious part so make sure |
4182 | that the hashtable one covers this as well |
4183 | by adjusting the ref alias set and its base. */ |
4184 | if (ref->set == set |
4185 | || alias_set_subset_of (set, ref->set)) |
4186 | ; |
4187 | else if (ref1->opcode != ref2->opcode |
4188 | || (ref1->opcode != MEM_REF |
4189 | && ref1->opcode != TARGET_MEM_REF)) |
4190 | { |
4191 | /* With mismatching base opcodes or bases |
4192 | other than MEM_REF or TARGET_MEM_REF we |
4193 | can't do any easy TBAA adjustment. */ |
4194 | operands.release (); |
4195 | continue; |
4196 | } |
4197 | else if (alias_set_subset_of (ref->set, set)) |
4198 | { |
4199 | ref->set = set; |
4200 | if (ref1->opcode == MEM_REF) |
4201 | ref1->op0 |
4202 | = wide_int_to_tree (TREE_TYPE (ref2->op0)((contains_struct_check ((ref2->op0), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 4202, __FUNCTION__))->typed.type), |
4203 | wi::to_wide (ref1->op0)); |
4204 | else |
4205 | ref1->op2 |
4206 | = wide_int_to_tree (TREE_TYPE (ref2->op2)((contains_struct_check ((ref2->op2), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 4206, __FUNCTION__))->typed.type), |
4207 | wi::to_wide (ref1->op2)); |
4208 | } |
4209 | else |
4210 | { |
4211 | ref->set = 0; |
4212 | if (ref1->opcode == MEM_REF) |
4213 | ref1->op0 |
4214 | = wide_int_to_tree (ptr_type_nodeglobal_trees[TI_PTR_TYPE], |
4215 | wi::to_wide (ref1->op0)); |
4216 | else |
4217 | ref1->op2 |
4218 | = wide_int_to_tree (ptr_type_nodeglobal_trees[TI_PTR_TYPE], |
4219 | wi::to_wide (ref1->op2)); |
4220 | } |
4221 | operands.release (); |
4222 | |
4223 | result = get_or_alloc_expr_for_reference |
4224 | (ref, gimple_location (stmt)); |
4225 | break; |
4226 | } |
4227 | |
4228 | default: |
4229 | continue; |
4230 | } |
4231 | |
4232 | add_to_value (get_expr_value_id (result), result); |
4233 | bitmap_value_insert_into_set (EXP_GEN (block)((bb_value_sets_t) ((block)->aux))->exp_gen, result); |
4234 | continue; |
4235 | } |
4236 | default: |
4237 | break; |
4238 | } |
4239 | } |
4240 | if (set_bb_may_notreturn) |
4241 | { |
4242 | BB_MAY_NOTRETURN (block)((bb_value_sets_t) ((block)->aux))->contains_may_not_return_call = 1; |
4243 | set_bb_may_notreturn = false; |
Value stored to 'set_bb_may_notreturn' is never read | |
4244 | } |
4245 | |
4246 | if (dump_file && (dump_flags & TDF_DETAILS)) |
4247 | { |
4248 | print_bitmap_set (dump_file, EXP_GEN (block)((bb_value_sets_t) ((block)->aux))->exp_gen, |
4249 | "exp_gen", block->index); |
4250 | print_bitmap_set (dump_file, PHI_GEN (block)((bb_value_sets_t) ((block)->aux))->phi_gen, |
4251 | "phi_gen", block->index); |
4252 | print_bitmap_set (dump_file, TMP_GEN (block)((bb_value_sets_t) ((block)->aux))->tmp_gen, |
4253 | "tmp_gen", block->index); |
4254 | print_bitmap_set (dump_file, AVAIL_OUT (block)((bb_value_sets_t) ((block)->aux))->avail_out, |
4255 | "avail_out", block->index); |
4256 | } |
4257 | |
4258 | /* Put the dominator children of BLOCK on the worklist of blocks |
4259 | to compute available sets for. */ |
4260 | for (son = first_dom_son (CDI_DOMINATORS, block); |
4261 | son; |
4262 | son = next_dom_son (CDI_DOMINATORS, son)) |
4263 | worklist[sp++] = son; |
4264 | } |
4265 | vn_context_bb = NULLnullptr; |
4266 | |
4267 | free (worklist); |
4268 | } |
4269 | |
4270 | |
4271 | /* Initialize data structures used by PRE. */ |
4272 | |
4273 | static void |
4274 | init_pre (void) |
4275 | { |
4276 | basic_block bb; |
4277 | |
4278 | next_expression_id = 1; |
4279 | expressions.create (0); |
4280 | expressions.safe_push (NULLnullptr); |
4281 | value_expressions.create (get_max_value_id () + 1); |
4282 | value_expressions.quick_grow_cleared (get_max_value_id () + 1); |
4283 | constant_value_expressions.create (get_max_constant_value_id () + 1); |
4284 | constant_value_expressions.quick_grow_cleared (get_max_constant_value_id () + 1); |
4285 | name_to_id.create (0); |
4286 | gcc_obstack_init (&pre_expr_obstack)_obstack_begin (((&pre_expr_obstack)), (memory_block_pool ::block_size), (0), (mempool_obstack_chunk_alloc), (mempool_obstack_chunk_free )); |
4287 | |
4288 | inserted_exprs = BITMAP_ALLOCbitmap_alloc (NULLnullptr); |
4289 | |
4290 | connect_infinite_loops_to_exit (); |
4291 | memset (&pre_stats, 0, sizeof (pre_stats)); |
4292 | |
4293 | alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets)); |
4294 | |
4295 | calculate_dominance_info (CDI_DOMINATORS); |
4296 | |
4297 | bitmap_obstack_initialize (&grand_bitmap_obstack); |
4298 | expression_to_id = new hash_table<pre_expr_d> (num_ssa_names(vec_safe_length ((cfun + 0)->gimple_df->ssa_names)) * 3); |
4299 | FOR_ALL_BB_FN (bb, cfun)for (bb = (((cfun + 0))->cfg->x_entry_block_ptr); bb; bb = bb->next_bb) |
4300 | { |
4301 | EXP_GEN (bb)((bb_value_sets_t) ((bb)->aux))->exp_gen = bitmap_set_new (); |
4302 | PHI_GEN (bb)((bb_value_sets_t) ((bb)->aux))->phi_gen = bitmap_set_new (); |
4303 | TMP_GEN (bb)((bb_value_sets_t) ((bb)->aux))->tmp_gen = bitmap_set_new (); |
4304 | AVAIL_OUT (bb)((bb_value_sets_t) ((bb)->aux))->avail_out = bitmap_set_new (); |
4305 | PHI_TRANS_TABLE (bb)((bb_value_sets_t) ((bb)->aux))->phi_translate_table = NULLnullptr; |
4306 | } |
4307 | } |
4308 | |
4309 | |
4310 | /* Deallocate data structures used by PRE. */ |
4311 | |
4312 | static void |
4313 | fini_pre () |
4314 | { |
4315 | value_expressions.release (); |
4316 | constant_value_expressions.release (); |
4317 | expressions.release (); |
4318 | bitmap_obstack_release (&grand_bitmap_obstack); |
4319 | bitmap_set_pool.release (); |
4320 | pre_expr_pool.release (); |
4321 | delete expression_to_id; |
4322 | expression_to_id = NULLnullptr; |
4323 | name_to_id.release (); |
4324 | obstack_free (&pre_expr_obstack, NULL)__extension__ ({ struct obstack *__o = (&pre_expr_obstack ); void *__obj = (void *) (nullptr); if (__obj > (void *) __o ->chunk && __obj < (void *) __o->chunk_limit ) __o->next_free = __o->object_base = (char *) __obj; else _obstack_free (__o, __obj); }); |
4325 | |
4326 | basic_block bb; |
4327 | FOR_ALL_BB_FN (bb, cfun)for (bb = (((cfun + 0))->cfg->x_entry_block_ptr); bb; bb = bb->next_bb) |
4328 | if (bb->aux && PHI_TRANS_TABLE (bb)((bb_value_sets_t) ((bb)->aux))->phi_translate_table) |
4329 | delete PHI_TRANS_TABLE (bb)((bb_value_sets_t) ((bb)->aux))->phi_translate_table; |
4330 | free_aux_for_blocks (); |
4331 | } |
4332 | |
4333 | namespace { |
4334 | |
4335 | const pass_data pass_data_pre = |
4336 | { |
4337 | GIMPLE_PASS, /* type */ |
4338 | "pre", /* name */ |
4339 | OPTGROUP_NONE, /* optinfo_flags */ |
4340 | TV_TREE_PRE, /* tv_id */ |
4341 | ( PROP_cfg(1 << 3) | PROP_ssa(1 << 5) ), /* properties_required */ |
4342 | 0, /* properties_provided */ |
4343 | 0, /* properties_destroyed */ |
4344 | TODO_rebuild_alias(1 << 20), /* todo_flags_start */ |
4345 | 0, /* todo_flags_finish */ |
4346 | }; |
4347 | |
4348 | class pass_pre : public gimple_opt_pass |
4349 | { |
4350 | public: |
4351 | pass_pre (gcc::context *ctxt) |
4352 | : gimple_opt_pass (pass_data_pre, ctxt) |
4353 | {} |
4354 | |
4355 | /* opt_pass methods: */ |
4356 | bool gate (function *) final override |
4357 | { return flag_tree_preglobal_options.x_flag_tree_pre != 0 || flag_code_hoistingglobal_options.x_flag_code_hoisting != 0; } |
4358 | unsigned int execute (function *) final override; |
4359 | |
4360 | }; // class pass_pre |
4361 | |
4362 | /* Valueization hook for RPO VN when we are calling back to it |
4363 | at ANTIC compute time. */ |
4364 | |
4365 | static tree |
4366 | pre_valueize (tree name) |
4367 | { |
4368 | if (TREE_CODE (name)((enum tree_code) (name)->base.code) == SSA_NAME) |
4369 | { |
4370 | tree tem = VN_INFO (name)->valnum; |
4371 | if (tem != VN_TOP && tem != name) |
4372 | { |
4373 | if (TREE_CODE (tem)((enum tree_code) (tem)->base.code) != SSA_NAME |
4374 | || SSA_NAME_IS_DEFAULT_DEF (tem)(tree_check ((tem), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 4374, __FUNCTION__, (SSA_NAME)))->base.default_def_flag) |
4375 | return tem; |
4376 | /* We create temporary SSA names for representatives that |
4377 | do not have a definition (yet) but are not default defs either |
4378 | assume they are fine to use. */ |
4379 | basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (tem)(tree_check ((tem), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 4379, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt); |
4380 | if (! def_bb |
4381 | || dominated_by_p (CDI_DOMINATORS, vn_context_bb, def_bb)) |
4382 | return tem; |
4383 | /* ??? Now we could look for a leader. Ideally we'd somehow |
4384 | expose RPO VN leaders and get rid of AVAIL_OUT as well... */ |
4385 | } |
4386 | } |
4387 | return name; |
4388 | } |
4389 | |
4390 | unsigned int |
4391 | pass_pre::execute (function *fun) |
4392 | { |
4393 | unsigned int todo = 0; |
4394 | |
4395 | do_partial_partial = |
4396 | flag_tree_partial_preglobal_options.x_flag_tree_partial_pre && optimize_function_for_speed_p (fun); |
4397 | |
4398 | /* This has to happen before VN runs because |
4399 | loop_optimizer_init may create new phis, etc. */ |
4400 | loop_optimizer_init (LOOPS_NORMAL(LOOPS_HAVE_PREHEADERS | LOOPS_HAVE_SIMPLE_LATCHES | LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS )); |
4401 | split_edges_for_insertion (); |
4402 | scev_initialize (); |
4403 | calculate_dominance_info (CDI_DOMINATORS); |
4404 | |
4405 | run_rpo_vn (VN_WALK); |
4406 | |
4407 | init_pre (); |
4408 | |
4409 | vn_valueize = pre_valueize; |
4410 | |
4411 | /* Insert can get quite slow on an incredibly large number of basic |
4412 | blocks due to some quadratic behavior. Until this behavior is |
4413 | fixed, don't run it when he have an incredibly large number of |
4414 | bb's. If we aren't going to run insert, there is no point in |
4415 | computing ANTIC, either, even though it's plenty fast nor do |
4416 | we require AVAIL. */ |
4417 | if (n_basic_blocks_for_fn (fun)((fun)->cfg->x_n_basic_blocks) < 4000) |
4418 | { |
4419 | compute_avail (fun); |
4420 | compute_antic (); |
4421 | insert (); |
4422 | } |
4423 | |
4424 | /* Make sure to remove fake edges before committing our inserts. |
4425 | This makes sure we don't end up with extra critical edges that |
4426 | we would need to split. */ |
4427 | remove_fake_exit_edges (); |
4428 | gsi_commit_edge_inserts (); |
4429 | |
4430 | /* Eliminate folds statements which might (should not...) end up |
4431 | not keeping virtual operands up-to-date. */ |
4432 | gcc_assert (!need_ssa_update_p (fun))((void)(!(!need_ssa_update_p (fun)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-pre.cc" , 4432, __FUNCTION__), 0 : 0)); |
4433 | |
4434 | statistics_counter_event (fun, "Insertions", pre_stats.insertions); |
4435 | statistics_counter_event (fun, "PA inserted", pre_stats.pa_insert); |
4436 | statistics_counter_event (fun, "HOIST inserted", pre_stats.hoist_insert); |
4437 | statistics_counter_event (fun, "New PHIs", pre_stats.phis); |
4438 | |
4439 | todo |= eliminate_with_rpo_vn (inserted_exprs); |
4440 | |
4441 | vn_valueize = NULLnullptr; |
4442 | |
4443 | fini_pre (); |
4444 | |
4445 | scev_finalize (); |
4446 | loop_optimizer_finalize (); |
4447 | |
4448 | /* Perform a CFG cleanup before we run simple_dce_from_worklist since |
4449 | unreachable code regions will have not up-to-date SSA form which |
4450 | confuses it. */ |
4451 | bool need_crit_edge_split = false; |
4452 | if (todo & TODO_cleanup_cfg(1 << 5)) |
4453 | { |
4454 | cleanup_tree_cfg (); |
4455 | need_crit_edge_split = true; |
4456 | } |
4457 | |
4458 | /* Because we don't follow exactly the standard PRE algorithm, and decide not |
4459 | to insert PHI nodes sometimes, and because value numbering of casts isn't |
4460 | perfect, we sometimes end up inserting dead code. This simple DCE-like |
4461 | pass removes any insertions we made that weren't actually used. */ |
4462 | simple_dce_from_worklist (inserted_exprs); |
4463 | BITMAP_FREE (inserted_exprs)((void) (bitmap_obstack_free ((bitmap) inserted_exprs), (inserted_exprs ) = (bitmap) nullptr)); |
4464 | |
4465 | /* TODO: tail_merge_optimize may merge all predecessors of a block, in which |
4466 | case we can merge the block with the remaining predecessor of the block. |
4467 | It should either: |
4468 | - call merge_blocks after each tail merge iteration |
4469 | - call merge_blocks after all tail merge iterations |
4470 | - mark TODO_cleanup_cfg when necessary. */ |
4471 | todo |= tail_merge_optimize (need_crit_edge_split); |
4472 | |
4473 | free_rpo_vn (); |
4474 | |
4475 | /* Tail merging invalidates the virtual SSA web, together with |
4476 | cfg-cleanup opportunities exposed by PRE this will wreck the |
4477 | SSA updating machinery. So make sure to run update-ssa |
4478 | manually, before eventually scheduling cfg-cleanup as part of |
4479 | the todo. */ |
4480 | update_ssa (TODO_update_ssa_only_virtuals(1 << 14)); |
4481 | |
4482 | return todo; |
4483 | } |
4484 | |
4485 | } // anon namespace |
4486 | |
4487 | gimple_opt_pass * |
4488 | make_pass_pre (gcc::context *ctxt) |
4489 | { |
4490 | return new pass_pre (ctxt); |
4491 | } |