File: | build/gcc/tree-ssa-sccvn.cc |
Warning: | line 8543, column 63 Division by zero |
Press '?' to see keyboard shortcuts
Keyboard shortcuts:
1 | /* SCC value numbering for trees |
2 | Copyright (C) 2006-2023 Free Software Foundation, Inc. |
3 | Contributed by Daniel Berlin <dan@dberlin.org> |
4 | |
5 | This file is part of GCC. |
6 | |
7 | GCC is free software; you can redistribute it and/or modify |
8 | it under the terms of the GNU General Public License as published by |
9 | the Free Software Foundation; either version 3, or (at your option) |
10 | any later version. |
11 | |
12 | GCC is distributed in the hope that it will be useful, |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
15 | GNU General Public License for more details. |
16 | |
17 | You should have received a copy of the GNU General Public License |
18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ |
20 | |
21 | #include "config.h" |
22 | #include "system.h" |
23 | #include "coretypes.h" |
24 | #include "splay-tree.h" |
25 | #include "backend.h" |
26 | #include "rtl.h" |
27 | #include "tree.h" |
28 | #include "gimple.h" |
29 | #include "ssa.h" |
30 | #include "expmed.h" |
31 | #include "insn-config.h" |
32 | #include "memmodel.h" |
33 | #include "emit-rtl.h" |
34 | #include "cgraph.h" |
35 | #include "gimple-pretty-print.h" |
36 | #include "alias.h" |
37 | #include "fold-const.h" |
38 | #include "stor-layout.h" |
39 | #include "cfganal.h" |
40 | #include "tree-inline.h" |
41 | #include "internal-fn.h" |
42 | #include "gimple-iterator.h" |
43 | #include "gimple-fold.h" |
44 | #include "tree-eh.h" |
45 | #include "gimplify.h" |
46 | #include "flags.h" |
47 | #include "dojump.h" |
48 | #include "explow.h" |
49 | #include "calls.h" |
50 | #include "varasm.h" |
51 | #include "stmt.h" |
52 | #include "expr.h" |
53 | #include "tree-dfa.h" |
54 | #include "tree-ssa.h" |
55 | #include "dumpfile.h" |
56 | #include "cfgloop.h" |
57 | #include "tree-ssa-propagate.h" |
58 | #include "tree-cfg.h" |
59 | #include "domwalk.h" |
60 | #include "gimple-match.h" |
61 | #include "stringpool.h" |
62 | #include "attribs.h" |
63 | #include "tree-pass.h" |
64 | #include "statistics.h" |
65 | #include "langhooks.h" |
66 | #include "ipa-utils.h" |
67 | #include "dbgcnt.h" |
68 | #include "tree-cfgcleanup.h" |
69 | #include "tree-ssa-loop.h" |
70 | #include "tree-scalar-evolution.h" |
71 | #include "tree-ssa-loop-niter.h" |
72 | #include "builtins.h" |
73 | #include "fold-const-call.h" |
74 | #include "ipa-modref-tree.h" |
75 | #include "ipa-modref.h" |
76 | #include "tree-ssa-sccvn.h" |
77 | |
78 | /* This algorithm is based on the SCC algorithm presented by Keith |
79 | Cooper and L. Taylor Simpson in "SCC-Based Value numbering" |
80 | (http://citeseer.ist.psu.edu/41805.html). In |
81 | straight line code, it is equivalent to a regular hash based value |
82 | numbering that is performed in reverse postorder. |
83 | |
84 | For code with cycles, there are two alternatives, both of which |
85 | require keeping the hashtables separate from the actual list of |
86 | value numbers for SSA names. |
87 | |
88 | 1. Iterate value numbering in an RPO walk of the blocks, removing |
89 | all the entries from the hashtable after each iteration (but |
90 | keeping the SSA name->value number mapping between iterations). |
91 | Iterate until it does not change. |
92 | |
93 | 2. Perform value numbering as part of an SCC walk on the SSA graph, |
94 | iterating only the cycles in the SSA graph until they do not change |
95 | (using a separate, optimistic hashtable for value numbering the SCC |
96 | operands). |
97 | |
98 | The second is not just faster in practice (because most SSA graph |
99 | cycles do not involve all the variables in the graph), it also has |
100 | some nice properties. |
101 | |
102 | One of these nice properties is that when we pop an SCC off the |
103 | stack, we are guaranteed to have processed all the operands coming from |
104 | *outside of that SCC*, so we do not need to do anything special to |
105 | ensure they have value numbers. |
106 | |
107 | Another nice property is that the SCC walk is done as part of a DFS |
108 | of the SSA graph, which makes it easy to perform combining and |
109 | simplifying operations at the same time. |
110 | |
111 | The code below is deliberately written in a way that makes it easy |
112 | to separate the SCC walk from the other work it does. |
113 | |
114 | In order to propagate constants through the code, we track which |
115 | expressions contain constants, and use those while folding. In |
116 | theory, we could also track expressions whose value numbers are |
117 | replaced, in case we end up folding based on expression |
118 | identities. |
119 | |
120 | In order to value number memory, we assign value numbers to vuses. |
121 | This enables us to note that, for example, stores to the same |
122 | address of the same value from the same starting memory states are |
123 | equivalent. |
124 | TODO: |
125 | |
126 | 1. We can iterate only the changing portions of the SCC's, but |
127 | I have not seen an SCC big enough for this to be a win. |
128 | 2. If you differentiate between phi nodes for loops and phi nodes |
129 | for if-then-else, you can properly consider phi nodes in different |
130 | blocks for equivalence. |
131 | 3. We could value number vuses in more cases, particularly, whole |
132 | structure copies. |
133 | */ |
134 | |
135 | /* There's no BB_EXECUTABLE but we can use BB_VISITED. */ |
136 | #define BB_EXECUTABLE BB_VISITED |
137 | |
138 | static vn_lookup_kind default_vn_walk_kind; |
139 | |
140 | /* vn_nary_op hashtable helpers. */ |
141 | |
142 | struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s> |
143 | { |
144 | typedef vn_nary_op_s *compare_type; |
145 | static inline hashval_t hash (const vn_nary_op_s *); |
146 | static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *); |
147 | }; |
148 | |
149 | /* Return the computed hashcode for nary operation P1. */ |
150 | |
151 | inline hashval_t |
152 | vn_nary_op_hasher::hash (const vn_nary_op_s *vno1) |
153 | { |
154 | return vno1->hashcode; |
155 | } |
156 | |
157 | /* Compare nary operations P1 and P2 and return true if they are |
158 | equivalent. */ |
159 | |
160 | inline bool |
161 | vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2) |
162 | { |
163 | return vno1 == vno2 || vn_nary_op_eq (vno1, vno2); |
164 | } |
165 | |
166 | typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type; |
167 | typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type; |
168 | |
169 | |
170 | /* vn_phi hashtable helpers. */ |
171 | |
172 | static int |
173 | vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2); |
174 | |
175 | struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s> |
176 | { |
177 | static inline hashval_t hash (const vn_phi_s *); |
178 | static inline bool equal (const vn_phi_s *, const vn_phi_s *); |
179 | }; |
180 | |
181 | /* Return the computed hashcode for phi operation P1. */ |
182 | |
183 | inline hashval_t |
184 | vn_phi_hasher::hash (const vn_phi_s *vp1) |
185 | { |
186 | return vp1->hashcode; |
187 | } |
188 | |
189 | /* Compare two phi entries for equality, ignoring VN_TOP arguments. */ |
190 | |
191 | inline bool |
192 | vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2) |
193 | { |
194 | return vp1 == vp2 || vn_phi_eq (vp1, vp2); |
195 | } |
196 | |
197 | typedef hash_table<vn_phi_hasher> vn_phi_table_type; |
198 | typedef vn_phi_table_type::iterator vn_phi_iterator_type; |
199 | |
200 | |
201 | /* Compare two reference operands P1 and P2 for equality. Return true if |
202 | they are equal, and false otherwise. */ |
203 | |
204 | static int |
205 | vn_reference_op_eq (const void *p1, const void *p2) |
206 | { |
207 | const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1; |
208 | const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2; |
209 | |
210 | return (vro1->opcode == vro2->opcode |
211 | /* We do not care for differences in type qualification. */ |
212 | && (vro1->type == vro2->type |
213 | || (vro1->type && vro2->type |
214 | && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type)((tree_class_check ((vro1->type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 214, __FUNCTION__))->type_common.main_variant), |
215 | TYPE_MAIN_VARIANT (vro2->type)((tree_class_check ((vro2->type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 215, __FUNCTION__))->type_common.main_variant)))) |
216 | && expressions_equal_p (vro1->op0, vro2->op0) |
217 | && expressions_equal_p (vro1->op1, vro2->op1) |
218 | && expressions_equal_p (vro1->op2, vro2->op2) |
219 | && (vro1->opcode != CALL_EXPR || vro1->clique == vro2->clique)); |
220 | } |
221 | |
222 | /* Free a reference operation structure VP. */ |
223 | |
224 | static inline void |
225 | free_reference (vn_reference_s *vr) |
226 | { |
227 | vr->operands.release (); |
228 | } |
229 | |
230 | |
231 | /* vn_reference hashtable helpers. */ |
232 | |
233 | struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s> |
234 | { |
235 | static inline hashval_t hash (const vn_reference_s *); |
236 | static inline bool equal (const vn_reference_s *, const vn_reference_s *); |
237 | }; |
238 | |
239 | /* Return the hashcode for a given reference operation P1. */ |
240 | |
241 | inline hashval_t |
242 | vn_reference_hasher::hash (const vn_reference_s *vr1) |
243 | { |
244 | return vr1->hashcode; |
245 | } |
246 | |
247 | inline bool |
248 | vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c) |
249 | { |
250 | return v == c || vn_reference_eq (v, c); |
251 | } |
252 | |
253 | typedef hash_table<vn_reference_hasher> vn_reference_table_type; |
254 | typedef vn_reference_table_type::iterator vn_reference_iterator_type; |
255 | |
256 | /* Pretty-print OPS to OUTFILE. */ |
257 | |
258 | void |
259 | print_vn_reference_ops (FILE *outfile, const vec<vn_reference_op_s> ops) |
260 | { |
261 | vn_reference_op_t vro; |
262 | unsigned int i; |
263 | fprintf (outfile, "{"); |
264 | for (i = 0; ops.iterate (i, &vro); i++) |
265 | { |
266 | bool closebrace = false; |
267 | if (vro->opcode != SSA_NAME |
268 | && TREE_CODE_CLASS (vro->opcode)tree_code_type_tmpl <0>::tree_code_type[(int) (vro-> opcode)] != tcc_declaration) |
269 | { |
270 | fprintf (outfile, "%s", get_tree_code_name (vro->opcode)); |
271 | if (vro->op0 || vro->opcode == CALL_EXPR) |
272 | { |
273 | fprintf (outfile, "<"); |
274 | closebrace = true; |
275 | } |
276 | } |
277 | if (vro->op0 || vro->opcode == CALL_EXPR) |
278 | { |
279 | if (!vro->op0) |
280 | fprintf (outfile, internal_fn_name ((internal_fn)vro->clique)); |
281 | else |
282 | print_generic_expr (outfile, vro->op0); |
283 | if (vro->op1) |
284 | { |
285 | fprintf (outfile, ","); |
286 | print_generic_expr (outfile, vro->op1); |
287 | } |
288 | if (vro->op2) |
289 | { |
290 | fprintf (outfile, ","); |
291 | print_generic_expr (outfile, vro->op2); |
292 | } |
293 | } |
294 | if (closebrace) |
295 | fprintf (outfile, ">"); |
296 | if (i != ops.length () - 1) |
297 | fprintf (outfile, ","); |
298 | } |
299 | fprintf (outfile, "}"); |
300 | } |
301 | |
302 | DEBUG_FUNCTION__attribute__ ((__used__)) void |
303 | debug_vn_reference_ops (const vec<vn_reference_op_s> ops) |
304 | { |
305 | print_vn_reference_ops (stderrstderr, ops); |
306 | fputc ('\n', stderrstderr); |
307 | } |
308 | |
309 | /* The set of VN hashtables. */ |
310 | |
311 | typedef struct vn_tables_s |
312 | { |
313 | vn_nary_op_table_type *nary; |
314 | vn_phi_table_type *phis; |
315 | vn_reference_table_type *references; |
316 | } *vn_tables_t; |
317 | |
318 | |
319 | /* vn_constant hashtable helpers. */ |
320 | |
321 | struct vn_constant_hasher : free_ptr_hash <vn_constant_s> |
322 | { |
323 | static inline hashval_t hash (const vn_constant_s *); |
324 | static inline bool equal (const vn_constant_s *, const vn_constant_s *); |
325 | }; |
326 | |
327 | /* Hash table hash function for vn_constant_t. */ |
328 | |
329 | inline hashval_t |
330 | vn_constant_hasher::hash (const vn_constant_s *vc1) |
331 | { |
332 | return vc1->hashcode; |
333 | } |
334 | |
335 | /* Hash table equality function for vn_constant_t. */ |
336 | |
337 | inline bool |
338 | vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2) |
339 | { |
340 | if (vc1->hashcode != vc2->hashcode) |
341 | return false; |
342 | |
343 | return vn_constant_eq_with_type (vc1->constant, vc2->constant); |
344 | } |
345 | |
346 | static hash_table<vn_constant_hasher> *constant_to_value_id; |
347 | |
348 | |
349 | /* Obstack we allocate the vn-tables elements from. */ |
350 | static obstack vn_tables_obstack; |
351 | /* Special obstack we never unwind. */ |
352 | static obstack vn_tables_insert_obstack; |
353 | |
354 | static vn_reference_t last_inserted_ref; |
355 | static vn_phi_t last_inserted_phi; |
356 | static vn_nary_op_t last_inserted_nary; |
357 | static vn_ssa_aux_t last_pushed_avail; |
358 | |
359 | /* Valid hashtables storing information we have proven to be |
360 | correct. */ |
361 | static vn_tables_t valid_info; |
362 | |
363 | |
364 | /* Valueization hook for simplify_replace_tree. Valueize NAME if it is |
365 | an SSA name, otherwise just return it. */ |
366 | tree (*vn_valueize) (tree); |
367 | static tree |
368 | vn_valueize_for_srt (tree t, void* context ATTRIBUTE_UNUSED__attribute__ ((__unused__))) |
369 | { |
370 | basic_block saved_vn_context_bb = vn_context_bb; |
371 | /* Look for sth available at the definition block of the argument. |
372 | This avoids inconsistencies between availability there which |
373 | decides if the stmt can be removed and availability at the |
374 | use site. The SSA property ensures that things available |
375 | at the definition are also available at uses. */ |
376 | if (!SSA_NAME_IS_DEFAULT_DEF (t)(tree_check ((t), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 376, __FUNCTION__, (SSA_NAME)))->base.default_def_flag) |
377 | vn_context_bb = gimple_bb (SSA_NAME_DEF_STMT (t)(tree_check ((t), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 377, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt); |
378 | tree res = vn_valueize (t); |
379 | vn_context_bb = saved_vn_context_bb; |
380 | return res; |
381 | } |
382 | |
383 | |
384 | /* This represents the top of the VN lattice, which is the universal |
385 | value. */ |
386 | |
387 | tree VN_TOP; |
388 | |
389 | /* Unique counter for our value ids. */ |
390 | |
391 | static unsigned int next_value_id; |
392 | static int next_constant_value_id; |
393 | |
394 | |
395 | /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects |
396 | are allocated on an obstack for locality reasons, and to free them |
397 | without looping over the vec. */ |
398 | |
399 | struct vn_ssa_aux_hasher : typed_noop_remove <vn_ssa_aux_t> |
400 | { |
401 | typedef vn_ssa_aux_t value_type; |
402 | typedef tree compare_type; |
403 | static inline hashval_t hash (const value_type &); |
404 | static inline bool equal (const value_type &, const compare_type &); |
405 | static inline void mark_deleted (value_type &) {} |
406 | static const bool empty_zero_p = true; |
407 | static inline void mark_empty (value_type &e) { e = NULLnullptr; } |
408 | static inline bool is_deleted (value_type &) { return false; } |
409 | static inline bool is_empty (value_type &e) { return e == NULLnullptr; } |
410 | }; |
411 | |
412 | hashval_t |
413 | vn_ssa_aux_hasher::hash (const value_type &entry) |
414 | { |
415 | return SSA_NAME_VERSION (entry->name)(tree_check ((entry->name), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 415, __FUNCTION__, (SSA_NAME)))->base.u.version; |
416 | } |
417 | |
418 | bool |
419 | vn_ssa_aux_hasher::equal (const value_type &entry, const compare_type &name) |
420 | { |
421 | return name == entry->name; |
422 | } |
423 | |
424 | static hash_table<vn_ssa_aux_hasher> *vn_ssa_aux_hash; |
425 | typedef hash_table<vn_ssa_aux_hasher>::iterator vn_ssa_aux_iterator_type; |
426 | static struct obstack vn_ssa_aux_obstack; |
427 | |
428 | static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree); |
429 | static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t, |
430 | vn_nary_op_table_type *); |
431 | static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int, |
432 | enum tree_code, tree, tree *); |
433 | static tree vn_lookup_simplify_result (gimple_match_op *); |
434 | static vn_reference_t vn_reference_lookup_or_insert_for_pieces |
435 | (tree, alias_set_type, alias_set_type, tree, |
436 | vec<vn_reference_op_s, va_heap>, tree); |
437 | |
438 | /* Return whether there is value numbering information for a given SSA name. */ |
439 | |
440 | bool |
441 | has_VN_INFO (tree name) |
442 | { |
443 | return vn_ssa_aux_hash->find_with_hash (name, SSA_NAME_VERSION (name)(tree_check ((name), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 443, __FUNCTION__, (SSA_NAME)))->base.u.version); |
444 | } |
445 | |
446 | vn_ssa_aux_t |
447 | VN_INFO (tree name) |
448 | { |
449 | vn_ssa_aux_t *res |
450 | = vn_ssa_aux_hash->find_slot_with_hash (name, SSA_NAME_VERSION (name)(tree_check ((name), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 450, __FUNCTION__, (SSA_NAME)))->base.u.version, |
451 | INSERT); |
452 | if (*res != NULLnullptr) |
453 | return *res; |
454 | |
455 | vn_ssa_aux_t newinfo = *res = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux)((struct vn_ssa_aux *) __extension__ ({ struct obstack *__h = ((&vn_ssa_aux_obstack)); __extension__ ({ struct obstack *__o = (__h); size_t __len = ((sizeof (struct vn_ssa_aux))); if (__extension__ ({ struct obstack const *__o1 = (__o); (size_t ) (__o1->chunk_limit - __o1->next_free); }) < __len) _obstack_newchunk (__o, __len); ((void) ((__o)->next_free += (__len))); }); __extension__ ({ struct obstack *__o1 = (__h ); void *__value = (void *) __o1->object_base; if (__o1-> next_free == __value) __o1->maybe_empty_object = 1; __o1-> next_free = (sizeof (ptrdiff_t) < sizeof (void *) ? ((__o1 ->object_base) + (((__o1->next_free) - (__o1->object_base ) + (__o1->alignment_mask)) & ~(__o1->alignment_mask ))) : (char *) (((ptrdiff_t) (__o1->next_free) + (__o1-> alignment_mask)) & ~(__o1->alignment_mask))); if ((size_t ) (__o1->next_free - (char *) __o1->chunk) > (size_t ) (__o1->chunk_limit - (char *) __o1->chunk)) __o1-> next_free = __o1->chunk_limit; __o1->object_base = __o1 ->next_free; __value; }); })); |
456 | memset (newinfo, 0, sizeof (struct vn_ssa_aux)); |
457 | newinfo->name = name; |
458 | newinfo->valnum = VN_TOP; |
459 | /* We are using the visited flag to handle uses with defs not within the |
460 | region being value-numbered. */ |
461 | newinfo->visited = false; |
462 | |
463 | /* Given we create the VN_INFOs on-demand now we have to do initialization |
464 | different than VN_TOP here. */ |
465 | if (SSA_NAME_IS_DEFAULT_DEF (name)(tree_check ((name), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 465, __FUNCTION__, (SSA_NAME)))->base.default_def_flag) |
466 | switch (TREE_CODE (SSA_NAME_VAR (name))((enum tree_code) (((tree_check ((name), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 466, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree) nullptr || ((enum tree_code) ((name)->ssa_name.var)->base.code ) == IDENTIFIER_NODE ? (tree) nullptr : (name)->ssa_name.var ))->base.code)) |
467 | { |
468 | case VAR_DECL: |
469 | /* All undefined vars are VARYING. */ |
470 | newinfo->valnum = name; |
471 | newinfo->visited = true; |
472 | break; |
473 | |
474 | case PARM_DECL: |
475 | /* Parameters are VARYING but we can record a condition |
476 | if we know it is a non-NULL pointer. */ |
477 | newinfo->visited = true; |
478 | newinfo->valnum = name; |
479 | if (POINTER_TYPE_P (TREE_TYPE (name))(((enum tree_code) (((contains_struct_check ((name), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 479, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE || ((enum tree_code) (((contains_struct_check ((name), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 479, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE ) |
480 | && nonnull_arg_p (SSA_NAME_VAR (name)((tree_check ((name), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 480, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree) nullptr || ((enum tree_code) ((name)->ssa_name.var)->base.code ) == IDENTIFIER_NODE ? (tree) nullptr : (name)->ssa_name.var ))) |
481 | { |
482 | tree ops[2]; |
483 | ops[0] = name; |
484 | ops[1] = build_int_cst (TREE_TYPE (name)((contains_struct_check ((name), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 484, __FUNCTION__))->typed.type), 0); |
485 | vn_nary_op_t nary; |
486 | /* Allocate from non-unwinding stack. */ |
487 | nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack); |
488 | init_vn_nary_op_from_pieces (nary, 2, NE_EXPR, |
489 | boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], ops); |
490 | nary->predicated_values = 0; |
491 | nary->u.result = boolean_true_nodeglobal_trees[TI_BOOLEAN_TRUE]; |
492 | vn_nary_op_insert_into (nary, valid_info->nary); |
493 | gcc_assert (nary->unwind_to == NULL)((void)(!(nary->unwind_to == nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 493, __FUNCTION__), 0 : 0)); |
494 | /* Also do not link it into the undo chain. */ |
495 | last_inserted_nary = nary->next; |
496 | nary->next = (vn_nary_op_t)(void *)-1; |
497 | nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack); |
498 | init_vn_nary_op_from_pieces (nary, 2, EQ_EXPR, |
499 | boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], ops); |
500 | nary->predicated_values = 0; |
501 | nary->u.result = boolean_false_nodeglobal_trees[TI_BOOLEAN_FALSE]; |
502 | vn_nary_op_insert_into (nary, valid_info->nary); |
503 | gcc_assert (nary->unwind_to == NULL)((void)(!(nary->unwind_to == nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 503, __FUNCTION__), 0 : 0)); |
504 | last_inserted_nary = nary->next; |
505 | nary->next = (vn_nary_op_t)(void *)-1; |
506 | if (dump_file && (dump_flags & TDF_DETAILS)) |
507 | { |
508 | fprintf (dump_file, "Recording "); |
509 | print_generic_expr (dump_file, name, TDF_SLIM); |
510 | fprintf (dump_file, " != 0\n"); |
511 | } |
512 | } |
513 | break; |
514 | |
515 | case RESULT_DECL: |
516 | /* If the result is passed by invisible reference the default |
517 | def is initialized, otherwise it's uninitialized. Still |
518 | undefined is varying. */ |
519 | newinfo->visited = true; |
520 | newinfo->valnum = name; |
521 | break; |
522 | |
523 | default: |
524 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 524, __FUNCTION__)); |
525 | } |
526 | return newinfo; |
527 | } |
528 | |
529 | /* Return the SSA value of X. */ |
530 | |
531 | inline tree |
532 | SSA_VAL (tree x, bool *visited = NULLnullptr) |
533 | { |
534 | vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x)(tree_check ((x), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 534, __FUNCTION__, (SSA_NAME)))->base.u.version); |
535 | if (visited) |
536 | *visited = tem && tem->visited; |
537 | return tem && tem->visited ? tem->valnum : x; |
538 | } |
539 | |
540 | /* Return the SSA value of the VUSE x, supporting released VDEFs |
541 | during elimination which will value-number the VDEF to the |
542 | associated VUSE (but not substitute in the whole lattice). */ |
543 | |
544 | static inline tree |
545 | vuse_ssa_val (tree x) |
546 | { |
547 | if (!x) |
548 | return NULL_TREE(tree) nullptr; |
549 | |
550 | do |
551 | { |
552 | x = SSA_VAL (x); |
553 | gcc_assert (x != VN_TOP)((void)(!(x != VN_TOP) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 553, __FUNCTION__), 0 : 0)); |
554 | } |
555 | while (SSA_NAME_IN_FREE_LIST (x)(tree_check ((x), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 555, __FUNCTION__, (SSA_NAME)))->base.nothrow_flag); |
556 | |
557 | return x; |
558 | } |
559 | |
560 | /* Similar to the above but used as callback for walk_non_aliased_vuses |
561 | and thus should stop at unvisited VUSE to not walk across region |
562 | boundaries. */ |
563 | |
564 | static tree |
565 | vuse_valueize (tree vuse) |
566 | { |
567 | do |
568 | { |
569 | bool visited; |
570 | vuse = SSA_VAL (vuse, &visited); |
571 | if (!visited) |
572 | return NULL_TREE(tree) nullptr; |
573 | gcc_assert (vuse != VN_TOP)((void)(!(vuse != VN_TOP) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 573, __FUNCTION__), 0 : 0)); |
574 | } |
575 | while (SSA_NAME_IN_FREE_LIST (vuse)(tree_check ((vuse), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 575, __FUNCTION__, (SSA_NAME)))->base.nothrow_flag); |
576 | return vuse; |
577 | } |
578 | |
579 | |
580 | /* Return the vn_kind the expression computed by the stmt should be |
581 | associated with. */ |
582 | |
583 | enum vn_kind |
584 | vn_get_stmt_kind (gimple *stmt) |
585 | { |
586 | switch (gimple_code (stmt)) |
587 | { |
588 | case GIMPLE_CALL: |
589 | return VN_REFERENCE; |
590 | case GIMPLE_PHI: |
591 | return VN_PHI; |
592 | case GIMPLE_ASSIGN: |
593 | { |
594 | enum tree_code code = gimple_assign_rhs_code (stmt); |
595 | tree rhs1 = gimple_assign_rhs1 (stmt); |
596 | switch (get_gimple_rhs_class (code)) |
597 | { |
598 | case GIMPLE_UNARY_RHS: |
599 | case GIMPLE_BINARY_RHS: |
600 | case GIMPLE_TERNARY_RHS: |
601 | return VN_NARY; |
602 | case GIMPLE_SINGLE_RHS: |
603 | switch (TREE_CODE_CLASS (code)tree_code_type_tmpl <0>::tree_code_type[(int) (code)]) |
604 | { |
605 | case tcc_reference: |
606 | /* VOP-less references can go through unary case. */ |
607 | if ((code == REALPART_EXPR |
608 | || code == IMAGPART_EXPR |
609 | || code == VIEW_CONVERT_EXPR |
610 | || code == BIT_FIELD_REF) |
611 | && (TREE_CODE (TREE_OPERAND (rhs1, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check ((rhs1), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 611, __FUNCTION__))))))->base.code) == SSA_NAME |
612 | || is_gimple_min_invariant (TREE_OPERAND (rhs1, 0)(*((const_cast<tree*> (tree_operand_check ((rhs1), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 612, __FUNCTION__)))))))) |
613 | return VN_NARY; |
614 | |
615 | /* Fallthrough. */ |
616 | case tcc_declaration: |
617 | return VN_REFERENCE; |
618 | |
619 | case tcc_constant: |
620 | return VN_CONSTANT; |
621 | |
622 | default: |
623 | if (code == ADDR_EXPR) |
624 | return (is_gimple_min_invariant (rhs1) |
625 | ? VN_CONSTANT : VN_REFERENCE); |
626 | else if (code == CONSTRUCTOR) |
627 | return VN_NARY; |
628 | return VN_NONE; |
629 | } |
630 | default: |
631 | return VN_NONE; |
632 | } |
633 | } |
634 | default: |
635 | return VN_NONE; |
636 | } |
637 | } |
638 | |
639 | /* Lookup a value id for CONSTANT and return it. If it does not |
640 | exist returns 0. */ |
641 | |
642 | unsigned int |
643 | get_constant_value_id (tree constant) |
644 | { |
645 | vn_constant_s **slot; |
646 | struct vn_constant_s vc; |
647 | |
648 | vc.hashcode = vn_hash_constant_with_type (constant); |
649 | vc.constant = constant; |
650 | slot = constant_to_value_id->find_slot (&vc, NO_INSERT); |
651 | if (slot) |
652 | return (*slot)->value_id; |
653 | return 0; |
654 | } |
655 | |
656 | /* Lookup a value id for CONSTANT, and if it does not exist, create a |
657 | new one and return it. If it does exist, return it. */ |
658 | |
659 | unsigned int |
660 | get_or_alloc_constant_value_id (tree constant) |
661 | { |
662 | vn_constant_s **slot; |
663 | struct vn_constant_s vc; |
664 | vn_constant_t vcp; |
665 | |
666 | /* If the hashtable isn't initialized we're not running from PRE and thus |
667 | do not need value-ids. */ |
668 | if (!constant_to_value_id) |
669 | return 0; |
670 | |
671 | vc.hashcode = vn_hash_constant_with_type (constant); |
672 | vc.constant = constant; |
673 | slot = constant_to_value_id->find_slot (&vc, INSERT); |
674 | if (*slot) |
675 | return (*slot)->value_id; |
676 | |
677 | vcp = XNEW (struct vn_constant_s)((struct vn_constant_s *) xmalloc (sizeof (struct vn_constant_s ))); |
678 | vcp->hashcode = vc.hashcode; |
679 | vcp->constant = constant; |
680 | vcp->value_id = get_next_constant_value_id (); |
681 | *slot = vcp; |
682 | return vcp->value_id; |
683 | } |
684 | |
685 | /* Compute the hash for a reference operand VRO1. */ |
686 | |
687 | static void |
688 | vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate) |
689 | { |
690 | hstate.add_int (vro1->opcode); |
691 | if (vro1->opcode == CALL_EXPR && !vro1->op0) |
692 | hstate.add_int (vro1->clique); |
693 | if (vro1->op0) |
694 | inchash::add_expr (vro1->op0, hstate); |
695 | if (vro1->op1) |
696 | inchash::add_expr (vro1->op1, hstate); |
697 | if (vro1->op2) |
698 | inchash::add_expr (vro1->op2, hstate); |
699 | } |
700 | |
701 | /* Compute a hash for the reference operation VR1 and return it. */ |
702 | |
703 | static hashval_t |
704 | vn_reference_compute_hash (const vn_reference_t vr1) |
705 | { |
706 | inchash::hash hstate; |
707 | hashval_t result; |
708 | int i; |
709 | vn_reference_op_t vro; |
710 | poly_int64 off = -1; |
711 | bool deref = false; |
712 | |
713 | FOR_EACH_VEC_ELT (vr1->operands, i, vro)for (i = 0; (vr1->operands).iterate ((i), &(vro)); ++( i)) |
714 | { |
715 | if (vro->opcode == MEM_REF) |
716 | deref = true; |
717 | else if (vro->opcode != ADDR_EXPR) |
718 | deref = false; |
719 | if (maybe_ne (vro->off, -1)) |
720 | { |
721 | if (known_eq (off, -1)(!maybe_ne (off, -1))) |
722 | off = 0; |
723 | off += vro->off; |
724 | } |
725 | else |
726 | { |
727 | if (maybe_ne (off, -1) |
728 | && maybe_ne (off, 0)) |
729 | hstate.add_poly_int (off); |
730 | off = -1; |
731 | if (deref |
732 | && vro->opcode == ADDR_EXPR) |
733 | { |
734 | if (vro->op0) |
735 | { |
736 | tree op = TREE_OPERAND (vro->op0, 0)(*((const_cast<tree*> (tree_operand_check ((vro->op0 ), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 736, __FUNCTION__))))); |
737 | hstate.add_int (TREE_CODE (op)((enum tree_code) (op)->base.code)); |
738 | inchash::add_expr (op, hstate); |
739 | } |
740 | } |
741 | else |
742 | vn_reference_op_compute_hash (vro, hstate); |
743 | } |
744 | } |
745 | result = hstate.end (); |
746 | /* ??? We would ICE later if we hash instead of adding that in. */ |
747 | if (vr1->vuse) |
748 | result += SSA_NAME_VERSION (vr1->vuse)(tree_check ((vr1->vuse), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 748, __FUNCTION__, (SSA_NAME)))->base.u.version; |
749 | |
750 | return result; |
751 | } |
752 | |
753 | /* Return true if reference operations VR1 and VR2 are equivalent. This |
754 | means they have the same set of operands and vuses. */ |
755 | |
756 | bool |
757 | vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2) |
758 | { |
759 | unsigned i, j; |
760 | |
761 | /* Early out if this is not a hash collision. */ |
762 | if (vr1->hashcode != vr2->hashcode) |
763 | return false; |
764 | |
765 | /* The VOP needs to be the same. */ |
766 | if (vr1->vuse != vr2->vuse) |
767 | return false; |
768 | |
769 | /* If the operands are the same we are done. */ |
770 | if (vr1->operands == vr2->operands) |
771 | return true; |
772 | |
773 | if (!vr1->type || !vr2->type) |
774 | { |
775 | if (vr1->type != vr2->type) |
776 | return false; |
777 | } |
778 | else if (vr1->type == vr2->type) |
779 | ; |
780 | else if (COMPLETE_TYPE_P (vr1->type)(((tree_class_check ((vr1->type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 780, __FUNCTION__))->type_common.size) != (tree) nullptr ) != COMPLETE_TYPE_P (vr2->type)(((tree_class_check ((vr2->type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 780, __FUNCTION__))->type_common.size) != (tree) nullptr ) |
781 | || (COMPLETE_TYPE_P (vr1->type)(((tree_class_check ((vr1->type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 781, __FUNCTION__))->type_common.size) != (tree) nullptr ) |
782 | && !expressions_equal_p (TYPE_SIZE (vr1->type)((tree_class_check ((vr1->type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 782, __FUNCTION__))->type_common.size), |
783 | TYPE_SIZE (vr2->type)((tree_class_check ((vr2->type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 783, __FUNCTION__))->type_common.size)))) |
784 | return false; |
785 | else if (vr1->operands[0].opcode == CALL_EXPR |
786 | && !types_compatible_p (vr1->type, vr2->type)) |
787 | return false; |
788 | else if (INTEGRAL_TYPE_P (vr1->type)(((enum tree_code) (vr1->type)->base.code) == ENUMERAL_TYPE || ((enum tree_code) (vr1->type)->base.code) == BOOLEAN_TYPE || ((enum tree_code) (vr1->type)->base.code) == INTEGER_TYPE ) |
789 | && INTEGRAL_TYPE_P (vr2->type)(((enum tree_code) (vr2->type)->base.code) == ENUMERAL_TYPE || ((enum tree_code) (vr2->type)->base.code) == BOOLEAN_TYPE || ((enum tree_code) (vr2->type)->base.code) == INTEGER_TYPE )) |
790 | { |
791 | if (TYPE_PRECISION (vr1->type)((tree_class_check ((vr1->type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 791, __FUNCTION__))->type_common.precision) != TYPE_PRECISION (vr2->type)((tree_class_check ((vr2->type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 791, __FUNCTION__))->type_common.precision)) |
792 | return false; |
793 | } |
794 | else if (INTEGRAL_TYPE_P (vr1->type)(((enum tree_code) (vr1->type)->base.code) == ENUMERAL_TYPE || ((enum tree_code) (vr1->type)->base.code) == BOOLEAN_TYPE || ((enum tree_code) (vr1->type)->base.code) == INTEGER_TYPE ) |
795 | && (TYPE_PRECISION (vr1->type)((tree_class_check ((vr1->type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 795, __FUNCTION__))->type_common.precision) |
796 | != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))((unsigned long) (*tree_int_cst_elt_check ((((tree_class_check ((vr1->type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 796, __FUNCTION__))->type_common.size)), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 796, __FUNCTION__))))) |
797 | return false; |
798 | else if (INTEGRAL_TYPE_P (vr2->type)(((enum tree_code) (vr2->type)->base.code) == ENUMERAL_TYPE || ((enum tree_code) (vr2->type)->base.code) == BOOLEAN_TYPE || ((enum tree_code) (vr2->type)->base.code) == INTEGER_TYPE ) |
799 | && (TYPE_PRECISION (vr2->type)((tree_class_check ((vr2->type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 799, __FUNCTION__))->type_common.precision) |
800 | != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))((unsigned long) (*tree_int_cst_elt_check ((((tree_class_check ((vr2->type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 800, __FUNCTION__))->type_common.size)), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 800, __FUNCTION__))))) |
801 | return false; |
802 | |
803 | i = 0; |
804 | j = 0; |
805 | do |
806 | { |
807 | poly_int64 off1 = 0, off2 = 0; |
808 | vn_reference_op_t vro1, vro2; |
809 | vn_reference_op_s tem1, tem2; |
810 | bool deref1 = false, deref2 = false; |
811 | bool reverse1 = false, reverse2 = false; |
812 | for (; vr1->operands.iterate (i, &vro1); i++) |
813 | { |
814 | if (vro1->opcode == MEM_REF) |
815 | deref1 = true; |
816 | /* Do not look through a storage order barrier. */ |
817 | else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse) |
818 | return false; |
819 | reverse1 |= vro1->reverse; |
820 | if (known_eq (vro1->off, -1)(!maybe_ne (vro1->off, -1))) |
821 | break; |
822 | off1 += vro1->off; |
823 | } |
824 | for (; vr2->operands.iterate (j, &vro2); j++) |
825 | { |
826 | if (vro2->opcode == MEM_REF) |
827 | deref2 = true; |
828 | /* Do not look through a storage order barrier. */ |
829 | else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse) |
830 | return false; |
831 | reverse2 |= vro2->reverse; |
832 | if (known_eq (vro2->off, -1)(!maybe_ne (vro2->off, -1))) |
833 | break; |
834 | off2 += vro2->off; |
835 | } |
836 | if (maybe_ne (off1, off2) || reverse1 != reverse2) |
837 | return false; |
838 | if (deref1 && vro1->opcode == ADDR_EXPR) |
839 | { |
840 | memset (&tem1, 0, sizeof (tem1)); |
841 | tem1.op0 = TREE_OPERAND (vro1->op0, 0)(*((const_cast<tree*> (tree_operand_check ((vro1->op0 ), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 841, __FUNCTION__))))); |
842 | tem1.type = TREE_TYPE (tem1.op0)((contains_struct_check ((tem1.op0), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 842, __FUNCTION__))->typed.type); |
843 | tem1.opcode = TREE_CODE (tem1.op0)((enum tree_code) (tem1.op0)->base.code); |
844 | vro1 = &tem1; |
845 | deref1 = false; |
846 | } |
847 | if (deref2 && vro2->opcode == ADDR_EXPR) |
848 | { |
849 | memset (&tem2, 0, sizeof (tem2)); |
850 | tem2.op0 = TREE_OPERAND (vro2->op0, 0)(*((const_cast<tree*> (tree_operand_check ((vro2->op0 ), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 850, __FUNCTION__))))); |
851 | tem2.type = TREE_TYPE (tem2.op0)((contains_struct_check ((tem2.op0), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 851, __FUNCTION__))->typed.type); |
852 | tem2.opcode = TREE_CODE (tem2.op0)((enum tree_code) (tem2.op0)->base.code); |
853 | vro2 = &tem2; |
854 | deref2 = false; |
855 | } |
856 | if (deref1 != deref2) |
857 | return false; |
858 | if (!vn_reference_op_eq (vro1, vro2)) |
859 | return false; |
860 | ++j; |
861 | ++i; |
862 | } |
863 | while (vr1->operands.length () != i |
864 | || vr2->operands.length () != j); |
865 | |
866 | return true; |
867 | } |
868 | |
869 | /* Copy the operations present in load/store REF into RESULT, a vector of |
870 | vn_reference_op_s's. */ |
871 | |
872 | static void |
873 | copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result) |
874 | { |
875 | /* For non-calls, store the information that makes up the address. */ |
876 | tree orig = ref; |
877 | while (ref) |
878 | { |
879 | vn_reference_op_s temp; |
880 | |
881 | memset (&temp, 0, sizeof (temp)); |
882 | temp.type = TREE_TYPE (ref)((contains_struct_check ((ref), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 882, __FUNCTION__))->typed.type); |
883 | temp.opcode = TREE_CODE (ref)((enum tree_code) (ref)->base.code); |
884 | temp.off = -1; |
885 | |
886 | switch (temp.opcode) |
887 | { |
888 | case MODIFY_EXPR: |
889 | temp.op0 = TREE_OPERAND (ref, 1)(*((const_cast<tree*> (tree_operand_check ((ref), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 889, __FUNCTION__))))); |
890 | break; |
891 | case WITH_SIZE_EXPR: |
892 | temp.op0 = TREE_OPERAND (ref, 1)(*((const_cast<tree*> (tree_operand_check ((ref), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 892, __FUNCTION__))))); |
893 | temp.off = 0; |
894 | break; |
895 | case MEM_REF: |
896 | /* The base address gets its own vn_reference_op_s structure. */ |
897 | temp.op0 = TREE_OPERAND (ref, 1)(*((const_cast<tree*> (tree_operand_check ((ref), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 897, __FUNCTION__))))); |
898 | if (!mem_ref_offset (ref).to_shwi (&temp.off)) |
899 | temp.off = -1; |
900 | temp.clique = MR_DEPENDENCE_CLIQUE (ref)((tree_check2 ((ref), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 900, __FUNCTION__, (MEM_REF), (TARGET_MEM_REF)))->base.u .dependence_info.clique); |
901 | temp.base = MR_DEPENDENCE_BASE (ref)((tree_check2 ((ref), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 901, __FUNCTION__, (MEM_REF), (TARGET_MEM_REF)))->base.u .dependence_info.base); |
902 | temp.reverse = REF_REVERSE_STORAGE_ORDER (ref)((tree_check2 ((ref), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 902, __FUNCTION__, (BIT_FIELD_REF), (MEM_REF)))->base.default_def_flag ); |
903 | break; |
904 | case TARGET_MEM_REF: |
905 | /* The base address gets its own vn_reference_op_s structure. */ |
906 | temp.op0 = TMR_INDEX (ref)((*((const_cast<tree*> (tree_operand_check (((tree_check ((ref), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 906, __FUNCTION__, (TARGET_MEM_REF)))), (2), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 906, __FUNCTION__)))))); |
907 | temp.op1 = TMR_STEP (ref)((*((const_cast<tree*> (tree_operand_check (((tree_check ((ref), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 907, __FUNCTION__, (TARGET_MEM_REF)))), (3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 907, __FUNCTION__)))))); |
908 | temp.op2 = TMR_OFFSET (ref)((*((const_cast<tree*> (tree_operand_check (((tree_check ((ref), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 908, __FUNCTION__, (TARGET_MEM_REF)))), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 908, __FUNCTION__)))))); |
909 | temp.clique = MR_DEPENDENCE_CLIQUE (ref)((tree_check2 ((ref), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 909, __FUNCTION__, (MEM_REF), (TARGET_MEM_REF)))->base.u .dependence_info.clique); |
910 | temp.base = MR_DEPENDENCE_BASE (ref)((tree_check2 ((ref), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 910, __FUNCTION__, (MEM_REF), (TARGET_MEM_REF)))->base.u .dependence_info.base); |
911 | result->safe_push (temp); |
912 | memset (&temp, 0, sizeof (temp)); |
913 | temp.type = NULL_TREE(tree) nullptr; |
914 | temp.opcode = ERROR_MARK; |
915 | temp.op0 = TMR_INDEX2 (ref)((*((const_cast<tree*> (tree_operand_check (((tree_check ((ref), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 915, __FUNCTION__, (TARGET_MEM_REF)))), (4), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 915, __FUNCTION__)))))); |
916 | temp.off = -1; |
917 | break; |
918 | case BIT_FIELD_REF: |
919 | /* Record bits, position and storage order. */ |
920 | temp.op0 = TREE_OPERAND (ref, 1)(*((const_cast<tree*> (tree_operand_check ((ref), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 920, __FUNCTION__))))); |
921 | temp.op1 = TREE_OPERAND (ref, 2)(*((const_cast<tree*> (tree_operand_check ((ref), (2), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 921, __FUNCTION__))))); |
922 | if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT(8), &temp.off)) |
923 | temp.off = -1; |
924 | temp.reverse = REF_REVERSE_STORAGE_ORDER (ref)((tree_check2 ((ref), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 924, __FUNCTION__, (BIT_FIELD_REF), (MEM_REF)))->base.default_def_flag ); |
925 | break; |
926 | case COMPONENT_REF: |
927 | /* The field decl is enough to unambiguously specify the field, |
928 | so use its type here. */ |
929 | temp.type = TREE_TYPE (TREE_OPERAND (ref, 1))((contains_struct_check (((*((const_cast<tree*> (tree_operand_check ((ref), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 929, __FUNCTION__)))))), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 929, __FUNCTION__))->typed.type); |
930 | temp.op0 = TREE_OPERAND (ref, 1)(*((const_cast<tree*> (tree_operand_check ((ref), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 930, __FUNCTION__))))); |
931 | temp.op1 = TREE_OPERAND (ref, 2)(*((const_cast<tree*> (tree_operand_check ((ref), (2), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 931, __FUNCTION__))))); |
932 | temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))(((enum tree_code) (((contains_struct_check (((*((const_cast< tree*> (tree_operand_check ((ref), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 932, __FUNCTION__)))))), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 932, __FUNCTION__))->typed.type))->base.code) == ARRAY_TYPE || (((enum tree_code) (((contains_struct_check (((*((const_cast <tree*> (tree_operand_check ((ref), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 932, __FUNCTION__)))))), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 932, __FUNCTION__))->typed.type))->base.code) == RECORD_TYPE || ((enum tree_code) (((contains_struct_check (((*((const_cast <tree*> (tree_operand_check ((ref), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 932, __FUNCTION__)))))), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 932, __FUNCTION__))->typed.type))->base.code) == UNION_TYPE || ((enum tree_code) (((contains_struct_check (((*((const_cast <tree*> (tree_operand_check ((ref), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 932, __FUNCTION__)))))), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 932, __FUNCTION__))->typed.type))->base.code) == QUAL_UNION_TYPE )) |
933 | && TYPE_REVERSE_STORAGE_ORDER((tree_check4 ((((contains_struct_check (((*((const_cast<tree *> (tree_operand_check ((ref), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 934, __FUNCTION__)))))), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 934, __FUNCTION__))->typed.type)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 934, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE ), (ARRAY_TYPE)))->base.u.bits.saturating_flag) |
934 | (TREE_TYPE (TREE_OPERAND (ref, 0)))((tree_check4 ((((contains_struct_check (((*((const_cast<tree *> (tree_operand_check ((ref), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 934, __FUNCTION__)))))), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 934, __FUNCTION__))->typed.type)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 934, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE ), (ARRAY_TYPE)))->base.u.bits.saturating_flag)); |
935 | { |
936 | tree this_offset = component_ref_field_offset (ref); |
937 | if (this_offset |
938 | && poly_int_tree_p (this_offset)) |
939 | { |
940 | tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1))((tree_check (((*((const_cast<tree*> (tree_operand_check ((ref), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 940, __FUNCTION__)))))), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 940, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset ); |
941 | if (TREE_INT_CST_LOW (bit_offset)((unsigned long) (*tree_int_cst_elt_check ((bit_offset), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 941, __FUNCTION__))) % BITS_PER_UNIT(8) == 0) |
942 | { |
943 | poly_offset_int off |
944 | = (wi::to_poly_offset (this_offset) |
945 | + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT3)); |
946 | /* Probibit value-numbering zero offset components |
947 | of addresses the same before the pass folding |
948 | __builtin_object_size had a chance to run. */ |
949 | if (TREE_CODE (orig)((enum tree_code) (orig)->base.code) != ADDR_EXPR |
950 | || maybe_ne (off, 0) |
951 | || (cfun(cfun + 0)->curr_properties & PROP_objsz(1 << 4))) |
952 | off.to_shwi (&temp.off); |
953 | } |
954 | } |
955 | } |
956 | break; |
957 | case ARRAY_RANGE_REF: |
958 | case ARRAY_REF: |
959 | { |
960 | tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)))((contains_struct_check ((((contains_struct_check (((*((const_cast <tree*> (tree_operand_check ((ref), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 960, __FUNCTION__)))))), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 960, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 960, __FUNCTION__))->typed.type); |
961 | /* Record index as operand. */ |
962 | temp.op0 = TREE_OPERAND (ref, 1)(*((const_cast<tree*> (tree_operand_check ((ref), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 962, __FUNCTION__))))); |
963 | /* Always record lower bounds and element size. */ |
964 | temp.op1 = array_ref_low_bound (ref); |
965 | /* But record element size in units of the type alignment. */ |
966 | temp.op2 = TREE_OPERAND (ref, 3)(*((const_cast<tree*> (tree_operand_check ((ref), (3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 966, __FUNCTION__))))); |
967 | temp.align = eltype->type_common.align; |
968 | if (! temp.op2) |
969 | temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),size_binop_loc (((location_t) 0), EXACT_DIV_EXPR, ((tree_class_check ((eltype), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 969, __FUNCTION__))->type_common.size_unit), size_int_kind (((((tree_class_check ((eltype), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 970, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((eltype), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 970, __FUNCTION__))->type_common.align) - 1) : 0) / (8)) , stk_sizetype)) |
970 | size_int (TYPE_ALIGN_UNIT (eltype)))size_binop_loc (((location_t) 0), EXACT_DIV_EXPR, ((tree_class_check ((eltype), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 969, __FUNCTION__))->type_common.size_unit), size_int_kind (((((tree_class_check ((eltype), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 970, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((eltype), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 970, __FUNCTION__))->type_common.align) - 1) : 0) / (8)) , stk_sizetype)); |
971 | if (poly_int_tree_p (temp.op0) |
972 | && poly_int_tree_p (temp.op1) |
973 | && TREE_CODE (temp.op2)((enum tree_code) (temp.op2)->base.code) == INTEGER_CST) |
974 | { |
975 | poly_offset_int off = ((wi::to_poly_offset (temp.op0) |
976 | - wi::to_poly_offset (temp.op1)) |
977 | * wi::to_offset (temp.op2) |
978 | * vn_ref_op_align_unit (&temp)); |
979 | off.to_shwi (&temp.off); |
980 | } |
981 | temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))(((enum tree_code) (((contains_struct_check (((*((const_cast< tree*> (tree_operand_check ((ref), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 981, __FUNCTION__)))))), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 981, __FUNCTION__))->typed.type))->base.code) == ARRAY_TYPE || (((enum tree_code) (((contains_struct_check (((*((const_cast <tree*> (tree_operand_check ((ref), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 981, __FUNCTION__)))))), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 981, __FUNCTION__))->typed.type))->base.code) == RECORD_TYPE || ((enum tree_code) (((contains_struct_check (((*((const_cast <tree*> (tree_operand_check ((ref), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 981, __FUNCTION__)))))), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 981, __FUNCTION__))->typed.type))->base.code) == UNION_TYPE || ((enum tree_code) (((contains_struct_check (((*((const_cast <tree*> (tree_operand_check ((ref), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 981, __FUNCTION__)))))), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 981, __FUNCTION__))->typed.type))->base.code) == QUAL_UNION_TYPE )) |
982 | && TYPE_REVERSE_STORAGE_ORDER((tree_check4 ((((contains_struct_check (((*((const_cast<tree *> (tree_operand_check ((ref), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 983, __FUNCTION__)))))), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 983, __FUNCTION__))->typed.type)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 983, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE ), (ARRAY_TYPE)))->base.u.bits.saturating_flag) |
983 | (TREE_TYPE (TREE_OPERAND (ref, 0)))((tree_check4 ((((contains_struct_check (((*((const_cast<tree *> (tree_operand_check ((ref), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 983, __FUNCTION__)))))), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 983, __FUNCTION__))->typed.type)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 983, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE ), (ARRAY_TYPE)))->base.u.bits.saturating_flag)); |
984 | } |
985 | break; |
986 | case VAR_DECL: |
987 | if (DECL_HARD_REGISTER (ref)((tree_check ((ref), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 987, __FUNCTION__, (VAR_DECL)))->decl_with_vis.hard_register )) |
988 | { |
989 | temp.op0 = ref; |
990 | break; |
991 | } |
992 | /* Fallthru. */ |
993 | case PARM_DECL: |
994 | case CONST_DECL: |
995 | case RESULT_DECL: |
996 | /* Canonicalize decls to MEM[&decl] which is what we end up with |
997 | when valueizing MEM[ptr] with ptr = &decl. */ |
998 | temp.opcode = MEM_REF; |
999 | temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)((contains_struct_check ((ref), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 999, __FUNCTION__))->typed.type)), 0); |
1000 | temp.off = 0; |
1001 | result->safe_push (temp); |
1002 | temp.opcode = ADDR_EXPR; |
1003 | temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0)((contains_struct_check ((temp.op0), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1003, __FUNCTION__))->typed.type), ref); |
1004 | temp.type = TREE_TYPE (temp.op0)((contains_struct_check ((temp.op0), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1004, __FUNCTION__))->typed.type); |
1005 | temp.off = -1; |
1006 | break; |
1007 | case STRING_CST: |
1008 | case INTEGER_CST: |
1009 | case POLY_INT_CST: |
1010 | case COMPLEX_CST: |
1011 | case VECTOR_CST: |
1012 | case REAL_CST: |
1013 | case FIXED_CST: |
1014 | case CONSTRUCTOR: |
1015 | case SSA_NAME: |
1016 | temp.op0 = ref; |
1017 | break; |
1018 | case ADDR_EXPR: |
1019 | if (is_gimple_min_invariant (ref)) |
1020 | { |
1021 | temp.op0 = ref; |
1022 | break; |
1023 | } |
1024 | break; |
1025 | /* These are only interesting for their operands, their |
1026 | existence, and their type. They will never be the last |
1027 | ref in the chain of references (IE they require an |
1028 | operand), so we don't have to put anything |
1029 | for op* as it will be handled by the iteration */ |
1030 | case REALPART_EXPR: |
1031 | temp.off = 0; |
1032 | break; |
1033 | case VIEW_CONVERT_EXPR: |
1034 | temp.off = 0; |
1035 | temp.reverse = storage_order_barrier_p (ref); |
1036 | break; |
1037 | case IMAGPART_EXPR: |
1038 | /* This is only interesting for its constant offset. */ |
1039 | temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)))((unsigned long) (*tree_int_cst_elt_check ((((tree_class_check ((((contains_struct_check ((ref), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1039, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1039, __FUNCTION__))->type_common.size_unit)), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1039, __FUNCTION__))); |
1040 | break; |
1041 | default: |
1042 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1042, __FUNCTION__)); |
1043 | } |
1044 | result->safe_push (temp); |
1045 | |
1046 | if (REFERENCE_CLASS_P (ref)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code ) (ref)->base.code))] == tcc_reference) |
1047 | || TREE_CODE (ref)((enum tree_code) (ref)->base.code) == MODIFY_EXPR |
1048 | || TREE_CODE (ref)((enum tree_code) (ref)->base.code) == WITH_SIZE_EXPR |
1049 | || (TREE_CODE (ref)((enum tree_code) (ref)->base.code) == ADDR_EXPR |
1050 | && !is_gimple_min_invariant (ref))) |
1051 | ref = TREE_OPERAND (ref, 0)(*((const_cast<tree*> (tree_operand_check ((ref), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1051, __FUNCTION__))))); |
1052 | else |
1053 | ref = NULL_TREE(tree) nullptr; |
1054 | } |
1055 | } |
1056 | |
1057 | /* Build a alias-oracle reference abstraction in *REF from the vn_reference |
1058 | operands in *OPS, the reference alias set SET and the reference type TYPE. |
1059 | Return true if something useful was produced. */ |
1060 | |
1061 | bool |
1062 | ao_ref_init_from_vn_reference (ao_ref *ref, |
1063 | alias_set_type set, alias_set_type base_set, |
1064 | tree type, const vec<vn_reference_op_s> &ops) |
1065 | { |
1066 | unsigned i; |
1067 | tree base = NULL_TREE(tree) nullptr; |
1068 | tree *op0_p = &base; |
1069 | poly_offset_int offset = 0; |
1070 | poly_offset_int max_size; |
1071 | poly_offset_int size = -1; |
1072 | tree size_tree = NULL_TREE(tree) nullptr; |
1073 | |
1074 | /* We don't handle calls. */ |
1075 | if (!type) |
1076 | return false; |
1077 | |
1078 | machine_mode mode = TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1078, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (type) : (type)->type_common.mode); |
1079 | if (mode == BLKmode((void) 0, E_BLKmode)) |
1080 | size_tree = TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1080, __FUNCTION__))->type_common.size); |
1081 | else |
1082 | size = GET_MODE_BITSIZE (mode); |
1083 | if (size_tree != NULL_TREE(tree) nullptr |
1084 | && poly_int_tree_p (size_tree)) |
1085 | size = wi::to_poly_offset (size_tree); |
1086 | |
1087 | /* Lower the final access size from the outermost expression. */ |
1088 | const_vn_reference_op_t cst_op = &ops[0]; |
1089 | /* Cast away constness for the sake of the const-unsafe |
1090 | FOR_EACH_VEC_ELT(). */ |
1091 | vn_reference_op_t op = const_cast<vn_reference_op_t>(cst_op); |
1092 | size_tree = NULL_TREE(tree) nullptr; |
1093 | if (op->opcode == COMPONENT_REF) |
1094 | size_tree = DECL_SIZE (op->op0)((contains_struct_check ((op->op0), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1094, __FUNCTION__))->decl_common.size); |
1095 | else if (op->opcode == BIT_FIELD_REF) |
1096 | size_tree = op->op0; |
1097 | if (size_tree != NULL_TREE(tree) nullptr |
1098 | && poly_int_tree_p (size_tree) |
1099 | && (!known_size_p (size) |
1100 | || known_lt (wi::to_poly_offset (size_tree), size)(!maybe_le (size, wi::to_poly_offset (size_tree))))) |
1101 | size = wi::to_poly_offset (size_tree); |
1102 | |
1103 | /* Initially, maxsize is the same as the accessed element size. |
1104 | In the following it will only grow (or become -1). */ |
1105 | max_size = size; |
1106 | |
1107 | /* Compute cumulative bit-offset for nested component-refs and array-refs, |
1108 | and find the ultimate containing object. */ |
1109 | FOR_EACH_VEC_ELT (ops, i, op)for (i = 0; (ops).iterate ((i), &(op)); ++(i)) |
1110 | { |
1111 | switch (op->opcode) |
1112 | { |
1113 | /* These may be in the reference ops, but we cannot do anything |
1114 | sensible with them here. */ |
1115 | case ADDR_EXPR: |
1116 | /* Apart from ADDR_EXPR arguments to MEM_REF. */ |
1117 | if (base != NULL_TREE(tree) nullptr |
1118 | && TREE_CODE (base)((enum tree_code) (base)->base.code) == MEM_REF |
1119 | && op->op0 |
1120 | && DECL_P (TREE_OPERAND (op->op0, 0))(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code ) ((*((const_cast<tree*> (tree_operand_check ((op->op0 ), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1120, __FUNCTION__))))))->base.code))] == tcc_declaration )) |
1121 | { |
1122 | const_vn_reference_op_t pop = &ops[i-1]; |
1123 | base = TREE_OPERAND (op->op0, 0)(*((const_cast<tree*> (tree_operand_check ((op->op0) , (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1123, __FUNCTION__))))); |
1124 | if (known_eq (pop->off, -1)(!maybe_ne (pop->off, -1))) |
1125 | { |
1126 | max_size = -1; |
1127 | offset = 0; |
1128 | } |
1129 | else |
1130 | offset += pop->off * BITS_PER_UNIT(8); |
1131 | op0_p = NULLnullptr; |
1132 | break; |
1133 | } |
1134 | /* Fallthru. */ |
1135 | case CALL_EXPR: |
1136 | return false; |
1137 | |
1138 | /* Record the base objects. */ |
1139 | case MEM_REF: |
1140 | *op0_p = build2 (MEM_REF, op->type, |
1141 | NULL_TREE(tree) nullptr, op->op0); |
1142 | MR_DEPENDENCE_CLIQUE (*op0_p)((tree_check2 ((*op0_p), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1142, __FUNCTION__, (MEM_REF), (TARGET_MEM_REF)))->base. u.dependence_info.clique) = op->clique; |
1143 | MR_DEPENDENCE_BASE (*op0_p)((tree_check2 ((*op0_p), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1143, __FUNCTION__, (MEM_REF), (TARGET_MEM_REF)))->base. u.dependence_info.base) = op->base; |
1144 | op0_p = &TREE_OPERAND (*op0_p, 0)(*((const_cast<tree*> (tree_operand_check ((*op0_p), (0 ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1144, __FUNCTION__))))); |
1145 | break; |
1146 | |
1147 | case VAR_DECL: |
1148 | case PARM_DECL: |
1149 | case RESULT_DECL: |
1150 | case SSA_NAME: |
1151 | *op0_p = op->op0; |
1152 | op0_p = NULLnullptr; |
1153 | break; |
1154 | |
1155 | /* And now the usual component-reference style ops. */ |
1156 | case BIT_FIELD_REF: |
1157 | offset += wi::to_poly_offset (op->op1); |
1158 | break; |
1159 | |
1160 | case COMPONENT_REF: |
1161 | { |
1162 | tree field = op->op0; |
1163 | /* We do not have a complete COMPONENT_REF tree here so we |
1164 | cannot use component_ref_field_offset. Do the interesting |
1165 | parts manually. */ |
1166 | tree this_offset = DECL_FIELD_OFFSET (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1166, __FUNCTION__, (FIELD_DECL)))->field_decl.offset); |
1167 | |
1168 | if (op->op1 || !poly_int_tree_p (this_offset)) |
1169 | max_size = -1; |
1170 | else |
1171 | { |
1172 | poly_offset_int woffset = (wi::to_poly_offset (this_offset) |
1173 | << LOG2_BITS_PER_UNIT3); |
1174 | woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1174, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset )); |
1175 | offset += woffset; |
1176 | } |
1177 | break; |
1178 | } |
1179 | |
1180 | case ARRAY_RANGE_REF: |
1181 | case ARRAY_REF: |
1182 | /* We recorded the lower bound and the element size. */ |
1183 | if (!poly_int_tree_p (op->op0) |
1184 | || !poly_int_tree_p (op->op1) |
1185 | || TREE_CODE (op->op2)((enum tree_code) (op->op2)->base.code) != INTEGER_CST) |
1186 | max_size = -1; |
1187 | else |
1188 | { |
1189 | poly_offset_int woffset |
1190 | = wi::sext (wi::to_poly_offset (op->op0) |
1191 | - wi::to_poly_offset (op->op1), |
1192 | TYPE_PRECISION (sizetype)((tree_class_check ((sizetype_tab[(int) stk_sizetype]), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1192, __FUNCTION__))->type_common.precision)); |
1193 | woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op); |
1194 | woffset <<= LOG2_BITS_PER_UNIT3; |
1195 | offset += woffset; |
1196 | } |
1197 | break; |
1198 | |
1199 | case REALPART_EXPR: |
1200 | break; |
1201 | |
1202 | case IMAGPART_EXPR: |
1203 | offset += size; |
1204 | break; |
1205 | |
1206 | case VIEW_CONVERT_EXPR: |
1207 | break; |
1208 | |
1209 | case STRING_CST: |
1210 | case INTEGER_CST: |
1211 | case COMPLEX_CST: |
1212 | case VECTOR_CST: |
1213 | case REAL_CST: |
1214 | case CONSTRUCTOR: |
1215 | case CONST_DECL: |
1216 | return false; |
1217 | |
1218 | default: |
1219 | return false; |
1220 | } |
1221 | } |
1222 | |
1223 | if (base == NULL_TREE(tree) nullptr) |
1224 | return false; |
1225 | |
1226 | ref->ref = NULL_TREE(tree) nullptr; |
1227 | ref->base = base; |
1228 | ref->ref_alias_set = set; |
1229 | ref->base_alias_set = base_set; |
1230 | /* We discount volatiles from value-numbering elsewhere. */ |
1231 | ref->volatile_p = false; |
1232 | |
1233 | if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0)) |
1234 | { |
1235 | ref->offset = 0; |
1236 | ref->size = -1; |
1237 | ref->max_size = -1; |
1238 | return true; |
1239 | } |
1240 | |
1241 | if (!offset.to_shwi (&ref->offset)) |
1242 | { |
1243 | ref->offset = 0; |
1244 | ref->max_size = -1; |
1245 | return true; |
1246 | } |
1247 | |
1248 | if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0)) |
1249 | ref->max_size = -1; |
1250 | |
1251 | return true; |
1252 | } |
1253 | |
1254 | /* Copy the operations present in load/store/call REF into RESULT, a vector of |
1255 | vn_reference_op_s's. */ |
1256 | |
1257 | static void |
1258 | copy_reference_ops_from_call (gcall *call, |
1259 | vec<vn_reference_op_s> *result) |
1260 | { |
1261 | vn_reference_op_s temp; |
1262 | unsigned i; |
1263 | tree lhs = gimple_call_lhs (call); |
1264 | int lr; |
1265 | |
1266 | /* If 2 calls have a different non-ssa lhs, vdef value numbers should be |
1267 | different. By adding the lhs here in the vector, we ensure that the |
1268 | hashcode is different, guaranteeing a different value number. */ |
1269 | if (lhs && TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) != SSA_NAME) |
1270 | { |
1271 | memset (&temp, 0, sizeof (temp)); |
1272 | temp.opcode = MODIFY_EXPR; |
1273 | temp.type = TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1273, __FUNCTION__))->typed.type); |
1274 | temp.op0 = lhs; |
1275 | temp.off = -1; |
1276 | result->safe_push (temp); |
1277 | } |
1278 | |
1279 | /* Copy the type, opcode, function, static chain and EH region, if any. */ |
1280 | memset (&temp, 0, sizeof (temp)); |
1281 | temp.type = gimple_call_fntype (call); |
1282 | temp.opcode = CALL_EXPR; |
1283 | temp.op0 = gimple_call_fn (call); |
1284 | if (gimple_call_internal_p (call)) |
1285 | temp.clique = gimple_call_internal_fn (call); |
1286 | temp.op1 = gimple_call_chain (call); |
1287 | if (stmt_could_throw_p (cfun(cfun + 0), call) && (lr = lookup_stmt_eh_lp (call)) > 0) |
1288 | temp.op2 = size_int (lr)size_int_kind (lr, stk_sizetype); |
1289 | temp.off = -1; |
1290 | result->safe_push (temp); |
1291 | |
1292 | /* Copy the call arguments. As they can be references as well, |
1293 | just chain them together. */ |
1294 | for (i = 0; i < gimple_call_num_args (call); ++i) |
1295 | { |
1296 | tree callarg = gimple_call_arg (call, i); |
1297 | copy_reference_ops_from_ref (callarg, result); |
1298 | } |
1299 | } |
1300 | |
1301 | /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates |
1302 | *I_P to point to the last element of the replacement. */ |
1303 | static bool |
1304 | vn_reference_fold_indirect (vec<vn_reference_op_s> *ops, |
1305 | unsigned int *i_p) |
1306 | { |
1307 | unsigned int i = *i_p; |
1308 | vn_reference_op_t op = &(*ops)[i]; |
1309 | vn_reference_op_t mem_op = &(*ops)[i - 1]; |
1310 | tree addr_base; |
1311 | poly_int64 addr_offset = 0; |
1312 | |
1313 | /* The only thing we have to do is from &OBJ.foo.bar add the offset |
1314 | from .foo.bar to the preceding MEM_REF offset and replace the |
1315 | address with &OBJ. */ |
1316 | addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (op->op0, 0)(*((const_cast<tree*> (tree_operand_check ((op->op0) , (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1316, __FUNCTION__))))), |
1317 | &addr_offset, vn_valueize); |
1318 | gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF)((void)(!(addr_base && ((enum tree_code) (addr_base)-> base.code) != MEM_REF) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1318, __FUNCTION__), 0 : 0)); |
1319 | if (addr_base != TREE_OPERAND (op->op0, 0)(*((const_cast<tree*> (tree_operand_check ((op->op0) , (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1319, __FUNCTION__)))))) |
1320 | { |
1321 | poly_offset_int off |
1322 | = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0), |
1323 | SIGNED) |
1324 | + addr_offset); |
1325 | mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0)((contains_struct_check ((mem_op->op0), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1325, __FUNCTION__))->typed.type), off); |
1326 | op->op0 = build_fold_addr_expr (addr_base)build_fold_addr_expr_loc (((location_t) 0), (addr_base)); |
1327 | if (tree_fits_shwi_p (mem_op->op0)) |
1328 | mem_op->off = tree_to_shwi (mem_op->op0); |
1329 | else |
1330 | mem_op->off = -1; |
1331 | return true; |
1332 | } |
1333 | return false; |
1334 | } |
1335 | |
1336 | /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates |
1337 | *I_P to point to the last element of the replacement. */ |
1338 | static bool |
1339 | vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops, |
1340 | unsigned int *i_p) |
1341 | { |
1342 | bool changed = false; |
1343 | vn_reference_op_t op; |
1344 | |
1345 | do |
1346 | { |
1347 | unsigned int i = *i_p; |
1348 | op = &(*ops)[i]; |
1349 | vn_reference_op_t mem_op = &(*ops)[i - 1]; |
1350 | gimple *def_stmt; |
1351 | enum tree_code code; |
1352 | poly_offset_int off; |
1353 | |
1354 | def_stmt = SSA_NAME_DEF_STMT (op->op0)(tree_check ((op->op0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1354, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt; |
1355 | if (!is_gimple_assign (def_stmt)) |
1356 | return changed; |
1357 | |
1358 | code = gimple_assign_rhs_code (def_stmt); |
1359 | if (code != ADDR_EXPR |
1360 | && code != POINTER_PLUS_EXPR) |
1361 | return changed; |
1362 | |
1363 | off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED); |
1364 | |
1365 | /* The only thing we have to do is from &OBJ.foo.bar add the offset |
1366 | from .foo.bar to the preceding MEM_REF offset and replace the |
1367 | address with &OBJ. */ |
1368 | if (code == ADDR_EXPR) |
1369 | { |
1370 | tree addr, addr_base; |
1371 | poly_int64 addr_offset; |
1372 | |
1373 | addr = gimple_assign_rhs1 (def_stmt); |
1374 | addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (addr, 0)(*((const_cast<tree*> (tree_operand_check ((addr), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1374, __FUNCTION__))))), |
1375 | &addr_offset, |
1376 | vn_valueize); |
1377 | /* If that didn't work because the address isn't invariant propagate |
1378 | the reference tree from the address operation in case the current |
1379 | dereference isn't offsetted. */ |
1380 | if (!addr_base |
1381 | && *i_p == ops->length () - 1 |
1382 | && known_eq (off, 0)(!maybe_ne (off, 0)) |
1383 | /* This makes us disable this transform for PRE where the |
1384 | reference ops might be also used for code insertion which |
1385 | is invalid. */ |
1386 | && default_vn_walk_kind == VN_WALKREWRITE) |
1387 | { |
1388 | auto_vec<vn_reference_op_s, 32> tem; |
1389 | copy_reference_ops_from_ref (TREE_OPERAND (addr, 0)(*((const_cast<tree*> (tree_operand_check ((addr), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1389, __FUNCTION__))))), &tem); |
1390 | /* Make sure to preserve TBAA info. The only objects not |
1391 | wrapped in MEM_REFs that can have their address taken are |
1392 | STRING_CSTs. */ |
1393 | if (tem.length () >= 2 |
1394 | && tem[tem.length () - 2].opcode == MEM_REF) |
1395 | { |
1396 | vn_reference_op_t new_mem_op = &tem[tem.length () - 2]; |
1397 | new_mem_op->op0 |
1398 | = wide_int_to_tree (TREE_TYPE (mem_op->op0)((contains_struct_check ((mem_op->op0), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1398, __FUNCTION__))->typed.type), |
1399 | wi::to_poly_wide (new_mem_op->op0)); |
1400 | } |
1401 | else |
1402 | gcc_assert (tem.last ().opcode == STRING_CST)((void)(!(tem.last ().opcode == STRING_CST) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1402, __FUNCTION__), 0 : 0)); |
1403 | ops->pop (); |
1404 | ops->pop (); |
1405 | ops->safe_splice (tem); |
1406 | --*i_p; |
1407 | return true; |
1408 | } |
1409 | if (!addr_base |
1410 | || TREE_CODE (addr_base)((enum tree_code) (addr_base)->base.code) != MEM_REF |
1411 | || (TREE_CODE (TREE_OPERAND (addr_base, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check ((addr_base), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1411, __FUNCTION__))))))->base.code) == SSA_NAME |
1412 | && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base,(tree_check (((*((const_cast<tree*> (tree_operand_check ((addr_base), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1413, __FUNCTION__)))))), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1413, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag |
1413 | 0))(tree_check (((*((const_cast<tree*> (tree_operand_check ((addr_base), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1413, __FUNCTION__)))))), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1413, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag)) |
1414 | return changed; |
1415 | |
1416 | off += addr_offset; |
1417 | off += mem_ref_offset (addr_base); |
1418 | op->op0 = TREE_OPERAND (addr_base, 0)(*((const_cast<tree*> (tree_operand_check ((addr_base), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1418, __FUNCTION__))))); |
1419 | } |
1420 | else |
1421 | { |
1422 | tree ptr, ptroff; |
1423 | ptr = gimple_assign_rhs1 (def_stmt); |
1424 | ptroff = gimple_assign_rhs2 (def_stmt); |
1425 | if (TREE_CODE (ptr)((enum tree_code) (ptr)->base.code) != SSA_NAME |
1426 | || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)(tree_check ((ptr), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1426, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag |
1427 | /* Make sure to not endlessly recurse. |
1428 | See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily |
1429 | happen when we value-number a PHI to its backedge value. */ |
1430 | || SSA_VAL (ptr) == op->op0 |
1431 | || !poly_int_tree_p (ptroff)) |
1432 | return changed; |
1433 | |
1434 | off += wi::to_poly_offset (ptroff); |
1435 | op->op0 = ptr; |
1436 | } |
1437 | |
1438 | mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0)((contains_struct_check ((mem_op->op0), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1438, __FUNCTION__))->typed.type), off); |
1439 | if (tree_fits_shwi_p (mem_op->op0)) |
1440 | mem_op->off = tree_to_shwi (mem_op->op0); |
1441 | else |
1442 | mem_op->off = -1; |
1443 | /* ??? Can end up with endless recursion here!? |
1444 | gcc.c-torture/execute/strcmp-1.c */ |
1445 | if (TREE_CODE (op->op0)((enum tree_code) (op->op0)->base.code) == SSA_NAME) |
1446 | op->op0 = SSA_VAL (op->op0); |
1447 | if (TREE_CODE (op->op0)((enum tree_code) (op->op0)->base.code) != SSA_NAME) |
1448 | op->opcode = TREE_CODE (op->op0)((enum tree_code) (op->op0)->base.code); |
1449 | |
1450 | changed = true; |
1451 | } |
1452 | /* Tail-recurse. */ |
1453 | while (TREE_CODE (op->op0)((enum tree_code) (op->op0)->base.code) == SSA_NAME); |
1454 | |
1455 | /* Fold a remaining *&. */ |
1456 | if (TREE_CODE (op->op0)((enum tree_code) (op->op0)->base.code) == ADDR_EXPR) |
1457 | vn_reference_fold_indirect (ops, i_p); |
1458 | |
1459 | return changed; |
1460 | } |
1461 | |
1462 | /* Optimize the reference REF to a constant if possible or return |
1463 | NULL_TREE if not. */ |
1464 | |
1465 | tree |
1466 | fully_constant_vn_reference_p (vn_reference_t ref) |
1467 | { |
1468 | vec<vn_reference_op_s> operands = ref->operands; |
1469 | vn_reference_op_t op; |
1470 | |
1471 | /* Try to simplify the translated expression if it is |
1472 | a call to a builtin function with at most two arguments. */ |
1473 | op = &operands[0]; |
1474 | if (op->opcode == CALL_EXPR |
1475 | && (!op->op0 |
1476 | || (TREE_CODE (op->op0)((enum tree_code) (op->op0)->base.code) == ADDR_EXPR |
1477 | && TREE_CODE (TREE_OPERAND (op->op0, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check ((op->op0), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1477, __FUNCTION__))))))->base.code) == FUNCTION_DECL |
1478 | && fndecl_built_in_p (TREE_OPERAND (op->op0, 0)(*((const_cast<tree*> (tree_operand_check ((op->op0) , (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1478, __FUNCTION__))))), |
1479 | BUILT_IN_NORMAL))) |
1480 | && operands.length () >= 2 |
1481 | && operands.length () <= 3) |
1482 | { |
1483 | vn_reference_op_t arg0, arg1 = NULLnullptr; |
1484 | bool anyconst = false; |
1485 | arg0 = &operands[1]; |
1486 | if (operands.length () > 2) |
1487 | arg1 = &operands[2]; |
1488 | if (TREE_CODE_CLASS (arg0->opcode)tree_code_type_tmpl <0>::tree_code_type[(int) (arg0-> opcode)] == tcc_constant |
1489 | || (arg0->opcode == ADDR_EXPR |
1490 | && is_gimple_min_invariant (arg0->op0))) |
1491 | anyconst = true; |
1492 | if (arg1 |
1493 | && (TREE_CODE_CLASS (arg1->opcode)tree_code_type_tmpl <0>::tree_code_type[(int) (arg1-> opcode)] == tcc_constant |
1494 | || (arg1->opcode == ADDR_EXPR |
1495 | && is_gimple_min_invariant (arg1->op0)))) |
1496 | anyconst = true; |
1497 | if (anyconst) |
1498 | { |
1499 | combined_fn fn; |
1500 | if (op->op0) |
1501 | fn = as_combined_fn (DECL_FUNCTION_CODE |
1502 | (TREE_OPERAND (op->op0, 0)(*((const_cast<tree*> (tree_operand_check ((op->op0) , (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1502, __FUNCTION__))))))); |
1503 | else |
1504 | fn = as_combined_fn ((internal_fn) op->clique); |
1505 | tree folded; |
1506 | if (arg1) |
1507 | folded = fold_const_call (fn, ref->type, arg0->op0, arg1->op0); |
1508 | else |
1509 | folded = fold_const_call (fn, ref->type, arg0->op0); |
1510 | if (folded |
1511 | && is_gimple_min_invariant (folded)) |
1512 | return folded; |
1513 | } |
1514 | } |
1515 | |
1516 | /* Simplify reads from constants or constant initializers. */ |
1517 | else if (BITS_PER_UNIT(8) == 8 |
1518 | && ref->type |
1519 | && COMPLETE_TYPE_P (ref->type)(((tree_class_check ((ref->type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1519, __FUNCTION__))->type_common.size) != (tree) nullptr ) |
1520 | && is_gimple_reg_type (ref->type)) |
1521 | { |
1522 | poly_int64 off = 0; |
1523 | HOST_WIDE_INTlong size; |
1524 | if (INTEGRAL_TYPE_P (ref->type)(((enum tree_code) (ref->type)->base.code) == ENUMERAL_TYPE || ((enum tree_code) (ref->type)->base.code) == BOOLEAN_TYPE || ((enum tree_code) (ref->type)->base.code) == INTEGER_TYPE )) |
1525 | size = TYPE_PRECISION (ref->type)((tree_class_check ((ref->type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1525, __FUNCTION__))->type_common.precision); |
1526 | else if (tree_fits_shwi_p (TYPE_SIZE (ref->type)((tree_class_check ((ref->type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1526, __FUNCTION__))->type_common.size))) |
1527 | size = tree_to_shwi (TYPE_SIZE (ref->type)((tree_class_check ((ref->type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1527, __FUNCTION__))->type_common.size)); |
1528 | else |
1529 | return NULL_TREE(tree) nullptr; |
1530 | if (size % BITS_PER_UNIT(8) != 0 |
1531 | || size > MAX_BITSIZE_MODE_ANY_MODE(256*(8))) |
1532 | return NULL_TREE(tree) nullptr; |
1533 | size /= BITS_PER_UNIT(8); |
1534 | unsigned i; |
1535 | for (i = 0; i < operands.length (); ++i) |
1536 | { |
1537 | if (TREE_CODE_CLASS (operands[i].opcode)tree_code_type_tmpl <0>::tree_code_type[(int) (operands [i].opcode)] == tcc_constant) |
1538 | { |
1539 | ++i; |
1540 | break; |
1541 | } |
1542 | if (known_eq (operands[i].off, -1)(!maybe_ne (operands[i].off, -1))) |
1543 | return NULL_TREE(tree) nullptr; |
1544 | off += operands[i].off; |
1545 | if (operands[i].opcode == MEM_REF) |
1546 | { |
1547 | ++i; |
1548 | break; |
1549 | } |
1550 | } |
1551 | vn_reference_op_t base = &operands[--i]; |
1552 | tree ctor = error_mark_nodeglobal_trees[TI_ERROR_MARK]; |
1553 | tree decl = NULL_TREE(tree) nullptr; |
1554 | if (TREE_CODE_CLASS (base->opcode)tree_code_type_tmpl <0>::tree_code_type[(int) (base-> opcode)] == tcc_constant) |
1555 | ctor = base->op0; |
1556 | else if (base->opcode == MEM_REF |
1557 | && base[1].opcode == ADDR_EXPR |
1558 | && (TREE_CODE (TREE_OPERAND (base[1].op0, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check ((base[1].op0), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1558, __FUNCTION__))))))->base.code) == VAR_DECL |
1559 | || TREE_CODE (TREE_OPERAND (base[1].op0, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check ((base[1].op0), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1559, __FUNCTION__))))))->base.code) == CONST_DECL |
1560 | || TREE_CODE (TREE_OPERAND (base[1].op0, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check ((base[1].op0), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1560, __FUNCTION__))))))->base.code) == STRING_CST)) |
1561 | { |
1562 | decl = TREE_OPERAND (base[1].op0, 0)(*((const_cast<tree*> (tree_operand_check ((base[1].op0 ), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1562, __FUNCTION__))))); |
1563 | if (TREE_CODE (decl)((enum tree_code) (decl)->base.code) == STRING_CST) |
1564 | ctor = decl; |
1565 | else |
1566 | ctor = ctor_for_folding (decl); |
1567 | } |
1568 | if (ctor == NULL_TREE(tree) nullptr) |
1569 | return build_zero_cst (ref->type); |
1570 | else if (ctor != error_mark_nodeglobal_trees[TI_ERROR_MARK]) |
1571 | { |
1572 | HOST_WIDE_INTlong const_off; |
1573 | if (decl) |
1574 | { |
1575 | tree res = fold_ctor_reference (ref->type, ctor, |
1576 | off * BITS_PER_UNIT(8), |
1577 | size * BITS_PER_UNIT(8), decl); |
1578 | if (res) |
1579 | { |
1580 | STRIP_USELESS_TYPE_CONVERSION (res)(res) = tree_ssa_strip_useless_type_conversions (res); |
1581 | if (is_gimple_min_invariant (res)) |
1582 | return res; |
1583 | } |
1584 | } |
1585 | else if (off.is_constant (&const_off)) |
1586 | { |
1587 | unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE(256*(8)) / BITS_PER_UNIT(8)]; |
1588 | int len = native_encode_expr (ctor, buf, size, const_off); |
1589 | if (len > 0) |
1590 | return native_interpret_expr (ref->type, buf, len); |
1591 | } |
1592 | } |
1593 | } |
1594 | |
1595 | return NULL_TREE(tree) nullptr; |
1596 | } |
1597 | |
1598 | /* Return true if OPS contain a storage order barrier. */ |
1599 | |
1600 | static bool |
1601 | contains_storage_order_barrier_p (vec<vn_reference_op_s> ops) |
1602 | { |
1603 | vn_reference_op_t op; |
1604 | unsigned i; |
1605 | |
1606 | FOR_EACH_VEC_ELT (ops, i, op)for (i = 0; (ops).iterate ((i), &(op)); ++(i)) |
1607 | if (op->opcode == VIEW_CONVERT_EXPR && op->reverse) |
1608 | return true; |
1609 | |
1610 | return false; |
1611 | } |
1612 | |
1613 | /* Return true if OPS represent an access with reverse storage order. */ |
1614 | |
1615 | static bool |
1616 | reverse_storage_order_for_component_p (vec<vn_reference_op_s> ops) |
1617 | { |
1618 | unsigned i = 0; |
1619 | if (ops[i].opcode == REALPART_EXPR || ops[i].opcode == IMAGPART_EXPR) |
1620 | ++i; |
1621 | switch (ops[i].opcode) |
1622 | { |
1623 | case ARRAY_REF: |
1624 | case COMPONENT_REF: |
1625 | case BIT_FIELD_REF: |
1626 | case MEM_REF: |
1627 | return ops[i].reverse; |
1628 | default: |
1629 | return false; |
1630 | } |
1631 | } |
1632 | |
1633 | /* Transform any SSA_NAME's in a vector of vn_reference_op_s |
1634 | structures into their value numbers. This is done in-place, and |
1635 | the vector passed in is returned. *VALUEIZED_ANYTHING will specify |
1636 | whether any operands were valueized. */ |
1637 | |
1638 | static void |
1639 | valueize_refs_1 (vec<vn_reference_op_s> *orig, bool *valueized_anything, |
1640 | bool with_avail = false) |
1641 | { |
1642 | *valueized_anything = false; |
1643 | |
1644 | for (unsigned i = 0; i < orig->length (); ++i) |
1645 | { |
1646 | re_valueize: |
1647 | vn_reference_op_t vro = &(*orig)[i]; |
1648 | if (vro->opcode == SSA_NAME |
1649 | || (vro->op0 && TREE_CODE (vro->op0)((enum tree_code) (vro->op0)->base.code) == SSA_NAME)) |
1650 | { |
1651 | tree tem = with_avail ? vn_valueize (vro->op0) : SSA_VAL (vro->op0); |
1652 | if (tem != vro->op0) |
1653 | { |
1654 | *valueized_anything = true; |
1655 | vro->op0 = tem; |
1656 | } |
1657 | /* If it transforms from an SSA_NAME to a constant, update |
1658 | the opcode. */ |
1659 | if (TREE_CODE (vro->op0)((enum tree_code) (vro->op0)->base.code) != SSA_NAME && vro->opcode == SSA_NAME) |
1660 | vro->opcode = TREE_CODE (vro->op0)((enum tree_code) (vro->op0)->base.code); |
1661 | } |
1662 | if (vro->op1 && TREE_CODE (vro->op1)((enum tree_code) (vro->op1)->base.code) == SSA_NAME) |
1663 | { |
1664 | tree tem = with_avail ? vn_valueize (vro->op1) : SSA_VAL (vro->op1); |
1665 | if (tem != vro->op1) |
1666 | { |
1667 | *valueized_anything = true; |
1668 | vro->op1 = tem; |
1669 | } |
1670 | } |
1671 | if (vro->op2 && TREE_CODE (vro->op2)((enum tree_code) (vro->op2)->base.code) == SSA_NAME) |
1672 | { |
1673 | tree tem = with_avail ? vn_valueize (vro->op2) : SSA_VAL (vro->op2); |
1674 | if (tem != vro->op2) |
1675 | { |
1676 | *valueized_anything = true; |
1677 | vro->op2 = tem; |
1678 | } |
1679 | } |
1680 | /* If it transforms from an SSA_NAME to an address, fold with |
1681 | a preceding indirect reference. */ |
1682 | if (i > 0 |
1683 | && vro->op0 |
1684 | && TREE_CODE (vro->op0)((enum tree_code) (vro->op0)->base.code) == ADDR_EXPR |
1685 | && (*orig)[i - 1].opcode == MEM_REF) |
1686 | { |
1687 | if (vn_reference_fold_indirect (orig, &i)) |
1688 | *valueized_anything = true; |
1689 | } |
1690 | else if (i > 0 |
1691 | && vro->opcode == SSA_NAME |
1692 | && (*orig)[i - 1].opcode == MEM_REF) |
1693 | { |
1694 | if (vn_reference_maybe_forwprop_address (orig, &i)) |
1695 | { |
1696 | *valueized_anything = true; |
1697 | /* Re-valueize the current operand. */ |
1698 | goto re_valueize; |
1699 | } |
1700 | } |
1701 | /* If it transforms a non-constant ARRAY_REF into a constant |
1702 | one, adjust the constant offset. */ |
1703 | else if (vro->opcode == ARRAY_REF |
1704 | && known_eq (vro->off, -1)(!maybe_ne (vro->off, -1)) |
1705 | && poly_int_tree_p (vro->op0) |
1706 | && poly_int_tree_p (vro->op1) |
1707 | && TREE_CODE (vro->op2)((enum tree_code) (vro->op2)->base.code) == INTEGER_CST) |
1708 | { |
1709 | poly_offset_int off = ((wi::to_poly_offset (vro->op0) |
1710 | - wi::to_poly_offset (vro->op1)) |
1711 | * wi::to_offset (vro->op2) |
1712 | * vn_ref_op_align_unit (vro)); |
1713 | off.to_shwi (&vro->off); |
1714 | } |
1715 | } |
1716 | } |
1717 | |
1718 | static void |
1719 | valueize_refs (vec<vn_reference_op_s> *orig) |
1720 | { |
1721 | bool tem; |
1722 | valueize_refs_1 (orig, &tem); |
1723 | } |
1724 | |
1725 | static vec<vn_reference_op_s> shared_lookup_references; |
1726 | |
1727 | /* Create a vector of vn_reference_op_s structures from REF, a |
1728 | REFERENCE_CLASS_P tree. The vector is shared among all callers of |
1729 | this function. *VALUEIZED_ANYTHING will specify whether any |
1730 | operands were valueized. */ |
1731 | |
1732 | static vec<vn_reference_op_s> |
1733 | valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything) |
1734 | { |
1735 | if (!ref) |
1736 | return vNULL; |
1737 | shared_lookup_references.truncate (0); |
1738 | copy_reference_ops_from_ref (ref, &shared_lookup_references); |
1739 | valueize_refs_1 (&shared_lookup_references, valueized_anything); |
1740 | return shared_lookup_references; |
1741 | } |
1742 | |
1743 | /* Create a vector of vn_reference_op_s structures from CALL, a |
1744 | call statement. The vector is shared among all callers of |
1745 | this function. */ |
1746 | |
1747 | static vec<vn_reference_op_s> |
1748 | valueize_shared_reference_ops_from_call (gcall *call) |
1749 | { |
1750 | if (!call) |
1751 | return vNULL; |
1752 | shared_lookup_references.truncate (0); |
1753 | copy_reference_ops_from_call (call, &shared_lookup_references); |
1754 | valueize_refs (&shared_lookup_references); |
1755 | return shared_lookup_references; |
1756 | } |
1757 | |
1758 | /* Lookup a SCCVN reference operation VR in the current hash table. |
1759 | Returns the resulting value number if it exists in the hash table, |
1760 | NULL_TREE otherwise. VNRESULT will be filled in with the actual |
1761 | vn_reference_t stored in the hashtable if something is found. */ |
1762 | |
1763 | static tree |
1764 | vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult) |
1765 | { |
1766 | vn_reference_s **slot; |
1767 | hashval_t hash; |
1768 | |
1769 | hash = vr->hashcode; |
1770 | slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT); |
1771 | if (slot) |
1772 | { |
1773 | if (vnresult) |
1774 | *vnresult = (vn_reference_t)*slot; |
1775 | return ((vn_reference_t)*slot)->result; |
1776 | } |
1777 | |
1778 | return NULL_TREE(tree) nullptr; |
1779 | } |
1780 | |
1781 | |
1782 | /* Partial definition tracking support. */ |
1783 | |
1784 | struct pd_range |
1785 | { |
1786 | HOST_WIDE_INTlong offset; |
1787 | HOST_WIDE_INTlong size; |
1788 | }; |
1789 | |
1790 | struct pd_data |
1791 | { |
1792 | tree rhs; |
1793 | HOST_WIDE_INTlong rhs_off; |
1794 | HOST_WIDE_INTlong offset; |
1795 | HOST_WIDE_INTlong size; |
1796 | }; |
1797 | |
1798 | /* Context for alias walking. */ |
1799 | |
1800 | struct vn_walk_cb_data |
1801 | { |
1802 | vn_walk_cb_data (vn_reference_t vr_, tree orig_ref_, tree *last_vuse_ptr_, |
1803 | vn_lookup_kind vn_walk_kind_, bool tbaa_p_, tree mask_, |
1804 | bool redundant_store_removal_p_) |
1805 | : vr (vr_), last_vuse_ptr (last_vuse_ptr_), last_vuse (NULL_TREE(tree) nullptr), |
1806 | mask (mask_), masked_result (NULL_TREE(tree) nullptr), same_val (NULL_TREE(tree) nullptr), |
1807 | vn_walk_kind (vn_walk_kind_), |
1808 | tbaa_p (tbaa_p_), redundant_store_removal_p (redundant_store_removal_p_), |
1809 | saved_operands (vNULL), first_set (-2), first_base_set (-2), |
1810 | known_ranges (NULLnullptr) |
1811 | { |
1812 | if (!last_vuse_ptr) |
1813 | last_vuse_ptr = &last_vuse; |
1814 | ao_ref_init (&orig_ref, orig_ref_); |
1815 | if (mask) |
1816 | { |
1817 | wide_int w = wi::to_wide (mask); |
1818 | unsigned int pos = 0, prec = w.get_precision (); |
1819 | pd_data pd; |
1820 | pd.rhs = build_constructor (NULL_TREE(tree) nullptr, NULLnullptr); |
1821 | pd.rhs_off = 0; |
1822 | /* When bitwise and with a constant is done on a memory load, |
1823 | we don't really need all the bits to be defined or defined |
1824 | to constants, we don't really care what is in the position |
1825 | corresponding to 0 bits in the mask. |
1826 | So, push the ranges of those 0 bits in the mask as artificial |
1827 | zero stores and let the partial def handling code do the |
1828 | rest. */ |
1829 | while (pos < prec) |
1830 | { |
1831 | int tz = wi::ctz (w); |
1832 | if (pos + tz > prec) |
1833 | tz = prec - pos; |
1834 | if (tz) |
1835 | { |
1836 | if (BYTES_BIG_ENDIAN0) |
1837 | pd.offset = prec - pos - tz; |
1838 | else |
1839 | pd.offset = pos; |
1840 | pd.size = tz; |
1841 | void *r = push_partial_def (pd, 0, 0, 0, prec); |
1842 | gcc_assert (r == NULL_TREE)((void)(!(r == (tree) nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1842, __FUNCTION__), 0 : 0)); |
1843 | } |
1844 | pos += tz; |
1845 | if (pos == prec) |
1846 | break; |
1847 | w = wi::lrshift (w, tz); |
1848 | tz = wi::ctz (wi::bit_not (w)); |
1849 | if (pos + tz > prec) |
1850 | tz = prec - pos; |
1851 | pos += tz; |
1852 | w = wi::lrshift (w, tz); |
1853 | } |
1854 | } |
1855 | } |
1856 | ~vn_walk_cb_data (); |
1857 | void *finish (alias_set_type, alias_set_type, tree); |
1858 | void *push_partial_def (pd_data pd, |
1859 | alias_set_type, alias_set_type, HOST_WIDE_INTlong, |
1860 | HOST_WIDE_INTlong); |
1861 | |
1862 | vn_reference_t vr; |
1863 | ao_ref orig_ref; |
1864 | tree *last_vuse_ptr; |
1865 | tree last_vuse; |
1866 | tree mask; |
1867 | tree masked_result; |
1868 | tree same_val; |
1869 | vn_lookup_kind vn_walk_kind; |
1870 | bool tbaa_p; |
1871 | bool redundant_store_removal_p; |
1872 | vec<vn_reference_op_s> saved_operands; |
1873 | |
1874 | /* The VDEFs of partial defs we come along. */ |
1875 | auto_vec<pd_data, 2> partial_defs; |
1876 | /* The first defs range to avoid splay tree setup in most cases. */ |
1877 | pd_range first_range; |
1878 | alias_set_type first_set; |
1879 | alias_set_type first_base_set; |
1880 | splay_tree known_ranges; |
1881 | obstack ranges_obstack; |
1882 | }; |
1883 | |
1884 | vn_walk_cb_data::~vn_walk_cb_data () |
1885 | { |
1886 | if (known_ranges) |
1887 | { |
1888 | splay_tree_delete (known_ranges); |
1889 | obstack_free (&ranges_obstack, NULL)__extension__ ({ struct obstack *__o = (&ranges_obstack); void *__obj = (void *) (nullptr); if (__obj > (void *) __o ->chunk && __obj < (void *) __o->chunk_limit ) __o->next_free = __o->object_base = (char *) __obj; else _obstack_free (__o, __obj); }); |
1890 | } |
1891 | saved_operands.release (); |
1892 | } |
1893 | |
1894 | void * |
1895 | vn_walk_cb_data::finish (alias_set_type set, alias_set_type base_set, tree val) |
1896 | { |
1897 | if (first_set != -2) |
1898 | { |
1899 | set = first_set; |
1900 | base_set = first_base_set; |
1901 | } |
1902 | if (mask) |
1903 | { |
1904 | masked_result = val; |
1905 | return (void *) -1; |
1906 | } |
1907 | if (same_val && !operand_equal_p (val, same_val)) |
1908 | return (void *) -1; |
1909 | vec<vn_reference_op_s> &operands |
1910 | = saved_operands.exists () ? saved_operands : vr->operands; |
1911 | return vn_reference_lookup_or_insert_for_pieces (last_vuse, set, base_set, |
1912 | vr->type, operands, val); |
1913 | } |
1914 | |
1915 | /* pd_range splay-tree helpers. */ |
1916 | |
1917 | static int |
1918 | pd_range_compare (splay_tree_key offset1p, splay_tree_key offset2p) |
1919 | { |
1920 | HOST_WIDE_INTlong offset1 = *(HOST_WIDE_INTlong *)offset1p; |
1921 | HOST_WIDE_INTlong offset2 = *(HOST_WIDE_INTlong *)offset2p; |
1922 | if (offset1 < offset2) |
1923 | return -1; |
1924 | else if (offset1 > offset2) |
1925 | return 1; |
1926 | return 0; |
1927 | } |
1928 | |
1929 | static void * |
1930 | pd_tree_alloc (int size, void *data_) |
1931 | { |
1932 | vn_walk_cb_data *data = (vn_walk_cb_data *)data_; |
1933 | return obstack_alloc (&data->ranges_obstack, size)__extension__ ({ struct obstack *__h = (&data->ranges_obstack ); __extension__ ({ struct obstack *__o = (__h); size_t __len = ((size)); if (__extension__ ({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->next_free); }) < __len) _obstack_newchunk (__o, __len); ((void) ((__o )->next_free += (__len))); }); __extension__ ({ struct obstack *__o1 = (__h); void *__value = (void *) __o1->object_base ; if (__o1->next_free == __value) __o1->maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t) < sizeof (void *) ? ((__o1->object_base) + (((__o1->next_free) - (__o1 ->object_base) + (__o1->alignment_mask)) & ~(__o1-> alignment_mask))) : (char *) (((ptrdiff_t) (__o1->next_free ) + (__o1->alignment_mask)) & ~(__o1->alignment_mask ))); if ((size_t) (__o1->next_free - (char *) __o1->chunk ) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk )) __o1->next_free = __o1->chunk_limit; __o1->object_base = __o1->next_free; __value; }); }); |
1934 | } |
1935 | |
1936 | static void |
1937 | pd_tree_dealloc (void *, void *) |
1938 | { |
1939 | } |
1940 | |
1941 | /* Push PD to the vector of partial definitions returning a |
1942 | value when we are ready to combine things with VUSE, SET and MAXSIZEI, |
1943 | NULL when we want to continue looking for partial defs or -1 |
1944 | on failure. */ |
1945 | |
1946 | void * |
1947 | vn_walk_cb_data::push_partial_def (pd_data pd, |
1948 | alias_set_type set, alias_set_type base_set, |
1949 | HOST_WIDE_INTlong offseti, |
1950 | HOST_WIDE_INTlong maxsizei) |
1951 | { |
1952 | const HOST_WIDE_INTlong bufsize = 64; |
1953 | /* We're using a fixed buffer for encoding so fail early if the object |
1954 | we want to interpret is bigger. */ |
1955 | if (maxsizei > bufsize * BITS_PER_UNIT(8) |
1956 | || CHAR_BIT8 != 8 |
1957 | || BITS_PER_UNIT(8) != 8 |
1958 | /* Not prepared to handle PDP endian. */ |
1959 | || BYTES_BIG_ENDIAN0 != WORDS_BIG_ENDIAN0) |
1960 | return (void *)-1; |
1961 | |
1962 | /* Turn too large constant stores into non-constant stores. */ |
1963 | if (CONSTANT_CLASS_P (pd.rhs)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code ) (pd.rhs)->base.code))] == tcc_constant) && pd.size > bufsize * BITS_PER_UNIT(8)) |
1964 | pd.rhs = error_mark_nodeglobal_trees[TI_ERROR_MARK]; |
1965 | |
1966 | /* And for non-constant or CONSTRUCTOR stores shrink them to only keep at |
1967 | most a partial byte before and/or after the region. */ |
1968 | if (!CONSTANT_CLASS_P (pd.rhs)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code ) (pd.rhs)->base.code))] == tcc_constant)) |
1969 | { |
1970 | if (pd.offset < offseti) |
1971 | { |
1972 | HOST_WIDE_INTlong o = ROUND_DOWN (offseti - pd.offset, BITS_PER_UNIT)((offseti - pd.offset) & ~(((8)) - 1)); |
1973 | gcc_assert (pd.size > o)((void)(!(pd.size > o) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 1973, __FUNCTION__), 0 : 0)); |
1974 | pd.size -= o; |
1975 | pd.offset += o; |
1976 | } |
1977 | if (pd.size > maxsizei) |
1978 | pd.size = maxsizei + ((pd.size - maxsizei) % BITS_PER_UNIT(8)); |
1979 | } |
1980 | |
1981 | pd.offset -= offseti; |
1982 | |
1983 | bool pd_constant_p = (TREE_CODE (pd.rhs)((enum tree_code) (pd.rhs)->base.code) == CONSTRUCTOR |
1984 | || CONSTANT_CLASS_P (pd.rhs)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code ) (pd.rhs)->base.code))] == tcc_constant)); |
1985 | pd_range *r; |
1986 | if (partial_defs.is_empty ()) |
1987 | { |
1988 | /* If we get a clobber upfront, fail. */ |
1989 | if (TREE_CLOBBER_P (pd.rhs)(((enum tree_code) (pd.rhs)->base.code) == CONSTRUCTOR && ((pd.rhs)->base.volatile_flag))) |
1990 | return (void *)-1; |
1991 | if (!pd_constant_p) |
1992 | return (void *)-1; |
1993 | partial_defs.safe_push (pd); |
1994 | first_range.offset = pd.offset; |
1995 | first_range.size = pd.size; |
1996 | first_set = set; |
1997 | first_base_set = base_set; |
1998 | last_vuse_ptr = NULLnullptr; |
1999 | r = &first_range; |
2000 | /* Go check if the first partial definition was a full one in case |
2001 | the caller didn't optimize for this. */ |
2002 | } |
2003 | else |
2004 | { |
2005 | if (!known_ranges) |
2006 | { |
2007 | /* ??? Optimize the case where the 2nd partial def completes |
2008 | things. */ |
2009 | gcc_obstack_init (&ranges_obstack)_obstack_begin (((&ranges_obstack)), (memory_block_pool:: block_size), (0), (mempool_obstack_chunk_alloc), (mempool_obstack_chunk_free )); |
2010 | known_ranges = splay_tree_new_with_allocator (pd_range_compare, 0, 0, |
2011 | pd_tree_alloc, |
2012 | pd_tree_dealloc, this); |
2013 | splay_tree_insert (known_ranges, |
2014 | (splay_tree_key)&first_range.offset, |
2015 | (splay_tree_value)&first_range); |
2016 | } |
2017 | |
2018 | pd_range newr = { pd.offset, pd.size }; |
2019 | splay_tree_node n; |
2020 | /* Lookup the predecessor of offset + 1 and see if we need to merge. */ |
2021 | HOST_WIDE_INTlong loffset = newr.offset + 1; |
2022 | if ((n = splay_tree_predecessor (known_ranges, (splay_tree_key)&loffset)) |
2023 | && ((r = (pd_range *)n->value), true) |
2024 | && ranges_known_overlap_p (r->offset, r->size + 1, |
2025 | newr.offset, newr.size)) |
2026 | { |
2027 | /* Ignore partial defs already covered. Here we also drop shadowed |
2028 | clobbers arriving here at the floor. */ |
2029 | if (known_subrange_p (newr.offset, newr.size, r->offset, r->size)) |
2030 | return NULLnullptr; |
2031 | r->size |
2032 | = MAX (r->offset + r->size, newr.offset + newr.size)((r->offset + r->size) > (newr.offset + newr.size) ? (r->offset + r->size) : (newr.offset + newr.size)) - r->offset; |
2033 | } |
2034 | else |
2035 | { |
2036 | /* newr.offset wasn't covered yet, insert the range. */ |
2037 | r = XOBNEW (&ranges_obstack, pd_range)((pd_range *) __extension__ ({ struct obstack *__h = ((&ranges_obstack )); __extension__ ({ struct obstack *__o = (__h); size_t __len = ((sizeof (pd_range))); if (__extension__ ({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit - __o1-> next_free); }) < __len) _obstack_newchunk (__o, __len); (( void) ((__o)->next_free += (__len))); }); __extension__ ({ struct obstack *__o1 = (__h); void *__value = (void *) __o1-> object_base; if (__o1->next_free == __value) __o1->maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t) < sizeof (void *) ? ((__o1->object_base) + (((__o1->next_free) - (__o1 ->object_base) + (__o1->alignment_mask)) & ~(__o1-> alignment_mask))) : (char *) (((ptrdiff_t) (__o1->next_free ) + (__o1->alignment_mask)) & ~(__o1->alignment_mask ))); if ((size_t) (__o1->next_free - (char *) __o1->chunk ) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk )) __o1->next_free = __o1->chunk_limit; __o1->object_base = __o1->next_free; __value; }); })); |
2038 | *r = newr; |
2039 | splay_tree_insert (known_ranges, (splay_tree_key)&r->offset, |
2040 | (splay_tree_value)r); |
2041 | } |
2042 | /* Merge r which now contains newr and is a member of the splay tree with |
2043 | adjacent overlapping ranges. */ |
2044 | pd_range *rafter; |
2045 | while ((n = splay_tree_successor (known_ranges, |
2046 | (splay_tree_key)&r->offset)) |
2047 | && ((rafter = (pd_range *)n->value), true) |
2048 | && ranges_known_overlap_p (r->offset, r->size + 1, |
2049 | rafter->offset, rafter->size)) |
2050 | { |
2051 | r->size = MAX (r->offset + r->size,((r->offset + r->size) > (rafter->offset + rafter ->size) ? (r->offset + r->size) : (rafter->offset + rafter->size)) |
2052 | rafter->offset + rafter->size)((r->offset + r->size) > (rafter->offset + rafter ->size) ? (r->offset + r->size) : (rafter->offset + rafter->size)) - r->offset; |
2053 | splay_tree_remove (known_ranges, (splay_tree_key)&rafter->offset); |
2054 | } |
2055 | /* If we get a clobber, fail. */ |
2056 | if (TREE_CLOBBER_P (pd.rhs)(((enum tree_code) (pd.rhs)->base.code) == CONSTRUCTOR && ((pd.rhs)->base.volatile_flag))) |
2057 | return (void *)-1; |
2058 | /* Non-constants are OK as long as they are shadowed by a constant. */ |
2059 | if (!pd_constant_p) |
2060 | return (void *)-1; |
2061 | partial_defs.safe_push (pd); |
2062 | } |
2063 | |
2064 | /* Now we have merged newr into the range tree. When we have covered |
2065 | [offseti, sizei] then the tree will contain exactly one node which has |
2066 | the desired properties and it will be 'r'. */ |
2067 | if (!known_subrange_p (0, maxsizei, r->offset, r->size)) |
2068 | /* Continue looking for partial defs. */ |
2069 | return NULLnullptr; |
2070 | |
2071 | /* Now simply native encode all partial defs in reverse order. */ |
2072 | unsigned ndefs = partial_defs.length (); |
2073 | /* We support up to 512-bit values (for V8DFmode). */ |
2074 | unsigned char buffer[bufsize + 1]; |
2075 | unsigned char this_buffer[bufsize + 1]; |
2076 | int len; |
2077 | |
2078 | memset (buffer, 0, bufsize + 1); |
2079 | unsigned needed_len = ROUND_UP (maxsizei, BITS_PER_UNIT)(((maxsizei) + ((8)) - 1) & ~(((8)) - 1)) / BITS_PER_UNIT(8); |
2080 | while (!partial_defs.is_empty ()) |
2081 | { |
2082 | pd_data pd = partial_defs.pop (); |
2083 | unsigned int amnt; |
2084 | if (TREE_CODE (pd.rhs)((enum tree_code) (pd.rhs)->base.code) == CONSTRUCTOR) |
2085 | { |
2086 | /* Empty CONSTRUCTOR. */ |
2087 | if (pd.size >= needed_len * BITS_PER_UNIT(8)) |
2088 | len = needed_len; |
2089 | else |
2090 | len = ROUND_UP (pd.size, BITS_PER_UNIT)(((pd.size) + ((8)) - 1) & ~(((8)) - 1)) / BITS_PER_UNIT(8); |
2091 | memset (this_buffer, 0, len); |
2092 | } |
2093 | else if (pd.rhs_off >= 0) |
2094 | { |
2095 | len = native_encode_expr (pd.rhs, this_buffer, bufsize, |
2096 | (MAX (0, -pd.offset)((0) > (-pd.offset) ? (0) : (-pd.offset)) |
2097 | + pd.rhs_off) / BITS_PER_UNIT(8)); |
2098 | if (len <= 0 |
2099 | || len < (ROUND_UP (pd.size, BITS_PER_UNIT)(((pd.size) + ((8)) - 1) & ~(((8)) - 1)) / BITS_PER_UNIT(8) |
2100 | - MAX (0, -pd.offset)((0) > (-pd.offset) ? (0) : (-pd.offset)) / BITS_PER_UNIT(8))) |
2101 | { |
2102 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2103 | fprintf (dump_file, "Failed to encode %u " |
2104 | "partial definitions\n", ndefs); |
2105 | return (void *)-1; |
2106 | } |
2107 | } |
2108 | else /* negative pd.rhs_off indicates we want to chop off first bits */ |
2109 | { |
2110 | if (-pd.rhs_off >= bufsize) |
2111 | return (void *)-1; |
2112 | len = native_encode_expr (pd.rhs, |
2113 | this_buffer + -pd.rhs_off / BITS_PER_UNIT(8), |
2114 | bufsize - -pd.rhs_off / BITS_PER_UNIT(8), |
2115 | MAX (0, -pd.offset)((0) > (-pd.offset) ? (0) : (-pd.offset)) / BITS_PER_UNIT(8)); |
2116 | if (len <= 0 |
2117 | || len < (ROUND_UP (pd.size, BITS_PER_UNIT)(((pd.size) + ((8)) - 1) & ~(((8)) - 1)) / BITS_PER_UNIT(8) |
2118 | - MAX (0, -pd.offset)((0) > (-pd.offset) ? (0) : (-pd.offset)) / BITS_PER_UNIT(8))) |
2119 | { |
2120 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2121 | fprintf (dump_file, "Failed to encode %u " |
2122 | "partial definitions\n", ndefs); |
2123 | return (void *)-1; |
2124 | } |
2125 | } |
2126 | |
2127 | unsigned char *p = buffer; |
2128 | HOST_WIDE_INTlong size = pd.size; |
2129 | if (pd.offset < 0) |
2130 | size -= ROUND_DOWN (-pd.offset, BITS_PER_UNIT)((-pd.offset) & ~(((8)) - 1)); |
2131 | this_buffer[len] = 0; |
2132 | if (BYTES_BIG_ENDIAN0) |
2133 | { |
2134 | /* LSB of this_buffer[len - 1] byte should be at |
2135 | pd.offset + pd.size - 1 bits in buffer. */ |
2136 | amnt = ((unsigned HOST_WIDE_INTlong) pd.offset |
2137 | + pd.size) % BITS_PER_UNIT(8); |
2138 | if (amnt) |
2139 | shift_bytes_in_array_right (this_buffer, len + 1, amnt); |
2140 | unsigned char *q = this_buffer; |
2141 | unsigned int off = 0; |
2142 | if (pd.offset >= 0) |
2143 | { |
2144 | unsigned int msk; |
2145 | off = pd.offset / BITS_PER_UNIT(8); |
2146 | gcc_assert (off < needed_len)((void)(!(off < needed_len) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2146, __FUNCTION__), 0 : 0)); |
2147 | p = buffer + off; |
2148 | if (size <= amnt) |
2149 | { |
2150 | msk = ((1 << size) - 1) << (BITS_PER_UNIT(8) - amnt); |
2151 | *p = (*p & ~msk) | (this_buffer[len] & msk); |
2152 | size = 0; |
2153 | } |
2154 | else |
2155 | { |
2156 | if (TREE_CODE (pd.rhs)((enum tree_code) (pd.rhs)->base.code) != CONSTRUCTOR) |
2157 | q = (this_buffer + len |
2158 | - (ROUND_UP (size - amnt, BITS_PER_UNIT)(((size - amnt) + ((8)) - 1) & ~(((8)) - 1)) |
2159 | / BITS_PER_UNIT(8))); |
2160 | if (pd.offset % BITS_PER_UNIT(8)) |
2161 | { |
2162 | msk = -1U << (BITS_PER_UNIT(8) |
2163 | - (pd.offset % BITS_PER_UNIT(8))); |
2164 | *p = (*p & msk) | (*q & ~msk); |
2165 | p++; |
2166 | q++; |
2167 | off++; |
2168 | size -= BITS_PER_UNIT(8) - (pd.offset % BITS_PER_UNIT(8)); |
2169 | gcc_assert (size >= 0)((void)(!(size >= 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2169, __FUNCTION__), 0 : 0)); |
2170 | } |
2171 | } |
2172 | } |
2173 | else if (TREE_CODE (pd.rhs)((enum tree_code) (pd.rhs)->base.code) != CONSTRUCTOR) |
2174 | { |
2175 | q = (this_buffer + len |
2176 | - (ROUND_UP (size - amnt, BITS_PER_UNIT)(((size - amnt) + ((8)) - 1) & ~(((8)) - 1)) |
2177 | / BITS_PER_UNIT(8))); |
2178 | if (pd.offset % BITS_PER_UNIT(8)) |
2179 | { |
2180 | q++; |
2181 | size -= BITS_PER_UNIT(8) - ((unsigned HOST_WIDE_INTlong) pd.offset |
2182 | % BITS_PER_UNIT(8)); |
2183 | gcc_assert (size >= 0)((void)(!(size >= 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2183, __FUNCTION__), 0 : 0)); |
2184 | } |
2185 | } |
2186 | if ((unsigned HOST_WIDE_INTlong) size / BITS_PER_UNIT(8) + off |
2187 | > needed_len) |
2188 | size = (needed_len - off) * BITS_PER_UNIT(8); |
2189 | memcpy (p, q, size / BITS_PER_UNIT(8)); |
2190 | if (size % BITS_PER_UNIT(8)) |
2191 | { |
2192 | unsigned int msk |
2193 | = -1U << (BITS_PER_UNIT(8) - (size % BITS_PER_UNIT(8))); |
2194 | p += size / BITS_PER_UNIT(8); |
2195 | q += size / BITS_PER_UNIT(8); |
2196 | *p = (*q & msk) | (*p & ~msk); |
2197 | } |
2198 | } |
2199 | else |
2200 | { |
2201 | if (pd.offset >= 0) |
2202 | { |
2203 | /* LSB of this_buffer[0] byte should be at pd.offset bits |
2204 | in buffer. */ |
2205 | unsigned int msk; |
2206 | size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT)((size) < ((long) needed_len * (8)) ? (size) : ((long) needed_len * (8))); |
2207 | amnt = pd.offset % BITS_PER_UNIT(8); |
2208 | if (amnt) |
2209 | shift_bytes_in_array_left (this_buffer, len + 1, amnt); |
2210 | unsigned int off = pd.offset / BITS_PER_UNIT(8); |
2211 | gcc_assert (off < needed_len)((void)(!(off < needed_len) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2211, __FUNCTION__), 0 : 0)); |
2212 | size = MIN (size,((size) < ((long) (needed_len - off) * (8)) ? (size) : ((long ) (needed_len - off) * (8))) |
2213 | (HOST_WIDE_INT) (needed_len - off) * BITS_PER_UNIT)((size) < ((long) (needed_len - off) * (8)) ? (size) : ((long ) (needed_len - off) * (8))); |
2214 | p = buffer + off; |
2215 | if (amnt + size < BITS_PER_UNIT(8)) |
2216 | { |
2217 | /* Low amnt bits come from *p, then size bits |
2218 | from this_buffer[0] and the remaining again from |
2219 | *p. */ |
2220 | msk = ((1 << size) - 1) << amnt; |
2221 | *p = (*p & ~msk) | (this_buffer[0] & msk); |
2222 | size = 0; |
2223 | } |
2224 | else if (amnt) |
2225 | { |
2226 | msk = -1U << amnt; |
2227 | *p = (*p & ~msk) | (this_buffer[0] & msk); |
2228 | p++; |
2229 | size -= (BITS_PER_UNIT(8) - amnt); |
2230 | } |
2231 | } |
2232 | else |
2233 | { |
2234 | amnt = (unsigned HOST_WIDE_INTlong) pd.offset % BITS_PER_UNIT(8); |
2235 | if (amnt) |
2236 | size -= BITS_PER_UNIT(8) - amnt; |
2237 | size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT)((size) < ((long) needed_len * (8)) ? (size) : ((long) needed_len * (8))); |
2238 | if (amnt) |
2239 | shift_bytes_in_array_left (this_buffer, len + 1, amnt); |
2240 | } |
2241 | memcpy (p, this_buffer + (amnt != 0), size / BITS_PER_UNIT(8)); |
2242 | p += size / BITS_PER_UNIT(8); |
2243 | if (size % BITS_PER_UNIT(8)) |
2244 | { |
2245 | unsigned int msk = -1U << (size % BITS_PER_UNIT(8)); |
2246 | *p = (this_buffer[(amnt != 0) + size / BITS_PER_UNIT(8)] |
2247 | & ~msk) | (*p & msk); |
2248 | } |
2249 | } |
2250 | } |
2251 | |
2252 | tree type = vr->type; |
2253 | /* Make sure to interpret in a type that has a range covering the whole |
2254 | access size. */ |
2255 | if (INTEGRAL_TYPE_P (vr->type)(((enum tree_code) (vr->type)->base.code) == ENUMERAL_TYPE || ((enum tree_code) (vr->type)->base.code) == BOOLEAN_TYPE || ((enum tree_code) (vr->type)->base.code) == INTEGER_TYPE ) && maxsizei != TYPE_PRECISION (vr->type)((tree_class_check ((vr->type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2255, __FUNCTION__))->type_common.precision)) |
2256 | type = build_nonstandard_integer_type (maxsizei, TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2256, __FUNCTION__))->base.u.bits.unsigned_flag)); |
2257 | tree val; |
2258 | if (BYTES_BIG_ENDIAN0) |
2259 | { |
2260 | unsigned sz = needed_len; |
2261 | if (maxsizei % BITS_PER_UNIT(8)) |
2262 | shift_bytes_in_array_right (buffer, needed_len, |
2263 | BITS_PER_UNIT(8) |
2264 | - (maxsizei % BITS_PER_UNIT(8))); |
2265 | if (INTEGRAL_TYPE_P (type)(((enum tree_code) (type)->base.code) == ENUMERAL_TYPE || ( (enum tree_code) (type)->base.code) == BOOLEAN_TYPE || ((enum tree_code) (type)->base.code) == INTEGER_TYPE)) |
2266 | sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)(as_a <scalar_int_mode> ((tree_class_check ((type), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2266, __FUNCTION__))->type_common.mode))); |
2267 | if (sz > needed_len) |
2268 | { |
2269 | memcpy (this_buffer + (sz - needed_len), buffer, needed_len); |
2270 | val = native_interpret_expr (type, this_buffer, sz); |
2271 | } |
2272 | else |
2273 | val = native_interpret_expr (type, buffer, needed_len); |
2274 | } |
2275 | else |
2276 | val = native_interpret_expr (type, buffer, bufsize); |
2277 | /* If we chop off bits because the types precision doesn't match the memory |
2278 | access size this is ok when optimizing reads but not when called from |
2279 | the DSE code during elimination. */ |
2280 | if (val && type != vr->type) |
2281 | { |
2282 | if (! int_fits_type_p (val, vr->type)) |
2283 | val = NULL_TREE(tree) nullptr; |
2284 | else |
2285 | val = fold_convert (vr->type, val)fold_convert_loc (((location_t) 0), vr->type, val); |
2286 | } |
2287 | |
2288 | if (val) |
2289 | { |
2290 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2291 | fprintf (dump_file, |
2292 | "Successfully combined %u partial definitions\n", ndefs); |
2293 | /* We are using the alias-set of the first store we encounter which |
2294 | should be appropriate here. */ |
2295 | return finish (first_set, first_base_set, val); |
2296 | } |
2297 | else |
2298 | { |
2299 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2300 | fprintf (dump_file, |
2301 | "Failed to interpret %u encoded partial definitions\n", ndefs); |
2302 | return (void *)-1; |
2303 | } |
2304 | } |
2305 | |
2306 | /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_ |
2307 | with the current VUSE and performs the expression lookup. */ |
2308 | |
2309 | static void * |
2310 | vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED__attribute__ ((__unused__)), tree vuse, void *data_) |
2311 | { |
2312 | vn_walk_cb_data *data = (vn_walk_cb_data *)data_; |
2313 | vn_reference_t vr = data->vr; |
2314 | vn_reference_s **slot; |
2315 | hashval_t hash; |
2316 | |
2317 | /* If we have partial definitions recorded we have to go through |
2318 | vn_reference_lookup_3. */ |
2319 | if (!data->partial_defs.is_empty ()) |
2320 | return NULLnullptr; |
2321 | |
2322 | if (data->last_vuse_ptr) |
2323 | { |
2324 | *data->last_vuse_ptr = vuse; |
2325 | data->last_vuse = vuse; |
2326 | } |
2327 | |
2328 | /* Fixup vuse and hash. */ |
2329 | if (vr->vuse) |
2330 | vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse)(tree_check ((vr->vuse), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2330, __FUNCTION__, (SSA_NAME)))->base.u.version; |
2331 | vr->vuse = vuse_ssa_val (vuse); |
2332 | if (vr->vuse) |
2333 | vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse)(tree_check ((vr->vuse), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2333, __FUNCTION__, (SSA_NAME)))->base.u.version; |
2334 | |
2335 | hash = vr->hashcode; |
2336 | slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT); |
2337 | if (slot) |
2338 | { |
2339 | if ((*slot)->result && data->saved_operands.exists ()) |
2340 | return data->finish (vr->set, vr->base_set, (*slot)->result); |
2341 | return *slot; |
2342 | } |
2343 | |
2344 | return NULLnullptr; |
2345 | } |
2346 | |
2347 | /* Lookup an existing or insert a new vn_reference entry into the |
2348 | value table for the VUSE, SET, TYPE, OPERANDS reference which |
2349 | has the value VALUE which is either a constant or an SSA name. */ |
2350 | |
2351 | static vn_reference_t |
2352 | vn_reference_lookup_or_insert_for_pieces (tree vuse, |
2353 | alias_set_type set, |
2354 | alias_set_type base_set, |
2355 | tree type, |
2356 | vec<vn_reference_op_s, |
2357 | va_heap> operands, |
2358 | tree value) |
2359 | { |
2360 | vn_reference_s vr1; |
2361 | vn_reference_t result; |
2362 | unsigned value_id; |
2363 | vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE(tree) nullptr; |
2364 | vr1.operands = operands; |
2365 | vr1.type = type; |
2366 | vr1.set = set; |
2367 | vr1.base_set = base_set; |
2368 | vr1.hashcode = vn_reference_compute_hash (&vr1); |
2369 | if (vn_reference_lookup_1 (&vr1, &result)) |
2370 | return result; |
2371 | if (TREE_CODE (value)((enum tree_code) (value)->base.code) == SSA_NAME) |
2372 | value_id = VN_INFO (value)->value_id; |
2373 | else |
2374 | value_id = get_or_alloc_constant_value_id (value); |
2375 | return vn_reference_insert_pieces (vuse, set, base_set, type, |
2376 | operands.copy (), value, value_id); |
2377 | } |
2378 | |
2379 | /* Return a value-number for RCODE OPS... either by looking up an existing |
2380 | value-number for the possibly simplified result or by inserting the |
2381 | operation if INSERT is true. If SIMPLIFY is false, return a value |
2382 | number for the unsimplified expression. */ |
2383 | |
2384 | static tree |
2385 | vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert, |
2386 | bool simplify) |
2387 | { |
2388 | tree result = NULL_TREE(tree) nullptr; |
2389 | /* We will be creating a value number for |
2390 | RCODE (OPS...). |
2391 | So first simplify and lookup this expression to see if it |
2392 | is already available. */ |
2393 | /* For simplification valueize. */ |
2394 | unsigned i = 0; |
2395 | if (simplify) |
2396 | for (i = 0; i < res_op->num_ops; ++i) |
2397 | if (TREE_CODE (res_op->ops[i])((enum tree_code) (res_op->ops[i])->base.code) == SSA_NAME) |
2398 | { |
2399 | tree tem = vn_valueize (res_op->ops[i]); |
2400 | if (!tem) |
2401 | break; |
2402 | res_op->ops[i] = tem; |
2403 | } |
2404 | /* If valueization of an operand fails (it is not available), skip |
2405 | simplification. */ |
2406 | bool res = false; |
2407 | if (i == res_op->num_ops) |
2408 | { |
2409 | mprts_hook = vn_lookup_simplify_result; |
2410 | res = res_op->resimplify (NULLnullptr, vn_valueize); |
2411 | mprts_hook = NULLnullptr; |
2412 | } |
2413 | gimple *new_stmt = NULLnullptr; |
2414 | if (res |
2415 | && gimple_simplified_result_is_gimple_val (res_op)) |
2416 | { |
2417 | /* The expression is already available. */ |
2418 | result = res_op->ops[0]; |
2419 | /* Valueize it, simplification returns sth in AVAIL only. */ |
2420 | if (TREE_CODE (result)((enum tree_code) (result)->base.code) == SSA_NAME) |
2421 | result = SSA_VAL (result); |
2422 | } |
2423 | else |
2424 | { |
2425 | tree val = vn_lookup_simplify_result (res_op); |
2426 | if (!val && insert) |
2427 | { |
2428 | gimple_seq stmts = NULLnullptr; |
2429 | result = maybe_push_res_to_seq (res_op, &stmts); |
2430 | if (result) |
2431 | { |
2432 | gcc_assert (gimple_seq_singleton_p (stmts))((void)(!(gimple_seq_singleton_p (stmts)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2432, __FUNCTION__), 0 : 0)); |
2433 | new_stmt = gimple_seq_first_stmt (stmts); |
2434 | } |
2435 | } |
2436 | else |
2437 | /* The expression is already available. */ |
2438 | result = val; |
2439 | } |
2440 | if (new_stmt) |
2441 | { |
2442 | /* The expression is not yet available, value-number lhs to |
2443 | the new SSA_NAME we created. */ |
2444 | /* Initialize value-number information properly. */ |
2445 | vn_ssa_aux_t result_info = VN_INFO (result); |
2446 | result_info->valnum = result; |
2447 | result_info->value_id = get_next_value_id (); |
2448 | result_info->visited = 1; |
2449 | gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr, |
2450 | new_stmt); |
2451 | result_info->needs_insertion = true; |
2452 | /* ??? PRE phi-translation inserts NARYs without corresponding |
2453 | SSA name result. Re-use those but set their result according |
2454 | to the stmt we just built. */ |
2455 | vn_nary_op_t nary = NULLnullptr; |
2456 | vn_nary_op_lookup_stmt (new_stmt, &nary); |
2457 | if (nary) |
2458 | { |
2459 | gcc_assert (! nary->predicated_values && nary->u.result == NULL_TREE)((void)(!(! nary->predicated_values && nary->u. result == (tree) nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2459, __FUNCTION__), 0 : 0)); |
2460 | nary->u.result = gimple_assign_lhs (new_stmt); |
2461 | } |
2462 | /* As all "inserted" statements are singleton SCCs, insert |
2463 | to the valid table. This is strictly needed to |
2464 | avoid re-generating new value SSA_NAMEs for the same |
2465 | expression during SCC iteration over and over (the |
2466 | optimistic table gets cleared after each iteration). |
2467 | We do not need to insert into the optimistic table, as |
2468 | lookups there will fall back to the valid table. */ |
2469 | else |
2470 | { |
2471 | unsigned int length = vn_nary_length_from_stmt (new_stmt); |
2472 | vn_nary_op_t vno1 |
2473 | = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack); |
2474 | vno1->value_id = result_info->value_id; |
2475 | vno1->length = length; |
2476 | vno1->predicated_values = 0; |
2477 | vno1->u.result = result; |
2478 | init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (new_stmt)); |
2479 | vn_nary_op_insert_into (vno1, valid_info->nary); |
2480 | /* Also do not link it into the undo chain. */ |
2481 | last_inserted_nary = vno1->next; |
2482 | vno1->next = (vn_nary_op_t)(void *)-1; |
2483 | } |
2484 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2485 | { |
2486 | fprintf (dump_file, "Inserting name "); |
2487 | print_generic_expr (dump_file, result); |
2488 | fprintf (dump_file, " for expression "); |
2489 | print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM); |
2490 | fprintf (dump_file, "\n"); |
2491 | } |
2492 | } |
2493 | return result; |
2494 | } |
2495 | |
2496 | /* Return a value-number for RCODE OPS... either by looking up an existing |
2497 | value-number for the simplified result or by inserting the operation. */ |
2498 | |
2499 | static tree |
2500 | vn_nary_build_or_lookup (gimple_match_op *res_op) |
2501 | { |
2502 | return vn_nary_build_or_lookup_1 (res_op, true, true); |
2503 | } |
2504 | |
2505 | /* Try to simplify the expression RCODE OPS... of type TYPE and return |
2506 | its value if present. */ |
2507 | |
2508 | tree |
2509 | vn_nary_simplify (vn_nary_op_t nary) |
2510 | { |
2511 | if (nary->length > gimple_match_op::MAX_NUM_OPS) |
2512 | return NULL_TREE(tree) nullptr; |
2513 | gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode, |
2514 | nary->type, nary->length); |
2515 | memcpy (op.ops, nary->op, sizeof (tree) * nary->length); |
2516 | return vn_nary_build_or_lookup_1 (&op, false, true); |
2517 | } |
2518 | |
2519 | /* Elimination engine. */ |
2520 | |
2521 | class eliminate_dom_walker : public dom_walker |
2522 | { |
2523 | public: |
2524 | eliminate_dom_walker (cdi_direction, bitmap); |
2525 | ~eliminate_dom_walker (); |
2526 | |
2527 | edge before_dom_children (basic_block) final override; |
2528 | void after_dom_children (basic_block) final override; |
2529 | |
2530 | virtual tree eliminate_avail (basic_block, tree op); |
2531 | virtual void eliminate_push_avail (basic_block, tree op); |
2532 | tree eliminate_insert (basic_block, gimple_stmt_iterator *gsi, tree val); |
2533 | |
2534 | void eliminate_stmt (basic_block, gimple_stmt_iterator *); |
2535 | |
2536 | unsigned eliminate_cleanup (bool region_p = false); |
2537 | |
2538 | bool do_pre; |
2539 | unsigned int el_todo; |
2540 | unsigned int eliminations; |
2541 | unsigned int insertions; |
2542 | |
2543 | /* SSA names that had their defs inserted by PRE if do_pre. */ |
2544 | bitmap inserted_exprs; |
2545 | |
2546 | /* Blocks with statements that have had their EH properties changed. */ |
2547 | bitmap need_eh_cleanup; |
2548 | |
2549 | /* Blocks with statements that have had their AB properties changed. */ |
2550 | bitmap need_ab_cleanup; |
2551 | |
2552 | /* Local state for the eliminate domwalk. */ |
2553 | auto_vec<gimple *> to_remove; |
2554 | auto_vec<gimple *> to_fixup; |
2555 | auto_vec<tree> avail; |
2556 | auto_vec<tree> avail_stack; |
2557 | }; |
2558 | |
2559 | /* Adaptor to the elimination engine using RPO availability. */ |
2560 | |
2561 | class rpo_elim : public eliminate_dom_walker |
2562 | { |
2563 | public: |
2564 | rpo_elim(basic_block entry_) |
2565 | : eliminate_dom_walker (CDI_DOMINATORS, NULLnullptr), entry (entry_), |
2566 | m_avail_freelist (NULLnullptr) {} |
2567 | |
2568 | tree eliminate_avail (basic_block, tree op) final override; |
2569 | |
2570 | void eliminate_push_avail (basic_block, tree) final override; |
2571 | |
2572 | basic_block entry; |
2573 | /* Freelist of avail entries which are allocated from the vn_ssa_aux |
2574 | obstack. */ |
2575 | vn_avail *m_avail_freelist; |
2576 | }; |
2577 | |
2578 | /* Global RPO state for access from hooks. */ |
2579 | static eliminate_dom_walker *rpo_avail; |
2580 | basic_block vn_context_bb; |
2581 | |
2582 | /* Return true if BASE1 and BASE2 can be adjusted so they have the |
2583 | same address and adjust *OFFSET1 and *OFFSET2 accordingly. |
2584 | Otherwise return false. */ |
2585 | |
2586 | static bool |
2587 | adjust_offsets_for_equal_base_address (tree base1, poly_int64 *offset1, |
2588 | tree base2, poly_int64 *offset2) |
2589 | { |
2590 | poly_int64 soff; |
2591 | if (TREE_CODE (base1)((enum tree_code) (base1)->base.code) == MEM_REF |
2592 | && TREE_CODE (base2)((enum tree_code) (base2)->base.code) == MEM_REF) |
2593 | { |
2594 | if (mem_ref_offset (base1).to_shwi (&soff)) |
2595 | { |
2596 | base1 = TREE_OPERAND (base1, 0)(*((const_cast<tree*> (tree_operand_check ((base1), (0) , "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2596, __FUNCTION__))))); |
2597 | *offset1 += soff * BITS_PER_UNIT(8); |
2598 | } |
2599 | if (mem_ref_offset (base2).to_shwi (&soff)) |
2600 | { |
2601 | base2 = TREE_OPERAND (base2, 0)(*((const_cast<tree*> (tree_operand_check ((base2), (0) , "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2601, __FUNCTION__))))); |
2602 | *offset2 += soff * BITS_PER_UNIT(8); |
2603 | } |
2604 | return operand_equal_p (base1, base2, 0); |
2605 | } |
2606 | return operand_equal_p (base1, base2, OEP_ADDRESS_OF); |
2607 | } |
2608 | |
2609 | /* Callback for walk_non_aliased_vuses. Tries to perform a lookup |
2610 | from the statement defining VUSE and if not successful tries to |
2611 | translate *REFP and VR_ through an aggregate copy at the definition |
2612 | of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation |
2613 | of *REF and *VR. If only disambiguation was performed then |
2614 | *DISAMBIGUATE_ONLY is set to true. */ |
2615 | |
2616 | static void * |
2617 | vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, |
2618 | translate_flags *disambiguate_only) |
2619 | { |
2620 | vn_walk_cb_data *data = (vn_walk_cb_data *)data_; |
2621 | vn_reference_t vr = data->vr; |
2622 | gimple *def_stmt = SSA_NAME_DEF_STMT (vuse)(tree_check ((vuse), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2622, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt; |
2623 | tree base = ao_ref_base (ref); |
2624 | HOST_WIDE_INTlong offseti = 0, maxsizei, sizei = 0; |
2625 | static vec<vn_reference_op_s> lhs_ops; |
2626 | ao_ref lhs_ref; |
2627 | bool lhs_ref_ok = false; |
2628 | poly_int64 copy_size; |
2629 | |
2630 | /* First try to disambiguate after value-replacing in the definitions LHS. */ |
2631 | if (is_gimple_assign (def_stmt)) |
2632 | { |
2633 | tree lhs = gimple_assign_lhs (def_stmt); |
2634 | bool valueized_anything = false; |
2635 | /* Avoid re-allocation overhead. */ |
2636 | lhs_ops.truncate (0); |
2637 | basic_block saved_rpo_bb = vn_context_bb; |
2638 | vn_context_bb = gimple_bb (def_stmt); |
2639 | if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE) |
2640 | { |
2641 | copy_reference_ops_from_ref (lhs, &lhs_ops); |
2642 | valueize_refs_1 (&lhs_ops, &valueized_anything, true); |
2643 | } |
2644 | vn_context_bb = saved_rpo_bb; |
2645 | ao_ref_init (&lhs_ref, lhs); |
2646 | lhs_ref_ok = true; |
2647 | if (valueized_anything |
2648 | && ao_ref_init_from_vn_reference |
2649 | (&lhs_ref, ao_ref_alias_set (&lhs_ref), |
2650 | ao_ref_base_alias_set (&lhs_ref), TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2650, __FUNCTION__))->typed.type), lhs_ops) |
2651 | && !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p)) |
2652 | { |
2653 | *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE; |
2654 | return NULLnullptr; |
2655 | } |
2656 | |
2657 | /* When the def is a CLOBBER we can optimistically disambiguate |
2658 | against it since any overlap it would be undefined behavior. |
2659 | Avoid this for obvious must aliases to save compile-time though. |
2660 | We also may not do this when the query is used for redundant |
2661 | store removal. */ |
2662 | if (!data->redundant_store_removal_p |
2663 | && gimple_clobber_p (def_stmt) |
2664 | && !operand_equal_p (ao_ref_base (&lhs_ref), base, OEP_ADDRESS_OF)) |
2665 | { |
2666 | *disambiguate_only = TR_DISAMBIGUATE; |
2667 | return NULLnullptr; |
2668 | } |
2669 | |
2670 | /* Besides valueizing the LHS we can also use access-path based |
2671 | disambiguation on the original non-valueized ref. */ |
2672 | if (!ref->ref |
2673 | && lhs_ref_ok |
2674 | && data->orig_ref.ref) |
2675 | { |
2676 | /* We want to use the non-valueized LHS for this, but avoid redundant |
2677 | work. */ |
2678 | ao_ref *lref = &lhs_ref; |
2679 | ao_ref lref_alt; |
2680 | if (valueized_anything) |
2681 | { |
2682 | ao_ref_init (&lref_alt, lhs); |
2683 | lref = &lref_alt; |
2684 | } |
2685 | if (!refs_may_alias_p_1 (&data->orig_ref, lref, data->tbaa_p)) |
2686 | { |
2687 | *disambiguate_only = (valueized_anything |
2688 | ? TR_VALUEIZE_AND_DISAMBIGUATE |
2689 | : TR_DISAMBIGUATE); |
2690 | return NULLnullptr; |
2691 | } |
2692 | } |
2693 | |
2694 | /* If we reach a clobbering statement try to skip it and see if |
2695 | we find a VN result with exactly the same value as the |
2696 | possible clobber. In this case we can ignore the clobber |
2697 | and return the found value. */ |
2698 | if (is_gimple_reg_type (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2698, __FUNCTION__))->typed.type)) |
2699 | && types_compatible_p (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2699, __FUNCTION__))->typed.type), vr->type) |
2700 | && (ref->ref || data->orig_ref.ref) |
2701 | && !data->mask |
2702 | && data->partial_defs.is_empty () |
2703 | && multiple_p (get_object_alignment |
2704 | (ref->ref ? ref->ref : data->orig_ref.ref), |
2705 | ref->size) |
2706 | && multiple_p (get_object_alignment (lhs), ref->size)) |
2707 | { |
2708 | tree rhs = gimple_assign_rhs1 (def_stmt); |
2709 | /* ??? We may not compare to ahead values which might be from |
2710 | a different loop iteration but only to loop invariants. Use |
2711 | CONSTANT_CLASS_P (unvalueized!) as conservative approximation. |
2712 | The one-hop lookup below doesn't have this issue since there's |
2713 | a virtual PHI before we ever reach a backedge to cross. |
2714 | We can skip multiple defs as long as they are from the same |
2715 | value though. */ |
2716 | if (data->same_val |
2717 | && !operand_equal_p (data->same_val, rhs)) |
2718 | ; |
2719 | else if (CONSTANT_CLASS_P (rhs)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code ) (rhs)->base.code))] == tcc_constant)) |
2720 | { |
2721 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2722 | { |
2723 | fprintf (dump_file, |
2724 | "Skipping possible redundant definition "); |
2725 | print_gimple_stmt (dump_file, def_stmt, 0); |
2726 | } |
2727 | /* Delay the actual compare of the values to the end of the walk |
2728 | but do not update last_vuse from here. */ |
2729 | data->last_vuse_ptr = NULLnullptr; |
2730 | data->same_val = rhs; |
2731 | return NULLnullptr; |
2732 | } |
2733 | else |
2734 | { |
2735 | tree *saved_last_vuse_ptr = data->last_vuse_ptr; |
2736 | /* Do not update last_vuse_ptr in vn_reference_lookup_2. */ |
2737 | data->last_vuse_ptr = NULLnullptr; |
2738 | tree saved_vuse = vr->vuse; |
2739 | hashval_t saved_hashcode = vr->hashcode; |
2740 | void *res = vn_reference_lookup_2 (ref, gimple_vuse (def_stmt), |
2741 | data); |
2742 | /* Need to restore vr->vuse and vr->hashcode. */ |
2743 | vr->vuse = saved_vuse; |
2744 | vr->hashcode = saved_hashcode; |
2745 | data->last_vuse_ptr = saved_last_vuse_ptr; |
2746 | if (res && res != (void *)-1) |
2747 | { |
2748 | vn_reference_t vnresult = (vn_reference_t) res; |
2749 | if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == SSA_NAME) |
2750 | rhs = SSA_VAL (rhs); |
2751 | if (vnresult->result |
2752 | && operand_equal_p (vnresult->result, rhs, 0)) |
2753 | return res; |
2754 | } |
2755 | } |
2756 | } |
2757 | } |
2758 | else if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE |
2759 | && gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL) |
2760 | && gimple_call_num_args (def_stmt) <= 4) |
2761 | { |
2762 | /* For builtin calls valueize its arguments and call the |
2763 | alias oracle again. Valueization may improve points-to |
2764 | info of pointers and constify size and position arguments. |
2765 | Originally this was motivated by PR61034 which has |
2766 | conditional calls to free falsely clobbering ref because |
2767 | of imprecise points-to info of the argument. */ |
2768 | tree oldargs[4]; |
2769 | bool valueized_anything = false; |
2770 | for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i) |
2771 | { |
2772 | oldargs[i] = gimple_call_arg (def_stmt, i); |
2773 | tree val = vn_valueize (oldargs[i]); |
2774 | if (val != oldargs[i]) |
2775 | { |
2776 | gimple_call_set_arg (def_stmt, i, val); |
2777 | valueized_anything = true; |
2778 | } |
2779 | } |
2780 | if (valueized_anything) |
2781 | { |
2782 | bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt), |
2783 | ref, data->tbaa_p); |
2784 | for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i) |
2785 | gimple_call_set_arg (def_stmt, i, oldargs[i]); |
2786 | if (!res) |
2787 | { |
2788 | *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE; |
2789 | return NULLnullptr; |
2790 | } |
2791 | } |
2792 | } |
2793 | |
2794 | if (*disambiguate_only > TR_TRANSLATE) |
2795 | return (void *)-1; |
2796 | |
2797 | /* If we cannot constrain the size of the reference we cannot |
2798 | test if anything kills it. */ |
2799 | if (!ref->max_size_known_p ()) |
2800 | return (void *)-1; |
2801 | |
2802 | poly_int64 offset = ref->offset; |
2803 | poly_int64 maxsize = ref->max_size; |
2804 | |
2805 | /* def_stmt may-defs *ref. See if we can derive a value for *ref |
2806 | from that definition. |
2807 | 1) Memset. */ |
2808 | if (is_gimple_reg_type (vr->type) |
2809 | && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET) |
2810 | || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET_CHK)) |
2811 | && (integer_zerop (gimple_call_arg (def_stmt, 1)) |
2812 | || ((TREE_CODE (gimple_call_arg (def_stmt, 1))((enum tree_code) (gimple_call_arg (def_stmt, 1))->base.code ) == INTEGER_CST |
2813 | || (INTEGRAL_TYPE_P (vr->type)(((enum tree_code) (vr->type)->base.code) == ENUMERAL_TYPE || ((enum tree_code) (vr->type)->base.code) == BOOLEAN_TYPE || ((enum tree_code) (vr->type)->base.code) == INTEGER_TYPE ) && known_eq (ref->size, 8)(!maybe_ne (ref->size, 8)))) |
2814 | && CHAR_BIT8 == 8 |
2815 | && BITS_PER_UNIT(8) == 8 |
2816 | && BYTES_BIG_ENDIAN0 == WORDS_BIG_ENDIAN0 |
2817 | && offset.is_constant (&offseti) |
2818 | && ref->size.is_constant (&sizei) |
2819 | && (offseti % BITS_PER_UNIT(8) == 0 |
2820 | || TREE_CODE (gimple_call_arg (def_stmt, 1))((enum tree_code) (gimple_call_arg (def_stmt, 1))->base.code ) == INTEGER_CST))) |
2821 | && (poly_int_tree_p (gimple_call_arg (def_stmt, 2)) |
2822 | || (TREE_CODE (gimple_call_arg (def_stmt, 2))((enum tree_code) (gimple_call_arg (def_stmt, 2))->base.code ) == SSA_NAME |
2823 | && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2))))) |
2824 | && (TREE_CODE (gimple_call_arg (def_stmt, 0))((enum tree_code) (gimple_call_arg (def_stmt, 0))->base.code ) == ADDR_EXPR |
2825 | || TREE_CODE (gimple_call_arg (def_stmt, 0))((enum tree_code) (gimple_call_arg (def_stmt, 0))->base.code ) == SSA_NAME)) |
2826 | { |
2827 | tree base2; |
2828 | poly_int64 offset2, size2, maxsize2; |
2829 | bool reverse; |
2830 | tree ref2 = gimple_call_arg (def_stmt, 0); |
2831 | if (TREE_CODE (ref2)((enum tree_code) (ref2)->base.code) == SSA_NAME) |
2832 | { |
2833 | ref2 = SSA_VAL (ref2); |
2834 | if (TREE_CODE (ref2)((enum tree_code) (ref2)->base.code) == SSA_NAME |
2835 | && (TREE_CODE (base)((enum tree_code) (base)->base.code) != MEM_REF |
2836 | || TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2836, __FUNCTION__))))) != ref2)) |
2837 | { |
2838 | gimple *def_stmt = SSA_NAME_DEF_STMT (ref2)(tree_check ((ref2), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2838, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt; |
2839 | if (gimple_assign_single_p (def_stmt) |
2840 | && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR) |
2841 | ref2 = gimple_assign_rhs1 (def_stmt); |
2842 | } |
2843 | } |
2844 | if (TREE_CODE (ref2)((enum tree_code) (ref2)->base.code) == ADDR_EXPR) |
2845 | { |
2846 | ref2 = TREE_OPERAND (ref2, 0)(*((const_cast<tree*> (tree_operand_check ((ref2), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2846, __FUNCTION__))))); |
2847 | base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2, |
2848 | &reverse); |
2849 | if (!known_size_p (maxsize2) |
2850 | || !known_eq (maxsize2, size2)(!maybe_ne (maxsize2, size2)) |
2851 | || !operand_equal_p (base, base2, OEP_ADDRESS_OF)) |
2852 | return (void *)-1; |
2853 | } |
2854 | else if (TREE_CODE (ref2)((enum tree_code) (ref2)->base.code) == SSA_NAME) |
2855 | { |
2856 | poly_int64 soff; |
2857 | if (TREE_CODE (base)((enum tree_code) (base)->base.code) != MEM_REF |
2858 | || !(mem_ref_offset (base) |
2859 | << LOG2_BITS_PER_UNIT3).to_shwi (&soff)) |
2860 | return (void *)-1; |
2861 | offset += soff; |
2862 | offset2 = 0; |
2863 | if (TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2863, __FUNCTION__))))) != ref2) |
2864 | { |
2865 | gimple *def = SSA_NAME_DEF_STMT (ref2)(tree_check ((ref2), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2865, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt; |
2866 | if (is_gimple_assign (def) |
2867 | && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR |
2868 | && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2868, __FUNCTION__))))) |
2869 | && poly_int_tree_p (gimple_assign_rhs2 (def))) |
2870 | { |
2871 | tree rhs2 = gimple_assign_rhs2 (def); |
2872 | if (!(poly_offset_int::from (wi::to_poly_wide (rhs2), |
2873 | SIGNED) |
2874 | << LOG2_BITS_PER_UNIT3).to_shwi (&offset2)) |
2875 | return (void *)-1; |
2876 | ref2 = gimple_assign_rhs1 (def); |
2877 | if (TREE_CODE (ref2)((enum tree_code) (ref2)->base.code) == SSA_NAME) |
2878 | ref2 = SSA_VAL (ref2); |
2879 | } |
2880 | else |
2881 | return (void *)-1; |
2882 | } |
2883 | } |
2884 | else |
2885 | return (void *)-1; |
2886 | tree len = gimple_call_arg (def_stmt, 2); |
2887 | HOST_WIDE_INTlong leni, offset2i; |
2888 | if (TREE_CODE (len)((enum tree_code) (len)->base.code) == SSA_NAME) |
2889 | len = SSA_VAL (len); |
2890 | /* Sometimes the above trickery is smarter than alias analysis. Take |
2891 | advantage of that. */ |
2892 | if (!ranges_maybe_overlap_p (offset, maxsize, offset2, |
2893 | (wi::to_poly_offset (len) |
2894 | << LOG2_BITS_PER_UNIT3))) |
2895 | return NULLnullptr; |
2896 | if (data->partial_defs.is_empty () |
2897 | && known_subrange_p (offset, maxsize, offset2, |
2898 | wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT3)) |
2899 | { |
2900 | tree val; |
2901 | if (integer_zerop (gimple_call_arg (def_stmt, 1))) |
2902 | val = build_zero_cst (vr->type); |
2903 | else if (INTEGRAL_TYPE_P (vr->type)(((enum tree_code) (vr->type)->base.code) == ENUMERAL_TYPE || ((enum tree_code) (vr->type)->base.code) == BOOLEAN_TYPE || ((enum tree_code) (vr->type)->base.code) == INTEGER_TYPE ) |
2904 | && known_eq (ref->size, 8)(!maybe_ne (ref->size, 8)) |
2905 | && offseti % BITS_PER_UNIT(8) == 0) |
2906 | { |
2907 | gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR, |
2908 | vr->type, gimple_call_arg (def_stmt, 1)); |
2909 | val = vn_nary_build_or_lookup (&res_op); |
2910 | if (!val |
2911 | || (TREE_CODE (val)((enum tree_code) (val)->base.code) == SSA_NAME |
2912 | && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)(tree_check ((val), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2912, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag)) |
2913 | return (void *)-1; |
2914 | } |
2915 | else |
2916 | { |
2917 | unsigned buflen = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type))((unsigned long) (*tree_int_cst_elt_check ((((tree_class_check ((vr->type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2917, __FUNCTION__))->type_common.size_unit)), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2917, __FUNCTION__))) + 1; |
2918 | if (INTEGRAL_TYPE_P (vr->type)(((enum tree_code) (vr->type)->base.code) == ENUMERAL_TYPE || ((enum tree_code) (vr->type)->base.code) == BOOLEAN_TYPE || ((enum tree_code) (vr->type)->base.code) == INTEGER_TYPE )) |
2919 | buflen = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (vr->type)(as_a <scalar_int_mode> ((tree_class_check ((vr->type ), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2919, __FUNCTION__))->type_common.mode))) + 1; |
2920 | unsigned char *buf = XALLOCAVEC (unsigned char, buflen)((unsigned char *) __builtin_alloca(sizeof (unsigned char) * ( buflen))); |
2921 | memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1))((unsigned long) (*tree_int_cst_elt_check ((gimple_call_arg ( def_stmt, 1)), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2921, __FUNCTION__))), |
2922 | buflen); |
2923 | if (BYTES_BIG_ENDIAN0) |
2924 | { |
2925 | unsigned int amnt |
2926 | = (((unsigned HOST_WIDE_INTlong) offseti + sizei) |
2927 | % BITS_PER_UNIT(8)); |
2928 | if (amnt) |
2929 | { |
2930 | shift_bytes_in_array_right (buf, buflen, |
2931 | BITS_PER_UNIT(8) - amnt); |
2932 | buf++; |
2933 | buflen--; |
2934 | } |
2935 | } |
2936 | else if (offseti % BITS_PER_UNIT(8) != 0) |
2937 | { |
2938 | unsigned int amnt |
2939 | = BITS_PER_UNIT(8) - ((unsigned HOST_WIDE_INTlong) offseti |
2940 | % BITS_PER_UNIT(8)); |
2941 | shift_bytes_in_array_left (buf, buflen, amnt); |
2942 | buf++; |
2943 | buflen--; |
2944 | } |
2945 | val = native_interpret_expr (vr->type, buf, buflen); |
2946 | if (!val) |
2947 | return (void *)-1; |
2948 | } |
2949 | return data->finish (0, 0, val); |
2950 | } |
2951 | /* For now handle clearing memory with partial defs. */ |
2952 | else if (known_eq (ref->size, maxsize)(!maybe_ne (ref->size, maxsize)) |
2953 | && integer_zerop (gimple_call_arg (def_stmt, 1)) |
2954 | && tree_fits_poly_int64_p (len) |
2955 | && tree_to_poly_int64 (len).is_constant (&leni) |
2956 | && leni <= INTTYPE_MAXIMUM (HOST_WIDE_INT)((long) (~ (long) 0 - ((long) ((! ((long) 0 < (long) -1)) ? (long) 1 << (sizeof (long) * 8 - 1) : (long) 0)))) / BITS_PER_UNIT(8) |
2957 | && offset.is_constant (&offseti) |
2958 | && offset2.is_constant (&offset2i) |
2959 | && maxsize.is_constant (&maxsizei) |
2960 | && ranges_known_overlap_p (offseti, maxsizei, offset2i, |
2961 | leni << LOG2_BITS_PER_UNIT3)) |
2962 | { |
2963 | pd_data pd; |
2964 | pd.rhs = build_constructor (NULL_TREE(tree) nullptr, NULLnullptr); |
2965 | pd.rhs_off = 0; |
2966 | pd.offset = offset2i; |
2967 | pd.size = leni << LOG2_BITS_PER_UNIT3; |
2968 | return data->push_partial_def (pd, 0, 0, offseti, maxsizei); |
2969 | } |
2970 | } |
2971 | |
2972 | /* 2) Assignment from an empty CONSTRUCTOR. */ |
2973 | else if (is_gimple_reg_type (vr->type) |
2974 | && gimple_assign_single_p (def_stmt) |
2975 | && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR |
2976 | && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt))(vec_safe_length (((tree_check ((gimple_assign_rhs1 (def_stmt )), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2976, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts))) == 0) |
2977 | { |
2978 | tree base2; |
2979 | poly_int64 offset2, size2, maxsize2; |
2980 | HOST_WIDE_INTlong offset2i, size2i; |
2981 | gcc_assert (lhs_ref_ok)((void)(!(lhs_ref_ok) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 2981, __FUNCTION__), 0 : 0)); |
2982 | base2 = ao_ref_base (&lhs_ref); |
2983 | offset2 = lhs_ref.offset; |
2984 | size2 = lhs_ref.size; |
2985 | maxsize2 = lhs_ref.max_size; |
2986 | if (known_size_p (maxsize2) |
2987 | && known_eq (maxsize2, size2)(!maybe_ne (maxsize2, size2)) |
2988 | && adjust_offsets_for_equal_base_address (base, &offset, |
2989 | base2, &offset2)) |
2990 | { |
2991 | if (data->partial_defs.is_empty () |
2992 | && known_subrange_p (offset, maxsize, offset2, size2)) |
2993 | { |
2994 | /* While technically undefined behavior do not optimize |
2995 | a full read from a clobber. */ |
2996 | if (gimple_clobber_p (def_stmt)) |
2997 | return (void *)-1; |
2998 | tree val = build_zero_cst (vr->type); |
2999 | return data->finish (ao_ref_alias_set (&lhs_ref), |
3000 | ao_ref_base_alias_set (&lhs_ref), val); |
3001 | } |
3002 | else if (known_eq (ref->size, maxsize)(!maybe_ne (ref->size, maxsize)) |
3003 | && maxsize.is_constant (&maxsizei) |
3004 | && offset.is_constant (&offseti) |
3005 | && offset2.is_constant (&offset2i) |
3006 | && size2.is_constant (&size2i) |
3007 | && ranges_known_overlap_p (offseti, maxsizei, |
3008 | offset2i, size2i)) |
3009 | { |
3010 | /* Let clobbers be consumed by the partial-def tracker |
3011 | which can choose to ignore them if they are shadowed |
3012 | by a later def. */ |
3013 | pd_data pd; |
3014 | pd.rhs = gimple_assign_rhs1 (def_stmt); |
3015 | pd.rhs_off = 0; |
3016 | pd.offset = offset2i; |
3017 | pd.size = size2i; |
3018 | return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref), |
3019 | ao_ref_base_alias_set (&lhs_ref), |
3020 | offseti, maxsizei); |
3021 | } |
3022 | } |
3023 | } |
3024 | |
3025 | /* 3) Assignment from a constant. We can use folds native encode/interpret |
3026 | routines to extract the assigned bits. */ |
3027 | else if (known_eq (ref->size, maxsize)(!maybe_ne (ref->size, maxsize)) |
3028 | && is_gimple_reg_type (vr->type) |
3029 | && !reverse_storage_order_for_component_p (vr->operands) |
3030 | && !contains_storage_order_barrier_p (vr->operands) |
3031 | && gimple_assign_single_p (def_stmt) |
3032 | && CHAR_BIT8 == 8 |
3033 | && BITS_PER_UNIT(8) == 8 |
3034 | && BYTES_BIG_ENDIAN0 == WORDS_BIG_ENDIAN0 |
3035 | /* native_encode and native_decode operate on arrays of bytes |
3036 | and so fundamentally need a compile-time size and offset. */ |
3037 | && maxsize.is_constant (&maxsizei) |
3038 | && offset.is_constant (&offseti) |
3039 | && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)) |
3040 | || (TREE_CODE (gimple_assign_rhs1 (def_stmt))((enum tree_code) (gimple_assign_rhs1 (def_stmt))->base.code ) == SSA_NAME |
3041 | && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt)))))) |
3042 | { |
3043 | tree lhs = gimple_assign_lhs (def_stmt); |
3044 | tree base2; |
3045 | poly_int64 offset2, size2, maxsize2; |
3046 | HOST_WIDE_INTlong offset2i, size2i; |
3047 | bool reverse; |
3048 | gcc_assert (lhs_ref_ok)((void)(!(lhs_ref_ok) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3048, __FUNCTION__), 0 : 0)); |
3049 | base2 = ao_ref_base (&lhs_ref); |
3050 | offset2 = lhs_ref.offset; |
3051 | size2 = lhs_ref.size; |
3052 | maxsize2 = lhs_ref.max_size; |
3053 | reverse = reverse_storage_order_for_component_p (lhs); |
3054 | if (base2 |
3055 | && !reverse |
3056 | && !storage_order_barrier_p (lhs) |
3057 | && known_eq (maxsize2, size2)(!maybe_ne (maxsize2, size2)) |
3058 | && adjust_offsets_for_equal_base_address (base, &offset, |
3059 | base2, &offset2) |
3060 | && offset.is_constant (&offseti) |
3061 | && offset2.is_constant (&offset2i) |
3062 | && size2.is_constant (&size2i)) |
3063 | { |
3064 | if (data->partial_defs.is_empty () |
3065 | && known_subrange_p (offseti, maxsizei, offset2, size2)) |
3066 | { |
3067 | /* We support up to 512-bit values (for V8DFmode). */ |
3068 | unsigned char buffer[65]; |
3069 | int len; |
3070 | |
3071 | tree rhs = gimple_assign_rhs1 (def_stmt); |
3072 | if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == SSA_NAME) |
3073 | rhs = SSA_VAL (rhs); |
3074 | len = native_encode_expr (rhs, |
3075 | buffer, sizeof (buffer) - 1, |
3076 | (offseti - offset2i) / BITS_PER_UNIT(8)); |
3077 | if (len > 0 && len * BITS_PER_UNIT(8) >= maxsizei) |
3078 | { |
3079 | tree type = vr->type; |
3080 | unsigned char *buf = buffer; |
3081 | unsigned int amnt = 0; |
3082 | /* Make sure to interpret in a type that has a range |
3083 | covering the whole access size. */ |
3084 | if (INTEGRAL_TYPE_P (vr->type)(((enum tree_code) (vr->type)->base.code) == ENUMERAL_TYPE || ((enum tree_code) (vr->type)->base.code) == BOOLEAN_TYPE || ((enum tree_code) (vr->type)->base.code) == INTEGER_TYPE ) |
3085 | && maxsizei != TYPE_PRECISION (vr->type)((tree_class_check ((vr->type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3085, __FUNCTION__))->type_common.precision)) |
3086 | type = build_nonstandard_integer_type (maxsizei, |
3087 | TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3087, __FUNCTION__))->base.u.bits.unsigned_flag)); |
3088 | if (BYTES_BIG_ENDIAN0) |
3089 | { |
3090 | /* For big-endian native_encode_expr stored the rhs |
3091 | such that the LSB of it is the LSB of buffer[len - 1]. |
3092 | That bit is stored into memory at position |
3093 | offset2 + size2 - 1, i.e. in byte |
3094 | base + (offset2 + size2 - 1) / BITS_PER_UNIT. |
3095 | E.g. for offset2 1 and size2 14, rhs -1 and memory |
3096 | previously cleared that is: |
3097 | 0 1 |
3098 | 01111111|11111110 |
3099 | Now, if we want to extract offset 2 and size 12 from |
3100 | it using native_interpret_expr (which actually works |
3101 | for integral bitfield types in terms of byte size of |
3102 | the mode), the native_encode_expr stored the value |
3103 | into buffer as |
3104 | XX111111|11111111 |
3105 | and returned len 2 (the X bits are outside of |
3106 | precision). |
3107 | Let sz be maxsize / BITS_PER_UNIT if not extracting |
3108 | a bitfield, and GET_MODE_SIZE otherwise. |
3109 | We need to align the LSB of the value we want to |
3110 | extract as the LSB of buf[sz - 1]. |
3111 | The LSB from memory we need to read is at position |
3112 | offset + maxsize - 1. */ |
3113 | HOST_WIDE_INTlong sz = maxsizei / BITS_PER_UNIT(8); |
3114 | if (INTEGRAL_TYPE_P (type)(((enum tree_code) (type)->base.code) == ENUMERAL_TYPE || ( (enum tree_code) (type)->base.code) == BOOLEAN_TYPE || ((enum tree_code) (type)->base.code) == INTEGER_TYPE)) |
3115 | sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)(as_a <scalar_int_mode> ((tree_class_check ((type), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3115, __FUNCTION__))->type_common.mode))); |
3116 | amnt = ((unsigned HOST_WIDE_INTlong) offset2i + size2i |
3117 | - offseti - maxsizei) % BITS_PER_UNIT(8); |
3118 | if (amnt) |
3119 | shift_bytes_in_array_right (buffer, len, amnt); |
3120 | amnt = ((unsigned HOST_WIDE_INTlong) offset2i + size2i |
3121 | - offseti - maxsizei - amnt) / BITS_PER_UNIT(8); |
3122 | if ((unsigned HOST_WIDE_INTlong) sz + amnt > (unsigned) len) |
3123 | len = 0; |
3124 | else |
3125 | { |
3126 | buf = buffer + len - sz - amnt; |
3127 | len -= (buf - buffer); |
3128 | } |
3129 | } |
3130 | else |
3131 | { |
3132 | amnt = ((unsigned HOST_WIDE_INTlong) offset2i |
3133 | - offseti) % BITS_PER_UNIT(8); |
3134 | if (amnt) |
3135 | { |
3136 | buffer[len] = 0; |
3137 | shift_bytes_in_array_left (buffer, len + 1, amnt); |
3138 | buf = buffer + 1; |
3139 | } |
3140 | } |
3141 | tree val = native_interpret_expr (type, buf, len); |
3142 | /* If we chop off bits because the types precision doesn't |
3143 | match the memory access size this is ok when optimizing |
3144 | reads but not when called from the DSE code during |
3145 | elimination. */ |
3146 | if (val |
3147 | && type != vr->type) |
3148 | { |
3149 | if (! int_fits_type_p (val, vr->type)) |
3150 | val = NULL_TREE(tree) nullptr; |
3151 | else |
3152 | val = fold_convert (vr->type, val)fold_convert_loc (((location_t) 0), vr->type, val); |
3153 | } |
3154 | |
3155 | if (val) |
3156 | return data->finish (ao_ref_alias_set (&lhs_ref), |
3157 | ao_ref_base_alias_set (&lhs_ref), val); |
3158 | } |
3159 | } |
3160 | else if (ranges_known_overlap_p (offseti, maxsizei, offset2i, |
3161 | size2i)) |
3162 | { |
3163 | pd_data pd; |
3164 | tree rhs = gimple_assign_rhs1 (def_stmt); |
3165 | if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == SSA_NAME) |
3166 | rhs = SSA_VAL (rhs); |
3167 | pd.rhs = rhs; |
3168 | pd.rhs_off = 0; |
3169 | pd.offset = offset2i; |
3170 | pd.size = size2i; |
3171 | return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref), |
3172 | ao_ref_base_alias_set (&lhs_ref), |
3173 | offseti, maxsizei); |
3174 | } |
3175 | } |
3176 | } |
3177 | |
3178 | /* 4) Assignment from an SSA name which definition we may be able |
3179 | to access pieces from or we can combine to a larger entity. */ |
3180 | else if (known_eq (ref->size, maxsize)(!maybe_ne (ref->size, maxsize)) |
3181 | && is_gimple_reg_type (vr->type) |
3182 | && !reverse_storage_order_for_component_p (vr->operands) |
3183 | && !contains_storage_order_barrier_p (vr->operands) |
3184 | && gimple_assign_single_p (def_stmt) |
3185 | && TREE_CODE (gimple_assign_rhs1 (def_stmt))((enum tree_code) (gimple_assign_rhs1 (def_stmt))->base.code ) == SSA_NAME) |
3186 | { |
3187 | tree lhs = gimple_assign_lhs (def_stmt); |
3188 | tree base2; |
3189 | poly_int64 offset2, size2, maxsize2; |
3190 | HOST_WIDE_INTlong offset2i, size2i, offseti; |
3191 | bool reverse; |
3192 | gcc_assert (lhs_ref_ok)((void)(!(lhs_ref_ok) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3192, __FUNCTION__), 0 : 0)); |
3193 | base2 = ao_ref_base (&lhs_ref); |
3194 | offset2 = lhs_ref.offset; |
3195 | size2 = lhs_ref.size; |
3196 | maxsize2 = lhs_ref.max_size; |
3197 | reverse = reverse_storage_order_for_component_p (lhs); |
3198 | tree def_rhs = gimple_assign_rhs1 (def_stmt); |
3199 | if (!reverse |
3200 | && !storage_order_barrier_p (lhs) |
3201 | && known_size_p (maxsize2) |
3202 | && known_eq (maxsize2, size2)(!maybe_ne (maxsize2, size2)) |
3203 | && adjust_offsets_for_equal_base_address (base, &offset, |
3204 | base2, &offset2)) |
3205 | { |
3206 | if (data->partial_defs.is_empty () |
3207 | && known_subrange_p (offset, maxsize, offset2, size2) |
3208 | /* ??? We can't handle bitfield precision extracts without |
3209 | either using an alternate type for the BIT_FIELD_REF and |
3210 | then doing a conversion or possibly adjusting the offset |
3211 | according to endianness. */ |
3212 | && (! INTEGRAL_TYPE_P (vr->type)(((enum tree_code) (vr->type)->base.code) == ENUMERAL_TYPE || ((enum tree_code) (vr->type)->base.code) == BOOLEAN_TYPE || ((enum tree_code) (vr->type)->base.code) == INTEGER_TYPE ) |
3213 | || known_eq (ref->size, TYPE_PRECISION (vr->type))(!maybe_ne (ref->size, ((tree_class_check ((vr->type), ( tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3213, __FUNCTION__))->type_common.precision)))) |
3214 | && multiple_p (ref->size, BITS_PER_UNIT(8))) |
3215 | { |
3216 | tree val = NULL_TREE(tree) nullptr; |
3217 | if (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))(((enum tree_code) (((contains_struct_check ((def_rhs), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3217, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE || ((enum tree_code) (((contains_struct_check ((def_rhs), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3217, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE || ((enum tree_code) (((contains_struct_check ((def_rhs), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3217, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE ) |
3218 | || type_has_mode_precision_p (TREE_TYPE (def_rhs)((contains_struct_check ((def_rhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3218, __FUNCTION__))->typed.type))) |
3219 | { |
3220 | gimple_match_op op (gimple_match_cond::UNCOND, |
3221 | BIT_FIELD_REF, vr->type, |
3222 | SSA_VAL (def_rhs), |
3223 | bitsize_int (ref->size)size_int_kind (ref->size, stk_bitsizetype), |
3224 | bitsize_int (offset - offset2)size_int_kind (offset - offset2, stk_bitsizetype)); |
3225 | val = vn_nary_build_or_lookup (&op); |
3226 | } |
3227 | else if (known_eq (ref->size, size2)(!maybe_ne (ref->size, size2))) |
3228 | { |
3229 | gimple_match_op op (gimple_match_cond::UNCOND, |
3230 | VIEW_CONVERT_EXPR, vr->type, |
3231 | SSA_VAL (def_rhs)); |
3232 | val = vn_nary_build_or_lookup (&op); |
3233 | } |
3234 | if (val |
3235 | && (TREE_CODE (val)((enum tree_code) (val)->base.code) != SSA_NAME |
3236 | || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)(tree_check ((val), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3236, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag)) |
3237 | return data->finish (ao_ref_alias_set (&lhs_ref), |
3238 | ao_ref_base_alias_set (&lhs_ref), val); |
3239 | } |
3240 | else if (maxsize.is_constant (&maxsizei) |
3241 | && offset.is_constant (&offseti) |
3242 | && offset2.is_constant (&offset2i) |
3243 | && size2.is_constant (&size2i) |
3244 | && ranges_known_overlap_p (offset, maxsize, offset2, size2)) |
3245 | { |
3246 | pd_data pd; |
3247 | pd.rhs = SSA_VAL (def_rhs); |
3248 | pd.rhs_off = 0; |
3249 | pd.offset = offset2i; |
3250 | pd.size = size2i; |
3251 | return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref), |
3252 | ao_ref_base_alias_set (&lhs_ref), |
3253 | offseti, maxsizei); |
3254 | } |
3255 | } |
3256 | } |
3257 | |
3258 | /* 4b) Assignment done via one of the vectorizer internal store |
3259 | functions where we may be able to access pieces from or we can |
3260 | combine to a larger entity. */ |
3261 | else if (known_eq (ref->size, maxsize)(!maybe_ne (ref->size, maxsize)) |
3262 | && is_gimple_reg_type (vr->type) |
3263 | && !reverse_storage_order_for_component_p (vr->operands) |
3264 | && !contains_storage_order_barrier_p (vr->operands) |
3265 | && is_gimple_call (def_stmt) |
3266 | && gimple_call_internal_p (def_stmt) |
3267 | && internal_store_fn_p (gimple_call_internal_fn (def_stmt))) |
3268 | { |
3269 | gcall *call = as_a <gcall *> (def_stmt); |
3270 | internal_fn fn = gimple_call_internal_fn (call); |
3271 | |
3272 | tree mask = NULL_TREE(tree) nullptr, len = NULL_TREE(tree) nullptr, bias = NULL_TREE(tree) nullptr; |
3273 | switch (fn) |
3274 | { |
3275 | case IFN_MASK_STORE: |
3276 | mask = gimple_call_arg (call, internal_fn_mask_index (fn)); |
3277 | mask = vn_valueize (mask); |
3278 | if (TREE_CODE (mask)((enum tree_code) (mask)->base.code) != VECTOR_CST) |
3279 | return (void *)-1; |
3280 | break; |
3281 | case IFN_LEN_STORE: |
3282 | len = gimple_call_arg (call, 2); |
3283 | bias = gimple_call_arg (call, 4); |
3284 | if (!tree_fits_uhwi_p (len) || !tree_fits_shwi_p (bias)) |
3285 | return (void *)-1; |
3286 | break; |
3287 | default: |
3288 | return (void *)-1; |
3289 | } |
3290 | tree def_rhs = gimple_call_arg (call, |
3291 | internal_fn_stored_value_index (fn)); |
3292 | def_rhs = vn_valueize (def_rhs); |
3293 | if (TREE_CODE (def_rhs)((enum tree_code) (def_rhs)->base.code) != VECTOR_CST) |
3294 | return (void *)-1; |
3295 | |
3296 | ao_ref_init_from_ptr_and_size (&lhs_ref, |
3297 | vn_valueize (gimple_call_arg (call, 0)), |
3298 | TYPE_SIZE_UNIT (TREE_TYPE (def_rhs))((tree_class_check ((((contains_struct_check ((def_rhs), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3298, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3298, __FUNCTION__))->type_common.size_unit)); |
3299 | tree base2; |
3300 | poly_int64 offset2, size2, maxsize2; |
3301 | HOST_WIDE_INTlong offset2i, size2i, offseti; |
3302 | base2 = ao_ref_base (&lhs_ref); |
3303 | offset2 = lhs_ref.offset; |
3304 | size2 = lhs_ref.size; |
3305 | maxsize2 = lhs_ref.max_size; |
3306 | if (known_size_p (maxsize2) |
3307 | && known_eq (maxsize2, size2)(!maybe_ne (maxsize2, size2)) |
3308 | && adjust_offsets_for_equal_base_address (base, &offset, |
3309 | base2, &offset2) |
3310 | && maxsize.is_constant (&maxsizei) |
3311 | && offset.is_constant (&offseti) |
3312 | && offset2.is_constant (&offset2i) |
3313 | && size2.is_constant (&size2i)) |
3314 | { |
3315 | if (!ranges_maybe_overlap_p (offset, maxsize, offset2, size2)) |
3316 | /* Poor-mans disambiguation. */ |
3317 | return NULLnullptr; |
3318 | else if (ranges_known_overlap_p (offset, maxsize, offset2, size2)) |
3319 | { |
3320 | pd_data pd; |
3321 | pd.rhs = def_rhs; |
3322 | tree aa = gimple_call_arg (call, 1); |
3323 | alias_set_type set = get_deref_alias_set (TREE_TYPE (aa)((contains_struct_check ((aa), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3323, __FUNCTION__))->typed.type)); |
3324 | tree vectype = TREE_TYPE (def_rhs)((contains_struct_check ((def_rhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3324, __FUNCTION__))->typed.type); |
3325 | unsigned HOST_WIDE_INTlong elsz |
3326 | = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (vectype))((tree_class_check ((((contains_struct_check ((vectype), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3326, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3326, __FUNCTION__))->type_common.size)); |
3327 | if (mask) |
3328 | { |
3329 | HOST_WIDE_INTlong start = 0, len = 0; |
3330 | unsigned mask_idx = 0; |
3331 | do |
3332 | { |
3333 | if (integer_zerop (VECTOR_CST_ELT (mask, mask_idx)vector_cst_elt (mask, mask_idx))) |
3334 | { |
3335 | if (len != 0) |
3336 | { |
3337 | pd.rhs_off = start; |
3338 | pd.offset = offset2i + start; |
3339 | pd.size = len; |
3340 | if (ranges_known_overlap_p |
3341 | (offset, maxsize, pd.offset, pd.size)) |
3342 | { |
3343 | void *res = data->push_partial_def |
3344 | (pd, set, set, offseti, maxsizei); |
3345 | if (res != NULLnullptr) |
3346 | return res; |
3347 | } |
3348 | } |
3349 | start = (mask_idx + 1) * elsz; |
3350 | len = 0; |
3351 | } |
3352 | else |
3353 | len += elsz; |
3354 | mask_idx++; |
3355 | } |
3356 | while (known_lt (mask_idx, TYPE_VECTOR_SUBPARTS (vectype))(!maybe_le (TYPE_VECTOR_SUBPARTS (vectype), mask_idx))); |
3357 | if (len != 0) |
3358 | { |
3359 | pd.rhs_off = start; |
3360 | pd.offset = offset2i + start; |
3361 | pd.size = len; |
3362 | if (ranges_known_overlap_p (offset, maxsize, |
3363 | pd.offset, pd.size)) |
3364 | return data->push_partial_def (pd, set, set, |
3365 | offseti, maxsizei); |
3366 | } |
3367 | } |
3368 | else if (fn == IFN_LEN_STORE) |
3369 | { |
3370 | pd.offset = offset2i; |
3371 | pd.size = (tree_to_uhwi (len) |
3372 | + -tree_to_shwi (bias)) * BITS_PER_UNIT(8); |
3373 | if (BYTES_BIG_ENDIAN0) |
3374 | pd.rhs_off = pd.size - tree_to_uhwi (TYPE_SIZE (vectype)((tree_class_check ((vectype), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3374, __FUNCTION__))->type_common.size)); |
3375 | else |
3376 | pd.rhs_off = 0; |
3377 | if (ranges_known_overlap_p (offset, maxsize, |
3378 | pd.offset, pd.size)) |
3379 | return data->push_partial_def (pd, set, set, |
3380 | offseti, maxsizei); |
3381 | } |
3382 | else |
3383 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3383, __FUNCTION__)); |
3384 | return NULLnullptr; |
3385 | } |
3386 | } |
3387 | } |
3388 | |
3389 | /* 5) For aggregate copies translate the reference through them if |
3390 | the copy kills ref. */ |
3391 | else if (data->vn_walk_kind == VN_WALKREWRITE |
3392 | && gimple_assign_single_p (def_stmt) |
3393 | && (DECL_P (gimple_assign_rhs1 (def_stmt))(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code ) (gimple_assign_rhs1 (def_stmt))->base.code))] == tcc_declaration ) |
3394 | || TREE_CODE (gimple_assign_rhs1 (def_stmt))((enum tree_code) (gimple_assign_rhs1 (def_stmt))->base.code ) == MEM_REF |
3395 | || handled_component_p (gimple_assign_rhs1 (def_stmt)))) |
3396 | { |
3397 | tree base2; |
3398 | int i, j, k; |
3399 | auto_vec<vn_reference_op_s> rhs; |
3400 | vn_reference_op_t vro; |
3401 | ao_ref r; |
3402 | |
3403 | gcc_assert (lhs_ref_ok)((void)(!(lhs_ref_ok) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3403, __FUNCTION__), 0 : 0)); |
3404 | |
3405 | /* See if the assignment kills REF. */ |
3406 | base2 = ao_ref_base (&lhs_ref); |
3407 | if (!lhs_ref.max_size_known_p () |
3408 | || (base != base2 |
3409 | && (TREE_CODE (base)((enum tree_code) (base)->base.code) != MEM_REF |
3410 | || TREE_CODE (base2)((enum tree_code) (base2)->base.code) != MEM_REF |
3411 | || TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3411, __FUNCTION__))))) != TREE_OPERAND (base2, 0)(*((const_cast<tree*> (tree_operand_check ((base2), (0) , "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3411, __FUNCTION__))))) |
3412 | || !tree_int_cst_equal (TREE_OPERAND (base, 1)(*((const_cast<tree*> (tree_operand_check ((base), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3412, __FUNCTION__))))), |
3413 | TREE_OPERAND (base2, 1)(*((const_cast<tree*> (tree_operand_check ((base2), (1) , "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3413, __FUNCTION__)))))))) |
3414 | || !stmt_kills_ref_p (def_stmt, ref)) |
3415 | return (void *)-1; |
3416 | |
3417 | /* Find the common base of ref and the lhs. lhs_ops already |
3418 | contains valueized operands for the lhs. */ |
3419 | i = vr->operands.length () - 1; |
3420 | j = lhs_ops.length () - 1; |
3421 | while (j >= 0 && i >= 0 |
3422 | && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j])) |
3423 | { |
3424 | i--; |
3425 | j--; |
3426 | } |
3427 | |
3428 | /* ??? The innermost op should always be a MEM_REF and we already |
3429 | checked that the assignment to the lhs kills vr. Thus for |
3430 | aggregate copies using char[] types the vn_reference_op_eq |
3431 | may fail when comparing types for compatibility. But we really |
3432 | don't care here - further lookups with the rewritten operands |
3433 | will simply fail if we messed up types too badly. */ |
3434 | poly_int64 extra_off = 0; |
3435 | if (j == 0 && i >= 0 |
3436 | && lhs_ops[0].opcode == MEM_REF |
3437 | && maybe_ne (lhs_ops[0].off, -1)) |
3438 | { |
3439 | if (known_eq (lhs_ops[0].off, vr->operands[i].off)(!maybe_ne (lhs_ops[0].off, vr->operands[i].off))) |
3440 | i--, j--; |
3441 | else if (vr->operands[i].opcode == MEM_REF |
3442 | && maybe_ne (vr->operands[i].off, -1)) |
3443 | { |
3444 | extra_off = vr->operands[i].off - lhs_ops[0].off; |
3445 | i--, j--; |
3446 | } |
3447 | } |
3448 | |
3449 | /* i now points to the first additional op. |
3450 | ??? LHS may not be completely contained in VR, one or more |
3451 | VIEW_CONVERT_EXPRs could be in its way. We could at least |
3452 | try handling outermost VIEW_CONVERT_EXPRs. */ |
3453 | if (j != -1) |
3454 | return (void *)-1; |
3455 | |
3456 | /* Punt if the additional ops contain a storage order barrier. */ |
3457 | for (k = i; k >= 0; k--) |
3458 | { |
3459 | vro = &vr->operands[k]; |
3460 | if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse) |
3461 | return (void *)-1; |
3462 | } |
3463 | |
3464 | /* Now re-write REF to be based on the rhs of the assignment. */ |
3465 | tree rhs1 = gimple_assign_rhs1 (def_stmt); |
3466 | copy_reference_ops_from_ref (rhs1, &rhs); |
3467 | |
3468 | /* Apply an extra offset to the inner MEM_REF of the RHS. */ |
3469 | bool force_no_tbaa = false; |
3470 | if (maybe_ne (extra_off, 0)) |
3471 | { |
3472 | if (rhs.length () < 2) |
3473 | return (void *)-1; |
3474 | int ix = rhs.length () - 2; |
3475 | if (rhs[ix].opcode != MEM_REF |
3476 | || known_eq (rhs[ix].off, -1)(!maybe_ne (rhs[ix].off, -1))) |
3477 | return (void *)-1; |
3478 | rhs[ix].off += extra_off; |
3479 | rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0, |
3480 | build_int_cst (TREE_TYPE (rhs[ix].op0)((contains_struct_check ((rhs[ix].op0), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3480, __FUNCTION__))->typed.type), |
3481 | extra_off)); |
3482 | /* When we have offsetted the RHS, reading only parts of it, |
3483 | we can no longer use the original TBAA type, force alias-set |
3484 | zero. */ |
3485 | force_no_tbaa = true; |
3486 | } |
3487 | |
3488 | /* Save the operands since we need to use the original ones for |
3489 | the hash entry we use. */ |
3490 | if (!data->saved_operands.exists ()) |
3491 | data->saved_operands = vr->operands.copy (); |
3492 | |
3493 | /* We need to pre-pend vr->operands[0..i] to rhs. */ |
3494 | vec<vn_reference_op_s> old = vr->operands; |
3495 | if (i + 1 + rhs.length () > vr->operands.length ()) |
3496 | vr->operands.safe_grow (i + 1 + rhs.length (), true); |
3497 | else |
3498 | vr->operands.truncate (i + 1 + rhs.length ()); |
3499 | FOR_EACH_VEC_ELT (rhs, j, vro)for (j = 0; (rhs).iterate ((j), &(vro)); ++(j)) |
3500 | vr->operands[i + 1 + j] = *vro; |
3501 | valueize_refs (&vr->operands); |
3502 | if (old == shared_lookup_references) |
3503 | shared_lookup_references = vr->operands; |
3504 | vr->hashcode = vn_reference_compute_hash (vr); |
3505 | |
3506 | /* Try folding the new reference to a constant. */ |
3507 | tree val = fully_constant_vn_reference_p (vr); |
3508 | if (val) |
3509 | { |
3510 | if (data->partial_defs.is_empty ()) |
3511 | return data->finish (ao_ref_alias_set (&lhs_ref), |
3512 | ao_ref_base_alias_set (&lhs_ref), val); |
3513 | /* This is the only interesting case for partial-def handling |
3514 | coming from targets that like to gimplify init-ctors as |
3515 | aggregate copies from constant data like aarch64 for |
3516 | PR83518. */ |
3517 | if (maxsize.is_constant (&maxsizei) && known_eq (ref->size, maxsize)(!maybe_ne (ref->size, maxsize))) |
3518 | { |
3519 | pd_data pd; |
3520 | pd.rhs = val; |
3521 | pd.rhs_off = 0; |
3522 | pd.offset = 0; |
3523 | pd.size = maxsizei; |
3524 | return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref), |
3525 | ao_ref_base_alias_set (&lhs_ref), |
3526 | 0, maxsizei); |
3527 | } |
3528 | } |
3529 | |
3530 | /* Continuing with partial defs isn't easily possible here, we |
3531 | have to find a full def from further lookups from here. Probably |
3532 | not worth the special-casing everywhere. */ |
3533 | if (!data->partial_defs.is_empty ()) |
3534 | return (void *)-1; |
3535 | |
3536 | /* Adjust *ref from the new operands. */ |
3537 | ao_ref rhs1_ref; |
3538 | ao_ref_init (&rhs1_ref, rhs1); |
3539 | if (!ao_ref_init_from_vn_reference (&r, |
3540 | force_no_tbaa ? 0 |
3541 | : ao_ref_alias_set (&rhs1_ref), |
3542 | force_no_tbaa ? 0 |
3543 | : ao_ref_base_alias_set (&rhs1_ref), |
3544 | vr->type, vr->operands)) |
3545 | return (void *)-1; |
3546 | /* This can happen with bitfields. */ |
3547 | if (maybe_ne (ref->size, r.size)) |
3548 | { |
3549 | /* If the access lacks some subsetting simply apply that by |
3550 | shortening it. That in the end can only be successful |
3551 | if we can pun the lookup result which in turn requires |
3552 | exact offsets. */ |
3553 | if (known_eq (r.size, r.max_size)(!maybe_ne (r.size, r.max_size)) |
3554 | && known_lt (ref->size, r.size)(!maybe_le (r.size, ref->size))) |
3555 | r.size = r.max_size = ref->size; |
3556 | else |
3557 | return (void *)-1; |
3558 | } |
3559 | *ref = r; |
3560 | |
3561 | /* Do not update last seen VUSE after translating. */ |
3562 | data->last_vuse_ptr = NULLnullptr; |
3563 | /* Invalidate the original access path since it now contains |
3564 | the wrong base. */ |
3565 | data->orig_ref.ref = NULL_TREE(tree) nullptr; |
3566 | /* Use the alias-set of this LHS for recording an eventual result. */ |
3567 | if (data->first_set == -2) |
3568 | { |
3569 | data->first_set = ao_ref_alias_set (&lhs_ref); |
3570 | data->first_base_set = ao_ref_base_alias_set (&lhs_ref); |
3571 | } |
3572 | |
3573 | /* Keep looking for the adjusted *REF / VR pair. */ |
3574 | return NULLnullptr; |
3575 | } |
3576 | |
3577 | /* 6) For memcpy copies translate the reference through them if the copy |
3578 | kills ref. But we cannot (easily) do this translation if the memcpy is |
3579 | a storage order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that |
3580 | can modify the storage order of objects (see storage_order_barrier_p). */ |
3581 | else if (data->vn_walk_kind == VN_WALKREWRITE |
3582 | && is_gimple_reg_type (vr->type) |
3583 | /* ??? Handle BCOPY as well. */ |
3584 | && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY) |
3585 | || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY_CHK) |
3586 | || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY) |
3587 | || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY_CHK) |
3588 | || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE) |
3589 | || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE_CHK)) |
3590 | && (TREE_CODE (gimple_call_arg (def_stmt, 0))((enum tree_code) (gimple_call_arg (def_stmt, 0))->base.code ) == ADDR_EXPR |
3591 | || TREE_CODE (gimple_call_arg (def_stmt, 0))((enum tree_code) (gimple_call_arg (def_stmt, 0))->base.code ) == SSA_NAME) |
3592 | && (TREE_CODE (gimple_call_arg (def_stmt, 1))((enum tree_code) (gimple_call_arg (def_stmt, 1))->base.code ) == ADDR_EXPR |
3593 | || TREE_CODE (gimple_call_arg (def_stmt, 1))((enum tree_code) (gimple_call_arg (def_stmt, 1))->base.code ) == SSA_NAME) |
3594 | && (poly_int_tree_p (gimple_call_arg (def_stmt, 2), ©_size) |
3595 | || (TREE_CODE (gimple_call_arg (def_stmt, 2))((enum tree_code) (gimple_call_arg (def_stmt, 2))->base.code ) == SSA_NAME |
3596 | && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)), |
3597 | ©_size))) |
3598 | /* Handling this is more complicated, give up for now. */ |
3599 | && data->partial_defs.is_empty ()) |
3600 | { |
3601 | tree lhs, rhs; |
3602 | ao_ref r; |
3603 | poly_int64 rhs_offset, lhs_offset; |
3604 | vn_reference_op_s op; |
3605 | poly_uint64 mem_offset; |
3606 | poly_int64 at, byte_maxsize; |
3607 | |
3608 | /* Only handle non-variable, addressable refs. */ |
3609 | if (maybe_ne (ref->size, maxsize) |
3610 | || !multiple_p (offset, BITS_PER_UNIT(8), &at) |
3611 | || !multiple_p (maxsize, BITS_PER_UNIT(8), &byte_maxsize)) |
3612 | return (void *)-1; |
3613 | |
3614 | /* Extract a pointer base and an offset for the destination. */ |
3615 | lhs = gimple_call_arg (def_stmt, 0); |
3616 | lhs_offset = 0; |
3617 | if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME) |
3618 | { |
3619 | lhs = vn_valueize (lhs); |
3620 | if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME) |
3621 | { |
3622 | gimple *def_stmt = SSA_NAME_DEF_STMT (lhs)(tree_check ((lhs), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3622, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt; |
3623 | if (gimple_assign_single_p (def_stmt) |
3624 | && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR) |
3625 | lhs = gimple_assign_rhs1 (def_stmt); |
3626 | } |
3627 | } |
3628 | if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == ADDR_EXPR) |
3629 | { |
3630 | if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (lhs)))(((enum tree_code) (((contains_struct_check ((((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3630, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3630, __FUNCTION__))->typed.type))->base.code) == ARRAY_TYPE || (((enum tree_code) (((contains_struct_check ((((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3630, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3630, __FUNCTION__))->typed.type))->base.code) == RECORD_TYPE || ((enum tree_code) (((contains_struct_check ((((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3630, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3630, __FUNCTION__))->typed.type))->base.code) == UNION_TYPE || ((enum tree_code) (((contains_struct_check ((((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3630, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3630, __FUNCTION__))->typed.type))->base.code) == QUAL_UNION_TYPE )) |
3631 | && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (lhs)))((tree_check4 ((((contains_struct_check ((((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3631, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3631, __FUNCTION__))->typed.type)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3631, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE ), (ARRAY_TYPE)))->base.u.bits.saturating_flag)) |
3632 | return (void *)-1; |
3633 | tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0)(*((const_cast<tree*> (tree_operand_check ((lhs), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3633, __FUNCTION__))))), |
3634 | &lhs_offset); |
3635 | if (!tem) |
3636 | return (void *)-1; |
3637 | if (TREE_CODE (tem)((enum tree_code) (tem)->base.code) == MEM_REF |
3638 | && poly_int_tree_p (TREE_OPERAND (tem, 1)(*((const_cast<tree*> (tree_operand_check ((tem), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3638, __FUNCTION__))))), &mem_offset)) |
3639 | { |
3640 | lhs = TREE_OPERAND (tem, 0)(*((const_cast<tree*> (tree_operand_check ((tem), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3640, __FUNCTION__))))); |
3641 | if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME) |
3642 | lhs = vn_valueize (lhs); |
3643 | lhs_offset += mem_offset; |
3644 | } |
3645 | else if (DECL_P (tem)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code ) (tem)->base.code))] == tcc_declaration)) |
3646 | lhs = build_fold_addr_expr (tem)build_fold_addr_expr_loc (((location_t) 0), (tem)); |
3647 | else |
3648 | return (void *)-1; |
3649 | } |
3650 | if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) != SSA_NAME |
3651 | && TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) != ADDR_EXPR) |
3652 | return (void *)-1; |
3653 | |
3654 | /* Extract a pointer base and an offset for the source. */ |
3655 | rhs = gimple_call_arg (def_stmt, 1); |
3656 | rhs_offset = 0; |
3657 | if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == SSA_NAME) |
3658 | rhs = vn_valueize (rhs); |
3659 | if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == ADDR_EXPR) |
3660 | { |
3661 | if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (rhs)))(((enum tree_code) (((contains_struct_check ((((contains_struct_check ((rhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3661, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3661, __FUNCTION__))->typed.type))->base.code) == ARRAY_TYPE || (((enum tree_code) (((contains_struct_check ((((contains_struct_check ((rhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3661, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3661, __FUNCTION__))->typed.type))->base.code) == RECORD_TYPE || ((enum tree_code) (((contains_struct_check ((((contains_struct_check ((rhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3661, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3661, __FUNCTION__))->typed.type))->base.code) == UNION_TYPE || ((enum tree_code) (((contains_struct_check ((((contains_struct_check ((rhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3661, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3661, __FUNCTION__))->typed.type))->base.code) == QUAL_UNION_TYPE )) |
3662 | && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (rhs)))((tree_check4 ((((contains_struct_check ((((contains_struct_check ((rhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3662, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3662, __FUNCTION__))->typed.type)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3662, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE ), (ARRAY_TYPE)))->base.u.bits.saturating_flag)) |
3663 | return (void *)-1; |
3664 | tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0)(*((const_cast<tree*> (tree_operand_check ((rhs), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3664, __FUNCTION__))))), |
3665 | &rhs_offset); |
3666 | if (!tem) |
3667 | return (void *)-1; |
3668 | if (TREE_CODE (tem)((enum tree_code) (tem)->base.code) == MEM_REF |
3669 | && poly_int_tree_p (TREE_OPERAND (tem, 1)(*((const_cast<tree*> (tree_operand_check ((tem), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3669, __FUNCTION__))))), &mem_offset)) |
3670 | { |
3671 | rhs = TREE_OPERAND (tem, 0)(*((const_cast<tree*> (tree_operand_check ((tem), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3671, __FUNCTION__))))); |
3672 | rhs_offset += mem_offset; |
3673 | } |
3674 | else if (DECL_P (tem)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code ) (tem)->base.code))] == tcc_declaration) |
3675 | || TREE_CODE (tem)((enum tree_code) (tem)->base.code) == STRING_CST) |
3676 | rhs = build_fold_addr_expr (tem)build_fold_addr_expr_loc (((location_t) 0), (tem)); |
3677 | else |
3678 | return (void *)-1; |
3679 | } |
3680 | if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == SSA_NAME) |
3681 | rhs = SSA_VAL (rhs); |
3682 | else if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) != ADDR_EXPR) |
3683 | return (void *)-1; |
3684 | |
3685 | /* The bases of the destination and the references have to agree. */ |
3686 | if (TREE_CODE (base)((enum tree_code) (base)->base.code) == MEM_REF) |
3687 | { |
3688 | if (TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3688, __FUNCTION__))))) != lhs |
3689 | || !poly_int_tree_p (TREE_OPERAND (base, 1)(*((const_cast<tree*> (tree_operand_check ((base), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3689, __FUNCTION__))))), &mem_offset)) |
3690 | return (void *) -1; |
3691 | at += mem_offset; |
3692 | } |
3693 | else if (!DECL_P (base)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code ) (base)->base.code))] == tcc_declaration) |
3694 | || TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) != ADDR_EXPR |
3695 | || TREE_OPERAND (lhs, 0)(*((const_cast<tree*> (tree_operand_check ((lhs), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3695, __FUNCTION__))))) != base) |
3696 | return (void *)-1; |
3697 | |
3698 | /* If the access is completely outside of the memcpy destination |
3699 | area there is no aliasing. */ |
3700 | if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize)) |
3701 | return NULLnullptr; |
3702 | /* And the access has to be contained within the memcpy destination. */ |
3703 | if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size)) |
3704 | return (void *)-1; |
3705 | |
3706 | /* Save the operands since we need to use the original ones for |
3707 | the hash entry we use. */ |
3708 | if (!data->saved_operands.exists ()) |
3709 | data->saved_operands = vr->operands.copy (); |
3710 | |
3711 | /* Make room for 2 operands in the new reference. */ |
3712 | if (vr->operands.length () < 2) |
3713 | { |
3714 | vec<vn_reference_op_s> old = vr->operands; |
3715 | vr->operands.safe_grow_cleared (2, true); |
3716 | if (old == shared_lookup_references) |
3717 | shared_lookup_references = vr->operands; |
3718 | } |
3719 | else |
3720 | vr->operands.truncate (2); |
3721 | |
3722 | /* The looked-through reference is a simple MEM_REF. */ |
3723 | memset (&op, 0, sizeof (op)); |
3724 | op.type = vr->type; |
3725 | op.opcode = MEM_REF; |
3726 | op.op0 = build_int_cst (ptr_type_nodeglobal_trees[TI_PTR_TYPE], at - lhs_offset + rhs_offset); |
3727 | op.off = at - lhs_offset + rhs_offset; |
3728 | vr->operands[0] = op; |
3729 | op.type = TREE_TYPE (rhs)((contains_struct_check ((rhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3729, __FUNCTION__))->typed.type); |
3730 | op.opcode = TREE_CODE (rhs)((enum tree_code) (rhs)->base.code); |
3731 | op.op0 = rhs; |
3732 | op.off = -1; |
3733 | vr->operands[1] = op; |
3734 | vr->hashcode = vn_reference_compute_hash (vr); |
3735 | |
3736 | /* Try folding the new reference to a constant. */ |
3737 | tree val = fully_constant_vn_reference_p (vr); |
3738 | if (val) |
3739 | return data->finish (0, 0, val); |
3740 | |
3741 | /* Adjust *ref from the new operands. */ |
3742 | if (!ao_ref_init_from_vn_reference (&r, 0, 0, vr->type, vr->operands)) |
3743 | return (void *)-1; |
3744 | /* This can happen with bitfields. */ |
3745 | if (maybe_ne (ref->size, r.size)) |
3746 | return (void *)-1; |
3747 | *ref = r; |
3748 | |
3749 | /* Do not update last seen VUSE after translating. */ |
3750 | data->last_vuse_ptr = NULLnullptr; |
3751 | /* Invalidate the original access path since it now contains |
3752 | the wrong base. */ |
3753 | data->orig_ref.ref = NULL_TREE(tree) nullptr; |
3754 | /* Use the alias-set of this stmt for recording an eventual result. */ |
3755 | if (data->first_set == -2) |
3756 | { |
3757 | data->first_set = 0; |
3758 | data->first_base_set = 0; |
3759 | } |
3760 | |
3761 | /* Keep looking for the adjusted *REF / VR pair. */ |
3762 | return NULLnullptr; |
3763 | } |
3764 | |
3765 | /* Bail out and stop walking. */ |
3766 | return (void *)-1; |
3767 | } |
3768 | |
3769 | /* Return a reference op vector from OP that can be used for |
3770 | vn_reference_lookup_pieces. The caller is responsible for releasing |
3771 | the vector. */ |
3772 | |
3773 | vec<vn_reference_op_s> |
3774 | vn_reference_operands_for_lookup (tree op) |
3775 | { |
3776 | bool valueized; |
3777 | return valueize_shared_reference_ops_from_ref (op, &valueized).copy (); |
3778 | } |
3779 | |
3780 | /* Lookup a reference operation by it's parts, in the current hash table. |
3781 | Returns the resulting value number if it exists in the hash table, |
3782 | NULL_TREE otherwise. VNRESULT will be filled in with the actual |
3783 | vn_reference_t stored in the hashtable if something is found. */ |
3784 | |
3785 | tree |
3786 | vn_reference_lookup_pieces (tree vuse, alias_set_type set, |
3787 | alias_set_type base_set, tree type, |
3788 | vec<vn_reference_op_s> operands, |
3789 | vn_reference_t *vnresult, vn_lookup_kind kind) |
3790 | { |
3791 | struct vn_reference_s vr1; |
3792 | vn_reference_t tmp; |
3793 | tree cst; |
3794 | |
3795 | if (!vnresult) |
3796 | vnresult = &tmp; |
3797 | *vnresult = NULLnullptr; |
3798 | |
3799 | vr1.vuse = vuse_ssa_val (vuse); |
3800 | shared_lookup_references.truncate (0); |
3801 | shared_lookup_references.safe_grow (operands.length (), true); |
3802 | memcpy (shared_lookup_references.address (), |
3803 | operands.address (), |
3804 | sizeof (vn_reference_op_s) |
3805 | * operands.length ()); |
3806 | bool valueized_p; |
3807 | valueize_refs_1 (&shared_lookup_references, &valueized_p); |
3808 | vr1.operands = shared_lookup_references; |
3809 | vr1.type = type; |
3810 | vr1.set = set; |
3811 | vr1.base_set = base_set; |
3812 | vr1.hashcode = vn_reference_compute_hash (&vr1); |
3813 | if ((cst = fully_constant_vn_reference_p (&vr1))) |
3814 | return cst; |
3815 | |
3816 | vn_reference_lookup_1 (&vr1, vnresult); |
3817 | if (!*vnresult |
3818 | && kind != VN_NOWALK |
3819 | && vr1.vuse) |
3820 | { |
3821 | ao_ref r; |
3822 | unsigned limit = param_sccvn_max_alias_queries_per_accessglobal_options.x_param_sccvn_max_alias_queries_per_access; |
3823 | vn_walk_cb_data data (&vr1, NULL_TREE(tree) nullptr, NULLnullptr, kind, true, NULL_TREE(tree) nullptr, |
3824 | false); |
3825 | vec<vn_reference_op_s> ops_for_ref; |
3826 | if (!valueized_p) |
3827 | ops_for_ref = vr1.operands; |
3828 | else |
3829 | { |
3830 | /* For ao_ref_from_mem we have to ensure only available SSA names |
3831 | end up in base and the only convenient way to make this work |
3832 | for PRE is to re-valueize with that in mind. */ |
3833 | ops_for_ref.create (operands.length ()); |
3834 | ops_for_ref.quick_grow (operands.length ()); |
3835 | memcpy (ops_for_ref.address (), |
3836 | operands.address (), |
3837 | sizeof (vn_reference_op_s) |
3838 | * operands.length ()); |
3839 | valueize_refs_1 (&ops_for_ref, &valueized_p, true); |
3840 | } |
3841 | if (ao_ref_init_from_vn_reference (&r, set, base_set, type, |
3842 | ops_for_ref)) |
3843 | *vnresult |
3844 | = ((vn_reference_t) |
3845 | walk_non_aliased_vuses (&r, vr1.vuse, true, vn_reference_lookup_2, |
3846 | vn_reference_lookup_3, vuse_valueize, |
3847 | limit, &data)); |
3848 | if (ops_for_ref != shared_lookup_references) |
3849 | ops_for_ref.release (); |
3850 | gcc_checking_assert (vr1.operands == shared_lookup_references)((void)(!(vr1.operands == shared_lookup_references) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3850, __FUNCTION__), 0 : 0)); |
3851 | if (*vnresult |
3852 | && data.same_val |
3853 | && (!(*vnresult)->result |
3854 | || !operand_equal_p ((*vnresult)->result, data.same_val))) |
3855 | { |
3856 | *vnresult = NULLnullptr; |
3857 | return NULL_TREE(tree) nullptr; |
3858 | } |
3859 | } |
3860 | |
3861 | if (*vnresult) |
3862 | return (*vnresult)->result; |
3863 | |
3864 | return NULL_TREE(tree) nullptr; |
3865 | } |
3866 | |
3867 | /* Lookup OP in the current hash table, and return the resulting value |
3868 | number if it exists in the hash table. Return NULL_TREE if it does |
3869 | not exist in the hash table or if the result field of the structure |
3870 | was NULL.. VNRESULT will be filled in with the vn_reference_t |
3871 | stored in the hashtable if one exists. When TBAA_P is false assume |
3872 | we are looking up a store and treat it as having alias-set zero. |
3873 | *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded. |
3874 | MASK is either NULL_TREE, or can be an INTEGER_CST if the result of the |
3875 | load is bitwise anded with MASK and so we are only interested in a subset |
3876 | of the bits and can ignore if the other bits are uninitialized or |
3877 | not initialized with constants. When doing redundant store removal |
3878 | the caller has to set REDUNDANT_STORE_REMOVAL_P. */ |
3879 | |
3880 | tree |
3881 | vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind, |
3882 | vn_reference_t *vnresult, bool tbaa_p, |
3883 | tree *last_vuse_ptr, tree mask, |
3884 | bool redundant_store_removal_p) |
3885 | { |
3886 | vec<vn_reference_op_s> operands; |
3887 | struct vn_reference_s vr1; |
3888 | bool valueized_anything; |
3889 | |
3890 | if (vnresult) |
3891 | *vnresult = NULLnullptr; |
3892 | |
3893 | vr1.vuse = vuse_ssa_val (vuse); |
3894 | vr1.operands = operands |
3895 | = valueize_shared_reference_ops_from_ref (op, &valueized_anything); |
3896 | |
3897 | /* Handle &MEM[ptr + 5].b[1].c as POINTER_PLUS_EXPR. Avoid doing |
3898 | this before the pass folding __builtin_object_size had a chance to run. */ |
3899 | if ((cfun(cfun + 0)->curr_properties & PROP_objsz(1 << 4)) |
3900 | && operands[0].opcode == ADDR_EXPR |
3901 | && operands.last ().opcode == SSA_NAME) |
3902 | { |
3903 | poly_int64 off = 0; |
3904 | vn_reference_op_t vro; |
3905 | unsigned i; |
3906 | for (i = 1; operands.iterate (i, &vro); ++i) |
3907 | { |
3908 | if (vro->opcode == SSA_NAME) |
3909 | break; |
3910 | else if (known_eq (vro->off, -1)(!maybe_ne (vro->off, -1))) |
3911 | break; |
3912 | off += vro->off; |
3913 | } |
3914 | if (i == operands.length () - 1 |
3915 | /* Make sure we the offset we accumulated in a 64bit int |
3916 | fits the address computation carried out in target |
3917 | offset precision. */ |
3918 | && (off.coeffs[0] |
3919 | == sext_hwi (off.coeffs[0], TYPE_PRECISION (sizetype)((tree_class_check ((sizetype_tab[(int) stk_sizetype]), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3919, __FUNCTION__))->type_common.precision)))) |
3920 | { |
3921 | gcc_assert (operands[i-1].opcode == MEM_REF)((void)(!(operands[i-1].opcode == MEM_REF) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3921, __FUNCTION__), 0 : 0)); |
3922 | tree ops[2]; |
3923 | ops[0] = operands[i].op0; |
3924 | ops[1] = wide_int_to_tree (sizetypesizetype_tab[(int) stk_sizetype], off); |
3925 | tree res = vn_nary_op_lookup_pieces (2, POINTER_PLUS_EXPR, |
3926 | TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3926, __FUNCTION__))->typed.type), ops, NULLnullptr); |
3927 | if (res) |
3928 | return res; |
3929 | return NULL_TREE(tree) nullptr; |
3930 | } |
3931 | } |
3932 | |
3933 | vr1.type = TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3933, __FUNCTION__))->typed.type); |
3934 | ao_ref op_ref; |
3935 | ao_ref_init (&op_ref, op); |
3936 | vr1.set = ao_ref_alias_set (&op_ref); |
3937 | vr1.base_set = ao_ref_base_alias_set (&op_ref); |
3938 | vr1.hashcode = vn_reference_compute_hash (&vr1); |
3939 | if (mask == NULL_TREE(tree) nullptr) |
3940 | if (tree cst = fully_constant_vn_reference_p (&vr1)) |
3941 | return cst; |
3942 | |
3943 | if (kind != VN_NOWALK && vr1.vuse) |
3944 | { |
3945 | vn_reference_t wvnresult; |
3946 | ao_ref r; |
3947 | unsigned limit = param_sccvn_max_alias_queries_per_accessglobal_options.x_param_sccvn_max_alias_queries_per_access; |
3948 | auto_vec<vn_reference_op_s> ops_for_ref; |
3949 | if (valueized_anything) |
3950 | { |
3951 | copy_reference_ops_from_ref (op, &ops_for_ref); |
3952 | bool tem; |
3953 | valueize_refs_1 (&ops_for_ref, &tem, true); |
3954 | } |
3955 | /* Make sure to use a valueized reference if we valueized anything. |
3956 | Otherwise preserve the full reference for advanced TBAA. */ |
3957 | if (!valueized_anything |
3958 | || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.base_set, |
3959 | vr1.type, ops_for_ref)) |
3960 | ao_ref_init (&r, op); |
3961 | vn_walk_cb_data data (&vr1, r.ref ? NULL_TREE(tree) nullptr : op, |
3962 | last_vuse_ptr, kind, tbaa_p, mask, |
3963 | redundant_store_removal_p); |
3964 | |
3965 | wvnresult |
3966 | = ((vn_reference_t) |
3967 | walk_non_aliased_vuses (&r, vr1.vuse, tbaa_p, vn_reference_lookup_2, |
3968 | vn_reference_lookup_3, vuse_valueize, limit, |
3969 | &data)); |
3970 | gcc_checking_assert (vr1.operands == shared_lookup_references)((void)(!(vr1.operands == shared_lookup_references) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3970, __FUNCTION__), 0 : 0)); |
3971 | if (wvnresult) |
3972 | { |
3973 | gcc_assert (mask == NULL_TREE)((void)(!(mask == (tree) nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 3973, __FUNCTION__), 0 : 0)); |
3974 | if (data.same_val |
3975 | && (!wvnresult->result |
3976 | || !operand_equal_p (wvnresult->result, data.same_val))) |
3977 | return NULL_TREE(tree) nullptr; |
3978 | if (vnresult) |
3979 | *vnresult = wvnresult; |
3980 | return wvnresult->result; |
3981 | } |
3982 | else if (mask) |
3983 | return data.masked_result; |
3984 | |
3985 | return NULL_TREE(tree) nullptr; |
3986 | } |
3987 | |
3988 | if (last_vuse_ptr) |
3989 | *last_vuse_ptr = vr1.vuse; |
3990 | if (mask) |
3991 | return NULL_TREE(tree) nullptr; |
3992 | return vn_reference_lookup_1 (&vr1, vnresult); |
3993 | } |
3994 | |
3995 | /* Lookup CALL in the current hash table and return the entry in |
3996 | *VNRESULT if found. Populates *VR for the hashtable lookup. */ |
3997 | |
3998 | void |
3999 | vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult, |
4000 | vn_reference_t vr) |
4001 | { |
4002 | if (vnresult) |
4003 | *vnresult = NULLnullptr; |
4004 | |
4005 | tree vuse = gimple_vuse (call); |
4006 | |
4007 | vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE(tree) nullptr; |
4008 | vr->operands = valueize_shared_reference_ops_from_call (call); |
4009 | tree lhs = gimple_call_lhs (call); |
4010 | /* For non-SSA return values the referece ops contain the LHS. */ |
4011 | vr->type = ((lhs && TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME) |
4012 | ? TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4012, __FUNCTION__))->typed.type) : NULL_TREE(tree) nullptr); |
4013 | vr->punned = false; |
4014 | vr->set = 0; |
4015 | vr->base_set = 0; |
4016 | vr->hashcode = vn_reference_compute_hash (vr); |
4017 | vn_reference_lookup_1 (vr, vnresult); |
4018 | } |
4019 | |
4020 | /* Insert OP into the current hash table with a value number of RESULT. */ |
4021 | |
4022 | static void |
4023 | vn_reference_insert (tree op, tree result, tree vuse, tree vdef) |
4024 | { |
4025 | vn_reference_s **slot; |
4026 | vn_reference_t vr1; |
4027 | bool tem; |
4028 | |
4029 | vec<vn_reference_op_s> operands |
4030 | = valueize_shared_reference_ops_from_ref (op, &tem); |
4031 | /* Handle &MEM[ptr + 5].b[1].c as POINTER_PLUS_EXPR. Avoid doing this |
4032 | before the pass folding __builtin_object_size had a chance to run. */ |
4033 | if ((cfun(cfun + 0)->curr_properties & PROP_objsz(1 << 4)) |
4034 | && operands[0].opcode == ADDR_EXPR |
4035 | && operands.last ().opcode == SSA_NAME) |
4036 | { |
4037 | poly_int64 off = 0; |
4038 | vn_reference_op_t vro; |
4039 | unsigned i; |
4040 | for (i = 1; operands.iterate (i, &vro); ++i) |
4041 | { |
4042 | if (vro->opcode == SSA_NAME) |
4043 | break; |
4044 | else if (known_eq (vro->off, -1)(!maybe_ne (vro->off, -1))) |
4045 | break; |
4046 | off += vro->off; |
4047 | } |
4048 | if (i == operands.length () - 1 |
4049 | /* Make sure we the offset we accumulated in a 64bit int |
4050 | fits the address computation carried out in target |
4051 | offset precision. */ |
4052 | && (off.coeffs[0] |
4053 | == sext_hwi (off.coeffs[0], TYPE_PRECISION (sizetype)((tree_class_check ((sizetype_tab[(int) stk_sizetype]), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4053, __FUNCTION__))->type_common.precision)))) |
4054 | { |
4055 | gcc_assert (operands[i-1].opcode == MEM_REF)((void)(!(operands[i-1].opcode == MEM_REF) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4055, __FUNCTION__), 0 : 0)); |
4056 | tree ops[2]; |
4057 | ops[0] = operands[i].op0; |
4058 | ops[1] = wide_int_to_tree (sizetypesizetype_tab[(int) stk_sizetype], off); |
4059 | vn_nary_op_insert_pieces (2, POINTER_PLUS_EXPR, |
4060 | TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4060, __FUNCTION__))->typed.type), ops, result, |
4061 | VN_INFO (result)->value_id); |
4062 | return; |
4063 | } |
4064 | } |
4065 | |
4066 | vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s)((vn_reference_s *) __extension__ ({ struct obstack *__h = (( &vn_tables_obstack)); __extension__ ({ struct obstack *__o = (__h); size_t __len = ((sizeof (vn_reference_s))); if (__extension__ ({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->next_free); }) < __len) _obstack_newchunk (__o , __len); ((void) ((__o)->next_free += (__len))); }); __extension__ ({ struct obstack *__o1 = (__h); void *__value = (void *) __o1 ->object_base; if (__o1->next_free == __value) __o1-> maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t ) < sizeof (void *) ? ((__o1->object_base) + (((__o1-> next_free) - (__o1->object_base) + (__o1->alignment_mask )) & ~(__o1->alignment_mask))) : (char *) (((ptrdiff_t ) (__o1->next_free) + (__o1->alignment_mask)) & ~(__o1 ->alignment_mask))); if ((size_t) (__o1->next_free - (char *) __o1->chunk) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk)) __o1->next_free = __o1->chunk_limit ; __o1->object_base = __o1->next_free; __value; }); })); |
4067 | if (TREE_CODE (result)((enum tree_code) (result)->base.code) == SSA_NAME) |
4068 | vr1->value_id = VN_INFO (result)->value_id; |
4069 | else |
4070 | vr1->value_id = get_or_alloc_constant_value_id (result); |
4071 | vr1->vuse = vuse_ssa_val (vuse); |
4072 | vr1->operands = operands.copy (); |
4073 | vr1->type = TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4073, __FUNCTION__))->typed.type); |
4074 | vr1->punned = false; |
4075 | ao_ref op_ref; |
4076 | ao_ref_init (&op_ref, op); |
4077 | vr1->set = ao_ref_alias_set (&op_ref); |
4078 | vr1->base_set = ao_ref_base_alias_set (&op_ref); |
4079 | vr1->hashcode = vn_reference_compute_hash (vr1); |
4080 | vr1->result = TREE_CODE (result)((enum tree_code) (result)->base.code) == SSA_NAME ? SSA_VAL (result) : result; |
4081 | vr1->result_vdef = vdef; |
4082 | |
4083 | slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode, |
4084 | INSERT); |
4085 | |
4086 | /* Because IL walking on reference lookup can end up visiting |
4087 | a def that is only to be visited later in iteration order |
4088 | when we are about to make an irreducible region reducible |
4089 | the def can be effectively processed and its ref being inserted |
4090 | by vn_reference_lookup_3 already. So we cannot assert (!*slot) |
4091 | but save a lookup if we deal with already inserted refs here. */ |
4092 | if (*slot) |
4093 | { |
4094 | /* We cannot assert that we have the same value either because |
4095 | when disentangling an irreducible region we may end up visiting |
4096 | a use before the corresponding def. That's a missed optimization |
4097 | only though. See gcc.dg/tree-ssa/pr87126.c for example. */ |
4098 | if (dump_file && (dump_flags & TDF_DETAILS) |
4099 | && !operand_equal_p ((*slot)->result, vr1->result, 0)) |
4100 | { |
4101 | fprintf (dump_file, "Keeping old value "); |
4102 | print_generic_expr (dump_file, (*slot)->result); |
4103 | fprintf (dump_file, " because of collision\n"); |
4104 | } |
4105 | free_reference (vr1); |
4106 | obstack_free (&vn_tables_obstack, vr1)__extension__ ({ struct obstack *__o = (&vn_tables_obstack ); void *__obj = (void *) (vr1); if (__obj > (void *) __o-> chunk && __obj < (void *) __o->chunk_limit) __o ->next_free = __o->object_base = (char *) __obj; else _obstack_free (__o, __obj); }); |
4107 | return; |
4108 | } |
4109 | |
4110 | *slot = vr1; |
4111 | vr1->next = last_inserted_ref; |
4112 | last_inserted_ref = vr1; |
4113 | } |
4114 | |
4115 | /* Insert a reference by it's pieces into the current hash table with |
4116 | a value number of RESULT. Return the resulting reference |
4117 | structure we created. */ |
4118 | |
4119 | vn_reference_t |
4120 | vn_reference_insert_pieces (tree vuse, alias_set_type set, |
4121 | alias_set_type base_set, tree type, |
4122 | vec<vn_reference_op_s> operands, |
4123 | tree result, unsigned int value_id) |
4124 | |
4125 | { |
4126 | vn_reference_s **slot; |
4127 | vn_reference_t vr1; |
4128 | |
4129 | vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s)((vn_reference_s *) __extension__ ({ struct obstack *__h = (( &vn_tables_obstack)); __extension__ ({ struct obstack *__o = (__h); size_t __len = ((sizeof (vn_reference_s))); if (__extension__ ({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->next_free); }) < __len) _obstack_newchunk (__o , __len); ((void) ((__o)->next_free += (__len))); }); __extension__ ({ struct obstack *__o1 = (__h); void *__value = (void *) __o1 ->object_base; if (__o1->next_free == __value) __o1-> maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t ) < sizeof (void *) ? ((__o1->object_base) + (((__o1-> next_free) - (__o1->object_base) + (__o1->alignment_mask )) & ~(__o1->alignment_mask))) : (char *) (((ptrdiff_t ) (__o1->next_free) + (__o1->alignment_mask)) & ~(__o1 ->alignment_mask))); if ((size_t) (__o1->next_free - (char *) __o1->chunk) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk)) __o1->next_free = __o1->chunk_limit ; __o1->object_base = __o1->next_free; __value; }); })); |
4130 | vr1->value_id = value_id; |
4131 | vr1->vuse = vuse_ssa_val (vuse); |
4132 | vr1->operands = operands; |
4133 | valueize_refs (&vr1->operands); |
4134 | vr1->type = type; |
4135 | vr1->punned = false; |
4136 | vr1->set = set; |
4137 | vr1->base_set = base_set; |
4138 | vr1->hashcode = vn_reference_compute_hash (vr1); |
4139 | if (result && TREE_CODE (result)((enum tree_code) (result)->base.code) == SSA_NAME) |
4140 | result = SSA_VAL (result); |
4141 | vr1->result = result; |
4142 | vr1->result_vdef = NULL_TREE(tree) nullptr; |
4143 | |
4144 | slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode, |
4145 | INSERT); |
4146 | |
4147 | /* At this point we should have all the things inserted that we have |
4148 | seen before, and we should never try inserting something that |
4149 | already exists. */ |
4150 | gcc_assert (!*slot)((void)(!(!*slot) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4150, __FUNCTION__), 0 : 0)); |
4151 | |
4152 | *slot = vr1; |
4153 | vr1->next = last_inserted_ref; |
4154 | last_inserted_ref = vr1; |
4155 | return vr1; |
4156 | } |
4157 | |
4158 | /* Compute and return the hash value for nary operation VBO1. */ |
4159 | |
4160 | hashval_t |
4161 | vn_nary_op_compute_hash (const vn_nary_op_t vno1) |
4162 | { |
4163 | inchash::hash hstate; |
4164 | unsigned i; |
4165 | |
4166 | if (((vno1->length == 2 |
4167 | && commutative_tree_code (vno1->opcode)) |
4168 | || (vno1->length == 3 |
4169 | && commutative_ternary_tree_code (vno1->opcode))) |
4170 | && tree_swap_operands_p (vno1->op[0], vno1->op[1])) |
4171 | std::swap (vno1->op[0], vno1->op[1]); |
4172 | else if (TREE_CODE_CLASS (vno1->opcode)tree_code_type_tmpl <0>::tree_code_type[(int) (vno1-> opcode)] == tcc_comparison |
4173 | && tree_swap_operands_p (vno1->op[0], vno1->op[1])) |
4174 | { |
4175 | std::swap (vno1->op[0], vno1->op[1]); |
4176 | vno1->opcode = swap_tree_comparison (vno1->opcode); |
4177 | } |
4178 | |
4179 | hstate.add_int (vno1->opcode); |
4180 | for (i = 0; i < vno1->length; ++i) |
4181 | inchash::add_expr (vno1->op[i], hstate); |
4182 | |
4183 | return hstate.end (); |
4184 | } |
4185 | |
4186 | /* Compare nary operations VNO1 and VNO2 and return true if they are |
4187 | equivalent. */ |
4188 | |
4189 | bool |
4190 | vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2) |
4191 | { |
4192 | unsigned i; |
4193 | |
4194 | if (vno1->hashcode != vno2->hashcode) |
4195 | return false; |
4196 | |
4197 | if (vno1->length != vno2->length) |
4198 | return false; |
4199 | |
4200 | if (vno1->opcode != vno2->opcode |
4201 | || !types_compatible_p (vno1->type, vno2->type)) |
4202 | return false; |
4203 | |
4204 | for (i = 0; i < vno1->length; ++i) |
4205 | if (!expressions_equal_p (vno1->op[i], vno2->op[i])) |
4206 | return false; |
4207 | |
4208 | /* BIT_INSERT_EXPR has an implict operand as the type precision |
4209 | of op1. Need to check to make sure they are the same. */ |
4210 | if (vno1->opcode == BIT_INSERT_EXPR |
4211 | && TREE_CODE (vno1->op[1])((enum tree_code) (vno1->op[1])->base.code) == INTEGER_CST |
4212 | && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))((tree_class_check ((((contains_struct_check ((vno1->op[1] ), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4212, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4212, __FUNCTION__))->type_common.precision) |
4213 | != TYPE_PRECISION (TREE_TYPE (vno2->op[1]))((tree_class_check ((((contains_struct_check ((vno2->op[1] ), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4213, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4213, __FUNCTION__))->type_common.precision)) |
4214 | return false; |
4215 | |
4216 | return true; |
4217 | } |
4218 | |
4219 | /* Initialize VNO from the pieces provided. */ |
4220 | |
4221 | static void |
4222 | init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length, |
4223 | enum tree_code code, tree type, tree *ops) |
4224 | { |
4225 | vno->opcode = code; |
4226 | vno->length = length; |
4227 | vno->type = type; |
4228 | memcpy (&vno->op[0], ops, sizeof (tree) * length); |
4229 | } |
4230 | |
4231 | /* Return the number of operands for a vn_nary ops structure from STMT. */ |
4232 | |
4233 | unsigned int |
4234 | vn_nary_length_from_stmt (gimple *stmt) |
4235 | { |
4236 | switch (gimple_assign_rhs_code (stmt)) |
4237 | { |
4238 | case REALPART_EXPR: |
4239 | case IMAGPART_EXPR: |
4240 | case VIEW_CONVERT_EXPR: |
4241 | return 1; |
4242 | |
4243 | case BIT_FIELD_REF: |
4244 | return 3; |
4245 | |
4246 | case CONSTRUCTOR: |
4247 | return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt))(vec_safe_length (((tree_check ((gimple_assign_rhs1 (stmt)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4247, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts))); |
4248 | |
4249 | default: |
4250 | return gimple_num_ops (stmt) - 1; |
4251 | } |
4252 | } |
4253 | |
4254 | /* Initialize VNO from STMT. */ |
4255 | |
4256 | void |
4257 | init_vn_nary_op_from_stmt (vn_nary_op_t vno, gassign *stmt) |
4258 | { |
4259 | unsigned i; |
4260 | |
4261 | vno->opcode = gimple_assign_rhs_code (stmt); |
4262 | vno->type = TREE_TYPE (gimple_assign_lhs (stmt))((contains_struct_check ((gimple_assign_lhs (stmt)), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4262, __FUNCTION__))->typed.type); |
4263 | switch (vno->opcode) |
4264 | { |
4265 | case REALPART_EXPR: |
4266 | case IMAGPART_EXPR: |
4267 | case VIEW_CONVERT_EXPR: |
4268 | vno->length = 1; |
4269 | vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)(*((const_cast<tree*> (tree_operand_check ((gimple_assign_rhs1 (stmt)), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4269, __FUNCTION__))))); |
4270 | break; |
4271 | |
4272 | case BIT_FIELD_REF: |
4273 | vno->length = 3; |
4274 | vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)(*((const_cast<tree*> (tree_operand_check ((gimple_assign_rhs1 (stmt)), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4274, __FUNCTION__))))); |
4275 | vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1)(*((const_cast<tree*> (tree_operand_check ((gimple_assign_rhs1 (stmt)), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4275, __FUNCTION__))))); |
4276 | vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2)(*((const_cast<tree*> (tree_operand_check ((gimple_assign_rhs1 (stmt)), (2), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4276, __FUNCTION__))))); |
4277 | break; |
4278 | |
4279 | case CONSTRUCTOR: |
4280 | vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt))(vec_safe_length (((tree_check ((gimple_assign_rhs1 (stmt)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4280, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts))); |
4281 | for (i = 0; i < vno->length; ++i) |
4282 | vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)(&(*((tree_check ((gimple_assign_rhs1 (stmt)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4282, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts))[ i])->value; |
4283 | break; |
4284 | |
4285 | default: |
4286 | gcc_checking_assert (!gimple_assign_single_p (stmt))((void)(!(!gimple_assign_single_p (stmt)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4286, __FUNCTION__), 0 : 0)); |
4287 | vno->length = gimple_num_ops (stmt) - 1; |
4288 | for (i = 0; i < vno->length; ++i) |
4289 | vno->op[i] = gimple_op (stmt, i + 1); |
4290 | } |
4291 | } |
4292 | |
4293 | /* Compute the hashcode for VNO and look for it in the hash table; |
4294 | return the resulting value number if it exists in the hash table. |
4295 | Return NULL_TREE if it does not exist in the hash table or if the |
4296 | result field of the operation is NULL. VNRESULT will contain the |
4297 | vn_nary_op_t from the hashtable if it exists. */ |
4298 | |
4299 | static tree |
4300 | vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult) |
4301 | { |
4302 | vn_nary_op_s **slot; |
4303 | |
4304 | if (vnresult) |
4305 | *vnresult = NULLnullptr; |
4306 | |
4307 | for (unsigned i = 0; i < vno->length; ++i) |
4308 | if (TREE_CODE (vno->op[i])((enum tree_code) (vno->op[i])->base.code) == SSA_NAME) |
4309 | vno->op[i] = SSA_VAL (vno->op[i]); |
4310 | |
4311 | vno->hashcode = vn_nary_op_compute_hash (vno); |
4312 | slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, NO_INSERT); |
4313 | if (!slot) |
4314 | return NULL_TREE(tree) nullptr; |
4315 | if (vnresult) |
4316 | *vnresult = *slot; |
4317 | return (*slot)->predicated_values ? NULL_TREE(tree) nullptr : (*slot)->u.result; |
4318 | } |
4319 | |
4320 | /* Lookup a n-ary operation by its pieces and return the resulting value |
4321 | number if it exists in the hash table. Return NULL_TREE if it does |
4322 | not exist in the hash table or if the result field of the operation |
4323 | is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable |
4324 | if it exists. */ |
4325 | |
4326 | tree |
4327 | vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code, |
4328 | tree type, tree *ops, vn_nary_op_t *vnresult) |
4329 | { |
4330 | vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,((struct vn_nary_op_s *) __builtin_alloca((sizeof_vn_nary_op ( length)))) |
4331 | sizeof_vn_nary_op (length))((struct vn_nary_op_s *) __builtin_alloca((sizeof_vn_nary_op ( length)))); |
4332 | init_vn_nary_op_from_pieces (vno1, length, code, type, ops); |
4333 | return vn_nary_op_lookup_1 (vno1, vnresult); |
4334 | } |
4335 | |
4336 | /* Lookup the rhs of STMT in the current hash table, and return the resulting |
4337 | value number if it exists in the hash table. Return NULL_TREE if |
4338 | it does not exist in the hash table. VNRESULT will contain the |
4339 | vn_nary_op_t from the hashtable if it exists. */ |
4340 | |
4341 | tree |
4342 | vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult) |
4343 | { |
4344 | vn_nary_op_t vno1 |
4345 | = XALLOCAVAR (struct vn_nary_op_s,((struct vn_nary_op_s *) __builtin_alloca((sizeof_vn_nary_op ( vn_nary_length_from_stmt (stmt))))) |
4346 | sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)))((struct vn_nary_op_s *) __builtin_alloca((sizeof_vn_nary_op ( vn_nary_length_from_stmt (stmt))))); |
4347 | init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (stmt)); |
4348 | return vn_nary_op_lookup_1 (vno1, vnresult); |
4349 | } |
4350 | |
4351 | /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */ |
4352 | |
4353 | vn_nary_op_t |
4354 | alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack) |
4355 | { |
4356 | return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length))__extension__ ({ struct obstack *__h = (stack); __extension__ ({ struct obstack *__o = (__h); size_t __len = ((sizeof_vn_nary_op (length))); if (__extension__ ({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->next_free); }) < __len) _obstack_newchunk (__o, __len); ((void) ((__o )->next_free += (__len))); }); __extension__ ({ struct obstack *__o1 = (__h); void *__value = (void *) __o1->object_base ; if (__o1->next_free == __value) __o1->maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t) < sizeof (void *) ? ((__o1->object_base) + (((__o1->next_free) - (__o1 ->object_base) + (__o1->alignment_mask)) & ~(__o1-> alignment_mask))) : (char *) (((ptrdiff_t) (__o1->next_free ) + (__o1->alignment_mask)) & ~(__o1->alignment_mask ))); if ((size_t) (__o1->next_free - (char *) __o1->chunk ) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk )) __o1->next_free = __o1->chunk_limit; __o1->object_base = __o1->next_free; __value; }); }); |
4357 | } |
4358 | |
4359 | /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's |
4360 | obstack. */ |
4361 | |
4362 | static vn_nary_op_t |
4363 | alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id) |
4364 | { |
4365 | vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack); |
4366 | |
4367 | vno1->value_id = value_id; |
4368 | vno1->length = length; |
4369 | vno1->predicated_values = 0; |
4370 | vno1->u.result = result; |
4371 | |
4372 | return vno1; |
4373 | } |
4374 | |
4375 | /* Insert VNO into TABLE. */ |
4376 | |
4377 | static vn_nary_op_t |
4378 | vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table) |
4379 | { |
4380 | vn_nary_op_s **slot; |
4381 | |
4382 | gcc_assert (! vno->predicated_values((void)(!(! vno->predicated_values || (! vno->u.values-> next && vno->u.values->n == 1)) ? fancy_abort ( "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4384, __FUNCTION__), 0 : 0)) |
4383 | || (! vno->u.values->next((void)(!(! vno->predicated_values || (! vno->u.values-> next && vno->u.values->n == 1)) ? fancy_abort ( "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4384, __FUNCTION__), 0 : 0)) |
4384 | && vno->u.values->n == 1))((void)(!(! vno->predicated_values || (! vno->u.values-> next && vno->u.values->n == 1)) ? fancy_abort ( "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4384, __FUNCTION__), 0 : 0)); |
4385 | |
4386 | for (unsigned i = 0; i < vno->length; ++i) |
4387 | if (TREE_CODE (vno->op[i])((enum tree_code) (vno->op[i])->base.code) == SSA_NAME) |
4388 | vno->op[i] = SSA_VAL (vno->op[i]); |
4389 | |
4390 | vno->hashcode = vn_nary_op_compute_hash (vno); |
4391 | slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT); |
4392 | vno->unwind_to = *slot; |
4393 | if (*slot) |
4394 | { |
4395 | /* Prefer non-predicated values. |
4396 | ??? Only if those are constant, otherwise, with constant predicated |
4397 | value, turn them into predicated values with entry-block validity |
4398 | (??? but we always find the first valid result currently). */ |
4399 | if ((*slot)->predicated_values |
4400 | && ! vno->predicated_values) |
4401 | { |
4402 | /* ??? We cannot remove *slot from the unwind stack list. |
4403 | For the moment we deal with this by skipping not found |
4404 | entries but this isn't ideal ... */ |
4405 | *slot = vno; |
4406 | /* ??? Maintain a stack of states we can unwind in |
4407 | vn_nary_op_s? But how far do we unwind? In reality |
4408 | we need to push change records somewhere... Or not |
4409 | unwind vn_nary_op_s and linking them but instead |
4410 | unwind the results "list", linking that, which also |
4411 | doesn't move on hashtable resize. */ |
4412 | /* We can also have a ->unwind_to recording *slot there. |
4413 | That way we can make u.values a fixed size array with |
4414 | recording the number of entries but of course we then |
4415 | have always N copies for each unwind_to-state. Or we |
4416 | make sure to only ever append and each unwinding will |
4417 | pop off one entry (but how to deal with predicated |
4418 | replaced with non-predicated here?) */ |
4419 | vno->next = last_inserted_nary; |
4420 | last_inserted_nary = vno; |
4421 | return vno; |
4422 | } |
4423 | else if (vno->predicated_values |
4424 | && ! (*slot)->predicated_values) |
4425 | return *slot; |
4426 | else if (vno->predicated_values |
4427 | && (*slot)->predicated_values) |
4428 | { |
4429 | /* ??? Factor this all into a insert_single_predicated_value |
4430 | routine. */ |
4431 | gcc_assert (!vno->u.values->next && vno->u.values->n == 1)((void)(!(!vno->u.values->next && vno->u.values ->n == 1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4431, __FUNCTION__), 0 : 0)); |
4432 | basic_block vno_bb |
4433 | = BASIC_BLOCK_FOR_FN (cfun, vno->u.values->valid_dominated_by_p[0])((*(((cfun + 0))->cfg->x_basic_block_info))[(vno->u. values->valid_dominated_by_p[0])]); |
4434 | vn_pval *nval = vno->u.values; |
4435 | vn_pval **next = &vno->u.values; |
4436 | bool found = false; |
4437 | for (vn_pval *val = (*slot)->u.values; val; val = val->next) |
4438 | { |
4439 | if (expressions_equal_p (val->result, nval->result)) |
4440 | { |
4441 | found = true; |
4442 | for (unsigned i = 0; i < val->n; ++i) |
4443 | { |
4444 | basic_block val_bb |
4445 | = BASIC_BLOCK_FOR_FN (cfun,((*(((cfun + 0))->cfg->x_basic_block_info))[(val->valid_dominated_by_p [i])]) |
4446 | val->valid_dominated_by_p[i])((*(((cfun + 0))->cfg->x_basic_block_info))[(val->valid_dominated_by_p [i])]); |
4447 | if (dominated_by_p (CDI_DOMINATORS, vno_bb, val_bb)) |
4448 | /* Value registered with more generic predicate. */ |
4449 | return *slot; |
4450 | else if (flag_checkingglobal_options.x_flag_checking) |
4451 | /* Shouldn't happen, we insert in RPO order. */ |
4452 | gcc_assert (!dominated_by_p (CDI_DOMINATORS,((void)(!(!dominated_by_p (CDI_DOMINATORS, val_bb, vno_bb)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4453, __FUNCTION__), 0 : 0)) |
4453 | val_bb, vno_bb))((void)(!(!dominated_by_p (CDI_DOMINATORS, val_bb, vno_bb)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4453, __FUNCTION__), 0 : 0)); |
4454 | } |
4455 | /* Append value. */ |
4456 | *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,__extension__ ({ struct obstack *__h = (&vn_tables_obstack ); __extension__ ({ struct obstack *__o = (__h); size_t __len = ((sizeof (vn_pval) + val->n * sizeof (int))); if (__extension__ ({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->next_free); }) < __len) _obstack_newchunk (__o , __len); ((void) ((__o)->next_free += (__len))); }); __extension__ ({ struct obstack *__o1 = (__h); void *__value = (void *) __o1 ->object_base; if (__o1->next_free == __value) __o1-> maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t ) < sizeof (void *) ? ((__o1->object_base) + (((__o1-> next_free) - (__o1->object_base) + (__o1->alignment_mask )) & ~(__o1->alignment_mask))) : (char *) (((ptrdiff_t ) (__o1->next_free) + (__o1->alignment_mask)) & ~(__o1 ->alignment_mask))); if ((size_t) (__o1->next_free - (char *) __o1->chunk) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk)) __o1->next_free = __o1->chunk_limit ; __o1->object_base = __o1->next_free; __value; }); }) |
4457 | sizeof (vn_pval)__extension__ ({ struct obstack *__h = (&vn_tables_obstack ); __extension__ ({ struct obstack *__o = (__h); size_t __len = ((sizeof (vn_pval) + val->n * sizeof (int))); if (__extension__ ({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->next_free); }) < __len) _obstack_newchunk (__o , __len); ((void) ((__o)->next_free += (__len))); }); __extension__ ({ struct obstack *__o1 = (__h); void *__value = (void *) __o1 ->object_base; if (__o1->next_free == __value) __o1-> maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t ) < sizeof (void *) ? ((__o1->object_base) + (((__o1-> next_free) - (__o1->object_base) + (__o1->alignment_mask )) & ~(__o1->alignment_mask))) : (char *) (((ptrdiff_t ) (__o1->next_free) + (__o1->alignment_mask)) & ~(__o1 ->alignment_mask))); if ((size_t) (__o1->next_free - (char *) __o1->chunk) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk)) __o1->next_free = __o1->chunk_limit ; __o1->object_base = __o1->next_free; __value; }); }) |
4458 | + val->n * sizeof (int))__extension__ ({ struct obstack *__h = (&vn_tables_obstack ); __extension__ ({ struct obstack *__o = (__h); size_t __len = ((sizeof (vn_pval) + val->n * sizeof (int))); if (__extension__ ({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->next_free); }) < __len) _obstack_newchunk (__o , __len); ((void) ((__o)->next_free += (__len))); }); __extension__ ({ struct obstack *__o1 = (__h); void *__value = (void *) __o1 ->object_base; if (__o1->next_free == __value) __o1-> maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t ) < sizeof (void *) ? ((__o1->object_base) + (((__o1-> next_free) - (__o1->object_base) + (__o1->alignment_mask )) & ~(__o1->alignment_mask))) : (char *) (((ptrdiff_t ) (__o1->next_free) + (__o1->alignment_mask)) & ~(__o1 ->alignment_mask))); if ((size_t) (__o1->next_free - (char *) __o1->chunk) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk)) __o1->next_free = __o1->chunk_limit ; __o1->object_base = __o1->next_free; __value; }); }); |
4459 | (*next)->next = NULLnullptr; |
4460 | (*next)->result = val->result; |
4461 | (*next)->n = val->n + 1; |
4462 | memcpy ((*next)->valid_dominated_by_p, |
4463 | val->valid_dominated_by_p, |
4464 | val->n * sizeof (int)); |
4465 | (*next)->valid_dominated_by_p[val->n] = vno_bb->index; |
4466 | next = &(*next)->next; |
4467 | if (dump_file && (dump_flags & TDF_DETAILS)) |
4468 | fprintf (dump_file, "Appending predicate to value.\n"); |
4469 | continue; |
4470 | } |
4471 | /* Copy other predicated values. */ |
4472 | *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,__extension__ ({ struct obstack *__h = (&vn_tables_obstack ); __extension__ ({ struct obstack *__o = (__h); size_t __len = ((sizeof (vn_pval) + (val->n-1) * sizeof (int))); if (__extension__ ({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->next_free); }) < __len) _obstack_newchunk (__o , __len); ((void) ((__o)->next_free += (__len))); }); __extension__ ({ struct obstack *__o1 = (__h); void *__value = (void *) __o1 ->object_base; if (__o1->next_free == __value) __o1-> maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t ) < sizeof (void *) ? ((__o1->object_base) + (((__o1-> next_free) - (__o1->object_base) + (__o1->alignment_mask )) & ~(__o1->alignment_mask))) : (char *) (((ptrdiff_t ) (__o1->next_free) + (__o1->alignment_mask)) & ~(__o1 ->alignment_mask))); if ((size_t) (__o1->next_free - (char *) __o1->chunk) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk)) __o1->next_free = __o1->chunk_limit ; __o1->object_base = __o1->next_free; __value; }); }) |
4473 | sizeof (vn_pval)__extension__ ({ struct obstack *__h = (&vn_tables_obstack ); __extension__ ({ struct obstack *__o = (__h); size_t __len = ((sizeof (vn_pval) + (val->n-1) * sizeof (int))); if (__extension__ ({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->next_free); }) < __len) _obstack_newchunk (__o , __len); ((void) ((__o)->next_free += (__len))); }); __extension__ ({ struct obstack *__o1 = (__h); void *__value = (void *) __o1 ->object_base; if (__o1->next_free == __value) __o1-> maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t ) < sizeof (void *) ? ((__o1->object_base) + (((__o1-> next_free) - (__o1->object_base) + (__o1->alignment_mask )) & ~(__o1->alignment_mask))) : (char *) (((ptrdiff_t ) (__o1->next_free) + (__o1->alignment_mask)) & ~(__o1 ->alignment_mask))); if ((size_t) (__o1->next_free - (char *) __o1->chunk) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk)) __o1->next_free = __o1->chunk_limit ; __o1->object_base = __o1->next_free; __value; }); }) |
4474 | + (val->n-1) * sizeof (int))__extension__ ({ struct obstack *__h = (&vn_tables_obstack ); __extension__ ({ struct obstack *__o = (__h); size_t __len = ((sizeof (vn_pval) + (val->n-1) * sizeof (int))); if (__extension__ ({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->next_free); }) < __len) _obstack_newchunk (__o , __len); ((void) ((__o)->next_free += (__len))); }); __extension__ ({ struct obstack *__o1 = (__h); void *__value = (void *) __o1 ->object_base; if (__o1->next_free == __value) __o1-> maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t ) < sizeof (void *) ? ((__o1->object_base) + (((__o1-> next_free) - (__o1->object_base) + (__o1->alignment_mask )) & ~(__o1->alignment_mask))) : (char *) (((ptrdiff_t ) (__o1->next_free) + (__o1->alignment_mask)) & ~(__o1 ->alignment_mask))); if ((size_t) (__o1->next_free - (char *) __o1->chunk) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk)) __o1->next_free = __o1->chunk_limit ; __o1->object_base = __o1->next_free; __value; }); }); |
4475 | memcpy (*next, val, sizeof (vn_pval) + (val->n-1) * sizeof (int)); |
4476 | (*next)->next = NULLnullptr; |
4477 | next = &(*next)->next; |
4478 | } |
4479 | if (!found) |
4480 | *next = nval; |
4481 | |
4482 | *slot = vno; |
4483 | vno->next = last_inserted_nary; |
4484 | last_inserted_nary = vno; |
4485 | return vno; |
4486 | } |
4487 | |
4488 | /* While we do not want to insert things twice it's awkward to |
4489 | avoid it in the case where visit_nary_op pattern-matches stuff |
4490 | and ends up simplifying the replacement to itself. We then |
4491 | get two inserts, one from visit_nary_op and one from |
4492 | vn_nary_build_or_lookup. |
4493 | So allow inserts with the same value number. */ |
4494 | if ((*slot)->u.result == vno->u.result) |
4495 | return *slot; |
4496 | } |
4497 | |
4498 | /* ??? There's also optimistic vs. previous commited state merging |
4499 | that is problematic for the case of unwinding. */ |
4500 | |
4501 | /* ??? We should return NULL if we do not use 'vno' and have the |
4502 | caller release it. */ |
4503 | gcc_assert (!*slot)((void)(!(!*slot) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4503, __FUNCTION__), 0 : 0)); |
4504 | |
4505 | *slot = vno; |
4506 | vno->next = last_inserted_nary; |
4507 | last_inserted_nary = vno; |
4508 | return vno; |
4509 | } |
4510 | |
4511 | /* Insert a n-ary operation into the current hash table using it's |
4512 | pieces. Return the vn_nary_op_t structure we created and put in |
4513 | the hashtable. */ |
4514 | |
4515 | vn_nary_op_t |
4516 | vn_nary_op_insert_pieces (unsigned int length, enum tree_code code, |
4517 | tree type, tree *ops, |
4518 | tree result, unsigned int value_id) |
4519 | { |
4520 | vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id); |
4521 | init_vn_nary_op_from_pieces (vno1, length, code, type, ops); |
4522 | return vn_nary_op_insert_into (vno1, valid_info->nary); |
4523 | } |
4524 | |
4525 | /* Return whether we can track a predicate valid when PRED_E is executed. */ |
4526 | |
4527 | static bool |
4528 | can_track_predicate_on_edge (edge pred_e) |
4529 | { |
4530 | /* ??? As we are currently recording the destination basic-block index in |
4531 | vn_pval.valid_dominated_by_p and using dominance for the |
4532 | validity check we cannot track predicates on all edges. */ |
4533 | if (single_pred_p (pred_e->dest)) |
4534 | return true; |
4535 | /* Never record for backedges. */ |
4536 | if (pred_e->flags & EDGE_DFS_BACK) |
4537 | return false; |
4538 | /* When there's more than one predecessor we cannot track |
4539 | predicate validity based on the destination block. The |
4540 | exception is when all other incoming edges sources are |
4541 | dominated by the destination block. */ |
4542 | edge_iterator ei; |
4543 | edge e; |
4544 | FOR_EACH_EDGE (e, ei, pred_e->dest->preds)for ((ei) = ei_start_1 (&((pred_e->dest->preds))); ei_cond ((ei), &(e)); ei_next (&(ei))) |
4545 | if (e != pred_e && ! dominated_by_p (CDI_DOMINATORS, e->src, e->dest)) |
4546 | return false; |
4547 | return true; |
4548 | } |
4549 | |
4550 | static vn_nary_op_t |
4551 | vn_nary_op_insert_pieces_predicated (unsigned int length, enum tree_code code, |
4552 | tree type, tree *ops, |
4553 | tree result, unsigned int value_id, |
4554 | edge pred_e) |
4555 | { |
4556 | gcc_assert (can_track_predicate_on_edge (pred_e))((void)(!(can_track_predicate_on_edge (pred_e)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4556, __FUNCTION__), 0 : 0)); |
4557 | |
4558 | if (dump_file && (dump_flags & TDF_DETAILS) |
4559 | /* ??? Fix dumping, but currently we only get comparisons. */ |
4560 | && TREE_CODE_CLASS (code)tree_code_type_tmpl <0>::tree_code_type[(int) (code)] == tcc_comparison) |
4561 | { |
4562 | fprintf (dump_file, "Recording on edge %d->%d ", pred_e->src->index, |
4563 | pred_e->dest->index); |
4564 | print_generic_expr (dump_file, ops[0], TDF_SLIM); |
4565 | fprintf (dump_file, " %s ", get_tree_code_name (code)); |
4566 | print_generic_expr (dump_file, ops[1], TDF_SLIM); |
4567 | fprintf (dump_file, " == %s\n", |
4568 | integer_zerop (result) ? "false" : "true"); |
4569 | } |
4570 | vn_nary_op_t vno1 = alloc_vn_nary_op (length, NULL_TREE(tree) nullptr, value_id); |
4571 | init_vn_nary_op_from_pieces (vno1, length, code, type, ops); |
4572 | vno1->predicated_values = 1; |
4573 | vno1->u.values = (vn_pval *) obstack_alloc (&vn_tables_obstack,__extension__ ({ struct obstack *__h = (&vn_tables_obstack ); __extension__ ({ struct obstack *__o = (__h); size_t __len = ((sizeof (vn_pval))); if (__extension__ ({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->next_free ); }) < __len) _obstack_newchunk (__o, __len); ((void) ((__o )->next_free += (__len))); }); __extension__ ({ struct obstack *__o1 = (__h); void *__value = (void *) __o1->object_base ; if (__o1->next_free == __value) __o1->maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t) < sizeof (void *) ? ((__o1->object_base) + (((__o1->next_free) - (__o1 ->object_base) + (__o1->alignment_mask)) & ~(__o1-> alignment_mask))) : (char *) (((ptrdiff_t) (__o1->next_free ) + (__o1->alignment_mask)) & ~(__o1->alignment_mask ))); if ((size_t) (__o1->next_free - (char *) __o1->chunk ) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk )) __o1->next_free = __o1->chunk_limit; __o1->object_base = __o1->next_free; __value; }); }) |
4574 | sizeof (vn_pval))__extension__ ({ struct obstack *__h = (&vn_tables_obstack ); __extension__ ({ struct obstack *__o = (__h); size_t __len = ((sizeof (vn_pval))); if (__extension__ ({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->next_free ); }) < __len) _obstack_newchunk (__o, __len); ((void) ((__o )->next_free += (__len))); }); __extension__ ({ struct obstack *__o1 = (__h); void *__value = (void *) __o1->object_base ; if (__o1->next_free == __value) __o1->maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t) < sizeof (void *) ? ((__o1->object_base) + (((__o1->next_free) - (__o1 ->object_base) + (__o1->alignment_mask)) & ~(__o1-> alignment_mask))) : (char *) (((ptrdiff_t) (__o1->next_free ) + (__o1->alignment_mask)) & ~(__o1->alignment_mask ))); if ((size_t) (__o1->next_free - (char *) __o1->chunk ) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk )) __o1->next_free = __o1->chunk_limit; __o1->object_base = __o1->next_free; __value; }); }); |
4575 | vno1->u.values->next = NULLnullptr; |
4576 | vno1->u.values->result = result; |
4577 | vno1->u.values->n = 1; |
4578 | vno1->u.values->valid_dominated_by_p[0] = pred_e->dest->index; |
4579 | return vn_nary_op_insert_into (vno1, valid_info->nary); |
4580 | } |
4581 | |
4582 | static bool |
4583 | dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool); |
4584 | |
4585 | static tree |
4586 | vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb) |
4587 | { |
4588 | if (! vno->predicated_values) |
4589 | return vno->u.result; |
4590 | for (vn_pval *val = vno->u.values; val; val = val->next) |
4591 | for (unsigned i = 0; i < val->n; ++i) |
4592 | /* Do not handle backedge executability optimistically since |
4593 | when figuring out whether to iterate we do not consider |
4594 | changed predication. */ |
4595 | if (dominated_by_p_w_unex |
4596 | (bb, BASIC_BLOCK_FOR_FN (cfun, val->valid_dominated_by_p[i])((*(((cfun + 0))->cfg->x_basic_block_info))[(val->valid_dominated_by_p [i])]), |
4597 | false)) |
4598 | return val->result; |
4599 | return NULL_TREE(tree) nullptr; |
4600 | } |
4601 | |
4602 | /* Insert the rhs of STMT into the current hash table with a value number of |
4603 | RESULT. */ |
4604 | |
4605 | static vn_nary_op_t |
4606 | vn_nary_op_insert_stmt (gimple *stmt, tree result) |
4607 | { |
4608 | vn_nary_op_t vno1 |
4609 | = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt), |
4610 | result, VN_INFO (result)->value_id); |
4611 | init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (stmt)); |
4612 | return vn_nary_op_insert_into (vno1, valid_info->nary); |
4613 | } |
4614 | |
4615 | /* Compute a hashcode for PHI operation VP1 and return it. */ |
4616 | |
4617 | static inline hashval_t |
4618 | vn_phi_compute_hash (vn_phi_t vp1) |
4619 | { |
4620 | inchash::hash hstate; |
4621 | tree phi1op; |
4622 | tree type; |
4623 | edge e; |
4624 | edge_iterator ei; |
4625 | |
4626 | hstate.add_int (EDGE_COUNT (vp1->block->preds)vec_safe_length (vp1->block->preds)); |
4627 | switch (EDGE_COUNT (vp1->block->preds)vec_safe_length (vp1->block->preds)) |
4628 | { |
4629 | case 1: |
4630 | break; |
4631 | case 2: |
4632 | /* When this is a PHI node subject to CSE for different blocks |
4633 | avoid hashing the block index. */ |
4634 | if (vp1->cclhs) |
4635 | break; |
4636 | /* Fallthru. */ |
4637 | default: |
4638 | hstate.add_int (vp1->block->index); |
4639 | } |
4640 | |
4641 | /* If all PHI arguments are constants we need to distinguish |
4642 | the PHI node via its type. */ |
4643 | type = vp1->type; |
4644 | hstate.merge_hash (vn_hash_type (type)); |
4645 | |
4646 | FOR_EACH_EDGE (e, ei, vp1->block->preds)for ((ei) = ei_start_1 (&((vp1->block->preds))); ei_cond ((ei), &(e)); ei_next (&(ei))) |
4647 | { |
4648 | /* Don't hash backedge values they need to be handled as VN_TOP |
4649 | for optimistic value-numbering. */ |
4650 | if (e->flags & EDGE_DFS_BACK) |
4651 | continue; |
4652 | |
4653 | phi1op = vp1->phiargs[e->dest_idx]; |
4654 | if (phi1op == VN_TOP) |
4655 | continue; |
4656 | inchash::add_expr (phi1op, hstate); |
4657 | } |
4658 | |
4659 | return hstate.end (); |
4660 | } |
4661 | |
4662 | |
4663 | /* Return true if COND1 and COND2 represent the same condition, set |
4664 | *INVERTED_P if one needs to be inverted to make it the same as |
4665 | the other. */ |
4666 | |
4667 | static bool |
4668 | cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1, |
4669 | gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p) |
4670 | { |
4671 | enum tree_code code1 = gimple_cond_code (cond1); |
4672 | enum tree_code code2 = gimple_cond_code (cond2); |
4673 | |
4674 | *inverted_p = false; |
4675 | if (code1 == code2) |
4676 | ; |
4677 | else if (code1 == swap_tree_comparison (code2)) |
4678 | std::swap (lhs2, rhs2); |
4679 | else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2))) |
4680 | *inverted_p = true; |
4681 | else if (code1 == invert_tree_comparison |
4682 | (swap_tree_comparison (code2), HONOR_NANS (lhs2))) |
4683 | { |
4684 | std::swap (lhs2, rhs2); |
4685 | *inverted_p = true; |
4686 | } |
4687 | else |
4688 | return false; |
4689 | |
4690 | return ((expressions_equal_p (lhs1, lhs2) |
4691 | && expressions_equal_p (rhs1, rhs2)) |
4692 | || (commutative_tree_code (code1) |
4693 | && expressions_equal_p (lhs1, rhs2) |
4694 | && expressions_equal_p (rhs1, lhs2))); |
4695 | } |
4696 | |
4697 | /* Compare two phi entries for equality, ignoring VN_TOP arguments. */ |
4698 | |
4699 | static int |
4700 | vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2) |
4701 | { |
4702 | if (vp1->hashcode != vp2->hashcode) |
4703 | return false; |
4704 | |
4705 | if (vp1->block != vp2->block) |
4706 | { |
4707 | if (EDGE_COUNT (vp1->block->preds)vec_safe_length (vp1->block->preds) != EDGE_COUNT (vp2->block->preds)vec_safe_length (vp2->block->preds)) |
4708 | return false; |
4709 | |
4710 | switch (EDGE_COUNT (vp1->block->preds)vec_safe_length (vp1->block->preds)) |
4711 | { |
4712 | case 1: |
4713 | /* Single-arg PHIs are just copies. */ |
4714 | break; |
4715 | |
4716 | case 2: |
4717 | { |
4718 | /* Make sure both PHIs are classified as CSEable. */ |
4719 | if (! vp1->cclhs || ! vp2->cclhs) |
4720 | return false; |
4721 | |
4722 | /* Rule out backedges into the PHI. */ |
4723 | gcc_checking_assert((void)(!(vp1->block->loop_father->header != vp1-> block && vp2->block->loop_father->header != vp2 ->block) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4725, __FUNCTION__), 0 : 0)) |
4724 | (vp1->block->loop_father->header != vp1->block((void)(!(vp1->block->loop_father->header != vp1-> block && vp2->block->loop_father->header != vp2 ->block) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4725, __FUNCTION__), 0 : 0)) |
4725 | && vp2->block->loop_father->header != vp2->block)((void)(!(vp1->block->loop_father->header != vp1-> block && vp2->block->loop_father->header != vp2 ->block) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4725, __FUNCTION__), 0 : 0)); |
4726 | |
4727 | /* If the PHI nodes do not have compatible types |
4728 | they are not the same. */ |
4729 | if (!types_compatible_p (vp1->type, vp2->type)) |
4730 | return false; |
4731 | |
4732 | /* If the immediate dominator end in switch stmts multiple |
4733 | values may end up in the same PHI arg via intermediate |
4734 | CFG merges. */ |
4735 | basic_block idom1 |
4736 | = get_immediate_dominator (CDI_DOMINATORS, vp1->block); |
4737 | basic_block idom2 |
4738 | = get_immediate_dominator (CDI_DOMINATORS, vp2->block); |
4739 | gcc_checking_assert (EDGE_COUNT (idom1->succs) == 2((void)(!(vec_safe_length (idom1->succs) == 2 && vec_safe_length (idom2->succs) == 2) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4740, __FUNCTION__), 0 : 0)) |
4740 | && EDGE_COUNT (idom2->succs) == 2)((void)(!(vec_safe_length (idom1->succs) == 2 && vec_safe_length (idom2->succs) == 2) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4740, __FUNCTION__), 0 : 0)); |
4741 | |
4742 | /* Verify the controlling stmt is the same. */ |
4743 | gcond *last1 = as_a <gcond *> (last_stmt (idom1)); |
4744 | gcond *last2 = as_a <gcond *> (last_stmt (idom2)); |
4745 | bool inverted_p; |
4746 | if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs, |
4747 | last2, vp2->cclhs, vp2->ccrhs, |
4748 | &inverted_p)) |
4749 | return false; |
4750 | |
4751 | /* Get at true/false controlled edges into the PHI. */ |
4752 | edge te1, te2, fe1, fe2; |
4753 | if (! extract_true_false_controlled_edges (idom1, vp1->block, |
4754 | &te1, &fe1) |
4755 | || ! extract_true_false_controlled_edges (idom2, vp2->block, |
4756 | &te2, &fe2)) |
4757 | return false; |
4758 | |
4759 | /* Swap edges if the second condition is the inverted of the |
4760 | first. */ |
4761 | if (inverted_p) |
4762 | std::swap (te2, fe2); |
4763 | |
4764 | /* Since we do not know which edge will be executed we have |
4765 | to be careful when matching VN_TOP. Be conservative and |
4766 | only match VN_TOP == VN_TOP for now, we could allow |
4767 | VN_TOP on the not prevailing PHI though. See for example |
4768 | PR102920. */ |
4769 | if (! expressions_equal_p (vp1->phiargs[te1->dest_idx], |
4770 | vp2->phiargs[te2->dest_idx], false) |
4771 | || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx], |
4772 | vp2->phiargs[fe2->dest_idx], false)) |
4773 | return false; |
4774 | |
4775 | return true; |
4776 | } |
4777 | |
4778 | default: |
4779 | return false; |
4780 | } |
4781 | } |
4782 | |
4783 | /* If the PHI nodes do not have compatible types |
4784 | they are not the same. */ |
4785 | if (!types_compatible_p (vp1->type, vp2->type)) |
4786 | return false; |
4787 | |
4788 | /* Any phi in the same block will have it's arguments in the |
4789 | same edge order, because of how we store phi nodes. */ |
4790 | unsigned nargs = EDGE_COUNT (vp1->block->preds)vec_safe_length (vp1->block->preds); |
4791 | for (unsigned i = 0; i < nargs; ++i) |
4792 | { |
4793 | tree phi1op = vp1->phiargs[i]; |
4794 | tree phi2op = vp2->phiargs[i]; |
4795 | if (phi1op == phi2op) |
4796 | continue; |
4797 | if (!expressions_equal_p (phi1op, phi2op, false)) |
4798 | return false; |
4799 | } |
4800 | |
4801 | return true; |
4802 | } |
4803 | |
4804 | /* Lookup PHI in the current hash table, and return the resulting |
4805 | value number if it exists in the hash table. Return NULL_TREE if |
4806 | it does not exist in the hash table. */ |
4807 | |
4808 | static tree |
4809 | vn_phi_lookup (gimple *phi, bool backedges_varying_p) |
4810 | { |
4811 | vn_phi_s **slot; |
4812 | struct vn_phi_s *vp1; |
4813 | edge e; |
4814 | edge_iterator ei; |
4815 | |
4816 | vp1 = XALLOCAVAR (struct vn_phi_s,((struct vn_phi_s *) __builtin_alloca((sizeof (struct vn_phi_s ) + (gimple_phi_num_args (phi) - 1) * sizeof (tree)))) |
4817 | sizeof (struct vn_phi_s)((struct vn_phi_s *) __builtin_alloca((sizeof (struct vn_phi_s ) + (gimple_phi_num_args (phi) - 1) * sizeof (tree)))) |
4818 | + (gimple_phi_num_args (phi) - 1) * sizeof (tree))((struct vn_phi_s *) __builtin_alloca((sizeof (struct vn_phi_s ) + (gimple_phi_num_args (phi) - 1) * sizeof (tree)))); |
4819 | |
4820 | /* Canonicalize the SSA_NAME's to their value number. */ |
4821 | FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)for ((ei) = ei_start_1 (&((gimple_bb (phi)->preds))); ei_cond ((ei), &(e)); ei_next (&(ei))) |
4822 | { |
4823 | tree def = PHI_ARG_DEF_FROM_EDGE (phi, e)gimple_phi_arg_def (((phi)), ((e)->dest_idx)); |
4824 | if (TREE_CODE (def)((enum tree_code) (def)->base.code) == SSA_NAME |
4825 | && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK))) |
4826 | { |
4827 | if (!virtual_operand_p (def) |
4828 | && ssa_undefined_value_p (def, false)) |
4829 | def = VN_TOP; |
4830 | else |
4831 | def = SSA_VAL (def); |
4832 | } |
4833 | vp1->phiargs[e->dest_idx] = def; |
4834 | } |
4835 | vp1->type = TREE_TYPE (gimple_phi_result (phi))((contains_struct_check ((gimple_phi_result (phi)), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4835, __FUNCTION__))->typed.type); |
4836 | vp1->block = gimple_bb (phi); |
4837 | /* Extract values of the controlling condition. */ |
4838 | vp1->cclhs = NULL_TREE(tree) nullptr; |
4839 | vp1->ccrhs = NULL_TREE(tree) nullptr; |
4840 | if (EDGE_COUNT (vp1->block->preds)vec_safe_length (vp1->block->preds) == 2 |
4841 | && vp1->block->loop_father->header != vp1->block) |
4842 | { |
4843 | basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block); |
4844 | if (EDGE_COUNT (idom1->succs)vec_safe_length (idom1->succs) == 2) |
4845 | if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1))) |
4846 | { |
4847 | /* ??? We want to use SSA_VAL here. But possibly not |
4848 | allow VN_TOP. */ |
4849 | vp1->cclhs = vn_valueize (gimple_cond_lhs (last1)); |
4850 | vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1)); |
4851 | } |
4852 | } |
4853 | vp1->hashcode = vn_phi_compute_hash (vp1); |
4854 | slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, NO_INSERT); |
4855 | if (!slot) |
4856 | return NULL_TREE(tree) nullptr; |
4857 | return (*slot)->result; |
4858 | } |
4859 | |
4860 | /* Insert PHI into the current hash table with a value number of |
4861 | RESULT. */ |
4862 | |
4863 | static vn_phi_t |
4864 | vn_phi_insert (gimple *phi, tree result, bool backedges_varying_p) |
4865 | { |
4866 | vn_phi_s **slot; |
4867 | vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,__extension__ ({ struct obstack *__h = (&vn_tables_obstack ); __extension__ ({ struct obstack *__o = (__h); size_t __len = ((sizeof (vn_phi_s) + ((gimple_phi_num_args (phi) - 1) * sizeof (tree)))); if (__extension__ ({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->next_free); }) < __len) _obstack_newchunk (__o, __len); ((void) ((__o )->next_free += (__len))); }); __extension__ ({ struct obstack *__o1 = (__h); void *__value = (void *) __o1->object_base ; if (__o1->next_free == __value) __o1->maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t) < sizeof (void *) ? ((__o1->object_base) + (((__o1->next_free) - (__o1 ->object_base) + (__o1->alignment_mask)) & ~(__o1-> alignment_mask))) : (char *) (((ptrdiff_t) (__o1->next_free ) + (__o1->alignment_mask)) & ~(__o1->alignment_mask ))); if ((size_t) (__o1->next_free - (char *) __o1->chunk ) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk )) __o1->next_free = __o1->chunk_limit; __o1->object_base = __o1->next_free; __value; }); }) |
4868 | sizeof (vn_phi_s)__extension__ ({ struct obstack *__h = (&vn_tables_obstack ); __extension__ ({ struct obstack *__o = (__h); size_t __len = ((sizeof (vn_phi_s) + ((gimple_phi_num_args (phi) - 1) * sizeof (tree)))); if (__extension__ ({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->next_free); }) < __len) _obstack_newchunk (__o, __len); ((void) ((__o )->next_free += (__len))); }); __extension__ ({ struct obstack *__o1 = (__h); void *__value = (void *) __o1->object_base ; if (__o1->next_free == __value) __o1->maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t) < sizeof (void *) ? ((__o1->object_base) + (((__o1->next_free) - (__o1 ->object_base) + (__o1->alignment_mask)) & ~(__o1-> alignment_mask))) : (char *) (((ptrdiff_t) (__o1->next_free ) + (__o1->alignment_mask)) & ~(__o1->alignment_mask ))); if ((size_t) (__o1->next_free - (char *) __o1->chunk ) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk )) __o1->next_free = __o1->chunk_limit; __o1->object_base = __o1->next_free; __value; }); }) |
4869 | + ((gimple_phi_num_args (phi) - 1)__extension__ ({ struct obstack *__h = (&vn_tables_obstack ); __extension__ ({ struct obstack *__o = (__h); size_t __len = ((sizeof (vn_phi_s) + ((gimple_phi_num_args (phi) - 1) * sizeof (tree)))); if (__extension__ ({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->next_free); }) < __len) _obstack_newchunk (__o, __len); ((void) ((__o )->next_free += (__len))); }); __extension__ ({ struct obstack *__o1 = (__h); void *__value = (void *) __o1->object_base ; if (__o1->next_free == __value) __o1->maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t) < sizeof (void *) ? ((__o1->object_base) + (((__o1->next_free) - (__o1 ->object_base) + (__o1->alignment_mask)) & ~(__o1-> alignment_mask))) : (char *) (((ptrdiff_t) (__o1->next_free ) + (__o1->alignment_mask)) & ~(__o1->alignment_mask ))); if ((size_t) (__o1->next_free - (char *) __o1->chunk ) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk )) __o1->next_free = __o1->chunk_limit; __o1->object_base = __o1->next_free; __value; }); }) |
4870 | * sizeof (tree)))__extension__ ({ struct obstack *__h = (&vn_tables_obstack ); __extension__ ({ struct obstack *__o = (__h); size_t __len = ((sizeof (vn_phi_s) + ((gimple_phi_num_args (phi) - 1) * sizeof (tree)))); if (__extension__ ({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->next_free); }) < __len) _obstack_newchunk (__o, __len); ((void) ((__o )->next_free += (__len))); }); __extension__ ({ struct obstack *__o1 = (__h); void *__value = (void *) __o1->object_base ; if (__o1->next_free == __value) __o1->maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t) < sizeof (void *) ? ((__o1->object_base) + (((__o1->next_free) - (__o1 ->object_base) + (__o1->alignment_mask)) & ~(__o1-> alignment_mask))) : (char *) (((ptrdiff_t) (__o1->next_free ) + (__o1->alignment_mask)) & ~(__o1->alignment_mask ))); if ((size_t) (__o1->next_free - (char *) __o1->chunk ) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk )) __o1->next_free = __o1->chunk_limit; __o1->object_base = __o1->next_free; __value; }); }); |
4871 | edge e; |
4872 | edge_iterator ei; |
4873 | |
4874 | /* Canonicalize the SSA_NAME's to their value number. */ |
4875 | FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)for ((ei) = ei_start_1 (&((gimple_bb (phi)->preds))); ei_cond ((ei), &(e)); ei_next (&(ei))) |
4876 | { |
4877 | tree def = PHI_ARG_DEF_FROM_EDGE (phi, e)gimple_phi_arg_def (((phi)), ((e)->dest_idx)); |
4878 | if (TREE_CODE (def)((enum tree_code) (def)->base.code) == SSA_NAME |
4879 | && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK))) |
4880 | { |
4881 | if (!virtual_operand_p (def) |
4882 | && ssa_undefined_value_p (def, false)) |
4883 | def = VN_TOP; |
4884 | else |
4885 | def = SSA_VAL (def); |
4886 | } |
4887 | vp1->phiargs[e->dest_idx] = def; |
4888 | } |
4889 | vp1->value_id = VN_INFO (result)->value_id; |
4890 | vp1->type = TREE_TYPE (gimple_phi_result (phi))((contains_struct_check ((gimple_phi_result (phi)), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4890, __FUNCTION__))->typed.type); |
4891 | vp1->block = gimple_bb (phi); |
4892 | /* Extract values of the controlling condition. */ |
4893 | vp1->cclhs = NULL_TREE(tree) nullptr; |
4894 | vp1->ccrhs = NULL_TREE(tree) nullptr; |
4895 | if (EDGE_COUNT (vp1->block->preds)vec_safe_length (vp1->block->preds) == 2 |
4896 | && vp1->block->loop_father->header != vp1->block) |
4897 | { |
4898 | basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block); |
4899 | if (EDGE_COUNT (idom1->succs)vec_safe_length (idom1->succs) == 2) |
4900 | if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1))) |
4901 | { |
4902 | /* ??? We want to use SSA_VAL here. But possibly not |
4903 | allow VN_TOP. */ |
4904 | vp1->cclhs = vn_valueize (gimple_cond_lhs (last1)); |
4905 | vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1)); |
4906 | } |
4907 | } |
4908 | vp1->result = result; |
4909 | vp1->hashcode = vn_phi_compute_hash (vp1); |
4910 | |
4911 | slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT); |
4912 | gcc_assert (!*slot)((void)(!(!*slot) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 4912, __FUNCTION__), 0 : 0)); |
4913 | |
4914 | *slot = vp1; |
4915 | vp1->next = last_inserted_phi; |
4916 | last_inserted_phi = vp1; |
4917 | return vp1; |
4918 | } |
4919 | |
4920 | |
4921 | /* Return true if BB1 is dominated by BB2 taking into account edges |
4922 | that are not executable. When ALLOW_BACK is false consider not |
4923 | executable backedges as executable. */ |
4924 | |
4925 | static bool |
4926 | dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool allow_back) |
4927 | { |
4928 | edge_iterator ei; |
4929 | edge e; |
4930 | |
4931 | if (dominated_by_p (CDI_DOMINATORS, bb1, bb2)) |
4932 | return true; |
4933 | |
4934 | /* Before iterating we'd like to know if there exists a |
4935 | (executable) path from bb2 to bb1 at all, if not we can |
4936 | directly return false. For now simply iterate once. */ |
4937 | |
4938 | /* Iterate to the single executable bb1 predecessor. */ |
4939 | if (EDGE_COUNT (bb1->preds)vec_safe_length (bb1->preds) > 1) |
4940 | { |
4941 | edge prede = NULLnullptr; |
4942 | FOR_EACH_EDGE (e, ei, bb1->preds)for ((ei) = ei_start_1 (&((bb1->preds))); ei_cond ((ei ), &(e)); ei_next (&(ei))) |
4943 | if ((e->flags & EDGE_EXECUTABLE) |
4944 | || (!allow_back && (e->flags & EDGE_DFS_BACK))) |
4945 | { |
4946 | if (prede) |
4947 | { |
4948 | prede = NULLnullptr; |
4949 | break; |
4950 | } |
4951 | prede = e; |
4952 | } |
4953 | if (prede) |
4954 | { |
4955 | bb1 = prede->src; |
4956 | |
4957 | /* Re-do the dominance check with changed bb1. */ |
4958 | if (dominated_by_p (CDI_DOMINATORS, bb1, bb2)) |
4959 | return true; |
4960 | } |
4961 | } |
4962 | |
4963 | /* Iterate to the single executable bb2 successor. */ |
4964 | if (EDGE_COUNT (bb2->succs)vec_safe_length (bb2->succs) > 1) |
4965 | { |
4966 | edge succe = NULLnullptr; |
4967 | FOR_EACH_EDGE (e, ei, bb2->succs)for ((ei) = ei_start_1 (&((bb2->succs))); ei_cond ((ei ), &(e)); ei_next (&(ei))) |
4968 | if ((e->flags & EDGE_EXECUTABLE) |
4969 | || (!allow_back && (e->flags & EDGE_DFS_BACK))) |
4970 | { |
4971 | if (succe) |
4972 | { |
4973 | succe = NULLnullptr; |
4974 | break; |
4975 | } |
4976 | succe = e; |
4977 | } |
4978 | if (succe) |
4979 | { |
4980 | /* Verify the reached block is only reached through succe. |
4981 | If there is only one edge we can spare us the dominator |
4982 | check and iterate directly. */ |
4983 | if (EDGE_COUNT (succe->dest->preds)vec_safe_length (succe->dest->preds) > 1) |
4984 | { |
4985 | FOR_EACH_EDGE (e, ei, succe->dest->preds)for ((ei) = ei_start_1 (&((succe->dest->preds))); ei_cond ((ei), &(e)); ei_next (&(ei))) |
4986 | if (e != succe |
4987 | && ((e->flags & EDGE_EXECUTABLE) |
4988 | || (!allow_back && (e->flags & EDGE_DFS_BACK)))) |
4989 | { |
4990 | succe = NULLnullptr; |
4991 | break; |
4992 | } |
4993 | } |
4994 | if (succe) |
4995 | { |
4996 | bb2 = succe->dest; |
4997 | |
4998 | /* Re-do the dominance check with changed bb2. */ |
4999 | if (dominated_by_p (CDI_DOMINATORS, bb1, bb2)) |
5000 | return true; |
5001 | } |
5002 | } |
5003 | } |
5004 | |
5005 | /* We could now iterate updating bb1 / bb2. */ |
5006 | return false; |
5007 | } |
5008 | |
5009 | /* Set the value number of FROM to TO, return true if it has changed |
5010 | as a result. */ |
5011 | |
5012 | static inline bool |
5013 | set_ssa_val_to (tree from, tree to) |
5014 | { |
5015 | vn_ssa_aux_t from_info = VN_INFO (from); |
5016 | tree currval = from_info->valnum; // SSA_VAL (from) |
5017 | poly_int64 toff, coff; |
5018 | bool curr_undefined = false; |
5019 | bool curr_invariant = false; |
5020 | |
5021 | /* The only thing we allow as value numbers are ssa_names |
5022 | and invariants. So assert that here. We don't allow VN_TOP |
5023 | as visiting a stmt should produce a value-number other than |
5024 | that. |
5025 | ??? Still VN_TOP can happen for unreachable code, so force |
5026 | it to varying in that case. Not all code is prepared to |
5027 | get VN_TOP on valueization. */ |
5028 | if (to == VN_TOP) |
5029 | { |
5030 | /* ??? When iterating and visiting PHI <undef, backedge-value> |
5031 | for the first time we rightfully get VN_TOP and we need to |
5032 | preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c. |
5033 | With SCCVN we were simply lucky we iterated the other PHI |
5034 | cycles first and thus visited the backedge-value DEF. */ |
5035 | if (currval == VN_TOP) |
5036 | goto set_and_exit; |
5037 | if (dump_file && (dump_flags & TDF_DETAILS)) |
5038 | fprintf (dump_file, "Forcing value number to varying on " |
5039 | "receiving VN_TOP\n"); |
5040 | to = from; |
5041 | } |
5042 | |
5043 | gcc_checking_assert (to != NULL_TREE((void)(!(to != (tree) nullptr && ((((enum tree_code) (to)->base.code) == SSA_NAME && (to == from || SSA_VAL (to) == to)) || is_gimple_min_invariant (to))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5046, __FUNCTION__), 0 : 0)) |
5044 | && ((TREE_CODE (to) == SSA_NAME((void)(!(to != (tree) nullptr && ((((enum tree_code) (to)->base.code) == SSA_NAME && (to == from || SSA_VAL (to) == to)) || is_gimple_min_invariant (to))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5046, __FUNCTION__), 0 : 0)) |
5045 | && (to == from || SSA_VAL (to) == to))((void)(!(to != (tree) nullptr && ((((enum tree_code) (to)->base.code) == SSA_NAME && (to == from || SSA_VAL (to) == to)) || is_gimple_min_invariant (to))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5046, __FUNCTION__), 0 : 0)) |
5046 | || is_gimple_min_invariant (to)))((void)(!(to != (tree) nullptr && ((((enum tree_code) (to)->base.code) == SSA_NAME && (to == from || SSA_VAL (to) == to)) || is_gimple_min_invariant (to))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5046, __FUNCTION__), 0 : 0)); |
5047 | |
5048 | if (from != to) |
5049 | { |
5050 | if (currval == from) |
5051 | { |
5052 | if (dump_file && (dump_flags & TDF_DETAILS)) |
5053 | { |
5054 | fprintf (dump_file, "Not changing value number of "); |
5055 | print_generic_expr (dump_file, from); |
5056 | fprintf (dump_file, " from VARYING to "); |
5057 | print_generic_expr (dump_file, to); |
5058 | fprintf (dump_file, "\n"); |
5059 | } |
5060 | return false; |
5061 | } |
5062 | curr_invariant = is_gimple_min_invariant (currval); |
5063 | curr_undefined = (TREE_CODE (currval)((enum tree_code) (currval)->base.code) == SSA_NAME |
5064 | && !virtual_operand_p (currval) |
5065 | && ssa_undefined_value_p (currval, false)); |
5066 | if (currval != VN_TOP |
5067 | && !curr_invariant |
5068 | && !curr_undefined |
5069 | && is_gimple_min_invariant (to)) |
5070 | { |
5071 | if (dump_file && (dump_flags & TDF_DETAILS)) |
5072 | { |
5073 | fprintf (dump_file, "Forcing VARYING instead of changing " |
5074 | "value number of "); |
5075 | print_generic_expr (dump_file, from); |
5076 | fprintf (dump_file, " from "); |
5077 | print_generic_expr (dump_file, currval); |
5078 | fprintf (dump_file, " (non-constant) to "); |
5079 | print_generic_expr (dump_file, to); |
5080 | fprintf (dump_file, " (constant)\n"); |
5081 | } |
5082 | to = from; |
5083 | } |
5084 | else if (currval != VN_TOP |
5085 | && !curr_undefined |
5086 | && TREE_CODE (to)((enum tree_code) (to)->base.code) == SSA_NAME |
5087 | && !virtual_operand_p (to) |
5088 | && ssa_undefined_value_p (to, false)) |
5089 | { |
5090 | if (dump_file && (dump_flags & TDF_DETAILS)) |
5091 | { |
5092 | fprintf (dump_file, "Forcing VARYING instead of changing " |
5093 | "value number of "); |
5094 | print_generic_expr (dump_file, from); |
5095 | fprintf (dump_file, " from "); |
5096 | print_generic_expr (dump_file, currval); |
5097 | fprintf (dump_file, " (non-undefined) to "); |
5098 | print_generic_expr (dump_file, to); |
5099 | fprintf (dump_file, " (undefined)\n"); |
5100 | } |
5101 | to = from; |
5102 | } |
5103 | else if (TREE_CODE (to)((enum tree_code) (to)->base.code) == SSA_NAME |
5104 | && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to)(tree_check ((to), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5104, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag) |
5105 | to = from; |
5106 | } |
5107 | |
5108 | set_and_exit: |
5109 | if (dump_file && (dump_flags & TDF_DETAILS)) |
5110 | { |
5111 | fprintf (dump_file, "Setting value number of "); |
5112 | print_generic_expr (dump_file, from); |
5113 | fprintf (dump_file, " to "); |
5114 | print_generic_expr (dump_file, to); |
5115 | } |
5116 | |
5117 | if (currval != to |
5118 | && !operand_equal_p (currval, to, 0) |
5119 | /* Different undefined SSA names are not actually different. See |
5120 | PR82320 for a testcase were we'd otherwise not terminate iteration. */ |
5121 | && !(curr_undefined |
5122 | && TREE_CODE (to)((enum tree_code) (to)->base.code) == SSA_NAME |
5123 | && !virtual_operand_p (to) |
5124 | && ssa_undefined_value_p (to, false)) |
5125 | /* ??? For addresses involving volatile objects or types operand_equal_p |
5126 | does not reliably detect ADDR_EXPRs as equal. We know we are only |
5127 | getting invariant gimple addresses here, so can use |
5128 | get_addr_base_and_unit_offset to do this comparison. */ |
5129 | && !(TREE_CODE (currval)((enum tree_code) (currval)->base.code) == ADDR_EXPR |
5130 | && TREE_CODE (to)((enum tree_code) (to)->base.code) == ADDR_EXPR |
5131 | && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0)(*((const_cast<tree*> (tree_operand_check ((currval), ( 0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5131, __FUNCTION__))))), &coff) |
5132 | == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0)(*((const_cast<tree*> (tree_operand_check ((to), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5132, __FUNCTION__))))), &toff)) |
5133 | && known_eq (coff, toff)(!maybe_ne (coff, toff)))) |
5134 | { |
5135 | if (to != from |
5136 | && currval != VN_TOP |
5137 | && !curr_undefined |
5138 | /* We do not want to allow lattice transitions from one value |
5139 | to another since that may lead to not terminating iteration |
5140 | (see PR95049). Since there's no convenient way to check |
5141 | for the allowed transition of VAL -> PHI (loop entry value, |
5142 | same on two PHIs, to same PHI result) we restrict the check |
5143 | to invariants. */ |
5144 | && curr_invariant |
5145 | && is_gimple_min_invariant (to)) |
5146 | { |
5147 | if (dump_file && (dump_flags & TDF_DETAILS)) |
5148 | fprintf (dump_file, " forced VARYING"); |
5149 | to = from; |
5150 | } |
5151 | if (dump_file && (dump_flags & TDF_DETAILS)) |
5152 | fprintf (dump_file, " (changed)\n"); |
5153 | from_info->valnum = to; |
5154 | return true; |
5155 | } |
5156 | if (dump_file && (dump_flags & TDF_DETAILS)) |
5157 | fprintf (dump_file, "\n"); |
5158 | return false; |
5159 | } |
5160 | |
5161 | /* Set all definitions in STMT to value number to themselves. |
5162 | Return true if a value number changed. */ |
5163 | |
5164 | static bool |
5165 | defs_to_varying (gimple *stmt) |
5166 | { |
5167 | bool changed = false; |
5168 | ssa_op_iter iter; |
5169 | def_operand_p defp; |
5170 | |
5171 | FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)for (defp = op_iter_init_def (&(iter), stmt, ((0x08) | 0x02 )); !op_iter_done (&(iter)); defp = op_iter_next_def (& (iter))) |
5172 | { |
5173 | tree def = DEF_FROM_PTR (defp)get_def_from_ptr (defp); |
5174 | changed |= set_ssa_val_to (def, def); |
5175 | } |
5176 | return changed; |
5177 | } |
5178 | |
5179 | /* Visit a copy between LHS and RHS, return true if the value number |
5180 | changed. */ |
5181 | |
5182 | static bool |
5183 | visit_copy (tree lhs, tree rhs) |
5184 | { |
5185 | /* Valueize. */ |
5186 | rhs = SSA_VAL (rhs); |
5187 | |
5188 | return set_ssa_val_to (lhs, rhs); |
5189 | } |
5190 | |
5191 | /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP |
5192 | is the same. */ |
5193 | |
5194 | static tree |
5195 | valueized_wider_op (tree wide_type, tree op, bool allow_truncate) |
5196 | { |
5197 | if (TREE_CODE (op)((enum tree_code) (op)->base.code) == SSA_NAME) |
5198 | op = vn_valueize (op); |
5199 | |
5200 | /* Either we have the op widened available. */ |
5201 | tree ops[3] = {}; |
5202 | ops[0] = op; |
5203 | tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR, |
5204 | wide_type, ops, NULLnullptr); |
5205 | if (tem) |
5206 | return tem; |
5207 | |
5208 | /* Or the op is truncated from some existing value. */ |
5209 | if (allow_truncate && TREE_CODE (op)((enum tree_code) (op)->base.code) == SSA_NAME) |
5210 | { |
5211 | gimple *def = SSA_NAME_DEF_STMT (op)(tree_check ((op), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5211, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt; |
5212 | if (is_gimple_assign (def) |
5213 | && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def))((gimple_assign_rhs_code (def)) == NOP_EXPR || (gimple_assign_rhs_code (def)) == CONVERT_EXPR)) |
5214 | { |
5215 | tem = gimple_assign_rhs1 (def); |
5216 | if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)((contains_struct_check ((tem), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5216, __FUNCTION__))->typed.type))) |
5217 | { |
5218 | if (TREE_CODE (tem)((enum tree_code) (tem)->base.code) == SSA_NAME) |
5219 | tem = vn_valueize (tem); |
5220 | return tem; |
5221 | } |
5222 | } |
5223 | } |
5224 | |
5225 | /* For constants simply extend it. */ |
5226 | if (TREE_CODE (op)((enum tree_code) (op)->base.code) == INTEGER_CST) |
5227 | return wide_int_to_tree (wide_type, wi::to_widest (op)); |
5228 | |
5229 | return NULL_TREE(tree) nullptr; |
5230 | } |
5231 | |
5232 | /* Visit a nary operator RHS, value number it, and return true if the |
5233 | value number of LHS has changed as a result. */ |
5234 | |
5235 | static bool |
5236 | visit_nary_op (tree lhs, gassign *stmt) |
5237 | { |
5238 | vn_nary_op_t vnresult; |
5239 | tree result = vn_nary_op_lookup_stmt (stmt, &vnresult); |
5240 | if (! result && vnresult) |
5241 | result = vn_nary_op_get_predicated_value (vnresult, gimple_bb (stmt)); |
5242 | if (result) |
5243 | return set_ssa_val_to (lhs, result); |
5244 | |
5245 | /* Do some special pattern matching for redundancies of operations |
5246 | in different types. */ |
5247 | enum tree_code code = gimple_assign_rhs_code (stmt); |
5248 | tree type = TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5248, __FUNCTION__))->typed.type); |
5249 | tree rhs1 = gimple_assign_rhs1 (stmt); |
5250 | switch (code) |
5251 | { |
5252 | CASE_CONVERTcase NOP_EXPR: case CONVERT_EXPR: |
5253 | /* Match arithmetic done in a different type where we can easily |
5254 | substitute the result from some earlier sign-changed or widened |
5255 | operation. */ |
5256 | if (INTEGRAL_TYPE_P (type)(((enum tree_code) (type)->base.code) == ENUMERAL_TYPE || ( (enum tree_code) (type)->base.code) == BOOLEAN_TYPE || ((enum tree_code) (type)->base.code) == INTEGER_TYPE) |
5257 | && TREE_CODE (rhs1)((enum tree_code) (rhs1)->base.code) == SSA_NAME |
5258 | /* We only handle sign-changes, zero-extension -> & mask or |
5259 | sign-extension if we know the inner operation doesn't |
5260 | overflow. */ |
5261 | && (((TYPE_UNSIGNED (TREE_TYPE (rhs1))((tree_class_check ((((contains_struct_check ((rhs1), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5261, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5261, __FUNCTION__))->base.u.bits.unsigned_flag) |
5262 | || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))(((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5262, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE || ((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5262, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE || ((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5262, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE ) |
5263 | && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))((((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5263, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE || ((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5263, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE ) ? !global_options.x_flag_wrapv_pointer : (!(any_integral_type_check ((((contains_struct_check ((rhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5263, __FUNCTION__))->typed.type)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5263, __FUNCTION__))->base.u.bits.unsigned_flag && !global_options.x_flag_wrapv && !global_options.x_flag_trapv )))) |
5264 | && TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5264, __FUNCTION__))->type_common.precision) > TYPE_PRECISION (TREE_TYPE (rhs1))((tree_class_check ((((contains_struct_check ((rhs1), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5264, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5264, __FUNCTION__))->type_common.precision)) |
5265 | || TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5265, __FUNCTION__))->type_common.precision) == TYPE_PRECISION (TREE_TYPE (rhs1))((tree_class_check ((((contains_struct_check ((rhs1), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5265, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5265, __FUNCTION__))->type_common.precision))) |
5266 | { |
5267 | gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1)(tree_check ((rhs1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5267, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt); |
5268 | if (def |
5269 | && (gimple_assign_rhs_code (def) == PLUS_EXPR |
5270 | || gimple_assign_rhs_code (def) == MINUS_EXPR |
5271 | || gimple_assign_rhs_code (def) == MULT_EXPR)) |
5272 | { |
5273 | tree ops[3] = {}; |
5274 | /* When requiring a sign-extension we cannot model a |
5275 | previous truncation with a single op so don't bother. */ |
5276 | bool allow_truncate = TYPE_UNSIGNED (TREE_TYPE (rhs1))((tree_class_check ((((contains_struct_check ((rhs1), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5276, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5276, __FUNCTION__))->base.u.bits.unsigned_flag); |
5277 | /* Either we have the op widened available. */ |
5278 | ops[0] = valueized_wider_op (type, gimple_assign_rhs1 (def), |
5279 | allow_truncate); |
5280 | if (ops[0]) |
5281 | ops[1] = valueized_wider_op (type, gimple_assign_rhs2 (def), |
5282 | allow_truncate); |
5283 | if (ops[0] && ops[1]) |
5284 | { |
5285 | ops[0] = vn_nary_op_lookup_pieces |
5286 | (2, gimple_assign_rhs_code (def), type, ops, NULLnullptr); |
5287 | /* We have wider operation available. */ |
5288 | if (ops[0] |
5289 | /* If the leader is a wrapping operation we can |
5290 | insert it for code hoisting w/o introducing |
5291 | undefined overflow. If it is not it has to |
5292 | be available. See PR86554. */ |
5293 | && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops[0]))((((enum tree_code) (((contains_struct_check ((ops[0]), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5293, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE || ((enum tree_code) (((contains_struct_check ((ops[0]), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5293, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE ) ? global_options.x_flag_wrapv_pointer : ((any_integral_type_check ((((contains_struct_check ((ops[0]), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5293, __FUNCTION__))->typed.type)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5293, __FUNCTION__))->base.u.bits.unsigned_flag || global_options .x_flag_wrapv)) |
5294 | || (rpo_avail && vn_context_bb |
5295 | && rpo_avail->eliminate_avail (vn_context_bb, |
5296 | ops[0])))) |
5297 | { |
5298 | unsigned lhs_prec = TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5298, __FUNCTION__))->type_common.precision); |
5299 | unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1))((tree_class_check ((((contains_struct_check ((rhs1), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5299, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5299, __FUNCTION__))->type_common.precision); |
5300 | if (lhs_prec == rhs_prec |
5301 | || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))(((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5301, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE || ((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5301, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE || ((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5301, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE ) |
5302 | && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))((((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5302, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE || ((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5302, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE ) ? !global_options.x_flag_wrapv_pointer : (!(any_integral_type_check ((((contains_struct_check ((rhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5302, __FUNCTION__))->typed.type)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5302, __FUNCTION__))->base.u.bits.unsigned_flag && !global_options.x_flag_wrapv && !global_options.x_flag_trapv )))) |
5303 | { |
5304 | gimple_match_op match_op (gimple_match_cond::UNCOND, |
5305 | NOP_EXPR, type, ops[0]); |
5306 | result = vn_nary_build_or_lookup (&match_op); |
5307 | if (result) |
5308 | { |
5309 | bool changed = set_ssa_val_to (lhs, result); |
5310 | vn_nary_op_insert_stmt (stmt, result); |
5311 | return changed; |
5312 | } |
5313 | } |
5314 | else |
5315 | { |
5316 | tree mask = wide_int_to_tree |
5317 | (type, wi::mask (rhs_prec, false, lhs_prec)); |
5318 | gimple_match_op match_op (gimple_match_cond::UNCOND, |
5319 | BIT_AND_EXPR, |
5320 | TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5320, __FUNCTION__))->typed.type), |
5321 | ops[0], mask); |
5322 | result = vn_nary_build_or_lookup (&match_op); |
5323 | if (result) |
5324 | { |
5325 | bool changed = set_ssa_val_to (lhs, result); |
5326 | vn_nary_op_insert_stmt (stmt, result); |
5327 | return changed; |
5328 | } |
5329 | } |
5330 | } |
5331 | } |
5332 | } |
5333 | } |
5334 | break; |
5335 | case BIT_AND_EXPR: |
5336 | if (INTEGRAL_TYPE_P (type)(((enum tree_code) (type)->base.code) == ENUMERAL_TYPE || ( (enum tree_code) (type)->base.code) == BOOLEAN_TYPE || ((enum tree_code) (type)->base.code) == INTEGER_TYPE) |
5337 | && TREE_CODE (rhs1)((enum tree_code) (rhs1)->base.code) == SSA_NAME |
5338 | && TREE_CODE (gimple_assign_rhs2 (stmt))((enum tree_code) (gimple_assign_rhs2 (stmt))->base.code) == INTEGER_CST |
5339 | && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)(tree_check ((rhs1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5339, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag |
5340 | && default_vn_walk_kind != VN_NOWALK |
5341 | && CHAR_BIT8 == 8 |
5342 | && BITS_PER_UNIT(8) == 8 |
5343 | && BYTES_BIG_ENDIAN0 == WORDS_BIG_ENDIAN0 |
5344 | && !integer_all_onesp (gimple_assign_rhs2 (stmt)) |
5345 | && !integer_zerop (gimple_assign_rhs2 (stmt))) |
5346 | { |
5347 | gassign *ass = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1)(tree_check ((rhs1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5347, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt); |
5348 | if (ass |
5349 | && !gimple_has_volatile_ops (ass) |
5350 | && vn_get_stmt_kind (ass) == VN_REFERENCE) |
5351 | { |
5352 | tree last_vuse = gimple_vuse (ass); |
5353 | tree op = gimple_assign_rhs1 (ass); |
5354 | tree result = vn_reference_lookup (op, gimple_vuse (ass), |
5355 | default_vn_walk_kind, |
5356 | NULLnullptr, true, &last_vuse, |
5357 | gimple_assign_rhs2 (stmt)); |
5358 | if (result |
5359 | && useless_type_conversion_p (TREE_TYPE (result)((contains_struct_check ((result), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5359, __FUNCTION__))->typed.type), |
5360 | TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5360, __FUNCTION__))->typed.type))) |
5361 | return set_ssa_val_to (lhs, result); |
5362 | } |
5363 | } |
5364 | break; |
5365 | case TRUNC_DIV_EXPR: |
5366 | if (TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5366, __FUNCTION__))->base.u.bits.unsigned_flag)) |
5367 | break; |
5368 | /* Fallthru. */ |
5369 | case RDIV_EXPR: |
5370 | case MULT_EXPR: |
5371 | /* Match up ([-]a){/,*}([-])b with v=a{/,*}b, replacing it with -v. */ |
5372 | if (! HONOR_SIGN_DEPENDENT_ROUNDING (type)) |
5373 | { |
5374 | tree rhs[2]; |
5375 | rhs[0] = rhs1; |
5376 | rhs[1] = gimple_assign_rhs2 (stmt); |
5377 | for (unsigned i = 0; i <= 1; ++i) |
5378 | { |
5379 | unsigned j = i == 0 ? 1 : 0; |
5380 | tree ops[2]; |
5381 | gimple_match_op match_op (gimple_match_cond::UNCOND, |
5382 | NEGATE_EXPR, type, rhs[i]); |
5383 | ops[i] = vn_nary_build_or_lookup_1 (&match_op, false, true); |
5384 | ops[j] = rhs[j]; |
5385 | if (ops[i] |
5386 | && (ops[0] = vn_nary_op_lookup_pieces (2, code, |
5387 | type, ops, NULLnullptr))) |
5388 | { |
5389 | gimple_match_op match_op (gimple_match_cond::UNCOND, |
5390 | NEGATE_EXPR, type, ops[0]); |
5391 | result = vn_nary_build_or_lookup_1 (&match_op, true, false); |
5392 | if (result) |
5393 | { |
5394 | bool changed = set_ssa_val_to (lhs, result); |
5395 | vn_nary_op_insert_stmt (stmt, result); |
5396 | return changed; |
5397 | } |
5398 | } |
5399 | } |
5400 | } |
5401 | break; |
5402 | case LSHIFT_EXPR: |
5403 | /* For X << C, use the value number of X * (1 << C). */ |
5404 | if (INTEGRAL_TYPE_P (type)(((enum tree_code) (type)->base.code) == ENUMERAL_TYPE || ( (enum tree_code) (type)->base.code) == BOOLEAN_TYPE || ((enum tree_code) (type)->base.code) == INTEGER_TYPE) |
5405 | && TYPE_OVERFLOW_WRAPS (type)((((enum tree_code) (type)->base.code) == POINTER_TYPE || ( (enum tree_code) (type)->base.code) == REFERENCE_TYPE) ? global_options .x_flag_wrapv_pointer : ((any_integral_type_check ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5405, __FUNCTION__))->base.u.bits.unsigned_flag || global_options .x_flag_wrapv)) |
5406 | && !TYPE_SATURATING (type)((tree_not_check4 ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5406, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE ), (ARRAY_TYPE)))->base.u.bits.saturating_flag)) |
5407 | { |
5408 | tree rhs2 = gimple_assign_rhs2 (stmt); |
5409 | if (TREE_CODE (rhs2)((enum tree_code) (rhs2)->base.code) == INTEGER_CST |
5410 | && tree_fits_uhwi_p (rhs2) |
5411 | && tree_to_uhwi (rhs2) < TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5411, __FUNCTION__))->type_common.precision)) |
5412 | { |
5413 | wide_int w = wi::set_bit_in_zero (tree_to_uhwi (rhs2), |
5414 | TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5414, __FUNCTION__))->type_common.precision)); |
5415 | gimple_match_op match_op (gimple_match_cond::UNCOND, |
5416 | MULT_EXPR, type, rhs1, |
5417 | wide_int_to_tree (type, w)); |
5418 | result = vn_nary_build_or_lookup (&match_op); |
5419 | if (result) |
5420 | { |
5421 | bool changed = set_ssa_val_to (lhs, result); |
5422 | if (TREE_CODE (result)((enum tree_code) (result)->base.code) == SSA_NAME) |
5423 | vn_nary_op_insert_stmt (stmt, result); |
5424 | return changed; |
5425 | } |
5426 | } |
5427 | } |
5428 | break; |
5429 | default: |
5430 | break; |
5431 | } |
5432 | |
5433 | bool changed = set_ssa_val_to (lhs, lhs); |
5434 | vn_nary_op_insert_stmt (stmt, lhs); |
5435 | return changed; |
5436 | } |
5437 | |
5438 | /* Visit a call STMT storing into LHS. Return true if the value number |
5439 | of the LHS has changed as a result. */ |
5440 | |
5441 | static bool |
5442 | visit_reference_op_call (tree lhs, gcall *stmt) |
5443 | { |
5444 | bool changed = false; |
5445 | struct vn_reference_s vr1; |
5446 | vn_reference_t vnresult = NULLnullptr; |
5447 | tree vdef = gimple_vdef (stmt); |
5448 | modref_summary *summary; |
5449 | |
5450 | /* Non-ssa lhs is handled in copy_reference_ops_from_call. */ |
5451 | if (lhs && TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) != SSA_NAME) |
5452 | lhs = NULL_TREE(tree) nullptr; |
5453 | |
5454 | vn_reference_lookup_call (stmt, &vnresult, &vr1); |
5455 | |
5456 | /* If the lookup did not succeed for pure functions try to use |
5457 | modref info to find a candidate to CSE to. */ |
5458 | const unsigned accesses_limit = 8; |
5459 | if (!vnresult |
5460 | && !vdef |
5461 | && lhs |
5462 | && gimple_vuse (stmt) |
5463 | && (((summary = get_modref_function_summary (stmt, NULLnullptr)) |
5464 | && !summary->global_memory_read |
5465 | && summary->load_accesses < accesses_limit) |
5466 | || gimple_call_flags (stmt) & ECF_CONST(1 << 0))) |
5467 | { |
5468 | /* First search if we can do someting useful and build a |
5469 | vector of all loads we have to check. */ |
5470 | bool unknown_memory_access = false; |
5471 | auto_vec<ao_ref, accesses_limit> accesses; |
5472 | unsigned load_accesses = summary ? summary->load_accesses : 0; |
5473 | if (!unknown_memory_access) |
5474 | /* Add loads done as part of setting up the call arguments. |
5475 | That's also necessary for CONST functions which will |
5476 | not have a modref summary. */ |
5477 | for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i) |
5478 | { |
5479 | tree arg = gimple_call_arg (stmt, i); |
5480 | if (TREE_CODE (arg)((enum tree_code) (arg)->base.code) != SSA_NAME |
5481 | && !is_gimple_min_invariant (arg)) |
5482 | { |
5483 | if (accesses.length () >= accesses_limit - load_accesses) |
5484 | { |
5485 | unknown_memory_access = true; |
5486 | break; |
5487 | } |
5488 | accesses.quick_grow (accesses.length () + 1); |
5489 | ao_ref_init (&accesses.last (), arg); |
5490 | } |
5491 | } |
5492 | if (summary && !unknown_memory_access) |
5493 | { |
5494 | /* Add loads as analyzed by IPA modref. */ |
5495 | for (auto base_node : summary->loads->bases) |
5496 | if (unknown_memory_access) |
5497 | break; |
5498 | else for (auto ref_node : base_node->refs) |
5499 | if (unknown_memory_access) |
5500 | break; |
5501 | else for (auto access_node : ref_node->accesses) |
5502 | { |
5503 | accesses.quick_grow (accesses.length () + 1); |
5504 | ao_ref *r = &accesses.last (); |
5505 | if (!access_node.get_ao_ref (stmt, r)) |
5506 | { |
5507 | /* Initialize a ref based on the argument and |
5508 | unknown offset if possible. */ |
5509 | tree arg = access_node.get_call_arg (stmt); |
5510 | if (arg && TREE_CODE (arg)((enum tree_code) (arg)->base.code) == SSA_NAME) |
5511 | arg = SSA_VAL (arg); |
5512 | if (arg |
5513 | && TREE_CODE (arg)((enum tree_code) (arg)->base.code) == ADDR_EXPR |
5514 | && (arg = get_base_address (arg)) |
5515 | && DECL_P (arg)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code ) (arg)->base.code))] == tcc_declaration)) |
5516 | { |
5517 | ao_ref_init (r, arg); |
5518 | r->ref = NULL_TREE(tree) nullptr; |
5519 | r->base = arg; |
5520 | } |
5521 | else |
5522 | { |
5523 | unknown_memory_access = true; |
5524 | break; |
5525 | } |
5526 | } |
5527 | r->base_alias_set = base_node->base; |
5528 | r->ref_alias_set = ref_node->ref; |
5529 | } |
5530 | } |
5531 | |
5532 | /* Walk the VUSE->VDEF chain optimistically trying to find an entry |
5533 | for the call in the hashtable. */ |
5534 | unsigned limit = (unknown_memory_access |
5535 | ? 0 |
5536 | : (param_sccvn_max_alias_queries_per_accessglobal_options.x_param_sccvn_max_alias_queries_per_access |
5537 | / (accesses.length () + 1))); |
5538 | tree saved_vuse = vr1.vuse; |
5539 | hashval_t saved_hashcode = vr1.hashcode; |
5540 | while (limit > 0 && !vnresult && !SSA_NAME_IS_DEFAULT_DEF (vr1.vuse)(tree_check ((vr1.vuse), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5540, __FUNCTION__, (SSA_NAME)))->base.default_def_flag) |
5541 | { |
5542 | vr1.hashcode = vr1.hashcode - SSA_NAME_VERSION (vr1.vuse)(tree_check ((vr1.vuse), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5542, __FUNCTION__, (SSA_NAME)))->base.u.version; |
5543 | gimple *def = SSA_NAME_DEF_STMT (vr1.vuse)(tree_check ((vr1.vuse), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5543, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt; |
5544 | /* ??? We could use fancy stuff like in walk_non_aliased_vuses, but |
5545 | do not bother for now. */ |
5546 | if (is_a <gphi *> (def)) |
5547 | break; |
5548 | vr1.vuse = vuse_ssa_val (gimple_vuse (def)); |
5549 | vr1.hashcode = vr1.hashcode + SSA_NAME_VERSION (vr1.vuse)(tree_check ((vr1.vuse), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5549, __FUNCTION__, (SSA_NAME)))->base.u.version; |
5550 | vn_reference_lookup_1 (&vr1, &vnresult); |
5551 | limit--; |
5552 | } |
5553 | |
5554 | /* If we found a candidate to CSE to verify it is valid. */ |
5555 | if (vnresult && !accesses.is_empty ()) |
5556 | { |
5557 | tree vuse = vuse_ssa_val (gimple_vuse (stmt)); |
5558 | while (vnresult && vuse != vr1.vuse) |
5559 | { |
5560 | gimple *def = SSA_NAME_DEF_STMT (vuse)(tree_check ((vuse), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5560, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt; |
5561 | for (auto &ref : accesses) |
5562 | { |
5563 | /* ??? stmt_may_clobber_ref_p_1 does per stmt constant |
5564 | analysis overhead that we might be able to cache. */ |
5565 | if (stmt_may_clobber_ref_p_1 (def, &ref, true)) |
5566 | { |
5567 | vnresult = NULLnullptr; |
5568 | break; |
5569 | } |
5570 | } |
5571 | vuse = vuse_ssa_val (gimple_vuse (def)); |
5572 | } |
5573 | } |
5574 | vr1.vuse = saved_vuse; |
5575 | vr1.hashcode = saved_hashcode; |
5576 | } |
5577 | |
5578 | if (vnresult) |
5579 | { |
5580 | if (vdef) |
5581 | { |
5582 | if (vnresult->result_vdef) |
5583 | changed |= set_ssa_val_to (vdef, vnresult->result_vdef); |
5584 | else if (!lhs && gimple_call_lhs (stmt)) |
5585 | /* If stmt has non-SSA_NAME lhs, value number the vdef to itself, |
5586 | as the call still acts as a lhs store. */ |
5587 | changed |= set_ssa_val_to (vdef, vdef); |
5588 | else |
5589 | /* If the call was discovered to be pure or const reflect |
5590 | that as far as possible. */ |
5591 | changed |= set_ssa_val_to (vdef, |
5592 | vuse_ssa_val (gimple_vuse (stmt))); |
5593 | } |
5594 | |
5595 | if (!vnresult->result && lhs) |
5596 | vnresult->result = lhs; |
5597 | |
5598 | if (vnresult->result && lhs) |
5599 | changed |= set_ssa_val_to (lhs, vnresult->result); |
5600 | } |
5601 | else |
5602 | { |
5603 | vn_reference_t vr2; |
5604 | vn_reference_s **slot; |
5605 | tree vdef_val = vdef; |
5606 | if (vdef) |
5607 | { |
5608 | /* If we value numbered an indirect functions function to |
5609 | one not clobbering memory value number its VDEF to its |
5610 | VUSE. */ |
5611 | tree fn = gimple_call_fn (stmt); |
5612 | if (fn && TREE_CODE (fn)((enum tree_code) (fn)->base.code) == SSA_NAME) |
5613 | { |
5614 | fn = SSA_VAL (fn); |
5615 | if (TREE_CODE (fn)((enum tree_code) (fn)->base.code) == ADDR_EXPR |
5616 | && TREE_CODE (TREE_OPERAND (fn, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check ((fn), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5616, __FUNCTION__))))))->base.code) == FUNCTION_DECL |
5617 | && (flags_from_decl_or_type (TREE_OPERAND (fn, 0)(*((const_cast<tree*> (tree_operand_check ((fn), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.cc" , 5617, __FUNCTION__)))))) |
5 |