Bug Summary

File:build/gcc/tree-ssa-sccvn.c
Warning:line 2807, column 7
Value stored to 'ref2' is never read

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-unknown-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name tree-ssa-sccvn.c -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -fno-rounding-math -mconstructor-aliases -munwind-tables -target-cpu x86-64 -tune-cpu generic -debugger-tuning=gdb -fcoverage-compilation-dir=/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -resource-dir /usr/lib64/clang/13.0.0 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11/backward -internal-isystem /usr/lib64/clang/13.0.0/include -internal-isystem /usr/local/include -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../x86_64-suse-linux/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-error=format-diag -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir=/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -D__GCC_HAVE_DWARF2_CFI_ASM=1 -o /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2021-11-20-133755-20252-1/report-53F3OG.plist -x c++ /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c
1/* SCC value numbering for trees
2 Copyright (C) 2006-2021 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "splay-tree.h"
25#include "backend.h"
26#include "rtl.h"
27#include "tree.h"
28#include "gimple.h"
29#include "ssa.h"
30#include "expmed.h"
31#include "insn-config.h"
32#include "memmodel.h"
33#include "emit-rtl.h"
34#include "cgraph.h"
35#include "gimple-pretty-print.h"
36#include "alias.h"
37#include "fold-const.h"
38#include "stor-layout.h"
39#include "cfganal.h"
40#include "tree-inline.h"
41#include "internal-fn.h"
42#include "gimple-fold.h"
43#include "tree-eh.h"
44#include "gimplify.h"
45#include "flags.h"
46#include "dojump.h"
47#include "explow.h"
48#include "calls.h"
49#include "varasm.h"
50#include "stmt.h"
51#include "expr.h"
52#include "tree-dfa.h"
53#include "tree-ssa.h"
54#include "dumpfile.h"
55#include "cfgloop.h"
56#include "tree-ssa-propagate.h"
57#include "tree-cfg.h"
58#include "domwalk.h"
59#include "gimple-iterator.h"
60#include "gimple-match.h"
61#include "stringpool.h"
62#include "attribs.h"
63#include "tree-pass.h"
64#include "statistics.h"
65#include "langhooks.h"
66#include "ipa-utils.h"
67#include "dbgcnt.h"
68#include "tree-cfgcleanup.h"
69#include "tree-ssa-loop.h"
70#include "tree-scalar-evolution.h"
71#include "tree-ssa-loop-niter.h"
72#include "builtins.h"
73#include "fold-const-call.h"
74#include "tree-ssa-sccvn.h"
75
76/* This algorithm is based on the SCC algorithm presented by Keith
77 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
78 (http://citeseer.ist.psu.edu/41805.html). In
79 straight line code, it is equivalent to a regular hash based value
80 numbering that is performed in reverse postorder.
81
82 For code with cycles, there are two alternatives, both of which
83 require keeping the hashtables separate from the actual list of
84 value numbers for SSA names.
85
86 1. Iterate value numbering in an RPO walk of the blocks, removing
87 all the entries from the hashtable after each iteration (but
88 keeping the SSA name->value number mapping between iterations).
89 Iterate until it does not change.
90
91 2. Perform value numbering as part of an SCC walk on the SSA graph,
92 iterating only the cycles in the SSA graph until they do not change
93 (using a separate, optimistic hashtable for value numbering the SCC
94 operands).
95
96 The second is not just faster in practice (because most SSA graph
97 cycles do not involve all the variables in the graph), it also has
98 some nice properties.
99
100 One of these nice properties is that when we pop an SCC off the
101 stack, we are guaranteed to have processed all the operands coming from
102 *outside of that SCC*, so we do not need to do anything special to
103 ensure they have value numbers.
104
105 Another nice property is that the SCC walk is done as part of a DFS
106 of the SSA graph, which makes it easy to perform combining and
107 simplifying operations at the same time.
108
109 The code below is deliberately written in a way that makes it easy
110 to separate the SCC walk from the other work it does.
111
112 In order to propagate constants through the code, we track which
113 expressions contain constants, and use those while folding. In
114 theory, we could also track expressions whose value numbers are
115 replaced, in case we end up folding based on expression
116 identities.
117
118 In order to value number memory, we assign value numbers to vuses.
119 This enables us to note that, for example, stores to the same
120 address of the same value from the same starting memory states are
121 equivalent.
122 TODO:
123
124 1. We can iterate only the changing portions of the SCC's, but
125 I have not seen an SCC big enough for this to be a win.
126 2. If you differentiate between phi nodes for loops and phi nodes
127 for if-then-else, you can properly consider phi nodes in different
128 blocks for equivalence.
129 3. We could value number vuses in more cases, particularly, whole
130 structure copies.
131*/
132
133/* There's no BB_EXECUTABLE but we can use BB_VISITED. */
134#define BB_EXECUTABLE BB_VISITED
135
136static vn_lookup_kind default_vn_walk_kind;
137
138/* vn_nary_op hashtable helpers. */
139
140struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
141{
142 typedef vn_nary_op_s *compare_type;
143 static inline hashval_t hash (const vn_nary_op_s *);
144 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
145};
146
147/* Return the computed hashcode for nary operation P1. */
148
149inline hashval_t
150vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
151{
152 return vno1->hashcode;
153}
154
155/* Compare nary operations P1 and P2 and return true if they are
156 equivalent. */
157
158inline bool
159vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
160{
161 return vno1 == vno2 || vn_nary_op_eq (vno1, vno2);
162}
163
164typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
165typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
166
167
168/* vn_phi hashtable helpers. */
169
170static int
171vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
172
173struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s>
174{
175 static inline hashval_t hash (const vn_phi_s *);
176 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
177};
178
179/* Return the computed hashcode for phi operation P1. */
180
181inline hashval_t
182vn_phi_hasher::hash (const vn_phi_s *vp1)
183{
184 return vp1->hashcode;
185}
186
187/* Compare two phi entries for equality, ignoring VN_TOP arguments. */
188
189inline bool
190vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
191{
192 return vp1 == vp2 || vn_phi_eq (vp1, vp2);
193}
194
195typedef hash_table<vn_phi_hasher> vn_phi_table_type;
196typedef vn_phi_table_type::iterator vn_phi_iterator_type;
197
198
199/* Compare two reference operands P1 and P2 for equality. Return true if
200 they are equal, and false otherwise. */
201
202static int
203vn_reference_op_eq (const void *p1, const void *p2)
204{
205 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
206 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
207
208 return (vro1->opcode == vro2->opcode
209 /* We do not care for differences in type qualification. */
210 && (vro1->type == vro2->type
211 || (vro1->type && vro2->type
212 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type)((tree_class_check ((vro1->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 212, __FUNCTION__))->type_common.main_variant)
,
213 TYPE_MAIN_VARIANT (vro2->type)((tree_class_check ((vro2->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 213, __FUNCTION__))->type_common.main_variant)
)))
214 && expressions_equal_p (vro1->op0, vro2->op0)
215 && expressions_equal_p (vro1->op1, vro2->op1)
216 && expressions_equal_p (vro1->op2, vro2->op2)
217 && (vro1->opcode != CALL_EXPR || vro1->clique == vro2->clique));
218}
219
220/* Free a reference operation structure VP. */
221
222static inline void
223free_reference (vn_reference_s *vr)
224{
225 vr->operands.release ();
226}
227
228
229/* vn_reference hashtable helpers. */
230
231struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s>
232{
233 static inline hashval_t hash (const vn_reference_s *);
234 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
235};
236
237/* Return the hashcode for a given reference operation P1. */
238
239inline hashval_t
240vn_reference_hasher::hash (const vn_reference_s *vr1)
241{
242 return vr1->hashcode;
243}
244
245inline bool
246vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
247{
248 return v == c || vn_reference_eq (v, c);
249}
250
251typedef hash_table<vn_reference_hasher> vn_reference_table_type;
252typedef vn_reference_table_type::iterator vn_reference_iterator_type;
253
254/* Pretty-print OPS to OUTFILE. */
255
256void
257print_vn_reference_ops (FILE *outfile, const vec<vn_reference_op_s> ops)
258{
259 vn_reference_op_t vro;
260 unsigned int i;
261 fprintf (outfile, "{");
262 for (i = 0; ops.iterate (i, &vro); i++)
263 {
264 bool closebrace = false;
265 if (vro->opcode != SSA_NAME
266 && TREE_CODE_CLASS (vro->opcode)tree_code_type[(int) (vro->opcode)] != tcc_declaration)
267 {
268 fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
269 if (vro->op0 || vro->opcode == CALL_EXPR)
270 {
271 fprintf (outfile, "<");
272 closebrace = true;
273 }
274 }
275 if (vro->op0 || vro->opcode == CALL_EXPR)
276 {
277 if (!vro->op0)
278 fprintf (outfile, internal_fn_name ((internal_fn)vro->clique));
279 else
280 print_generic_expr (outfile, vro->op0);
281 if (vro->op1)
282 {
283 fprintf (outfile, ",");
284 print_generic_expr (outfile, vro->op1);
285 }
286 if (vro->op2)
287 {
288 fprintf (outfile, ",");
289 print_generic_expr (outfile, vro->op2);
290 }
291 }
292 if (closebrace)
293 fprintf (outfile, ">");
294 if (i != ops.length () - 1)
295 fprintf (outfile, ",");
296 }
297 fprintf (outfile, "}");
298}
299
300DEBUG_FUNCTION__attribute__ ((__used__)) void
301debug_vn_reference_ops (const vec<vn_reference_op_s> ops)
302{
303 print_vn_reference_ops (stderrstderr, ops);
304 fputc ('\n', stderrstderr);
305}
306
307/* The set of VN hashtables. */
308
309typedef struct vn_tables_s
310{
311 vn_nary_op_table_type *nary;
312 vn_phi_table_type *phis;
313 vn_reference_table_type *references;
314} *vn_tables_t;
315
316
317/* vn_constant hashtable helpers. */
318
319struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
320{
321 static inline hashval_t hash (const vn_constant_s *);
322 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
323};
324
325/* Hash table hash function for vn_constant_t. */
326
327inline hashval_t
328vn_constant_hasher::hash (const vn_constant_s *vc1)
329{
330 return vc1->hashcode;
331}
332
333/* Hash table equality function for vn_constant_t. */
334
335inline bool
336vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
337{
338 if (vc1->hashcode != vc2->hashcode)
339 return false;
340
341 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
342}
343
344static hash_table<vn_constant_hasher> *constant_to_value_id;
345
346
347/* Obstack we allocate the vn-tables elements from. */
348static obstack vn_tables_obstack;
349/* Special obstack we never unwind. */
350static obstack vn_tables_insert_obstack;
351
352static vn_reference_t last_inserted_ref;
353static vn_phi_t last_inserted_phi;
354static vn_nary_op_t last_inserted_nary;
355static vn_ssa_aux_t last_pushed_avail;
356
357/* Valid hashtables storing information we have proven to be
358 correct. */
359static vn_tables_t valid_info;
360
361
362/* Valueization hook for simplify_replace_tree. Valueize NAME if it is
363 an SSA name, otherwise just return it. */
364tree (*vn_valueize) (tree);
365static tree
366vn_valueize_for_srt (tree t, void* context ATTRIBUTE_UNUSED__attribute__ ((__unused__)))
367{
368 basic_block saved_vn_context_bb = vn_context_bb;
369 /* Look for sth available at the definition block of the argument.
370 This avoids inconsistencies between availability there which
371 decides if the stmt can be removed and availability at the
372 use site. The SSA property ensures that things available
373 at the definition are also available at uses. */
374 if (!SSA_NAME_IS_DEFAULT_DEF (t)(tree_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 374, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
)
375 vn_context_bb = gimple_bb (SSA_NAME_DEF_STMT (t)(tree_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 375, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
);
376 tree res = vn_valueize (t);
377 vn_context_bb = saved_vn_context_bb;
378 return res;
379}
380
381
382/* This represents the top of the VN lattice, which is the universal
383 value. */
384
385tree VN_TOP;
386
387/* Unique counter for our value ids. */
388
389static unsigned int next_value_id;
390static int next_constant_value_id;
391
392
393/* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
394 are allocated on an obstack for locality reasons, and to free them
395 without looping over the vec. */
396
397struct vn_ssa_aux_hasher : typed_noop_remove <vn_ssa_aux_t>
398{
399 typedef vn_ssa_aux_t value_type;
400 typedef tree compare_type;
401 static inline hashval_t hash (const value_type &);
402 static inline bool equal (const value_type &, const compare_type &);
403 static inline void mark_deleted (value_type &) {}
404 static const bool empty_zero_p = true;
405 static inline void mark_empty (value_type &e) { e = NULLnullptr; }
406 static inline bool is_deleted (value_type &) { return false; }
407 static inline bool is_empty (value_type &e) { return e == NULLnullptr; }
408};
409
410hashval_t
411vn_ssa_aux_hasher::hash (const value_type &entry)
412{
413 return SSA_NAME_VERSION (entry->name)(tree_check ((entry->name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 413, __FUNCTION__, (SSA_NAME)))->base.u.version
;
414}
415
416bool
417vn_ssa_aux_hasher::equal (const value_type &entry, const compare_type &name)
418{
419 return name == entry->name;
420}
421
422static hash_table<vn_ssa_aux_hasher> *vn_ssa_aux_hash;
423typedef hash_table<vn_ssa_aux_hasher>::iterator vn_ssa_aux_iterator_type;
424static struct obstack vn_ssa_aux_obstack;
425
426static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree);
427static unsigned int vn_nary_length_from_stmt (gimple *);
428static vn_nary_op_t alloc_vn_nary_op_noinit (unsigned int, obstack *);
429static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
430 vn_nary_op_table_type *);
431static void init_vn_nary_op_from_stmt (vn_nary_op_t, gassign *);
432static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int,
433 enum tree_code, tree, tree *);
434static tree vn_lookup_simplify_result (gimple_match_op *);
435static vn_reference_t vn_reference_lookup_or_insert_for_pieces
436 (tree, alias_set_type, alias_set_type, tree,
437 vec<vn_reference_op_s, va_heap>, tree);
438
439/* Return whether there is value numbering information for a given SSA name. */
440
441bool
442has_VN_INFO (tree name)
443{
444 return vn_ssa_aux_hash->find_with_hash (name, SSA_NAME_VERSION (name)(tree_check ((name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 444, __FUNCTION__, (SSA_NAME)))->base.u.version
);
445}
446
447vn_ssa_aux_t
448VN_INFO (tree name)
449{
450 vn_ssa_aux_t *res
451 = vn_ssa_aux_hash->find_slot_with_hash (name, SSA_NAME_VERSION (name)(tree_check ((name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 451, __FUNCTION__, (SSA_NAME)))->base.u.version
,
452 INSERT);
453 if (*res != NULLnullptr)
454 return *res;
455
456 vn_ssa_aux_t newinfo = *res = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux)((struct vn_ssa_aux *) __extension__ ({ struct obstack *__h =
((&vn_ssa_aux_obstack)); __extension__ ({ struct obstack
*__o = (__h); size_t __len = ((sizeof (struct vn_ssa_aux)));
if (__extension__ ({ struct obstack const *__o1 = (__o); (size_t
) (__o1->chunk_limit - __o1->next_free); }) < __len)
_obstack_newchunk (__o, __len); ((void) ((__o)->next_free
+= (__len))); }); __extension__ ({ struct obstack *__o1 = (__h
); void *__value = (void *) __o1->object_base; if (__o1->
next_free == __value) __o1->maybe_empty_object = 1; __o1->
next_free = (sizeof (ptrdiff_t) < sizeof (void *) ? ((__o1
->object_base) + (((__o1->next_free) - (__o1->object_base
) + (__o1->alignment_mask)) & ~(__o1->alignment_mask
))) : (char *) (((ptrdiff_t) (__o1->next_free) + (__o1->
alignment_mask)) & ~(__o1->alignment_mask))); if ((size_t
) (__o1->next_free - (char *) __o1->chunk) > (size_t
) (__o1->chunk_limit - (char *) __o1->chunk)) __o1->
next_free = __o1->chunk_limit; __o1->object_base = __o1
->next_free; __value; }); }))
;
457 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
458 newinfo->name = name;
459 newinfo->valnum = VN_TOP;
460 /* We are using the visited flag to handle uses with defs not within the
461 region being value-numbered. */
462 newinfo->visited = false;
463
464 /* Given we create the VN_INFOs on-demand now we have to do initialization
465 different than VN_TOP here. */
466 if (SSA_NAME_IS_DEFAULT_DEF (name)(tree_check ((name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 466, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
)
467 switch (TREE_CODE (SSA_NAME_VAR (name))((enum tree_code) (((tree_check ((name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 467, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree) nullptr
|| ((enum tree_code) ((name)->ssa_name.var)->base.code
) == IDENTIFIER_NODE ? (tree) nullptr : (name)->ssa_name.var
))->base.code)
)
468 {
469 case VAR_DECL:
470 /* All undefined vars are VARYING. */
471 newinfo->valnum = name;
472 newinfo->visited = true;
473 break;
474
475 case PARM_DECL:
476 /* Parameters are VARYING but we can record a condition
477 if we know it is a non-NULL pointer. */
478 newinfo->visited = true;
479 newinfo->valnum = name;
480 if (POINTER_TYPE_P (TREE_TYPE (name))(((enum tree_code) (((contains_struct_check ((name), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 480, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((name), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 480, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
481 && nonnull_arg_p (SSA_NAME_VAR (name)((tree_check ((name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 481, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree) nullptr
|| ((enum tree_code) ((name)->ssa_name.var)->base.code
) == IDENTIFIER_NODE ? (tree) nullptr : (name)->ssa_name.var
)
))
482 {
483 tree ops[2];
484 ops[0] = name;
485 ops[1] = build_int_cst (TREE_TYPE (name)((contains_struct_check ((name), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 485, __FUNCTION__))->typed.type)
, 0);
486 vn_nary_op_t nary;
487 /* Allocate from non-unwinding stack. */
488 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
489 init_vn_nary_op_from_pieces (nary, 2, NE_EXPR,
490 boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], ops);
491 nary->predicated_values = 0;
492 nary->u.result = boolean_true_nodeglobal_trees[TI_BOOLEAN_TRUE];
493 vn_nary_op_insert_into (nary, valid_info->nary);
494 gcc_assert (nary->unwind_to == NULL)((void)(!(nary->unwind_to == nullptr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 494, __FUNCTION__), 0 : 0))
;
495 /* Also do not link it into the undo chain. */
496 last_inserted_nary = nary->next;
497 nary->next = (vn_nary_op_t)(void *)-1;
498 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
499 init_vn_nary_op_from_pieces (nary, 2, EQ_EXPR,
500 boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], ops);
501 nary->predicated_values = 0;
502 nary->u.result = boolean_false_nodeglobal_trees[TI_BOOLEAN_FALSE];
503 vn_nary_op_insert_into (nary, valid_info->nary);
504 gcc_assert (nary->unwind_to == NULL)((void)(!(nary->unwind_to == nullptr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 504, __FUNCTION__), 0 : 0))
;
505 last_inserted_nary = nary->next;
506 nary->next = (vn_nary_op_t)(void *)-1;
507 if (dump_file && (dump_flags & TDF_DETAILS))
508 {
509 fprintf (dump_file, "Recording ");
510 print_generic_expr (dump_file, name, TDF_SLIM);
511 fprintf (dump_file, " != 0\n");
512 }
513 }
514 break;
515
516 case RESULT_DECL:
517 /* If the result is passed by invisible reference the default
518 def is initialized, otherwise it's uninitialized. Still
519 undefined is varying. */
520 newinfo->visited = true;
521 newinfo->valnum = name;
522 break;
523
524 default:
525 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 525, __FUNCTION__))
;
526 }
527 return newinfo;
528}
529
530/* Return the SSA value of X. */
531
532inline tree
533SSA_VAL (tree x, bool *visited = NULLnullptr)
534{
535 vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x)(tree_check ((x), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 535, __FUNCTION__, (SSA_NAME)))->base.u.version
);
536 if (visited)
537 *visited = tem && tem->visited;
538 return tem && tem->visited ? tem->valnum : x;
539}
540
541/* Return the SSA value of the VUSE x, supporting released VDEFs
542 during elimination which will value-number the VDEF to the
543 associated VUSE (but not substitute in the whole lattice). */
544
545static inline tree
546vuse_ssa_val (tree x)
547{
548 if (!x)
549 return NULL_TREE(tree) nullptr;
550
551 do
552 {
553 x = SSA_VAL (x);
554 gcc_assert (x != VN_TOP)((void)(!(x != VN_TOP) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 554, __FUNCTION__), 0 : 0))
;
555 }
556 while (SSA_NAME_IN_FREE_LIST (x)(tree_check ((x), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 556, __FUNCTION__, (SSA_NAME)))->base.nothrow_flag
);
557
558 return x;
559}
560
561/* Similar to the above but used as callback for walk_non_aliased_vuses
562 and thus should stop at unvisited VUSE to not walk across region
563 boundaries. */
564
565static tree
566vuse_valueize (tree vuse)
567{
568 do
569 {
570 bool visited;
571 vuse = SSA_VAL (vuse, &visited);
572 if (!visited)
573 return NULL_TREE(tree) nullptr;
574 gcc_assert (vuse != VN_TOP)((void)(!(vuse != VN_TOP) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 574, __FUNCTION__), 0 : 0))
;
575 }
576 while (SSA_NAME_IN_FREE_LIST (vuse)(tree_check ((vuse), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 576, __FUNCTION__, (SSA_NAME)))->base.nothrow_flag
);
577 return vuse;
578}
579
580
581/* Return the vn_kind the expression computed by the stmt should be
582 associated with. */
583
584enum vn_kind
585vn_get_stmt_kind (gimple *stmt)
586{
587 switch (gimple_code (stmt))
588 {
589 case GIMPLE_CALL:
590 return VN_REFERENCE;
591 case GIMPLE_PHI:
592 return VN_PHI;
593 case GIMPLE_ASSIGN:
594 {
595 enum tree_code code = gimple_assign_rhs_code (stmt);
596 tree rhs1 = gimple_assign_rhs1 (stmt);
597 switch (get_gimple_rhs_class (code))
598 {
599 case GIMPLE_UNARY_RHS:
600 case GIMPLE_BINARY_RHS:
601 case GIMPLE_TERNARY_RHS:
602 return VN_NARY;
603 case GIMPLE_SINGLE_RHS:
604 switch (TREE_CODE_CLASS (code)tree_code_type[(int) (code)])
605 {
606 case tcc_reference:
607 /* VOP-less references can go through unary case. */
608 if ((code == REALPART_EXPR
609 || code == IMAGPART_EXPR
610 || code == VIEW_CONVERT_EXPR
611 || code == BIT_FIELD_REF)
612 && (TREE_CODE (TREE_OPERAND (rhs1, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((rhs1), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 612, __FUNCTION__))))))->base.code)
== SSA_NAME
613 || is_gimple_min_invariant (TREE_OPERAND (rhs1, 0)(*((const_cast<tree*> (tree_operand_check ((rhs1), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 613, __FUNCTION__)))))
)))
614 return VN_NARY;
615
616 /* Fallthrough. */
617 case tcc_declaration:
618 return VN_REFERENCE;
619
620 case tcc_constant:
621 return VN_CONSTANT;
622
623 default:
624 if (code == ADDR_EXPR)
625 return (is_gimple_min_invariant (rhs1)
626 ? VN_CONSTANT : VN_REFERENCE);
627 else if (code == CONSTRUCTOR)
628 return VN_NARY;
629 return VN_NONE;
630 }
631 default:
632 return VN_NONE;
633 }
634 }
635 default:
636 return VN_NONE;
637 }
638}
639
640/* Lookup a value id for CONSTANT and return it. If it does not
641 exist returns 0. */
642
643unsigned int
644get_constant_value_id (tree constant)
645{
646 vn_constant_s **slot;
647 struct vn_constant_s vc;
648
649 vc.hashcode = vn_hash_constant_with_type (constant);
650 vc.constant = constant;
651 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
652 if (slot)
653 return (*slot)->value_id;
654 return 0;
655}
656
657/* Lookup a value id for CONSTANT, and if it does not exist, create a
658 new one and return it. If it does exist, return it. */
659
660unsigned int
661get_or_alloc_constant_value_id (tree constant)
662{
663 vn_constant_s **slot;
664 struct vn_constant_s vc;
665 vn_constant_t vcp;
666
667 /* If the hashtable isn't initialized we're not running from PRE and thus
668 do not need value-ids. */
669 if (!constant_to_value_id)
670 return 0;
671
672 vc.hashcode = vn_hash_constant_with_type (constant);
673 vc.constant = constant;
674 slot = constant_to_value_id->find_slot (&vc, INSERT);
675 if (*slot)
676 return (*slot)->value_id;
677
678 vcp = XNEW (struct vn_constant_s)((struct vn_constant_s *) xmalloc (sizeof (struct vn_constant_s
)))
;
679 vcp->hashcode = vc.hashcode;
680 vcp->constant = constant;
681 vcp->value_id = get_next_constant_value_id ();
682 *slot = vcp;
683 return vcp->value_id;
684}
685
686/* Compute the hash for a reference operand VRO1. */
687
688static void
689vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
690{
691 hstate.add_int (vro1->opcode);
692 if (vro1->opcode == CALL_EXPR && !vro1->op0)
693 hstate.add_int (vro1->clique);
694 if (vro1->op0)
695 inchash::add_expr (vro1->op0, hstate);
696 if (vro1->op1)
697 inchash::add_expr (vro1->op1, hstate);
698 if (vro1->op2)
699 inchash::add_expr (vro1->op2, hstate);
700}
701
702/* Compute a hash for the reference operation VR1 and return it. */
703
704static hashval_t
705vn_reference_compute_hash (const vn_reference_t vr1)
706{
707 inchash::hash hstate;
708 hashval_t result;
709 int i;
710 vn_reference_op_t vro;
711 poly_int64 off = -1;
712 bool deref = false;
713
714 FOR_EACH_VEC_ELT (vr1->operands, i, vro)for (i = 0; (vr1->operands).iterate ((i), &(vro)); ++(
i))
715 {
716 if (vro->opcode == MEM_REF)
717 deref = true;
718 else if (vro->opcode != ADDR_EXPR)
719 deref = false;
720 if (maybe_ne (vro->off, -1))
721 {
722 if (known_eq (off, -1)(!maybe_ne (off, -1)))
723 off = 0;
724 off += vro->off;
725 }
726 else
727 {
728 if (maybe_ne (off, -1)
729 && maybe_ne (off, 0))
730 hstate.add_poly_int (off);
731 off = -1;
732 if (deref
733 && vro->opcode == ADDR_EXPR)
734 {
735 if (vro->op0)
736 {
737 tree op = TREE_OPERAND (vro->op0, 0)(*((const_cast<tree*> (tree_operand_check ((vro->op0
), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 737, __FUNCTION__)))))
;
738 hstate.add_int (TREE_CODE (op)((enum tree_code) (op)->base.code));
739 inchash::add_expr (op, hstate);
740 }
741 }
742 else
743 vn_reference_op_compute_hash (vro, hstate);
744 }
745 }
746 result = hstate.end ();
747 /* ??? We would ICE later if we hash instead of adding that in. */
748 if (vr1->vuse)
749 result += SSA_NAME_VERSION (vr1->vuse)(tree_check ((vr1->vuse), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 749, __FUNCTION__, (SSA_NAME)))->base.u.version
;
750
751 return result;
752}
753
754/* Return true if reference operations VR1 and VR2 are equivalent. This
755 means they have the same set of operands and vuses. */
756
757bool
758vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
759{
760 unsigned i, j;
761
762 /* Early out if this is not a hash collision. */
763 if (vr1->hashcode != vr2->hashcode)
764 return false;
765
766 /* The VOP needs to be the same. */
767 if (vr1->vuse != vr2->vuse)
768 return false;
769
770 /* If the operands are the same we are done. */
771 if (vr1->operands == vr2->operands)
772 return true;
773
774 if (!vr1->type || !vr2->type)
775 {
776 if (vr1->type != vr2->type)
777 return false;
778 }
779 else if (vr1->type == vr2->type)
780 ;
781 else if (COMPLETE_TYPE_P (vr1->type)(((tree_class_check ((vr1->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 781, __FUNCTION__))->type_common.size) != (tree) nullptr
)
!= COMPLETE_TYPE_P (vr2->type)(((tree_class_check ((vr2->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 781, __FUNCTION__))->type_common.size) != (tree) nullptr
)
782 || (COMPLETE_TYPE_P (vr1->type)(((tree_class_check ((vr1->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 782, __FUNCTION__))->type_common.size) != (tree) nullptr
)
783 && !expressions_equal_p (TYPE_SIZE (vr1->type)((tree_class_check ((vr1->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 783, __FUNCTION__))->type_common.size)
,
784 TYPE_SIZE (vr2->type)((tree_class_check ((vr2->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 784, __FUNCTION__))->type_common.size)
)))
785 return false;
786 else if (vr1->operands[0].opcode == CALL_EXPR
787 && !types_compatible_p (vr1->type, vr2->type))
788 return false;
789 else if (INTEGRAL_TYPE_P (vr1->type)(((enum tree_code) (vr1->type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (vr1->type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (vr1->type)->base.code) == INTEGER_TYPE
)
790 && INTEGRAL_TYPE_P (vr2->type)(((enum tree_code) (vr2->type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (vr2->type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (vr2->type)->base.code) == INTEGER_TYPE
)
)
791 {
792 if (TYPE_PRECISION (vr1->type)((tree_class_check ((vr1->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 792, __FUNCTION__))->type_common.precision)
!= TYPE_PRECISION (vr2->type)((tree_class_check ((vr2->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 792, __FUNCTION__))->type_common.precision)
)
793 return false;
794 }
795 else if (INTEGRAL_TYPE_P (vr1->type)(((enum tree_code) (vr1->type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (vr1->type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (vr1->type)->base.code) == INTEGER_TYPE
)
796 && (TYPE_PRECISION (vr1->type)((tree_class_check ((vr1->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 796, __FUNCTION__))->type_common.precision)
797 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))((unsigned long) (*tree_int_cst_elt_check ((((tree_class_check
((vr1->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 797, __FUNCTION__))->type_common.size)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 797, __FUNCTION__)))
))
798 return false;
799 else if (INTEGRAL_TYPE_P (vr2->type)(((enum tree_code) (vr2->type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (vr2->type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (vr2->type)->base.code) == INTEGER_TYPE
)
800 && (TYPE_PRECISION (vr2->type)((tree_class_check ((vr2->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 800, __FUNCTION__))->type_common.precision)
801 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))((unsigned long) (*tree_int_cst_elt_check ((((tree_class_check
((vr2->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 801, __FUNCTION__))->type_common.size)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 801, __FUNCTION__)))
))
802 return false;
803
804 i = 0;
805 j = 0;
806 do
807 {
808 poly_int64 off1 = 0, off2 = 0;
809 vn_reference_op_t vro1, vro2;
810 vn_reference_op_s tem1, tem2;
811 bool deref1 = false, deref2 = false;
812 bool reverse1 = false, reverse2 = false;
813 for (; vr1->operands.iterate (i, &vro1); i++)
814 {
815 if (vro1->opcode == MEM_REF)
816 deref1 = true;
817 /* Do not look through a storage order barrier. */
818 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
819 return false;
820 reverse1 |= vro1->reverse;
821 if (known_eq (vro1->off, -1)(!maybe_ne (vro1->off, -1)))
822 break;
823 off1 += vro1->off;
824 }
825 for (; vr2->operands.iterate (j, &vro2); j++)
826 {
827 if (vro2->opcode == MEM_REF)
828 deref2 = true;
829 /* Do not look through a storage order barrier. */
830 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
831 return false;
832 reverse2 |= vro2->reverse;
833 if (known_eq (vro2->off, -1)(!maybe_ne (vro2->off, -1)))
834 break;
835 off2 += vro2->off;
836 }
837 if (maybe_ne (off1, off2) || reverse1 != reverse2)
838 return false;
839 if (deref1 && vro1->opcode == ADDR_EXPR)
840 {
841 memset (&tem1, 0, sizeof (tem1));
842 tem1.op0 = TREE_OPERAND (vro1->op0, 0)(*((const_cast<tree*> (tree_operand_check ((vro1->op0
), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 842, __FUNCTION__)))))
;
843 tem1.type = TREE_TYPE (tem1.op0)((contains_struct_check ((tem1.op0), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 843, __FUNCTION__))->typed.type)
;
844 tem1.opcode = TREE_CODE (tem1.op0)((enum tree_code) (tem1.op0)->base.code);
845 vro1 = &tem1;
846 deref1 = false;
847 }
848 if (deref2 && vro2->opcode == ADDR_EXPR)
849 {
850 memset (&tem2, 0, sizeof (tem2));
851 tem2.op0 = TREE_OPERAND (vro2->op0, 0)(*((const_cast<tree*> (tree_operand_check ((vro2->op0
), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 851, __FUNCTION__)))))
;
852 tem2.type = TREE_TYPE (tem2.op0)((contains_struct_check ((tem2.op0), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 852, __FUNCTION__))->typed.type)
;
853 tem2.opcode = TREE_CODE (tem2.op0)((enum tree_code) (tem2.op0)->base.code);
854 vro2 = &tem2;
855 deref2 = false;
856 }
857 if (deref1 != deref2)
858 return false;
859 if (!vn_reference_op_eq (vro1, vro2))
860 return false;
861 ++j;
862 ++i;
863 }
864 while (vr1->operands.length () != i
865 || vr2->operands.length () != j);
866
867 return true;
868}
869
870/* Copy the operations present in load/store REF into RESULT, a vector of
871 vn_reference_op_s's. */
872
873static void
874copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
875{
876 /* For non-calls, store the information that makes up the address. */
877 tree orig = ref;
878 while (ref)
879 {
880 vn_reference_op_s temp;
881
882 memset (&temp, 0, sizeof (temp));
883 temp.type = TREE_TYPE (ref)((contains_struct_check ((ref), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 883, __FUNCTION__))->typed.type)
;
884 temp.opcode = TREE_CODE (ref)((enum tree_code) (ref)->base.code);
885 temp.off = -1;
886
887 switch (temp.opcode)
888 {
889 case MODIFY_EXPR:
890 temp.op0 = TREE_OPERAND (ref, 1)(*((const_cast<tree*> (tree_operand_check ((ref), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 890, __FUNCTION__)))))
;
891 break;
892 case WITH_SIZE_EXPR:
893 temp.op0 = TREE_OPERAND (ref, 1)(*((const_cast<tree*> (tree_operand_check ((ref), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 893, __FUNCTION__)))))
;
894 temp.off = 0;
895 break;
896 case MEM_REF:
897 /* The base address gets its own vn_reference_op_s structure. */
898 temp.op0 = TREE_OPERAND (ref, 1)(*((const_cast<tree*> (tree_operand_check ((ref), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 898, __FUNCTION__)))))
;
899 if (!mem_ref_offset (ref).to_shwi (&temp.off))
900 temp.off = -1;
901 temp.clique = MR_DEPENDENCE_CLIQUE (ref)((tree_check2 ((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 901, __FUNCTION__, (MEM_REF), (TARGET_MEM_REF)))->base.u
.dependence_info.clique)
;
902 temp.base = MR_DEPENDENCE_BASE (ref)((tree_check2 ((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 902, __FUNCTION__, (MEM_REF), (TARGET_MEM_REF)))->base.u
.dependence_info.base)
;
903 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref)((tree_check2 ((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 903, __FUNCTION__, (BIT_FIELD_REF), (MEM_REF)))->base.default_def_flag
)
;
904 break;
905 case TARGET_MEM_REF:
906 /* The base address gets its own vn_reference_op_s structure. */
907 temp.op0 = TMR_INDEX (ref)((*((const_cast<tree*> (tree_operand_check (((tree_check
((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 907, __FUNCTION__, (TARGET_MEM_REF)))), (2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 907, __FUNCTION__))))))
;
908 temp.op1 = TMR_STEP (ref)((*((const_cast<tree*> (tree_operand_check (((tree_check
((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 908, __FUNCTION__, (TARGET_MEM_REF)))), (3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 908, __FUNCTION__))))))
;
909 temp.op2 = TMR_OFFSET (ref)((*((const_cast<tree*> (tree_operand_check (((tree_check
((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 909, __FUNCTION__, (TARGET_MEM_REF)))), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 909, __FUNCTION__))))))
;
910 temp.clique = MR_DEPENDENCE_CLIQUE (ref)((tree_check2 ((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 910, __FUNCTION__, (MEM_REF), (TARGET_MEM_REF)))->base.u
.dependence_info.clique)
;
911 temp.base = MR_DEPENDENCE_BASE (ref)((tree_check2 ((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 911, __FUNCTION__, (MEM_REF), (TARGET_MEM_REF)))->base.u
.dependence_info.base)
;
912 result->safe_push (temp);
913 memset (&temp, 0, sizeof (temp));
914 temp.type = NULL_TREE(tree) nullptr;
915 temp.opcode = ERROR_MARK;
916 temp.op0 = TMR_INDEX2 (ref)((*((const_cast<tree*> (tree_operand_check (((tree_check
((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 916, __FUNCTION__, (TARGET_MEM_REF)))), (4), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 916, __FUNCTION__))))))
;
917 temp.off = -1;
918 break;
919 case BIT_FIELD_REF:
920 /* Record bits, position and storage order. */
921 temp.op0 = TREE_OPERAND (ref, 1)(*((const_cast<tree*> (tree_operand_check ((ref), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 921, __FUNCTION__)))))
;
922 temp.op1 = TREE_OPERAND (ref, 2)(*((const_cast<tree*> (tree_operand_check ((ref), (2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 922, __FUNCTION__)))))
;
923 if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT(8), &temp.off))
924 temp.off = -1;
925 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref)((tree_check2 ((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 925, __FUNCTION__, (BIT_FIELD_REF), (MEM_REF)))->base.default_def_flag
)
;
926 break;
927 case COMPONENT_REF:
928 /* The field decl is enough to unambiguously specify the field,
929 so use its type here. */
930 temp.type = TREE_TYPE (TREE_OPERAND (ref, 1))((contains_struct_check (((*((const_cast<tree*> (tree_operand_check
((ref), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 930, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 930, __FUNCTION__))->typed.type)
;
931 temp.op0 = TREE_OPERAND (ref, 1)(*((const_cast<tree*> (tree_operand_check ((ref), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 931, __FUNCTION__)))))
;
932 temp.op1 = TREE_OPERAND (ref, 2)(*((const_cast<tree*> (tree_operand_check ((ref), (2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 932, __FUNCTION__)))))
;
933 temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))(((enum tree_code) (((contains_struct_check (((*((const_cast<
tree*> (tree_operand_check ((ref), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 933, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 933, __FUNCTION__))->typed.type))->base.code) == ARRAY_TYPE
|| (((enum tree_code) (((contains_struct_check (((*((const_cast
<tree*> (tree_operand_check ((ref), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 933, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 933, __FUNCTION__))->typed.type))->base.code) == RECORD_TYPE
|| ((enum tree_code) (((contains_struct_check (((*((const_cast
<tree*> (tree_operand_check ((ref), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 933, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 933, __FUNCTION__))->typed.type))->base.code) == UNION_TYPE
|| ((enum tree_code) (((contains_struct_check (((*((const_cast
<tree*> (tree_operand_check ((ref), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 933, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 933, __FUNCTION__))->typed.type))->base.code) == QUAL_UNION_TYPE
))
934 && TYPE_REVERSE_STORAGE_ORDER((tree_check4 ((((contains_struct_check (((*((const_cast<tree
*> (tree_operand_check ((ref), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 935, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 935, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 935, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE
), (ARRAY_TYPE)))->base.u.bits.saturating_flag)
935 (TREE_TYPE (TREE_OPERAND (ref, 0)))((tree_check4 ((((contains_struct_check (((*((const_cast<tree
*> (tree_operand_check ((ref), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 935, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 935, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 935, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE
), (ARRAY_TYPE)))->base.u.bits.saturating_flag)
);
936 {
937 tree this_offset = component_ref_field_offset (ref);
938 if (this_offset
939 && poly_int_tree_p (this_offset))
940 {
941 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1))((tree_check (((*((const_cast<tree*> (tree_operand_check
((ref), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 941, __FUNCTION__)))))), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 941, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset
)
;
942 if (TREE_INT_CST_LOW (bit_offset)((unsigned long) (*tree_int_cst_elt_check ((bit_offset), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 942, __FUNCTION__)))
% BITS_PER_UNIT(8) == 0)
943 {
944 poly_offset_int off
945 = (wi::to_poly_offset (this_offset)
946 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT3));
947 /* Probibit value-numbering zero offset components
948 of addresses the same before the pass folding
949 __builtin_object_size had a chance to run. */
950 if (TREE_CODE (orig)((enum tree_code) (orig)->base.code) != ADDR_EXPR
951 || maybe_ne (off, 0)
952 || (cfun(cfun + 0)->curr_properties & PROP_objsz(1 << 4)))
953 off.to_shwi (&temp.off);
954 }
955 }
956 }
957 break;
958 case ARRAY_RANGE_REF:
959 case ARRAY_REF:
960 {
961 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)))((contains_struct_check ((((contains_struct_check (((*((const_cast
<tree*> (tree_operand_check ((ref), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 961, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 961, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 961, __FUNCTION__))->typed.type)
;
962 /* Record index as operand. */
963 temp.op0 = TREE_OPERAND (ref, 1)(*((const_cast<tree*> (tree_operand_check ((ref), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 963, __FUNCTION__)))))
;
964 /* Always record lower bounds and element size. */
965 temp.op1 = array_ref_low_bound (ref);
966 /* But record element size in units of the type alignment. */
967 temp.op2 = TREE_OPERAND (ref, 3)(*((const_cast<tree*> (tree_operand_check ((ref), (3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 967, __FUNCTION__)))))
;
968 temp.align = eltype->type_common.align;
969 if (! temp.op2)
970 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),size_binop_loc (((location_t) 0), EXACT_DIV_EXPR, ((tree_class_check
((eltype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 970, __FUNCTION__))->type_common.size_unit), size_int_kind
(((((tree_class_check ((eltype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 971, __FUNCTION__))->type_common.align) ? ((unsigned)1) <<
(((tree_class_check ((eltype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 971, __FUNCTION__))->type_common.align) - 1) : 0) / (8))
, stk_sizetype))
971 size_int (TYPE_ALIGN_UNIT (eltype)))size_binop_loc (((location_t) 0), EXACT_DIV_EXPR, ((tree_class_check
((eltype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 970, __FUNCTION__))->type_common.size_unit), size_int_kind
(((((tree_class_check ((eltype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 971, __FUNCTION__))->type_common.align) ? ((unsigned)1) <<
(((tree_class_check ((eltype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 971, __FUNCTION__))->type_common.align) - 1) : 0) / (8))
, stk_sizetype))
;
972 if (poly_int_tree_p (temp.op0)
973 && poly_int_tree_p (temp.op1)
974 && TREE_CODE (temp.op2)((enum tree_code) (temp.op2)->base.code) == INTEGER_CST)
975 {
976 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
977 - wi::to_poly_offset (temp.op1))
978 * wi::to_offset (temp.op2)
979 * vn_ref_op_align_unit (&temp));
980 off.to_shwi (&temp.off);
981 }
982 temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))(((enum tree_code) (((contains_struct_check (((*((const_cast<
tree*> (tree_operand_check ((ref), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 982, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 982, __FUNCTION__))->typed.type))->base.code) == ARRAY_TYPE
|| (((enum tree_code) (((contains_struct_check (((*((const_cast
<tree*> (tree_operand_check ((ref), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 982, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 982, __FUNCTION__))->typed.type))->base.code) == RECORD_TYPE
|| ((enum tree_code) (((contains_struct_check (((*((const_cast
<tree*> (tree_operand_check ((ref), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 982, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 982, __FUNCTION__))->typed.type))->base.code) == UNION_TYPE
|| ((enum tree_code) (((contains_struct_check (((*((const_cast
<tree*> (tree_operand_check ((ref), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 982, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 982, __FUNCTION__))->typed.type))->base.code) == QUAL_UNION_TYPE
))
983 && TYPE_REVERSE_STORAGE_ORDER((tree_check4 ((((contains_struct_check (((*((const_cast<tree
*> (tree_operand_check ((ref), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 984, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 984, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 984, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE
), (ARRAY_TYPE)))->base.u.bits.saturating_flag)
984 (TREE_TYPE (TREE_OPERAND (ref, 0)))((tree_check4 ((((contains_struct_check (((*((const_cast<tree
*> (tree_operand_check ((ref), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 984, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 984, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 984, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE
), (ARRAY_TYPE)))->base.u.bits.saturating_flag)
);
985 }
986 break;
987 case VAR_DECL:
988 if (DECL_HARD_REGISTER (ref)((tree_check ((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 988, __FUNCTION__, (VAR_DECL)))->decl_with_vis.hard_register
)
)
989 {
990 temp.op0 = ref;
991 break;
992 }
993 /* Fallthru. */
994 case PARM_DECL:
995 case CONST_DECL:
996 case RESULT_DECL:
997 /* Canonicalize decls to MEM[&decl] which is what we end up with
998 when valueizing MEM[ptr] with ptr = &decl. */
999 temp.opcode = MEM_REF;
1000 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)((contains_struct_check ((ref), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1000, __FUNCTION__))->typed.type)
), 0);
1001 temp.off = 0;
1002 result->safe_push (temp);
1003 temp.opcode = ADDR_EXPR;
1004 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0)((contains_struct_check ((temp.op0), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1004, __FUNCTION__))->typed.type)
, ref);
1005 temp.type = TREE_TYPE (temp.op0)((contains_struct_check ((temp.op0), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1005, __FUNCTION__))->typed.type)
;
1006 temp.off = -1;
1007 break;
1008 case STRING_CST:
1009 case INTEGER_CST:
1010 case POLY_INT_CST:
1011 case COMPLEX_CST:
1012 case VECTOR_CST:
1013 case REAL_CST:
1014 case FIXED_CST:
1015 case CONSTRUCTOR:
1016 case SSA_NAME:
1017 temp.op0 = ref;
1018 break;
1019 case ADDR_EXPR:
1020 if (is_gimple_min_invariant (ref))
1021 {
1022 temp.op0 = ref;
1023 break;
1024 }
1025 break;
1026 /* These are only interesting for their operands, their
1027 existence, and their type. They will never be the last
1028 ref in the chain of references (IE they require an
1029 operand), so we don't have to put anything
1030 for op* as it will be handled by the iteration */
1031 case REALPART_EXPR:
1032 temp.off = 0;
1033 break;
1034 case VIEW_CONVERT_EXPR:
1035 temp.off = 0;
1036 temp.reverse = storage_order_barrier_p (ref);
1037 break;
1038 case IMAGPART_EXPR:
1039 /* This is only interesting for its constant offset. */
1040 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)))((unsigned long) (*tree_int_cst_elt_check ((((tree_class_check
((((contains_struct_check ((ref), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1040, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1040, __FUNCTION__))->type_common.size_unit)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1040, __FUNCTION__)))
;
1041 break;
1042 default:
1043 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1043, __FUNCTION__))
;
1044 }
1045 result->safe_push (temp);
1046
1047 if (REFERENCE_CLASS_P (ref)(tree_code_type[(int) (((enum tree_code) (ref)->base.code)
)] == tcc_reference)
1048 || TREE_CODE (ref)((enum tree_code) (ref)->base.code) == MODIFY_EXPR
1049 || TREE_CODE (ref)((enum tree_code) (ref)->base.code) == WITH_SIZE_EXPR
1050 || (TREE_CODE (ref)((enum tree_code) (ref)->base.code) == ADDR_EXPR
1051 && !is_gimple_min_invariant (ref)))
1052 ref = TREE_OPERAND (ref, 0)(*((const_cast<tree*> (tree_operand_check ((ref), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1052, __FUNCTION__)))))
;
1053 else
1054 ref = NULL_TREE(tree) nullptr;
1055 }
1056}
1057
1058/* Build a alias-oracle reference abstraction in *REF from the vn_reference
1059 operands in *OPS, the reference alias set SET and the reference type TYPE.
1060 Return true if something useful was produced. */
1061
1062bool
1063ao_ref_init_from_vn_reference (ao_ref *ref,
1064 alias_set_type set, alias_set_type base_set,
1065 tree type, const vec<vn_reference_op_s> &ops)
1066{
1067 unsigned i;
1068 tree base = NULL_TREE(tree) nullptr;
1069 tree *op0_p = &base;
1070 poly_offset_int offset = 0;
1071 poly_offset_int max_size;
1072 poly_offset_int size = -1;
1073 tree size_tree = NULL_TREE(tree) nullptr;
1074
1075 /* We don't handle calls. */
1076 if (!type)
1077 return false;
1078
1079 machine_mode mode = TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1079, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
;
1080 if (mode == BLKmode((void) 0, E_BLKmode))
1081 size_tree = TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1081, __FUNCTION__))->type_common.size)
;
1082 else
1083 size = GET_MODE_BITSIZE (mode);
1084 if (size_tree != NULL_TREE(tree) nullptr
1085 && poly_int_tree_p (size_tree))
1086 size = wi::to_poly_offset (size_tree);
1087
1088 /* Lower the final access size from the outermost expression. */
1089 const_vn_reference_op_t cst_op = &ops[0];
1090 /* Cast away constness for the sake of the const-unsafe
1091 FOR_EACH_VEC_ELT(). */
1092 vn_reference_op_t op = const_cast<vn_reference_op_t>(cst_op);
1093 size_tree = NULL_TREE(tree) nullptr;
1094 if (op->opcode == COMPONENT_REF)
1095 size_tree = DECL_SIZE (op->op0)((contains_struct_check ((op->op0), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1095, __FUNCTION__))->decl_common.size)
;
1096 else if (op->opcode == BIT_FIELD_REF)
1097 size_tree = op->op0;
1098 if (size_tree != NULL_TREE(tree) nullptr
1099 && poly_int_tree_p (size_tree)
1100 && (!known_size_p (size)
1101 || known_lt (wi::to_poly_offset (size_tree), size)(!maybe_le (size, wi::to_poly_offset (size_tree)))))
1102 size = wi::to_poly_offset (size_tree);
1103
1104 /* Initially, maxsize is the same as the accessed element size.
1105 In the following it will only grow (or become -1). */
1106 max_size = size;
1107
1108 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1109 and find the ultimate containing object. */
1110 FOR_EACH_VEC_ELT (ops, i, op)for (i = 0; (ops).iterate ((i), &(op)); ++(i))
1111 {
1112 switch (op->opcode)
1113 {
1114 /* These may be in the reference ops, but we cannot do anything
1115 sensible with them here. */
1116 case ADDR_EXPR:
1117 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1118 if (base != NULL_TREE(tree) nullptr
1119 && TREE_CODE (base)((enum tree_code) (base)->base.code) == MEM_REF
1120 && op->op0
1121 && DECL_P (TREE_OPERAND (op->op0, 0))(tree_code_type[(int) (((enum tree_code) ((*((const_cast<tree
*> (tree_operand_check ((op->op0), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1121, __FUNCTION__))))))->base.code))] == tcc_declaration
)
)
1122 {
1123 const_vn_reference_op_t pop = &ops[i-1];
1124 base = TREE_OPERAND (op->op0, 0)(*((const_cast<tree*> (tree_operand_check ((op->op0)
, (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1124, __FUNCTION__)))))
;
1125 if (known_eq (pop->off, -1)(!maybe_ne (pop->off, -1)))
1126 {
1127 max_size = -1;
1128 offset = 0;
1129 }
1130 else
1131 offset += pop->off * BITS_PER_UNIT(8);
1132 op0_p = NULLnullptr;
1133 break;
1134 }
1135 /* Fallthru. */
1136 case CALL_EXPR:
1137 return false;
1138
1139 /* Record the base objects. */
1140 case MEM_REF:
1141 *op0_p = build2 (MEM_REF, op->type,
1142 NULL_TREE(tree) nullptr, op->op0);
1143 MR_DEPENDENCE_CLIQUE (*op0_p)((tree_check2 ((*op0_p), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1143, __FUNCTION__, (MEM_REF), (TARGET_MEM_REF)))->base.
u.dependence_info.clique)
= op->clique;
1144 MR_DEPENDENCE_BASE (*op0_p)((tree_check2 ((*op0_p), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1144, __FUNCTION__, (MEM_REF), (TARGET_MEM_REF)))->base.
u.dependence_info.base)
= op->base;
1145 op0_p = &TREE_OPERAND (*op0_p, 0)(*((const_cast<tree*> (tree_operand_check ((*op0_p), (0
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1145, __FUNCTION__)))))
;
1146 break;
1147
1148 case VAR_DECL:
1149 case PARM_DECL:
1150 case RESULT_DECL:
1151 case SSA_NAME:
1152 *op0_p = op->op0;
1153 op0_p = NULLnullptr;
1154 break;
1155
1156 /* And now the usual component-reference style ops. */
1157 case BIT_FIELD_REF:
1158 offset += wi::to_poly_offset (op->op1);
1159 break;
1160
1161 case COMPONENT_REF:
1162 {
1163 tree field = op->op0;
1164 /* We do not have a complete COMPONENT_REF tree here so we
1165 cannot use component_ref_field_offset. Do the interesting
1166 parts manually. */
1167 tree this_offset = DECL_FIELD_OFFSET (field)((tree_check ((field), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1167, __FUNCTION__, (FIELD_DECL)))->field_decl.offset)
;
1168
1169 if (op->op1 || !poly_int_tree_p (this_offset))
1170 max_size = -1;
1171 else
1172 {
1173 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1174 << LOG2_BITS_PER_UNIT3);
1175 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field)((tree_check ((field), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1175, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset
)
);
1176 offset += woffset;
1177 }
1178 break;
1179 }
1180
1181 case ARRAY_RANGE_REF:
1182 case ARRAY_REF:
1183 /* We recorded the lower bound and the element size. */
1184 if (!poly_int_tree_p (op->op0)
1185 || !poly_int_tree_p (op->op1)
1186 || TREE_CODE (op->op2)((enum tree_code) (op->op2)->base.code) != INTEGER_CST)
1187 max_size = -1;
1188 else
1189 {
1190 poly_offset_int woffset
1191 = wi::sext (wi::to_poly_offset (op->op0)
1192 - wi::to_poly_offset (op->op1),
1193 TYPE_PRECISION (sizetype)((tree_class_check ((sizetype_tab[(int) stk_sizetype]), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1193, __FUNCTION__))->type_common.precision)
);
1194 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1195 woffset <<= LOG2_BITS_PER_UNIT3;
1196 offset += woffset;
1197 }
1198 break;
1199
1200 case REALPART_EXPR:
1201 break;
1202
1203 case IMAGPART_EXPR:
1204 offset += size;
1205 break;
1206
1207 case VIEW_CONVERT_EXPR:
1208 break;
1209
1210 case STRING_CST:
1211 case INTEGER_CST:
1212 case COMPLEX_CST:
1213 case VECTOR_CST:
1214 case REAL_CST:
1215 case CONSTRUCTOR:
1216 case CONST_DECL:
1217 return false;
1218
1219 default:
1220 return false;
1221 }
1222 }
1223
1224 if (base == NULL_TREE(tree) nullptr)
1225 return false;
1226
1227 ref->ref = NULL_TREE(tree) nullptr;
1228 ref->base = base;
1229 ref->ref_alias_set = set;
1230 ref->base_alias_set = base_set;
1231 /* We discount volatiles from value-numbering elsewhere. */
1232 ref->volatile_p = false;
1233
1234 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1235 {
1236 ref->offset = 0;
1237 ref->size = -1;
1238 ref->max_size = -1;
1239 return true;
1240 }
1241
1242 if (!offset.to_shwi (&ref->offset))
1243 {
1244 ref->offset = 0;
1245 ref->max_size = -1;
1246 return true;
1247 }
1248
1249 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1250 ref->max_size = -1;
1251
1252 return true;
1253}
1254
1255/* Copy the operations present in load/store/call REF into RESULT, a vector of
1256 vn_reference_op_s's. */
1257
1258static void
1259copy_reference_ops_from_call (gcall *call,
1260 vec<vn_reference_op_s> *result)
1261{
1262 vn_reference_op_s temp;
1263 unsigned i;
1264 tree lhs = gimple_call_lhs (call);
1265 int lr;
1266
1267 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1268 different. By adding the lhs here in the vector, we ensure that the
1269 hashcode is different, guaranteeing a different value number. */
1270 if (lhs && TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) != SSA_NAME)
1271 {
1272 memset (&temp, 0, sizeof (temp));
1273 temp.opcode = MODIFY_EXPR;
1274 temp.type = TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1274, __FUNCTION__))->typed.type)
;
1275 temp.op0 = lhs;
1276 temp.off = -1;
1277 result->safe_push (temp);
1278 }
1279
1280 /* Copy the type, opcode, function, static chain and EH region, if any. */
1281 memset (&temp, 0, sizeof (temp));
1282 temp.type = gimple_call_fntype (call);
1283 temp.opcode = CALL_EXPR;
1284 temp.op0 = gimple_call_fn (call);
1285 if (gimple_call_internal_p (call))
1286 temp.clique = gimple_call_internal_fn (call);
1287 temp.op1 = gimple_call_chain (call);
1288 if (stmt_could_throw_p (cfun(cfun + 0), call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1289 temp.op2 = size_int (lr)size_int_kind (lr, stk_sizetype);
1290 temp.off = -1;
1291 result->safe_push (temp);
1292
1293 /* Copy the call arguments. As they can be references as well,
1294 just chain them together. */
1295 for (i = 0; i < gimple_call_num_args (call); ++i)
1296 {
1297 tree callarg = gimple_call_arg (call, i);
1298 copy_reference_ops_from_ref (callarg, result);
1299 }
1300}
1301
1302/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1303 *I_P to point to the last element of the replacement. */
1304static bool
1305vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1306 unsigned int *i_p)
1307{
1308 unsigned int i = *i_p;
1309 vn_reference_op_t op = &(*ops)[i];
1310 vn_reference_op_t mem_op = &(*ops)[i - 1];
1311 tree addr_base;
1312 poly_int64 addr_offset = 0;
1313
1314 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1315 from .foo.bar to the preceding MEM_REF offset and replace the
1316 address with &OBJ. */
1317 addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (op->op0, 0)(*((const_cast<tree*> (tree_operand_check ((op->op0)
, (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1317, __FUNCTION__)))))
,
1318 &addr_offset, vn_valueize);
1319 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF)((void)(!(addr_base && ((enum tree_code) (addr_base)->
base.code) != MEM_REF) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1319, __FUNCTION__), 0 : 0))
;
1320 if (addr_base != TREE_OPERAND (op->op0, 0)(*((const_cast<tree*> (tree_operand_check ((op->op0)
, (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1320, __FUNCTION__)))))
)
1321 {
1322 poly_offset_int off
1323 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1324 SIGNED)
1325 + addr_offset);
1326 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0)((contains_struct_check ((mem_op->op0), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1326, __FUNCTION__))->typed.type)
, off);
1327 op->op0 = build_fold_addr_expr (addr_base)build_fold_addr_expr_loc (((location_t) 0), (addr_base));
1328 if (tree_fits_shwi_p (mem_op->op0))
1329 mem_op->off = tree_to_shwi (mem_op->op0);
1330 else
1331 mem_op->off = -1;
1332 return true;
1333 }
1334 return false;
1335}
1336
1337/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1338 *I_P to point to the last element of the replacement. */
1339static bool
1340vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1341 unsigned int *i_p)
1342{
1343 bool changed = false;
1344 vn_reference_op_t op;
1345
1346 do
1347 {
1348 unsigned int i = *i_p;
1349 op = &(*ops)[i];
1350 vn_reference_op_t mem_op = &(*ops)[i - 1];
1351 gimple *def_stmt;
1352 enum tree_code code;
1353 poly_offset_int off;
1354
1355 def_stmt = SSA_NAME_DEF_STMT (op->op0)(tree_check ((op->op0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1355, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
1356 if (!is_gimple_assign (def_stmt))
1357 return changed;
1358
1359 code = gimple_assign_rhs_code (def_stmt);
1360 if (code != ADDR_EXPR
1361 && code != POINTER_PLUS_EXPR)
1362 return changed;
1363
1364 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1365
1366 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1367 from .foo.bar to the preceding MEM_REF offset and replace the
1368 address with &OBJ. */
1369 if (code == ADDR_EXPR)
1370 {
1371 tree addr, addr_base;
1372 poly_int64 addr_offset;
1373
1374 addr = gimple_assign_rhs1 (def_stmt);
1375 addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (addr, 0)(*((const_cast<tree*> (tree_operand_check ((addr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1375, __FUNCTION__)))))
,
1376 &addr_offset,
1377 vn_valueize);
1378 /* If that didn't work because the address isn't invariant propagate
1379 the reference tree from the address operation in case the current
1380 dereference isn't offsetted. */
1381 if (!addr_base
1382 && *i_p == ops->length () - 1
1383 && known_eq (off, 0)(!maybe_ne (off, 0))
1384 /* This makes us disable this transform for PRE where the
1385 reference ops might be also used for code insertion which
1386 is invalid. */
1387 && default_vn_walk_kind == VN_WALKREWRITE)
1388 {
1389 auto_vec<vn_reference_op_s, 32> tem;
1390 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0)(*((const_cast<tree*> (tree_operand_check ((addr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1390, __FUNCTION__)))))
, &tem);
1391 /* Make sure to preserve TBAA info. The only objects not
1392 wrapped in MEM_REFs that can have their address taken are
1393 STRING_CSTs. */
1394 if (tem.length () >= 2
1395 && tem[tem.length () - 2].opcode == MEM_REF)
1396 {
1397 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1398 new_mem_op->op0
1399 = wide_int_to_tree (TREE_TYPE (mem_op->op0)((contains_struct_check ((mem_op->op0), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1399, __FUNCTION__))->typed.type)
,
1400 wi::to_poly_wide (new_mem_op->op0));
1401 }
1402 else
1403 gcc_assert (tem.last ().opcode == STRING_CST)((void)(!(tem.last ().opcode == STRING_CST) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1403, __FUNCTION__), 0 : 0))
;
1404 ops->pop ();
1405 ops->pop ();
1406 ops->safe_splice (tem);
1407 --*i_p;
1408 return true;
1409 }
1410 if (!addr_base
1411 || TREE_CODE (addr_base)((enum tree_code) (addr_base)->base.code) != MEM_REF
1412 || (TREE_CODE (TREE_OPERAND (addr_base, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((addr_base), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1412, __FUNCTION__))))))->base.code)
== SSA_NAME
1413 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base,(tree_check (((*((const_cast<tree*> (tree_operand_check
((addr_base), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1414, __FUNCTION__)))))), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1414, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
1414 0))(tree_check (((*((const_cast<tree*> (tree_operand_check
((addr_base), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1414, __FUNCTION__)))))), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1414, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
))
1415 return changed;
1416
1417 off += addr_offset;
1418 off += mem_ref_offset (addr_base);
1419 op->op0 = TREE_OPERAND (addr_base, 0)(*((const_cast<tree*> (tree_operand_check ((addr_base),
(0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1419, __FUNCTION__)))))
;
1420 }
1421 else
1422 {
1423 tree ptr, ptroff;
1424 ptr = gimple_assign_rhs1 (def_stmt);
1425 ptroff = gimple_assign_rhs2 (def_stmt);
1426 if (TREE_CODE (ptr)((enum tree_code) (ptr)->base.code) != SSA_NAME
1427 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)(tree_check ((ptr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1427, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
1428 /* Make sure to not endlessly recurse.
1429 See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily
1430 happen when we value-number a PHI to its backedge value. */
1431 || SSA_VAL (ptr) == op->op0
1432 || !poly_int_tree_p (ptroff))
1433 return changed;
1434
1435 off += wi::to_poly_offset (ptroff);
1436 op->op0 = ptr;
1437 }
1438
1439 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0)((contains_struct_check ((mem_op->op0), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1439, __FUNCTION__))->typed.type)
, off);
1440 if (tree_fits_shwi_p (mem_op->op0))
1441 mem_op->off = tree_to_shwi (mem_op->op0);
1442 else
1443 mem_op->off = -1;
1444 /* ??? Can end up with endless recursion here!?
1445 gcc.c-torture/execute/strcmp-1.c */
1446 if (TREE_CODE (op->op0)((enum tree_code) (op->op0)->base.code) == SSA_NAME)
1447 op->op0 = SSA_VAL (op->op0);
1448 if (TREE_CODE (op->op0)((enum tree_code) (op->op0)->base.code) != SSA_NAME)
1449 op->opcode = TREE_CODE (op->op0)((enum tree_code) (op->op0)->base.code);
1450
1451 changed = true;
1452 }
1453 /* Tail-recurse. */
1454 while (TREE_CODE (op->op0)((enum tree_code) (op->op0)->base.code) == SSA_NAME);
1455
1456 /* Fold a remaining *&. */
1457 if (TREE_CODE (op->op0)((enum tree_code) (op->op0)->base.code) == ADDR_EXPR)
1458 vn_reference_fold_indirect (ops, i_p);
1459
1460 return changed;
1461}
1462
1463/* Optimize the reference REF to a constant if possible or return
1464 NULL_TREE if not. */
1465
1466tree
1467fully_constant_vn_reference_p (vn_reference_t ref)
1468{
1469 vec<vn_reference_op_s> operands = ref->operands;
1470 vn_reference_op_t op;
1471
1472 /* Try to simplify the translated expression if it is
1473 a call to a builtin function with at most two arguments. */
1474 op = &operands[0];
1475 if (op->opcode == CALL_EXPR
1476 && (!op->op0
1477 || (TREE_CODE (op->op0)((enum tree_code) (op->op0)->base.code) == ADDR_EXPR
1478 && TREE_CODE (TREE_OPERAND (op->op0, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((op->op0), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1478, __FUNCTION__))))))->base.code)
== FUNCTION_DECL
1479 && fndecl_built_in_p (TREE_OPERAND (op->op0, 0)(*((const_cast<tree*> (tree_operand_check ((op->op0)
, (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1479, __FUNCTION__)))))
,
1480 BUILT_IN_NORMAL)))
1481 && operands.length () >= 2
1482 && operands.length () <= 3)
1483 {
1484 vn_reference_op_t arg0, arg1 = NULLnullptr;
1485 bool anyconst = false;
1486 arg0 = &operands[1];
1487 if (operands.length () > 2)
1488 arg1 = &operands[2];
1489 if (TREE_CODE_CLASS (arg0->opcode)tree_code_type[(int) (arg0->opcode)] == tcc_constant
1490 || (arg0->opcode == ADDR_EXPR
1491 && is_gimple_min_invariant (arg0->op0)))
1492 anyconst = true;
1493 if (arg1
1494 && (TREE_CODE_CLASS (arg1->opcode)tree_code_type[(int) (arg1->opcode)] == tcc_constant
1495 || (arg1->opcode == ADDR_EXPR
1496 && is_gimple_min_invariant (arg1->op0))))
1497 anyconst = true;
1498 if (anyconst)
1499 {
1500 combined_fn fn;
1501 if (op->op0)
1502 fn = as_combined_fn (DECL_FUNCTION_CODE
1503 (TREE_OPERAND (op->op0, 0)(*((const_cast<tree*> (tree_operand_check ((op->op0)
, (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1503, __FUNCTION__)))))
));
1504 else
1505 fn = as_combined_fn ((internal_fn) op->clique);
1506 tree folded;
1507 if (arg1)
1508 folded = fold_const_call (fn, ref->type, arg0->op0, arg1->op0);
1509 else
1510 folded = fold_const_call (fn, ref->type, arg0->op0);
1511 if (folded
1512 && is_gimple_min_invariant (folded))
1513 return folded;
1514 }
1515 }
1516
1517 /* Simplify reads from constants or constant initializers. */
1518 else if (BITS_PER_UNIT(8) == 8
1519 && ref->type
1520 && COMPLETE_TYPE_P (ref->type)(((tree_class_check ((ref->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1520, __FUNCTION__))->type_common.size) != (tree) nullptr
)
1521 && is_gimple_reg_type (ref->type))
1522 {
1523 poly_int64 off = 0;
1524 HOST_WIDE_INTlong size;
1525 if (INTEGRAL_TYPE_P (ref->type)(((enum tree_code) (ref->type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (ref->type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (ref->type)->base.code) == INTEGER_TYPE
)
)
1526 size = TYPE_PRECISION (ref->type)((tree_class_check ((ref->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1526, __FUNCTION__))->type_common.precision)
;
1527 else if (tree_fits_shwi_p (TYPE_SIZE (ref->type)((tree_class_check ((ref->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1527, __FUNCTION__))->type_common.size)
))
1528 size = tree_to_shwi (TYPE_SIZE (ref->type)((tree_class_check ((ref->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1528, __FUNCTION__))->type_common.size)
);
1529 else
1530 return NULL_TREE(tree) nullptr;
1531 if (size % BITS_PER_UNIT(8) != 0
1532 || size > MAX_BITSIZE_MODE_ANY_MODE(256*(8)))
1533 return NULL_TREE(tree) nullptr;
1534 size /= BITS_PER_UNIT(8);
1535 unsigned i;
1536 for (i = 0; i < operands.length (); ++i)
1537 {
1538 if (TREE_CODE_CLASS (operands[i].opcode)tree_code_type[(int) (operands[i].opcode)] == tcc_constant)
1539 {
1540 ++i;
1541 break;
1542 }
1543 if (known_eq (operands[i].off, -1)(!maybe_ne (operands[i].off, -1)))
1544 return NULL_TREE(tree) nullptr;
1545 off += operands[i].off;
1546 if (operands[i].opcode == MEM_REF)
1547 {
1548 ++i;
1549 break;
1550 }
1551 }
1552 vn_reference_op_t base = &operands[--i];
1553 tree ctor = error_mark_nodeglobal_trees[TI_ERROR_MARK];
1554 tree decl = NULL_TREE(tree) nullptr;
1555 if (TREE_CODE_CLASS (base->opcode)tree_code_type[(int) (base->opcode)] == tcc_constant)
1556 ctor = base->op0;
1557 else if (base->opcode == MEM_REF
1558 && base[1].opcode == ADDR_EXPR
1559 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((base[1].op0), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1559, __FUNCTION__))))))->base.code)
== VAR_DECL
1560 || TREE_CODE (TREE_OPERAND (base[1].op0, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((base[1].op0), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1560, __FUNCTION__))))))->base.code)
== CONST_DECL
1561 || TREE_CODE (TREE_OPERAND (base[1].op0, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((base[1].op0), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1561, __FUNCTION__))))))->base.code)
== STRING_CST))
1562 {
1563 decl = TREE_OPERAND (base[1].op0, 0)(*((const_cast<tree*> (tree_operand_check ((base[1].op0
), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1563, __FUNCTION__)))))
;
1564 if (TREE_CODE (decl)((enum tree_code) (decl)->base.code) == STRING_CST)
1565 ctor = decl;
1566 else
1567 ctor = ctor_for_folding (decl);
1568 }
1569 if (ctor == NULL_TREE(tree) nullptr)
1570 return build_zero_cst (ref->type);
1571 else if (ctor != error_mark_nodeglobal_trees[TI_ERROR_MARK])
1572 {
1573 HOST_WIDE_INTlong const_off;
1574 if (decl)
1575 {
1576 tree res = fold_ctor_reference (ref->type, ctor,
1577 off * BITS_PER_UNIT(8),
1578 size * BITS_PER_UNIT(8), decl);
1579 if (res)
1580 {
1581 STRIP_USELESS_TYPE_CONVERSION (res)(res) = tree_ssa_strip_useless_type_conversions (res);
1582 if (is_gimple_min_invariant (res))
1583 return res;
1584 }
1585 }
1586 else if (off.is_constant (&const_off))
1587 {
1588 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE(256*(8)) / BITS_PER_UNIT(8)];
1589 int len = native_encode_expr (ctor, buf, size, const_off);
1590 if (len > 0)
1591 return native_interpret_expr (ref->type, buf, len);
1592 }
1593 }
1594 }
1595
1596 return NULL_TREE(tree) nullptr;
1597}
1598
1599/* Return true if OPS contain a storage order barrier. */
1600
1601static bool
1602contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1603{
1604 vn_reference_op_t op;
1605 unsigned i;
1606
1607 FOR_EACH_VEC_ELT (ops, i, op)for (i = 0; (ops).iterate ((i), &(op)); ++(i))
1608 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1609 return true;
1610
1611 return false;
1612}
1613
1614/* Return true if OPS represent an access with reverse storage order. */
1615
1616static bool
1617reverse_storage_order_for_component_p (vec<vn_reference_op_s> ops)
1618{
1619 unsigned i = 0;
1620 if (ops[i].opcode == REALPART_EXPR || ops[i].opcode == IMAGPART_EXPR)
1621 ++i;
1622 switch (ops[i].opcode)
1623 {
1624 case ARRAY_REF:
1625 case COMPONENT_REF:
1626 case BIT_FIELD_REF:
1627 case MEM_REF:
1628 return ops[i].reverse;
1629 default:
1630 return false;
1631 }
1632}
1633
1634/* Transform any SSA_NAME's in a vector of vn_reference_op_s
1635 structures into their value numbers. This is done in-place, and
1636 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1637 whether any operands were valueized. */
1638
1639static void
1640valueize_refs_1 (vec<vn_reference_op_s> *orig, bool *valueized_anything,
1641 bool with_avail = false)
1642{
1643 *valueized_anything = false;
1644
1645 for (unsigned i = 0; i < orig->length (); ++i)
1646 {
1647re_valueize:
1648 vn_reference_op_t vro = &(*orig)[i];
1649 if (vro->opcode == SSA_NAME
1650 || (vro->op0 && TREE_CODE (vro->op0)((enum tree_code) (vro->op0)->base.code) == SSA_NAME))
1651 {
1652 tree tem = with_avail ? vn_valueize (vro->op0) : SSA_VAL (vro->op0);
1653 if (tem != vro->op0)
1654 {
1655 *valueized_anything = true;
1656 vro->op0 = tem;
1657 }
1658 /* If it transforms from an SSA_NAME to a constant, update
1659 the opcode. */
1660 if (TREE_CODE (vro->op0)((enum tree_code) (vro->op0)->base.code) != SSA_NAME && vro->opcode == SSA_NAME)
1661 vro->opcode = TREE_CODE (vro->op0)((enum tree_code) (vro->op0)->base.code);
1662 }
1663 if (vro->op1 && TREE_CODE (vro->op1)((enum tree_code) (vro->op1)->base.code) == SSA_NAME)
1664 {
1665 tree tem = with_avail ? vn_valueize (vro->op1) : SSA_VAL (vro->op1);
1666 if (tem != vro->op1)
1667 {
1668 *valueized_anything = true;
1669 vro->op1 = tem;
1670 }
1671 }
1672 if (vro->op2 && TREE_CODE (vro->op2)((enum tree_code) (vro->op2)->base.code) == SSA_NAME)
1673 {
1674 tree tem = with_avail ? vn_valueize (vro->op2) : SSA_VAL (vro->op2);
1675 if (tem != vro->op2)
1676 {
1677 *valueized_anything = true;
1678 vro->op2 = tem;
1679 }
1680 }
1681 /* If it transforms from an SSA_NAME to an address, fold with
1682 a preceding indirect reference. */
1683 if (i > 0
1684 && vro->op0
1685 && TREE_CODE (vro->op0)((enum tree_code) (vro->op0)->base.code) == ADDR_EXPR
1686 && (*orig)[i - 1].opcode == MEM_REF)
1687 {
1688 if (vn_reference_fold_indirect (orig, &i))
1689 *valueized_anything = true;
1690 }
1691 else if (i > 0
1692 && vro->opcode == SSA_NAME
1693 && (*orig)[i - 1].opcode == MEM_REF)
1694 {
1695 if (vn_reference_maybe_forwprop_address (orig, &i))
1696 {
1697 *valueized_anything = true;
1698 /* Re-valueize the current operand. */
1699 goto re_valueize;
1700 }
1701 }
1702 /* If it transforms a non-constant ARRAY_REF into a constant
1703 one, adjust the constant offset. */
1704 else if (vro->opcode == ARRAY_REF
1705 && known_eq (vro->off, -1)(!maybe_ne (vro->off, -1))
1706 && poly_int_tree_p (vro->op0)
1707 && poly_int_tree_p (vro->op1)
1708 && TREE_CODE (vro->op2)((enum tree_code) (vro->op2)->base.code) == INTEGER_CST)
1709 {
1710 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1711 - wi::to_poly_offset (vro->op1))
1712 * wi::to_offset (vro->op2)
1713 * vn_ref_op_align_unit (vro));
1714 off.to_shwi (&vro->off);
1715 }
1716 }
1717}
1718
1719static void
1720valueize_refs (vec<vn_reference_op_s> *orig)
1721{
1722 bool tem;
1723 valueize_refs_1 (orig, &tem);
1724}
1725
1726static vec<vn_reference_op_s> shared_lookup_references;
1727
1728/* Create a vector of vn_reference_op_s structures from REF, a
1729 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1730 this function. *VALUEIZED_ANYTHING will specify whether any
1731 operands were valueized. */
1732
1733static vec<vn_reference_op_s>
1734valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1735{
1736 if (!ref)
1737 return vNULL;
1738 shared_lookup_references.truncate (0);
1739 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1740 valueize_refs_1 (&shared_lookup_references, valueized_anything);
1741 return shared_lookup_references;
1742}
1743
1744/* Create a vector of vn_reference_op_s structures from CALL, a
1745 call statement. The vector is shared among all callers of
1746 this function. */
1747
1748static vec<vn_reference_op_s>
1749valueize_shared_reference_ops_from_call (gcall *call)
1750{
1751 if (!call)
1752 return vNULL;
1753 shared_lookup_references.truncate (0);
1754 copy_reference_ops_from_call (call, &shared_lookup_references);
1755 valueize_refs (&shared_lookup_references);
1756 return shared_lookup_references;
1757}
1758
1759/* Lookup a SCCVN reference operation VR in the current hash table.
1760 Returns the resulting value number if it exists in the hash table,
1761 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1762 vn_reference_t stored in the hashtable if something is found. */
1763
1764static tree
1765vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1766{
1767 vn_reference_s **slot;
1768 hashval_t hash;
1769
1770 hash = vr->hashcode;
1771 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1772 if (slot)
1773 {
1774 if (vnresult)
1775 *vnresult = (vn_reference_t)*slot;
1776 return ((vn_reference_t)*slot)->result;
1777 }
1778
1779 return NULL_TREE(tree) nullptr;
1780}
1781
1782
1783/* Partial definition tracking support. */
1784
1785struct pd_range
1786{
1787 HOST_WIDE_INTlong offset;
1788 HOST_WIDE_INTlong size;
1789};
1790
1791struct pd_data
1792{
1793 tree rhs;
1794 HOST_WIDE_INTlong offset;
1795 HOST_WIDE_INTlong size;
1796};
1797
1798/* Context for alias walking. */
1799
1800struct vn_walk_cb_data
1801{
1802 vn_walk_cb_data (vn_reference_t vr_, tree orig_ref_, tree *last_vuse_ptr_,
1803 vn_lookup_kind vn_walk_kind_, bool tbaa_p_, tree mask_)
1804 : vr (vr_), last_vuse_ptr (last_vuse_ptr_), last_vuse (NULL_TREE(tree) nullptr),
1805 mask (mask_), masked_result (NULL_TREE(tree) nullptr), vn_walk_kind (vn_walk_kind_),
1806 tbaa_p (tbaa_p_), saved_operands (vNULL), first_set (-2),
1807 first_base_set (-2), known_ranges (NULLnullptr)
1808 {
1809 if (!last_vuse_ptr)
1810 last_vuse_ptr = &last_vuse;
1811 ao_ref_init (&orig_ref, orig_ref_);
1812 if (mask)
1813 {
1814 wide_int w = wi::to_wide (mask);
1815 unsigned int pos = 0, prec = w.get_precision ();
1816 pd_data pd;
1817 pd.rhs = build_constructor (NULL_TREE(tree) nullptr, NULLnullptr);
1818 /* When bitwise and with a constant is done on a memory load,
1819 we don't really need all the bits to be defined or defined
1820 to constants, we don't really care what is in the position
1821 corresponding to 0 bits in the mask.
1822 So, push the ranges of those 0 bits in the mask as artificial
1823 zero stores and let the partial def handling code do the
1824 rest. */
1825 while (pos < prec)
1826 {
1827 int tz = wi::ctz (w);
1828 if (pos + tz > prec)
1829 tz = prec - pos;
1830 if (tz)
1831 {
1832 if (BYTES_BIG_ENDIAN0)
1833 pd.offset = prec - pos - tz;
1834 else
1835 pd.offset = pos;
1836 pd.size = tz;
1837 void *r = push_partial_def (pd, 0, 0, 0, prec);
1838 gcc_assert (r == NULL_TREE)((void)(!(r == (tree) nullptr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1838, __FUNCTION__), 0 : 0))
;
1839 }
1840 pos += tz;
1841 if (pos == prec)
1842 break;
1843 w = wi::lrshift (w, tz);
1844 tz = wi::ctz (wi::bit_not (w));
1845 if (pos + tz > prec)
1846 tz = prec - pos;
1847 pos += tz;
1848 w = wi::lrshift (w, tz);
1849 }
1850 }
1851 }
1852 ~vn_walk_cb_data ();
1853 void *finish (alias_set_type, alias_set_type, tree);
1854 void *push_partial_def (pd_data pd,
1855 alias_set_type, alias_set_type, HOST_WIDE_INTlong,
1856 HOST_WIDE_INTlong);
1857
1858 vn_reference_t vr;
1859 ao_ref orig_ref;
1860 tree *last_vuse_ptr;
1861 tree last_vuse;
1862 tree mask;
1863 tree masked_result;
1864 vn_lookup_kind vn_walk_kind;
1865 bool tbaa_p;
1866 vec<vn_reference_op_s> saved_operands;
1867
1868 /* The VDEFs of partial defs we come along. */
1869 auto_vec<pd_data, 2> partial_defs;
1870 /* The first defs range to avoid splay tree setup in most cases. */
1871 pd_range first_range;
1872 alias_set_type first_set;
1873 alias_set_type first_base_set;
1874 splay_tree known_ranges;
1875 obstack ranges_obstack;
1876};
1877
1878vn_walk_cb_data::~vn_walk_cb_data ()
1879{
1880 if (known_ranges)
1881 {
1882 splay_tree_delete (known_ranges);
1883 obstack_free (&ranges_obstack, NULL)__extension__ ({ struct obstack *__o = (&ranges_obstack);
void *__obj = (void *) (nullptr); if (__obj > (void *) __o
->chunk && __obj < (void *) __o->chunk_limit
) __o->next_free = __o->object_base = (char *) __obj; else
_obstack_free (__o, __obj); })
;
1884 }
1885 saved_operands.release ();
1886}
1887
1888void *
1889vn_walk_cb_data::finish (alias_set_type set, alias_set_type base_set, tree val)
1890{
1891 if (first_set != -2)
1892 {
1893 set = first_set;
1894 base_set = first_base_set;
1895 }
1896 if (mask)
1897 {
1898 masked_result = val;
1899 return (void *) -1;
1900 }
1901 vec<vn_reference_op_s> &operands
1902 = saved_operands.exists () ? saved_operands : vr->operands;
1903 return vn_reference_lookup_or_insert_for_pieces (last_vuse, set, base_set,
1904 vr->type, operands, val);
1905}
1906
1907/* pd_range splay-tree helpers. */
1908
1909static int
1910pd_range_compare (splay_tree_key offset1p, splay_tree_key offset2p)
1911{
1912 HOST_WIDE_INTlong offset1 = *(HOST_WIDE_INTlong *)offset1p;
1913 HOST_WIDE_INTlong offset2 = *(HOST_WIDE_INTlong *)offset2p;
1914 if (offset1 < offset2)
1915 return -1;
1916 else if (offset1 > offset2)
1917 return 1;
1918 return 0;
1919}
1920
1921static void *
1922pd_tree_alloc (int size, void *data_)
1923{
1924 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
1925 return obstack_alloc (&data->ranges_obstack, size)__extension__ ({ struct obstack *__h = (&data->ranges_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((size)); if (__extension__ ({ struct obstack const *__o1 =
(__o); (size_t) (__o1->chunk_limit - __o1->next_free);
}) < __len) _obstack_newchunk (__o, __len); ((void) ((__o
)->next_free += (__len))); }); __extension__ ({ struct obstack
*__o1 = (__h); void *__value = (void *) __o1->object_base
; if (__o1->next_free == __value) __o1->maybe_empty_object
= 1; __o1->next_free = (sizeof (ptrdiff_t) < sizeof (void
*) ? ((__o1->object_base) + (((__o1->next_free) - (__o1
->object_base) + (__o1->alignment_mask)) & ~(__o1->
alignment_mask))) : (char *) (((ptrdiff_t) (__o1->next_free
) + (__o1->alignment_mask)) & ~(__o1->alignment_mask
))); if ((size_t) (__o1->next_free - (char *) __o1->chunk
) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk
)) __o1->next_free = __o1->chunk_limit; __o1->object_base
= __o1->next_free; __value; }); })
;
1926}
1927
1928static void
1929pd_tree_dealloc (void *, void *)
1930{
1931}
1932
1933/* Push PD to the vector of partial definitions returning a
1934 value when we are ready to combine things with VUSE, SET and MAXSIZEI,
1935 NULL when we want to continue looking for partial defs or -1
1936 on failure. */
1937
1938void *
1939vn_walk_cb_data::push_partial_def (pd_data pd,
1940 alias_set_type set, alias_set_type base_set,
1941 HOST_WIDE_INTlong offseti,
1942 HOST_WIDE_INTlong maxsizei)
1943{
1944 const HOST_WIDE_INTlong bufsize = 64;
1945 /* We're using a fixed buffer for encoding so fail early if the object
1946 we want to interpret is bigger. */
1947 if (maxsizei > bufsize * BITS_PER_UNIT(8)
1948 || CHAR_BIT8 != 8
1949 || BITS_PER_UNIT(8) != 8
1950 /* Not prepared to handle PDP endian. */
1951 || BYTES_BIG_ENDIAN0 != WORDS_BIG_ENDIAN0)
1952 return (void *)-1;
1953
1954 /* Turn too large constant stores into non-constant stores. */
1955 if (CONSTANT_CLASS_P (pd.rhs)(tree_code_type[(int) (((enum tree_code) (pd.rhs)->base.code
))] == tcc_constant)
&& pd.size > bufsize * BITS_PER_UNIT(8))
1956 pd.rhs = error_mark_nodeglobal_trees[TI_ERROR_MARK];
1957
1958 /* And for non-constant or CONSTRUCTOR stores shrink them to only keep at
1959 most a partial byte before and/or after the region. */
1960 if (!CONSTANT_CLASS_P (pd.rhs)(tree_code_type[(int) (((enum tree_code) (pd.rhs)->base.code
))] == tcc_constant)
)
1961 {
1962 if (pd.offset < offseti)
1963 {
1964 HOST_WIDE_INTlong o = ROUND_DOWN (offseti - pd.offset, BITS_PER_UNIT)((offseti - pd.offset) & ~(((8)) - 1));
1965 gcc_assert (pd.size > o)((void)(!(pd.size > o) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1965, __FUNCTION__), 0 : 0))
;
1966 pd.size -= o;
1967 pd.offset += o;
1968 }
1969 if (pd.size > maxsizei)
1970 pd.size = maxsizei + ((pd.size - maxsizei) % BITS_PER_UNIT(8));
1971 }
1972
1973 pd.offset -= offseti;
1974
1975 bool pd_constant_p = (TREE_CODE (pd.rhs)((enum tree_code) (pd.rhs)->base.code) == CONSTRUCTOR
1976 || CONSTANT_CLASS_P (pd.rhs)(tree_code_type[(int) (((enum tree_code) (pd.rhs)->base.code
))] == tcc_constant)
);
1977 if (partial_defs.is_empty ())
1978 {
1979 /* If we get a clobber upfront, fail. */
1980 if (TREE_CLOBBER_P (pd.rhs)(((enum tree_code) (pd.rhs)->base.code) == CONSTRUCTOR &&
((pd.rhs)->base.volatile_flag))
)
1981 return (void *)-1;
1982 if (!pd_constant_p)
1983 return (void *)-1;
1984 partial_defs.safe_push (pd);
1985 first_range.offset = pd.offset;
1986 first_range.size = pd.size;
1987 first_set = set;
1988 first_base_set = base_set;
1989 last_vuse_ptr = NULLnullptr;
1990 /* Continue looking for partial defs. */
1991 return NULLnullptr;
1992 }
1993
1994 if (!known_ranges)
1995 {
1996 /* ??? Optimize the case where the 2nd partial def completes things. */
1997 gcc_obstack_init (&ranges_obstack)_obstack_begin (((&ranges_obstack)), (memory_block_pool::
block_size), (0), (mempool_obstack_chunk_alloc), (mempool_obstack_chunk_free
))
;
1998 known_ranges = splay_tree_new_with_allocator (pd_range_compare, 0, 0,
1999 pd_tree_alloc,
2000 pd_tree_dealloc, this);
2001 splay_tree_insert (known_ranges,
2002 (splay_tree_key)&first_range.offset,
2003 (splay_tree_value)&first_range);
2004 }
2005
2006 pd_range newr = { pd.offset, pd.size };
2007 splay_tree_node n;
2008 pd_range *r;
2009 /* Lookup the predecessor of offset + 1 and see if we need to merge. */
2010 HOST_WIDE_INTlong loffset = newr.offset + 1;
2011 if ((n = splay_tree_predecessor (known_ranges, (splay_tree_key)&loffset))
2012 && ((r = (pd_range *)n->value), true)
2013 && ranges_known_overlap_p (r->offset, r->size + 1,
2014 newr.offset, newr.size))
2015 {
2016 /* Ignore partial defs already covered. Here we also drop shadowed
2017 clobbers arriving here at the floor. */
2018 if (known_subrange_p (newr.offset, newr.size, r->offset, r->size))
2019 return NULLnullptr;
2020 r->size = MAX (r->offset + r->size, newr.offset + newr.size)((r->offset + r->size) > (newr.offset + newr.size) ?
(r->offset + r->size) : (newr.offset + newr.size))
- r->offset;
2021 }
2022 else
2023 {
2024 /* newr.offset wasn't covered yet, insert the range. */
2025 r = XOBNEW (&ranges_obstack, pd_range)((pd_range *) __extension__ ({ struct obstack *__h = ((&ranges_obstack
)); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (pd_range))); if (__extension__ ({ struct obstack
const *__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->
next_free); }) < __len) _obstack_newchunk (__o, __len); ((
void) ((__o)->next_free += (__len))); }); __extension__ ({
struct obstack *__o1 = (__h); void *__value = (void *) __o1->
object_base; if (__o1->next_free == __value) __o1->maybe_empty_object
= 1; __o1->next_free = (sizeof (ptrdiff_t) < sizeof (void
*) ? ((__o1->object_base) + (((__o1->next_free) - (__o1
->object_base) + (__o1->alignment_mask)) & ~(__o1->
alignment_mask))) : (char *) (((ptrdiff_t) (__o1->next_free
) + (__o1->alignment_mask)) & ~(__o1->alignment_mask
))); if ((size_t) (__o1->next_free - (char *) __o1->chunk
) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk
)) __o1->next_free = __o1->chunk_limit; __o1->object_base
= __o1->next_free; __value; }); }))
;
2026 *r = newr;
2027 splay_tree_insert (known_ranges, (splay_tree_key)&r->offset,
2028 (splay_tree_value)r);
2029 }
2030 /* Merge r which now contains newr and is a member of the splay tree with
2031 adjacent overlapping ranges. */
2032 pd_range *rafter;
2033 while ((n = splay_tree_successor (known_ranges, (splay_tree_key)&r->offset))
2034 && ((rafter = (pd_range *)n->value), true)
2035 && ranges_known_overlap_p (r->offset, r->size + 1,
2036 rafter->offset, rafter->size))
2037 {
2038 r->size = MAX (r->offset + r->size,((r->offset + r->size) > (rafter->offset + rafter
->size) ? (r->offset + r->size) : (rafter->offset
+ rafter->size))
2039 rafter->offset + rafter->size)((r->offset + r->size) > (rafter->offset + rafter
->size) ? (r->offset + r->size) : (rafter->offset
+ rafter->size))
- r->offset;
2040 splay_tree_remove (known_ranges, (splay_tree_key)&rafter->offset);
2041 }
2042 /* If we get a clobber, fail. */
2043 if (TREE_CLOBBER_P (pd.rhs)(((enum tree_code) (pd.rhs)->base.code) == CONSTRUCTOR &&
((pd.rhs)->base.volatile_flag))
)
2044 return (void *)-1;
2045 /* Non-constants are OK as long as they are shadowed by a constant. */
2046 if (!pd_constant_p)
2047 return (void *)-1;
2048 partial_defs.safe_push (pd);
2049
2050 /* Now we have merged newr into the range tree. When we have covered
2051 [offseti, sizei] then the tree will contain exactly one node which has
2052 the desired properties and it will be 'r'. */
2053 if (!known_subrange_p (0, maxsizei, r->offset, r->size))
2054 /* Continue looking for partial defs. */
2055 return NULLnullptr;
2056
2057 /* Now simply native encode all partial defs in reverse order. */
2058 unsigned ndefs = partial_defs.length ();
2059 /* We support up to 512-bit values (for V8DFmode). */
2060 unsigned char buffer[bufsize + 1];
2061 unsigned char this_buffer[bufsize + 1];
2062 int len;
2063
2064 memset (buffer, 0, bufsize + 1);
2065 unsigned needed_len = ROUND_UP (maxsizei, BITS_PER_UNIT)(((maxsizei) + ((8)) - 1) & ~(((8)) - 1)) / BITS_PER_UNIT(8);
2066 while (!partial_defs.is_empty ())
2067 {
2068 pd_data pd = partial_defs.pop ();
2069 unsigned int amnt;
2070 if (TREE_CODE (pd.rhs)((enum tree_code) (pd.rhs)->base.code) == CONSTRUCTOR)
2071 {
2072 /* Empty CONSTRUCTOR. */
2073 if (pd.size >= needed_len * BITS_PER_UNIT(8))
2074 len = needed_len;
2075 else
2076 len = ROUND_UP (pd.size, BITS_PER_UNIT)(((pd.size) + ((8)) - 1) & ~(((8)) - 1)) / BITS_PER_UNIT(8);
2077 memset (this_buffer, 0, len);
2078 }
2079 else
2080 {
2081 len = native_encode_expr (pd.rhs, this_buffer, bufsize,
2082 MAX (0, -pd.offset)((0) > (-pd.offset) ? (0) : (-pd.offset)) / BITS_PER_UNIT(8));
2083 if (len <= 0
2084 || len < (ROUND_UP (pd.size, BITS_PER_UNIT)(((pd.size) + ((8)) - 1) & ~(((8)) - 1)) / BITS_PER_UNIT(8)
2085 - MAX (0, -pd.offset)((0) > (-pd.offset) ? (0) : (-pd.offset)) / BITS_PER_UNIT(8)))
2086 {
2087 if (dump_file && (dump_flags & TDF_DETAILS))
2088 fprintf (dump_file, "Failed to encode %u "
2089 "partial definitions\n", ndefs);
2090 return (void *)-1;
2091 }
2092 }
2093
2094 unsigned char *p = buffer;
2095 HOST_WIDE_INTlong size = pd.size;
2096 if (pd.offset < 0)
2097 size -= ROUND_DOWN (-pd.offset, BITS_PER_UNIT)((-pd.offset) & ~(((8)) - 1));
2098 this_buffer[len] = 0;
2099 if (BYTES_BIG_ENDIAN0)
2100 {
2101 /* LSB of this_buffer[len - 1] byte should be at
2102 pd.offset + pd.size - 1 bits in buffer. */
2103 amnt = ((unsigned HOST_WIDE_INTlong) pd.offset
2104 + pd.size) % BITS_PER_UNIT(8);
2105 if (amnt)
2106 shift_bytes_in_array_right (this_buffer, len + 1, amnt);
2107 unsigned char *q = this_buffer;
2108 unsigned int off = 0;
2109 if (pd.offset >= 0)
2110 {
2111 unsigned int msk;
2112 off = pd.offset / BITS_PER_UNIT(8);
2113 gcc_assert (off < needed_len)((void)(!(off < needed_len) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2113, __FUNCTION__), 0 : 0))
;
2114 p = buffer + off;
2115 if (size <= amnt)
2116 {
2117 msk = ((1 << size) - 1) << (BITS_PER_UNIT(8) - amnt);
2118 *p = (*p & ~msk) | (this_buffer[len] & msk);
2119 size = 0;
2120 }
2121 else
2122 {
2123 if (TREE_CODE (pd.rhs)((enum tree_code) (pd.rhs)->base.code) != CONSTRUCTOR)
2124 q = (this_buffer + len
2125 - (ROUND_UP (size - amnt, BITS_PER_UNIT)(((size - amnt) + ((8)) - 1) & ~(((8)) - 1))
2126 / BITS_PER_UNIT(8)));
2127 if (pd.offset % BITS_PER_UNIT(8))
2128 {
2129 msk = -1U << (BITS_PER_UNIT(8)
2130 - (pd.offset % BITS_PER_UNIT(8)));
2131 *p = (*p & msk) | (*q & ~msk);
2132 p++;
2133 q++;
2134 off++;
2135 size -= BITS_PER_UNIT(8) - (pd.offset % BITS_PER_UNIT(8));
2136 gcc_assert (size >= 0)((void)(!(size >= 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2136, __FUNCTION__), 0 : 0))
;
2137 }
2138 }
2139 }
2140 else if (TREE_CODE (pd.rhs)((enum tree_code) (pd.rhs)->base.code) != CONSTRUCTOR)
2141 {
2142 q = (this_buffer + len
2143 - (ROUND_UP (size - amnt, BITS_PER_UNIT)(((size - amnt) + ((8)) - 1) & ~(((8)) - 1))
2144 / BITS_PER_UNIT(8)));
2145 if (pd.offset % BITS_PER_UNIT(8))
2146 {
2147 q++;
2148 size -= BITS_PER_UNIT(8) - ((unsigned HOST_WIDE_INTlong) pd.offset
2149 % BITS_PER_UNIT(8));
2150 gcc_assert (size >= 0)((void)(!(size >= 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2150, __FUNCTION__), 0 : 0))
;
2151 }
2152 }
2153 if ((unsigned HOST_WIDE_INTlong) size / BITS_PER_UNIT(8) + off
2154 > needed_len)
2155 size = (needed_len - off) * BITS_PER_UNIT(8);
2156 memcpy (p, q, size / BITS_PER_UNIT(8));
2157 if (size % BITS_PER_UNIT(8))
2158 {
2159 unsigned int msk
2160 = -1U << (BITS_PER_UNIT(8) - (size % BITS_PER_UNIT(8)));
2161 p += size / BITS_PER_UNIT(8);
2162 q += size / BITS_PER_UNIT(8);
2163 *p = (*q & msk) | (*p & ~msk);
2164 }
2165 }
2166 else
2167 {
2168 if (pd.offset >= 0)
2169 {
2170 /* LSB of this_buffer[0] byte should be at pd.offset bits
2171 in buffer. */
2172 unsigned int msk;
2173 size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT)((size) < ((long) needed_len * (8)) ? (size) : ((long) needed_len
* (8)))
;
2174 amnt = pd.offset % BITS_PER_UNIT(8);
2175 if (amnt)
2176 shift_bytes_in_array_left (this_buffer, len + 1, amnt);
2177 unsigned int off = pd.offset / BITS_PER_UNIT(8);
2178 gcc_assert (off < needed_len)((void)(!(off < needed_len) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2178, __FUNCTION__), 0 : 0))
;
2179 size = MIN (size,((size) < ((long) (needed_len - off) * (8)) ? (size) : ((long
) (needed_len - off) * (8)))
2180 (HOST_WIDE_INT) (needed_len - off) * BITS_PER_UNIT)((size) < ((long) (needed_len - off) * (8)) ? (size) : ((long
) (needed_len - off) * (8)))
;
2181 p = buffer + off;
2182 if (amnt + size < BITS_PER_UNIT(8))
2183 {
2184 /* Low amnt bits come from *p, then size bits
2185 from this_buffer[0] and the remaining again from
2186 *p. */
2187 msk = ((1 << size) - 1) << amnt;
2188 *p = (*p & ~msk) | (this_buffer[0] & msk);
2189 size = 0;
2190 }
2191 else if (amnt)
2192 {
2193 msk = -1U << amnt;
2194 *p = (*p & ~msk) | (this_buffer[0] & msk);
2195 p++;
2196 size -= (BITS_PER_UNIT(8) - amnt);
2197 }
2198 }
2199 else
2200 {
2201 amnt = (unsigned HOST_WIDE_INTlong) pd.offset % BITS_PER_UNIT(8);
2202 if (amnt)
2203 size -= BITS_PER_UNIT(8) - amnt;
2204 size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT)((size) < ((long) needed_len * (8)) ? (size) : ((long) needed_len
* (8)))
;
2205 if (amnt)
2206 shift_bytes_in_array_left (this_buffer, len + 1, amnt);
2207 }
2208 memcpy (p, this_buffer + (amnt != 0), size / BITS_PER_UNIT(8));
2209 p += size / BITS_PER_UNIT(8);
2210 if (size % BITS_PER_UNIT(8))
2211 {
2212 unsigned int msk = -1U << (size % BITS_PER_UNIT(8));
2213 *p = (this_buffer[(amnt != 0) + size / BITS_PER_UNIT(8)]
2214 & ~msk) | (*p & msk);
2215 }
2216 }
2217 }
2218
2219 tree type = vr->type;
2220 /* Make sure to interpret in a type that has a range covering the whole
2221 access size. */
2222 if (INTEGRAL_TYPE_P (vr->type)(((enum tree_code) (vr->type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (vr->type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (vr->type)->base.code) == INTEGER_TYPE
)
&& maxsizei != TYPE_PRECISION (vr->type)((tree_class_check ((vr->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2222, __FUNCTION__))->type_common.precision)
)
2223 type = build_nonstandard_integer_type (maxsizei, TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2223, __FUNCTION__))->base.u.bits.unsigned_flag)
);
2224 tree val;
2225 if (BYTES_BIG_ENDIAN0)
2226 {
2227 unsigned sz = needed_len;
2228 if (maxsizei % BITS_PER_UNIT(8))
2229 shift_bytes_in_array_right (buffer, needed_len,
2230 BITS_PER_UNIT(8)
2231 - (maxsizei % BITS_PER_UNIT(8)));
2232 if (INTEGRAL_TYPE_P (type)(((enum tree_code) (type)->base.code) == ENUMERAL_TYPE || (
(enum tree_code) (type)->base.code) == BOOLEAN_TYPE || ((enum
tree_code) (type)->base.code) == INTEGER_TYPE)
)
2233 sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)(as_a <scalar_int_mode> ((tree_class_check ((type), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2233, __FUNCTION__))->type_common.mode))
);
2234 if (sz > needed_len)
2235 {
2236 memcpy (this_buffer + (sz - needed_len), buffer, needed_len);
2237 val = native_interpret_expr (type, this_buffer, sz);
2238 }
2239 else
2240 val = native_interpret_expr (type, buffer, needed_len);
2241 }
2242 else
2243 val = native_interpret_expr (type, buffer, bufsize);
2244 /* If we chop off bits because the types precision doesn't match the memory
2245 access size this is ok when optimizing reads but not when called from
2246 the DSE code during elimination. */
2247 if (val && type != vr->type)
2248 {
2249 if (! int_fits_type_p (val, vr->type))
2250 val = NULL_TREE(tree) nullptr;
2251 else
2252 val = fold_convert (vr->type, val)fold_convert_loc (((location_t) 0), vr->type, val);
2253 }
2254
2255 if (val)
2256 {
2257 if (dump_file && (dump_flags & TDF_DETAILS))
2258 fprintf (dump_file,
2259 "Successfully combined %u partial definitions\n", ndefs);
2260 /* We are using the alias-set of the first store we encounter which
2261 should be appropriate here. */
2262 return finish (first_set, first_base_set, val);
2263 }
2264 else
2265 {
2266 if (dump_file && (dump_flags & TDF_DETAILS))
2267 fprintf (dump_file,
2268 "Failed to interpret %u encoded partial definitions\n", ndefs);
2269 return (void *)-1;
2270 }
2271}
2272
2273/* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
2274 with the current VUSE and performs the expression lookup. */
2275
2276static void *
2277vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED__attribute__ ((__unused__)), tree vuse, void *data_)
2278{
2279 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2280 vn_reference_t vr = data->vr;
2281 vn_reference_s **slot;
2282 hashval_t hash;
2283
2284 /* If we have partial definitions recorded we have to go through
2285 vn_reference_lookup_3. */
2286 if (!data->partial_defs.is_empty ())
2287 return NULLnullptr;
2288
2289 if (data->last_vuse_ptr)
2290 {
2291 *data->last_vuse_ptr = vuse;
2292 data->last_vuse = vuse;
2293 }
2294
2295 /* Fixup vuse and hash. */
2296 if (vr->vuse)
2297 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse)(tree_check ((vr->vuse), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2297, __FUNCTION__, (SSA_NAME)))->base.u.version
;
2298 vr->vuse = vuse_ssa_val (vuse);
2299 if (vr->vuse)
2300 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse)(tree_check ((vr->vuse), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2300, __FUNCTION__, (SSA_NAME)))->base.u.version
;
2301
2302 hash = vr->hashcode;
2303 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
2304 if (slot)
2305 {
2306 if ((*slot)->result && data->saved_operands.exists ())
2307 return data->finish (vr->set, vr->base_set, (*slot)->result);
2308 return *slot;
2309 }
2310
2311 return NULLnullptr;
2312}
2313
2314/* Lookup an existing or insert a new vn_reference entry into the
2315 value table for the VUSE, SET, TYPE, OPERANDS reference which
2316 has the value VALUE which is either a constant or an SSA name. */
2317
2318static vn_reference_t
2319vn_reference_lookup_or_insert_for_pieces (tree vuse,
2320 alias_set_type set,
2321 alias_set_type base_set,
2322 tree type,
2323 vec<vn_reference_op_s,
2324 va_heap> operands,
2325 tree value)
2326{
2327 vn_reference_s vr1;
2328 vn_reference_t result;
2329 unsigned value_id;
2330 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE(tree) nullptr;
2331 vr1.operands = operands;
2332 vr1.type = type;
2333 vr1.set = set;
2334 vr1.base_set = base_set;
2335 vr1.hashcode = vn_reference_compute_hash (&vr1);
2336 if (vn_reference_lookup_1 (&vr1, &result))
2337 return result;
2338 if (TREE_CODE (value)((enum tree_code) (value)->base.code) == SSA_NAME)
2339 value_id = VN_INFO (value)->value_id;
2340 else
2341 value_id = get_or_alloc_constant_value_id (value);
2342 return vn_reference_insert_pieces (vuse, set, base_set, type,
2343 operands.copy (), value, value_id);
2344}
2345
2346/* Return a value-number for RCODE OPS... either by looking up an existing
2347 value-number for the possibly simplified result or by inserting the
2348 operation if INSERT is true. If SIMPLIFY is false, return a value
2349 number for the unsimplified expression. */
2350
2351static tree
2352vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert,
2353 bool simplify)
2354{
2355 tree result = NULL_TREE(tree) nullptr;
2356 /* We will be creating a value number for
2357 RCODE (OPS...).
2358 So first simplify and lookup this expression to see if it
2359 is already available. */
2360 /* For simplification valueize. */
2361 unsigned i = 0;
2362 if (simplify)
2363 for (i = 0; i < res_op->num_ops; ++i)
2364 if (TREE_CODE (res_op->ops[i])((enum tree_code) (res_op->ops[i])->base.code) == SSA_NAME)
2365 {
2366 tree tem = vn_valueize (res_op->ops[i]);
2367 if (!tem)
2368 break;
2369 res_op->ops[i] = tem;
2370 }
2371 /* If valueization of an operand fails (it is not available), skip
2372 simplification. */
2373 bool res = false;
2374 if (i == res_op->num_ops)
2375 {
2376 mprts_hook = vn_lookup_simplify_result;
2377 res = res_op->resimplify (NULLnullptr, vn_valueize);
2378 mprts_hook = NULLnullptr;
2379 }
2380 gimple *new_stmt = NULLnullptr;
2381 if (res
2382 && gimple_simplified_result_is_gimple_val (res_op))
2383 {
2384 /* The expression is already available. */
2385 result = res_op->ops[0];
2386 /* Valueize it, simplification returns sth in AVAIL only. */
2387 if (TREE_CODE (result)((enum tree_code) (result)->base.code) == SSA_NAME)
2388 result = SSA_VAL (result);
2389 }
2390 else
2391 {
2392 tree val = vn_lookup_simplify_result (res_op);
2393 if (!val && insert)
2394 {
2395 gimple_seq stmts = NULLnullptr;
2396 result = maybe_push_res_to_seq (res_op, &stmts);
2397 if (result)
2398 {
2399 gcc_assert (gimple_seq_singleton_p (stmts))((void)(!(gimple_seq_singleton_p (stmts)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2399, __FUNCTION__), 0 : 0))
;
2400 new_stmt = gimple_seq_first_stmt (stmts);
2401 }
2402 }
2403 else
2404 /* The expression is already available. */
2405 result = val;
2406 }
2407 if (new_stmt)
2408 {
2409 /* The expression is not yet available, value-number lhs to
2410 the new SSA_NAME we created. */
2411 /* Initialize value-number information properly. */
2412 vn_ssa_aux_t result_info = VN_INFO (result);
2413 result_info->valnum = result;
2414 result_info->value_id = get_next_value_id ();
2415 result_info->visited = 1;
2416 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
2417 new_stmt);
2418 result_info->needs_insertion = true;
2419 /* ??? PRE phi-translation inserts NARYs without corresponding
2420 SSA name result. Re-use those but set their result according
2421 to the stmt we just built. */
2422 vn_nary_op_t nary = NULLnullptr;
2423 vn_nary_op_lookup_stmt (new_stmt, &nary);
2424 if (nary)
2425 {
2426 gcc_assert (! nary->predicated_values && nary->u.result == NULL_TREE)((void)(!(! nary->predicated_values && nary->u.
result == (tree) nullptr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2426, __FUNCTION__), 0 : 0))
;
2427 nary->u.result = gimple_assign_lhs (new_stmt);
2428 }
2429 /* As all "inserted" statements are singleton SCCs, insert
2430 to the valid table. This is strictly needed to
2431 avoid re-generating new value SSA_NAMEs for the same
2432 expression during SCC iteration over and over (the
2433 optimistic table gets cleared after each iteration).
2434 We do not need to insert into the optimistic table, as
2435 lookups there will fall back to the valid table. */
2436 else
2437 {
2438 unsigned int length = vn_nary_length_from_stmt (new_stmt);
2439 vn_nary_op_t vno1
2440 = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack);
2441 vno1->value_id = result_info->value_id;
2442 vno1->length = length;
2443 vno1->predicated_values = 0;
2444 vno1->u.result = result;
2445 init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (new_stmt));
2446 vn_nary_op_insert_into (vno1, valid_info->nary);
2447 /* Also do not link it into the undo chain. */
2448 last_inserted_nary = vno1->next;
2449 vno1->next = (vn_nary_op_t)(void *)-1;
2450 }
2451 if (dump_file && (dump_flags & TDF_DETAILS))
2452 {
2453 fprintf (dump_file, "Inserting name ");
2454 print_generic_expr (dump_file, result);
2455 fprintf (dump_file, " for expression ");
2456 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
2457 fprintf (dump_file, "\n");
2458 }
2459 }
2460 return result;
2461}
2462
2463/* Return a value-number for RCODE OPS... either by looking up an existing
2464 value-number for the simplified result or by inserting the operation. */
2465
2466static tree
2467vn_nary_build_or_lookup (gimple_match_op *res_op)
2468{
2469 return vn_nary_build_or_lookup_1 (res_op, true, true);
2470}
2471
2472/* Try to simplify the expression RCODE OPS... of type TYPE and return
2473 its value if present. */
2474
2475tree
2476vn_nary_simplify (vn_nary_op_t nary)
2477{
2478 if (nary->length > gimple_match_op::MAX_NUM_OPS)
2479 return NULL_TREE(tree) nullptr;
2480 gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
2481 nary->type, nary->length);
2482 memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
2483 return vn_nary_build_or_lookup_1 (&op, false, true);
2484}
2485
2486/* Elimination engine. */
2487
2488class eliminate_dom_walker : public dom_walker
2489{
2490public:
2491 eliminate_dom_walker (cdi_direction, bitmap);
2492 ~eliminate_dom_walker ();
2493
2494 virtual edge before_dom_children (basic_block);
2495 virtual void after_dom_children (basic_block);
2496
2497 virtual tree eliminate_avail (basic_block, tree op);
2498 virtual void eliminate_push_avail (basic_block, tree op);
2499 tree eliminate_insert (basic_block, gimple_stmt_iterator *gsi, tree val);
2500
2501 void eliminate_stmt (basic_block, gimple_stmt_iterator *);
2502
2503 unsigned eliminate_cleanup (bool region_p = false);
2504
2505 bool do_pre;
2506 unsigned int el_todo;
2507 unsigned int eliminations;
2508 unsigned int insertions;
2509
2510 /* SSA names that had their defs inserted by PRE if do_pre. */
2511 bitmap inserted_exprs;
2512
2513 /* Blocks with statements that have had their EH properties changed. */
2514 bitmap need_eh_cleanup;
2515
2516 /* Blocks with statements that have had their AB properties changed. */
2517 bitmap need_ab_cleanup;
2518
2519 /* Local state for the eliminate domwalk. */
2520 auto_vec<gimple *> to_remove;
2521 auto_vec<gimple *> to_fixup;
2522 auto_vec<tree> avail;
2523 auto_vec<tree> avail_stack;
2524};
2525
2526/* Adaptor to the elimination engine using RPO availability. */
2527
2528class rpo_elim : public eliminate_dom_walker
2529{
2530public:
2531 rpo_elim(basic_block entry_)
2532 : eliminate_dom_walker (CDI_DOMINATORS, NULLnullptr), entry (entry_),
2533 m_avail_freelist (NULLnullptr) {}
2534
2535 virtual tree eliminate_avail (basic_block, tree op);
2536
2537 virtual void eliminate_push_avail (basic_block, tree);
2538
2539 basic_block entry;
2540 /* Freelist of avail entries which are allocated from the vn_ssa_aux
2541 obstack. */
2542 vn_avail *m_avail_freelist;
2543};
2544
2545/* Global RPO state for access from hooks. */
2546static eliminate_dom_walker *rpo_avail;
2547basic_block vn_context_bb;
2548
2549/* Return true if BASE1 and BASE2 can be adjusted so they have the
2550 same address and adjust *OFFSET1 and *OFFSET2 accordingly.
2551 Otherwise return false. */
2552
2553static bool
2554adjust_offsets_for_equal_base_address (tree base1, poly_int64 *offset1,
2555 tree base2, poly_int64 *offset2)
2556{
2557 poly_int64 soff;
2558 if (TREE_CODE (base1)((enum tree_code) (base1)->base.code) == MEM_REF
2559 && TREE_CODE (base2)((enum tree_code) (base2)->base.code) == MEM_REF)
2560 {
2561 if (mem_ref_offset (base1).to_shwi (&soff))
2562 {
2563 base1 = TREE_OPERAND (base1, 0)(*((const_cast<tree*> (tree_operand_check ((base1), (0)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2563, __FUNCTION__)))))
;
2564 *offset1 += soff * BITS_PER_UNIT(8);
2565 }
2566 if (mem_ref_offset (base2).to_shwi (&soff))
2567 {
2568 base2 = TREE_OPERAND (base2, 0)(*((const_cast<tree*> (tree_operand_check ((base2), (0)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2568, __FUNCTION__)))))
;
2569 *offset2 += soff * BITS_PER_UNIT(8);
2570 }
2571 return operand_equal_p (base1, base2, 0);
2572 }
2573 return operand_equal_p (base1, base2, OEP_ADDRESS_OF);
2574}
2575
2576/* Callback for walk_non_aliased_vuses. Tries to perform a lookup
2577 from the statement defining VUSE and if not successful tries to
2578 translate *REFP and VR_ through an aggregate copy at the definition
2579 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
2580 of *REF and *VR. If only disambiguation was performed then
2581 *DISAMBIGUATE_ONLY is set to true. */
2582
2583static void *
2584vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
2585 translate_flags *disambiguate_only)
2586{
2587 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2588 vn_reference_t vr = data->vr;
2589 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse)(tree_check ((vuse), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2589, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2590 tree base = ao_ref_base (ref);
2591 HOST_WIDE_INTlong offseti = 0, maxsizei, sizei = 0;
2592 static vec<vn_reference_op_s> lhs_ops;
2593 ao_ref lhs_ref;
2594 bool lhs_ref_ok = false;
2595 poly_int64 copy_size;
2596
2597 /* First try to disambiguate after value-replacing in the definitions LHS. */
2598 if (is_gimple_assign (def_stmt))
2599 {
2600 tree lhs = gimple_assign_lhs (def_stmt);
2601 bool valueized_anything = false;
2602 /* Avoid re-allocation overhead. */
2603 lhs_ops.truncate (0);
2604 basic_block saved_rpo_bb = vn_context_bb;
2605 vn_context_bb = gimple_bb (def_stmt);
2606 if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE)
2607 {
2608 copy_reference_ops_from_ref (lhs, &lhs_ops);
2609 valueize_refs_1 (&lhs_ops, &valueized_anything, true);
2610 }
2611 vn_context_bb = saved_rpo_bb;
2612 ao_ref_init (&lhs_ref, lhs);
2613 lhs_ref_ok = true;
2614 if (valueized_anything
2615 && ao_ref_init_from_vn_reference
2616 (&lhs_ref, ao_ref_alias_set (&lhs_ref),
2617 ao_ref_base_alias_set (&lhs_ref), TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2617, __FUNCTION__))->typed.type)
, lhs_ops)
2618 && !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p))
2619 {
2620 *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2621 return NULLnullptr;
2622 }
2623
2624 /* Besides valueizing the LHS we can also use access-path based
2625 disambiguation on the original non-valueized ref. */
2626 if (!ref->ref
2627 && lhs_ref_ok
2628 && data->orig_ref.ref)
2629 {
2630 /* We want to use the non-valueized LHS for this, but avoid redundant
2631 work. */
2632 ao_ref *lref = &lhs_ref;
2633 ao_ref lref_alt;
2634 if (valueized_anything)
2635 {
2636 ao_ref_init (&lref_alt, lhs);
2637 lref = &lref_alt;
2638 }
2639 if (!refs_may_alias_p_1 (&data->orig_ref, lref, data->tbaa_p))
2640 {
2641 *disambiguate_only = (valueized_anything
2642 ? TR_VALUEIZE_AND_DISAMBIGUATE
2643 : TR_DISAMBIGUATE);
2644 return NULLnullptr;
2645 }
2646 }
2647
2648 /* If we reach a clobbering statement try to skip it and see if
2649 we find a VN result with exactly the same value as the
2650 possible clobber. In this case we can ignore the clobber
2651 and return the found value. */
2652 if (is_gimple_reg_type (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2652, __FUNCTION__))->typed.type)
)
2653 && types_compatible_p (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2653, __FUNCTION__))->typed.type)
, vr->type)
2654 && (ref->ref || data->orig_ref.ref))
2655 {
2656 tree *saved_last_vuse_ptr = data->last_vuse_ptr;
2657 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
2658 data->last_vuse_ptr = NULLnullptr;
2659 tree saved_vuse = vr->vuse;
2660 hashval_t saved_hashcode = vr->hashcode;
2661 void *res = vn_reference_lookup_2 (ref, gimple_vuse (def_stmt), data);
2662 /* Need to restore vr->vuse and vr->hashcode. */
2663 vr->vuse = saved_vuse;
2664 vr->hashcode = saved_hashcode;
2665 data->last_vuse_ptr = saved_last_vuse_ptr;
2666 if (res && res != (void *)-1)
2667 {
2668 vn_reference_t vnresult = (vn_reference_t) res;
2669 tree rhs = gimple_assign_rhs1 (def_stmt);
2670 if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == SSA_NAME)
2671 rhs = SSA_VAL (rhs);
2672 if (vnresult->result
2673 && operand_equal_p (vnresult->result, rhs, 0)
2674 /* We have to honor our promise about union type punning
2675 and also support arbitrary overlaps with
2676 -fno-strict-aliasing. So simply resort to alignment to
2677 rule out overlaps. Do this check last because it is
2678 quite expensive compared to the hash-lookup above. */
2679 && multiple_p (get_object_alignment
2680 (ref->ref ? ref->ref : data->orig_ref.ref),
2681 ref->size)
2682 && multiple_p (get_object_alignment (lhs), ref->size))
2683 return res;
2684 }
2685 }
2686 }
2687 else if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE
2688 && gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
2689 && gimple_call_num_args (def_stmt) <= 4)
2690 {
2691 /* For builtin calls valueize its arguments and call the
2692 alias oracle again. Valueization may improve points-to
2693 info of pointers and constify size and position arguments.
2694 Originally this was motivated by PR61034 which has
2695 conditional calls to free falsely clobbering ref because
2696 of imprecise points-to info of the argument. */
2697 tree oldargs[4];
2698 bool valueized_anything = false;
2699 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2700 {
2701 oldargs[i] = gimple_call_arg (def_stmt, i);
2702 tree val = vn_valueize (oldargs[i]);
2703 if (val != oldargs[i])
2704 {
2705 gimple_call_set_arg (def_stmt, i, val);
2706 valueized_anything = true;
2707 }
2708 }
2709 if (valueized_anything)
2710 {
2711 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
2712 ref, data->tbaa_p);
2713 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2714 gimple_call_set_arg (def_stmt, i, oldargs[i]);
2715 if (!res)
2716 {
2717 *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2718 return NULLnullptr;
2719 }
2720 }
2721 }
2722
2723 if (*disambiguate_only > TR_TRANSLATE)
2724 return (void *)-1;
2725
2726 /* If we cannot constrain the size of the reference we cannot
2727 test if anything kills it. */
2728 if (!ref->max_size_known_p ())
2729 return (void *)-1;
2730
2731 poly_int64 offset = ref->offset;
2732 poly_int64 maxsize = ref->max_size;
2733
2734 /* def_stmt may-defs *ref. See if we can derive a value for *ref
2735 from that definition.
2736 1) Memset. */
2737 if (is_gimple_reg_type (vr->type)
2738 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
2739 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET_CHK))
2740 && (integer_zerop (gimple_call_arg (def_stmt, 1))
2741 || ((TREE_CODE (gimple_call_arg (def_stmt, 1))((enum tree_code) (gimple_call_arg (def_stmt, 1))->base.code
)
== INTEGER_CST
2742 || (INTEGRAL_TYPE_P (vr->type)(((enum tree_code) (vr->type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (vr->type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (vr->type)->base.code) == INTEGER_TYPE
)
&& known_eq (ref->size, 8)(!maybe_ne (ref->size, 8))))
2743 && CHAR_BIT8 == 8
2744 && BITS_PER_UNIT(8) == 8
2745 && BYTES_BIG_ENDIAN0 == WORDS_BIG_ENDIAN0
2746 && offset.is_constant (&offseti)
2747 && ref->size.is_constant (&sizei)
2748 && (offseti % BITS_PER_UNIT(8) == 0
2749 || TREE_CODE (gimple_call_arg (def_stmt, 1))((enum tree_code) (gimple_call_arg (def_stmt, 1))->base.code
)
== INTEGER_CST)))
2750 && (poly_int_tree_p (gimple_call_arg (def_stmt, 2))
2751 || (TREE_CODE (gimple_call_arg (def_stmt, 2))((enum tree_code) (gimple_call_arg (def_stmt, 2))->base.code
)
== SSA_NAME
2752 && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)))))
2753 && (TREE_CODE (gimple_call_arg (def_stmt, 0))((enum tree_code) (gimple_call_arg (def_stmt, 0))->base.code
)
== ADDR_EXPR
2754 || TREE_CODE (gimple_call_arg (def_stmt, 0))((enum tree_code) (gimple_call_arg (def_stmt, 0))->base.code
)
== SSA_NAME))
2755 {
2756 tree base2;
2757 poly_int64 offset2, size2, maxsize2;
2758 bool reverse;
2759 tree ref2 = gimple_call_arg (def_stmt, 0);
2760 if (TREE_CODE (ref2)((enum tree_code) (ref2)->base.code) == SSA_NAME)
2761 {
2762 ref2 = SSA_VAL (ref2);
2763 if (TREE_CODE (ref2)((enum tree_code) (ref2)->base.code) == SSA_NAME
2764 && (TREE_CODE (base)((enum tree_code) (base)->base.code) != MEM_REF
2765 || TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2765, __FUNCTION__)))))
!= ref2))
2766 {
2767 gimple *def_stmt = SSA_NAME_DEF_STMT (ref2)(tree_check ((ref2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2767, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2768 if (gimple_assign_single_p (def_stmt)
2769 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2770 ref2 = gimple_assign_rhs1 (def_stmt);
2771 }
2772 }
2773 if (TREE_CODE (ref2)((enum tree_code) (ref2)->base.code) == ADDR_EXPR)
2774 {
2775 ref2 = TREE_OPERAND (ref2, 0)(*((const_cast<tree*> (tree_operand_check ((ref2), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2775, __FUNCTION__)))))
;
2776 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
2777 &reverse);
2778 if (!known_size_p (maxsize2)
2779 || !known_eq (maxsize2, size2)(!maybe_ne (maxsize2, size2))
2780 || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
2781 return (void *)-1;
2782 }
2783 else if (TREE_CODE (ref2)((enum tree_code) (ref2)->base.code) == SSA_NAME)
2784 {
2785 poly_int64 soff;
2786 if (TREE_CODE (base)((enum tree_code) (base)->base.code) != MEM_REF
2787 || !(mem_ref_offset (base)
2788 << LOG2_BITS_PER_UNIT3).to_shwi (&soff))
2789 return (void *)-1;
2790 offset += soff;
2791 offset2 = 0;
2792 if (TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2792, __FUNCTION__)))))
!= ref2)
2793 {
2794 gimple *def = SSA_NAME_DEF_STMT (ref2)(tree_check ((ref2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2794, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2795 if (is_gimple_assign (def)
2796 && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
2797 && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2797, __FUNCTION__)))))
2798 && poly_int_tree_p (gimple_assign_rhs2 (def)))
2799 {
2800 tree rhs2 = gimple_assign_rhs2 (def);
2801 if (!(poly_offset_int::from (wi::to_poly_wide (rhs2),
2802 SIGNED)
2803 << LOG2_BITS_PER_UNIT3).to_shwi (&offset2))
2804 return (void *)-1;
2805 ref2 = gimple_assign_rhs1 (def);
2806 if (TREE_CODE (ref2)((enum tree_code) (ref2)->base.code) == SSA_NAME)
2807 ref2 = SSA_VAL (ref2);
Value stored to 'ref2' is never read
2808 }
2809 else
2810 return (void *)-1;
2811 }
2812 }
2813 else
2814 return (void *)-1;
2815 tree len = gimple_call_arg (def_stmt, 2);
2816 HOST_WIDE_INTlong leni, offset2i;
2817 if (TREE_CODE (len)((enum tree_code) (len)->base.code) == SSA_NAME)
2818 len = SSA_VAL (len);
2819 /* Sometimes the above trickery is smarter than alias analysis. Take
2820 advantage of that. */
2821 if (!ranges_maybe_overlap_p (offset, maxsize, offset2,
2822 (wi::to_poly_offset (len)
2823 << LOG2_BITS_PER_UNIT3)))
2824 return NULLnullptr;
2825 if (data->partial_defs.is_empty ()
2826 && known_subrange_p (offset, maxsize, offset2,
2827 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT3))
2828 {
2829 tree val;
2830 if (integer_zerop (gimple_call_arg (def_stmt, 1)))
2831 val = build_zero_cst (vr->type);
2832 else if (INTEGRAL_TYPE_P (vr->type)(((enum tree_code) (vr->type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (vr->type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (vr->type)->base.code) == INTEGER_TYPE
)
2833 && known_eq (ref->size, 8)(!maybe_ne (ref->size, 8))
2834 && offseti % BITS_PER_UNIT(8) == 0)
2835 {
2836 gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
2837 vr->type, gimple_call_arg (def_stmt, 1));
2838 val = vn_nary_build_or_lookup (&res_op);
2839 if (!val
2840 || (TREE_CODE (val)((enum tree_code) (val)->base.code) == SSA_NAME
2841 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)(tree_check ((val), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2841, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
))
2842 return (void *)-1;
2843 }
2844 else
2845 {
2846 unsigned buflen = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type))((unsigned long) (*tree_int_cst_elt_check ((((tree_class_check
((vr->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2846, __FUNCTION__))->type_common.size_unit)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2846, __FUNCTION__)))
+ 1;
2847 if (INTEGRAL_TYPE_P (vr->type)(((enum tree_code) (vr->type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (vr->type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (vr->type)->base.code) == INTEGER_TYPE
)
)
2848 buflen = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (vr->type)(as_a <scalar_int_mode> ((tree_class_check ((vr->type
), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2848, __FUNCTION__))->type_common.mode))
) + 1;
2849 unsigned char *buf = XALLOCAVEC (unsigned char, buflen)((unsigned char *) __builtin_alloca(sizeof (unsigned char) * (
buflen)))
;
2850 memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1))((unsigned long) (*tree_int_cst_elt_check ((gimple_call_arg (
def_stmt, 1)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2850, __FUNCTION__)))
,
2851 buflen);
2852 if (BYTES_BIG_ENDIAN0)
2853 {
2854 unsigned int amnt
2855 = (((unsigned HOST_WIDE_INTlong) offseti + sizei)
2856 % BITS_PER_UNIT(8));
2857 if (amnt)
2858 {
2859 shift_bytes_in_array_right (buf, buflen,
2860 BITS_PER_UNIT(8) - amnt);
2861 buf++;
2862 buflen--;
2863 }
2864 }
2865 else if (offseti % BITS_PER_UNIT(8) != 0)
2866 {
2867 unsigned int amnt
2868 = BITS_PER_UNIT(8) - ((unsigned HOST_WIDE_INTlong) offseti
2869 % BITS_PER_UNIT(8));
2870 shift_bytes_in_array_left (buf, buflen, amnt);
2871 buf++;
2872 buflen--;
2873 }
2874 val = native_interpret_expr (vr->type, buf, buflen);
2875 if (!val)
2876 return (void *)-1;
2877 }
2878 return data->finish (0, 0, val);
2879 }
2880 /* For now handle clearing memory with partial defs. */
2881 else if (known_eq (ref->size, maxsize)(!maybe_ne (ref->size, maxsize))
2882 && integer_zerop (gimple_call_arg (def_stmt, 1))
2883 && tree_fits_poly_int64_p (len)
2884 && tree_to_poly_int64 (len).is_constant (&leni)
2885 && leni <= INTTYPE_MAXIMUM (HOST_WIDE_INT)((long) (~ (long) 0 - ((long) ((! ((long) 0 < (long) -1)) ?
(long) 1 << (sizeof (long) * 8 - 1) : (long) 0))))
/ BITS_PER_UNIT(8)
2886 && offset.is_constant (&offseti)
2887 && offset2.is_constant (&offset2i)
2888 && maxsize.is_constant (&maxsizei)
2889 && ranges_known_overlap_p (offseti, maxsizei, offset2i,
2890 leni << LOG2_BITS_PER_UNIT3))
2891 {
2892 pd_data pd;
2893 pd.rhs = build_constructor (NULL_TREE(tree) nullptr, NULLnullptr);
2894 pd.offset = offset2i;
2895 pd.size = leni << LOG2_BITS_PER_UNIT3;
2896 return data->push_partial_def (pd, 0, 0, offseti, maxsizei);
2897 }
2898 }
2899
2900 /* 2) Assignment from an empty CONSTRUCTOR. */
2901 else if (is_gimple_reg_type (vr->type)
2902 && gimple_assign_single_p (def_stmt)
2903 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2904 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt))(vec_safe_length (((tree_check ((gimple_assign_rhs1 (def_stmt
)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2904, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts)))
== 0)
2905 {
2906 tree base2;
2907 poly_int64 offset2, size2, maxsize2;
2908 HOST_WIDE_INTlong offset2i, size2i;
2909 gcc_assert (lhs_ref_ok)((void)(!(lhs_ref_ok) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2909, __FUNCTION__), 0 : 0))
;
2910 base2 = ao_ref_base (&lhs_ref);
2911 offset2 = lhs_ref.offset;
2912 size2 = lhs_ref.size;
2913 maxsize2 = lhs_ref.max_size;
2914 if (known_size_p (maxsize2)
2915 && known_eq (maxsize2, size2)(!maybe_ne (maxsize2, size2))
2916 && adjust_offsets_for_equal_base_address (base, &offset,
2917 base2, &offset2))
2918 {
2919 if (data->partial_defs.is_empty ()
2920 && known_subrange_p (offset, maxsize, offset2, size2))
2921 {
2922 /* While technically undefined behavior do not optimize
2923 a full read from a clobber. */
2924 if (gimple_clobber_p (def_stmt))
2925 return (void *)-1;
2926 tree val = build_zero_cst (vr->type);
2927 return data->finish (ao_ref_alias_set (&lhs_ref),
2928 ao_ref_base_alias_set (&lhs_ref), val);
2929 }
2930 else if (known_eq (ref->size, maxsize)(!maybe_ne (ref->size, maxsize))
2931 && maxsize.is_constant (&maxsizei)
2932 && offset.is_constant (&offseti)
2933 && offset2.is_constant (&offset2i)
2934 && size2.is_constant (&size2i)
2935 && ranges_known_overlap_p (offseti, maxsizei,
2936 offset2i, size2i))
2937 {
2938 /* Let clobbers be consumed by the partial-def tracker
2939 which can choose to ignore them if they are shadowed
2940 by a later def. */
2941 pd_data pd;
2942 pd.rhs = gimple_assign_rhs1 (def_stmt);
2943 pd.offset = offset2i;
2944 pd.size = size2i;
2945 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
2946 ao_ref_base_alias_set (&lhs_ref),
2947 offseti, maxsizei);
2948 }
2949 }
2950 }
2951
2952 /* 3) Assignment from a constant. We can use folds native encode/interpret
2953 routines to extract the assigned bits. */
2954 else if (known_eq (ref->size, maxsize)(!maybe_ne (ref->size, maxsize))
2955 && is_gimple_reg_type (vr->type)
2956 && !reverse_storage_order_for_component_p (vr->operands)
2957 && !contains_storage_order_barrier_p (vr->operands)
2958 && gimple_assign_single_p (def_stmt)
2959 && CHAR_BIT8 == 8
2960 && BITS_PER_UNIT(8) == 8
2961 && BYTES_BIG_ENDIAN0 == WORDS_BIG_ENDIAN0
2962 /* native_encode and native_decode operate on arrays of bytes
2963 and so fundamentally need a compile-time size and offset. */
2964 && maxsize.is_constant (&maxsizei)
2965 && offset.is_constant (&offseti)
2966 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2967 || (TREE_CODE (gimple_assign_rhs1 (def_stmt))((enum tree_code) (gimple_assign_rhs1 (def_stmt))->base.code
)
== SSA_NAME
2968 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2969 {
2970 tree lhs = gimple_assign_lhs (def_stmt);
2971 tree base2;
2972 poly_int64 offset2, size2, maxsize2;
2973 HOST_WIDE_INTlong offset2i, size2i;
2974 bool reverse;
2975 gcc_assert (lhs_ref_ok)((void)(!(lhs_ref_ok) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2975, __FUNCTION__), 0 : 0))
;
2976 base2 = ao_ref_base (&lhs_ref);
2977 offset2 = lhs_ref.offset;
2978 size2 = lhs_ref.size;
2979 maxsize2 = lhs_ref.max_size;
2980 reverse = reverse_storage_order_for_component_p (lhs);
2981 if (base2
2982 && !reverse
2983 && !storage_order_barrier_p (lhs)
2984 && known_eq (maxsize2, size2)(!maybe_ne (maxsize2, size2))
2985 && adjust_offsets_for_equal_base_address (base, &offset,
2986 base2, &offset2)
2987 && offset.is_constant (&offseti)
2988 && offset2.is_constant (&offset2i)
2989 && size2.is_constant (&size2i))
2990 {
2991 if (data->partial_defs.is_empty ()
2992 && known_subrange_p (offseti, maxsizei, offset2, size2))
2993 {
2994 /* We support up to 512-bit values (for V8DFmode). */
2995 unsigned char buffer[65];
2996 int len;
2997
2998 tree rhs = gimple_assign_rhs1 (def_stmt);
2999 if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == SSA_NAME)
3000 rhs = SSA_VAL (rhs);
3001 len = native_encode_expr (rhs,
3002 buffer, sizeof (buffer) - 1,
3003 (offseti - offset2i) / BITS_PER_UNIT(8));
3004 if (len > 0 && len * BITS_PER_UNIT(8) >= maxsizei)
3005 {
3006 tree type = vr->type;
3007 unsigned char *buf = buffer;
3008 unsigned int amnt = 0;
3009 /* Make sure to interpret in a type that has a range
3010 covering the whole access size. */
3011 if (INTEGRAL_TYPE_P (vr->type)(((enum tree_code) (vr->type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (vr->type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (vr->type)->base.code) == INTEGER_TYPE
)
3012 && maxsizei != TYPE_PRECISION (vr->type)((tree_class_check ((vr->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3012, __FUNCTION__))->type_common.precision)
)
3013 type = build_nonstandard_integer_type (maxsizei,
3014 TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3014, __FUNCTION__))->base.u.bits.unsigned_flag)
);
3015 if (BYTES_BIG_ENDIAN0)
3016 {
3017 /* For big-endian native_encode_expr stored the rhs
3018 such that the LSB of it is the LSB of buffer[len - 1].
3019 That bit is stored into memory at position
3020 offset2 + size2 - 1, i.e. in byte
3021 base + (offset2 + size2 - 1) / BITS_PER_UNIT.
3022 E.g. for offset2 1 and size2 14, rhs -1 and memory
3023 previously cleared that is:
3024 0 1
3025 01111111|11111110
3026 Now, if we want to extract offset 2 and size 12 from
3027 it using native_interpret_expr (which actually works
3028 for integral bitfield types in terms of byte size of
3029 the mode), the native_encode_expr stored the value
3030 into buffer as
3031 XX111111|11111111
3032 and returned len 2 (the X bits are outside of
3033 precision).
3034 Let sz be maxsize / BITS_PER_UNIT if not extracting
3035 a bitfield, and GET_MODE_SIZE otherwise.
3036 We need to align the LSB of the value we want to
3037 extract as the LSB of buf[sz - 1].
3038 The LSB from memory we need to read is at position
3039 offset + maxsize - 1. */
3040 HOST_WIDE_INTlong sz = maxsizei / BITS_PER_UNIT(8);
3041 if (INTEGRAL_TYPE_P (type)(((enum tree_code) (type)->base.code) == ENUMERAL_TYPE || (
(enum tree_code) (type)->base.code) == BOOLEAN_TYPE || ((enum
tree_code) (type)->base.code) == INTEGER_TYPE)
)
3042 sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)(as_a <scalar_int_mode> ((tree_class_check ((type), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3042, __FUNCTION__))->type_common.mode))
);
3043 amnt = ((unsigned HOST_WIDE_INTlong) offset2i + size2i
3044 - offseti - maxsizei) % BITS_PER_UNIT(8);
3045 if (amnt)
3046 shift_bytes_in_array_right (buffer, len, amnt);
3047 amnt = ((unsigned HOST_WIDE_INTlong) offset2i + size2i
3048 - offseti - maxsizei - amnt) / BITS_PER_UNIT(8);
3049 if ((unsigned HOST_WIDE_INTlong) sz + amnt > (unsigned) len)
3050 len = 0;
3051 else
3052 {
3053 buf = buffer + len - sz - amnt;
3054 len -= (buf - buffer);
3055 }
3056 }
3057 else
3058 {
3059 amnt = ((unsigned HOST_WIDE_INTlong) offset2i
3060 - offseti) % BITS_PER_UNIT(8);
3061 if (amnt)
3062 {
3063 buffer[len] = 0;
3064 shift_bytes_in_array_left (buffer, len + 1, amnt);
3065 buf = buffer + 1;
3066 }
3067 }
3068 tree val = native_interpret_expr (type, buf, len);
3069 /* If we chop off bits because the types precision doesn't
3070 match the memory access size this is ok when optimizing
3071 reads but not when called from the DSE code during
3072 elimination. */
3073 if (val
3074 && type != vr->type)
3075 {
3076 if (! int_fits_type_p (val, vr->type))
3077 val = NULL_TREE(tree) nullptr;
3078 else
3079 val = fold_convert (vr->type, val)fold_convert_loc (((location_t) 0), vr->type, val);
3080 }
3081
3082 if (val)
3083 return data->finish (ao_ref_alias_set (&lhs_ref),
3084 ao_ref_base_alias_set (&lhs_ref), val);
3085 }
3086 }
3087 else if (ranges_known_overlap_p (offseti, maxsizei, offset2i,
3088 size2i))
3089 {
3090 pd_data pd;
3091 tree rhs = gimple_assign_rhs1 (def_stmt);
3092 if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == SSA_NAME)
3093 rhs = SSA_VAL (rhs);
3094 pd.rhs = rhs;
3095 pd.offset = offset2i;
3096 pd.size = size2i;
3097 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3098 ao_ref_base_alias_set (&lhs_ref),
3099 offseti, maxsizei);
3100 }
3101 }
3102 }
3103
3104 /* 4) Assignment from an SSA name which definition we may be able
3105 to access pieces from or we can combine to a larger entity. */
3106 else if (known_eq (ref->size, maxsize)(!maybe_ne (ref->size, maxsize))
3107 && is_gimple_reg_type (vr->type)
3108 && !reverse_storage_order_for_component_p (vr->operands)
3109 && !contains_storage_order_barrier_p (vr->operands)
3110 && gimple_assign_single_p (def_stmt)
3111 && TREE_CODE (gimple_assign_rhs1 (def_stmt))((enum tree_code) (gimple_assign_rhs1 (def_stmt))->base.code
)
== SSA_NAME)
3112 {
3113 tree lhs = gimple_assign_lhs (def_stmt);
3114 tree base2;
3115 poly_int64 offset2, size2, maxsize2;
3116 HOST_WIDE_INTlong offset2i, size2i, offseti;
3117 bool reverse;
3118 gcc_assert (lhs_ref_ok)((void)(!(lhs_ref_ok) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3118, __FUNCTION__), 0 : 0))
;
3119 base2 = ao_ref_base (&lhs_ref);
3120 offset2 = lhs_ref.offset;
3121 size2 = lhs_ref.size;
3122 maxsize2 = lhs_ref.max_size;
3123 reverse = reverse_storage_order_for_component_p (lhs);
3124 tree def_rhs = gimple_assign_rhs1 (def_stmt);
3125 if (!reverse
3126 && !storage_order_barrier_p (lhs)
3127 && known_size_p (maxsize2)
3128 && known_eq (maxsize2, size2)(!maybe_ne (maxsize2, size2))
3129 && adjust_offsets_for_equal_base_address (base, &offset,
3130 base2, &offset2))
3131 {
3132 if (data->partial_defs.is_empty ()
3133 && known_subrange_p (offset, maxsize, offset2, size2)
3134 /* ??? We can't handle bitfield precision extracts without
3135 either using an alternate type for the BIT_FIELD_REF and
3136 then doing a conversion or possibly adjusting the offset
3137 according to endianness. */
3138 && (! INTEGRAL_TYPE_P (vr->type)(((enum tree_code) (vr->type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (vr->type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (vr->type)->base.code) == INTEGER_TYPE
)
3139 || known_eq (ref->size, TYPE_PRECISION (vr->type))(!maybe_ne (ref->size, ((tree_class_check ((vr->type), (
tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3139, __FUNCTION__))->type_common.precision)))
)
3140 && multiple_p (ref->size, BITS_PER_UNIT(8)))
3141 {
3142 tree val = NULL_TREE(tree) nullptr;
3143 if (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))(((enum tree_code) (((contains_struct_check ((def_rhs), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3143, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((def_rhs), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3143, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((def_rhs), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3143, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
3144 || type_has_mode_precision_p (TREE_TYPE (def_rhs)((contains_struct_check ((def_rhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3144, __FUNCTION__))->typed.type)
))
3145 {
3146 gimple_match_op op (gimple_match_cond::UNCOND,
3147 BIT_FIELD_REF, vr->type,
3148 SSA_VAL (def_rhs),
3149 bitsize_int (ref->size)size_int_kind (ref->size, stk_bitsizetype),
3150 bitsize_int (offset - offset2)size_int_kind (offset - offset2, stk_bitsizetype));
3151 val = vn_nary_build_or_lookup (&op);
3152 }
3153 else if (known_eq (ref->size, size2)(!maybe_ne (ref->size, size2)))
3154 {
3155 gimple_match_op op (gimple_match_cond::UNCOND,
3156 VIEW_CONVERT_EXPR, vr->type,
3157 SSA_VAL (def_rhs));
3158 val = vn_nary_build_or_lookup (&op);
3159 }
3160 if (val
3161 && (TREE_CODE (val)((enum tree_code) (val)->base.code) != SSA_NAME
3162 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)(tree_check ((val), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3162, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
))
3163 return data->finish (ao_ref_alias_set (&lhs_ref),
3164 ao_ref_base_alias_set (&lhs_ref), val);
3165 }
3166 else if (maxsize.is_constant (&maxsizei)
3167 && offset.is_constant (&offseti)
3168 && offset2.is_constant (&offset2i)
3169 && size2.is_constant (&size2i)
3170 && ranges_known_overlap_p (offset, maxsize, offset2, size2))
3171 {
3172 pd_data pd;
3173 pd.rhs = SSA_VAL (def_rhs);
3174 pd.offset = offset2i;
3175 pd.size = size2i;
3176 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3177 ao_ref_base_alias_set (&lhs_ref),
3178 offseti, maxsizei);
3179 }
3180 }
3181 }
3182
3183 /* 5) For aggregate copies translate the reference through them if
3184 the copy kills ref. */
3185 else if (data->vn_walk_kind == VN_WALKREWRITE
3186 && gimple_assign_single_p (def_stmt)
3187 && (DECL_P (gimple_assign_rhs1 (def_stmt))(tree_code_type[(int) (((enum tree_code) (gimple_assign_rhs1 (
def_stmt))->base.code))] == tcc_declaration)
3188 || TREE_CODE (gimple_assign_rhs1 (def_stmt))((enum tree_code) (gimple_assign_rhs1 (def_stmt))->base.code
)
== MEM_REF
3189 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
3190 {
3191 tree base2;
3192 int i, j, k;
3193 auto_vec<vn_reference_op_s> rhs;
3194 vn_reference_op_t vro;
3195 ao_ref r;
3196
3197 gcc_assert (lhs_ref_ok)((void)(!(lhs_ref_ok) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3197, __FUNCTION__), 0 : 0))
;
3198
3199 /* See if the assignment kills REF. */
3200 base2 = ao_ref_base (&lhs_ref);
3201 if (!lhs_ref.max_size_known_p ()
3202 || (base != base2
3203 && (TREE_CODE (base)((enum tree_code) (base)->base.code) != MEM_REF
3204 || TREE_CODE (base2)((enum tree_code) (base2)->base.code) != MEM_REF
3205 || TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3205, __FUNCTION__)))))
!= TREE_OPERAND (base2, 0)(*((const_cast<tree*> (tree_operand_check ((base2), (0)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3205, __FUNCTION__)))))
3206 || !tree_int_cst_equal (TREE_OPERAND (base, 1)(*((const_cast<tree*> (tree_operand_check ((base), (1),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3206, __FUNCTION__)))))
,
3207 TREE_OPERAND (base2, 1)(*((const_cast<tree*> (tree_operand_check ((base2), (1)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3207, __FUNCTION__)))))
)))
3208 || !stmt_kills_ref_p (def_stmt, ref))
3209 return (void *)-1;
3210
3211 /* Find the common base of ref and the lhs. lhs_ops already
3212 contains valueized operands for the lhs. */
3213 i = vr->operands.length () - 1;
3214 j = lhs_ops.length () - 1;
3215 while (j >= 0 && i >= 0
3216 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
3217 {
3218 i--;
3219 j--;
3220 }
3221
3222 /* ??? The innermost op should always be a MEM_REF and we already
3223 checked that the assignment to the lhs kills vr. Thus for
3224 aggregate copies using char[] types the vn_reference_op_eq
3225 may fail when comparing types for compatibility. But we really
3226 don't care here - further lookups with the rewritten operands
3227 will simply fail if we messed up types too badly. */
3228 poly_int64 extra_off = 0;
3229 if (j == 0 && i >= 0
3230 && lhs_ops[0].opcode == MEM_REF
3231 && maybe_ne (lhs_ops[0].off, -1))
3232 {
3233 if (known_eq (lhs_ops[0].off, vr->operands[i].off)(!maybe_ne (lhs_ops[0].off, vr->operands[i].off)))
3234 i--, j--;
3235 else if (vr->operands[i].opcode == MEM_REF
3236 && maybe_ne (vr->operands[i].off, -1))
3237 {
3238 extra_off = vr->operands[i].off - lhs_ops[0].off;
3239 i--, j--;
3240 }
3241 }
3242
3243 /* i now points to the first additional op.
3244 ??? LHS may not be completely contained in VR, one or more
3245 VIEW_CONVERT_EXPRs could be in its way. We could at least
3246 try handling outermost VIEW_CONVERT_EXPRs. */
3247 if (j != -1)
3248 return (void *)-1;
3249
3250 /* Punt if the additional ops contain a storage order barrier. */
3251 for (k = i; k >= 0; k--)
3252 {
3253 vro = &vr->operands[k];
3254 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
3255 return (void *)-1;
3256 }
3257
3258 /* Now re-write REF to be based on the rhs of the assignment. */
3259 tree rhs1 = gimple_assign_rhs1 (def_stmt);
3260 copy_reference_ops_from_ref (rhs1, &rhs);
3261
3262 /* Apply an extra offset to the inner MEM_REF of the RHS. */
3263 if (maybe_ne (extra_off, 0))
3264 {
3265 if (rhs.length () < 2)
3266 return (void *)-1;
3267 int ix = rhs.length () - 2;
3268 if (rhs[ix].opcode != MEM_REF
3269 || known_eq (rhs[ix].off, -1)(!maybe_ne (rhs[ix].off, -1)))
3270 return (void *)-1;
3271 rhs[ix].off += extra_off;
3272 rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
3273 build_int_cst (TREE_TYPE (rhs[ix].op0)((contains_struct_check ((rhs[ix].op0), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3273, __FUNCTION__))->typed.type)
,
3274 extra_off));
3275 }
3276
3277 /* Save the operands since we need to use the original ones for
3278 the hash entry we use. */
3279 if (!data->saved_operands.exists ())
3280 data->saved_operands = vr->operands.copy ();
3281
3282 /* We need to pre-pend vr->operands[0..i] to rhs. */
3283 vec<vn_reference_op_s> old = vr->operands;
3284 if (i + 1 + rhs.length () > vr->operands.length ())
3285 vr->operands.safe_grow (i + 1 + rhs.length (), true);
3286 else
3287 vr->operands.truncate (i + 1 + rhs.length ());
3288 FOR_EACH_VEC_ELT (rhs, j, vro)for (j = 0; (rhs).iterate ((j), &(vro)); ++(j))
3289 vr->operands[i + 1 + j] = *vro;
3290 valueize_refs (&vr->operands);
3291 if (old == shared_lookup_references)
3292 shared_lookup_references = vr->operands;
3293 vr->hashcode = vn_reference_compute_hash (vr);
3294
3295 /* Try folding the new reference to a constant. */
3296 tree val = fully_constant_vn_reference_p (vr);
3297 if (val)
3298 {
3299 if (data->partial_defs.is_empty ())
3300 return data->finish (ao_ref_alias_set (&lhs_ref),
3301 ao_ref_base_alias_set (&lhs_ref), val);
3302 /* This is the only interesting case for partial-def handling
3303 coming from targets that like to gimplify init-ctors as
3304 aggregate copies from constant data like aarch64 for
3305 PR83518. */
3306 if (maxsize.is_constant (&maxsizei) && known_eq (ref->size, maxsize)(!maybe_ne (ref->size, maxsize)))
3307 {
3308 pd_data pd;
3309 pd.rhs = val;
3310 pd.offset = 0;
3311 pd.size = maxsizei;
3312 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3313 ao_ref_base_alias_set (&lhs_ref),
3314 0, maxsizei);
3315 }
3316 }
3317
3318 /* Continuing with partial defs isn't easily possible here, we
3319 have to find a full def from further lookups from here. Probably
3320 not worth the special-casing everywhere. */
3321 if (!data->partial_defs.is_empty ())
3322 return (void *)-1;
3323
3324 /* Adjust *ref from the new operands. */
3325 ao_ref rhs1_ref;
3326 ao_ref_init (&rhs1_ref, rhs1);
3327 if (!ao_ref_init_from_vn_reference (&r, ao_ref_alias_set (&rhs1_ref),
3328 ao_ref_base_alias_set (&rhs1_ref),
3329 vr->type, vr->operands))
3330 return (void *)-1;
3331 /* This can happen with bitfields. */
3332 if (maybe_ne (ref->size, r.size))
3333 {
3334 /* If the access lacks some subsetting simply apply that by
3335 shortening it. That in the end can only be successful
3336 if we can pun the lookup result which in turn requires
3337 exact offsets. */
3338 if (known_eq (r.size, r.max_size)(!maybe_ne (r.size, r.max_size))
3339 && known_lt (ref->size, r.size)(!maybe_le (r.size, ref->size)))
3340 r.size = r.max_size = ref->size;
3341 else
3342 return (void *)-1;
3343 }
3344 *ref = r;
3345
3346 /* Do not update last seen VUSE after translating. */
3347 data->last_vuse_ptr = NULLnullptr;
3348 /* Invalidate the original access path since it now contains
3349 the wrong base. */
3350 data->orig_ref.ref = NULL_TREE(tree) nullptr;
3351 /* Use the alias-set of this LHS for recording an eventual result. */
3352 if (data->first_set == -2)
3353 {
3354 data->first_set = ao_ref_alias_set (&lhs_ref);
3355 data->first_base_set = ao_ref_base_alias_set (&lhs_ref);
3356 }
3357
3358 /* Keep looking for the adjusted *REF / VR pair. */
3359 return NULLnullptr;
3360 }
3361
3362 /* 6) For memcpy copies translate the reference through them if the copy
3363 kills ref. But we cannot (easily) do this translation if the memcpy is
3364 a storage order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that
3365 can modify the storage order of objects (see storage_order_barrier_p). */
3366 else if (data->vn_walk_kind == VN_WALKREWRITE
3367 && is_gimple_reg_type (vr->type)
3368 /* ??? Handle BCOPY as well. */
3369 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
3370 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY_CHK)
3371 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
3372 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY_CHK)
3373 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE)
3374 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE_CHK))
3375 && (TREE_CODE (gimple_call_arg (def_stmt, 0))((enum tree_code) (gimple_call_arg (def_stmt, 0))->base.code
)
== ADDR_EXPR
3376 || TREE_CODE (gimple_call_arg (def_stmt, 0))((enum tree_code) (gimple_call_arg (def_stmt, 0))->base.code
)
== SSA_NAME)
3377 && (TREE_CODE (gimple_call_arg (def_stmt, 1))((enum tree_code) (gimple_call_arg (def_stmt, 1))->base.code
)
== ADDR_EXPR
3378 || TREE_CODE (gimple_call_arg (def_stmt, 1))((enum tree_code) (gimple_call_arg (def_stmt, 1))->base.code
)
== SSA_NAME)
3379 && (poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size)
3380 || (TREE_CODE (gimple_call_arg (def_stmt, 2))((enum tree_code) (gimple_call_arg (def_stmt, 2))->base.code
)
== SSA_NAME
3381 && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)),
3382 &copy_size)))
3383 /* Handling this is more complicated, give up for now. */
3384 && data->partial_defs.is_empty ())
3385 {
3386 tree lhs, rhs;
3387 ao_ref r;
3388 poly_int64 rhs_offset, lhs_offset;
3389 vn_reference_op_s op;
3390 poly_uint64 mem_offset;
3391 poly_int64 at, byte_maxsize;
3392
3393 /* Only handle non-variable, addressable refs. */
3394 if (maybe_ne (ref->size, maxsize)
3395 || !multiple_p (offset, BITS_PER_UNIT(8), &at)
3396 || !multiple_p (maxsize, BITS_PER_UNIT(8), &byte_maxsize))
3397 return (void *)-1;
3398
3399 /* Extract a pointer base and an offset for the destination. */
3400 lhs = gimple_call_arg (def_stmt, 0);
3401 lhs_offset = 0;
3402 if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME)
3403 {
3404 lhs = vn_valueize (lhs);
3405 if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME)
3406 {
3407 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs)(tree_check ((lhs), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3407, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
3408 if (gimple_assign_single_p (def_stmt)
3409 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
3410 lhs = gimple_assign_rhs1 (def_stmt);
3411 }
3412 }
3413 if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == ADDR_EXPR)
3414 {
3415 if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (lhs)))(((enum tree_code) (((contains_struct_check ((((contains_struct_check
((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3415, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3415, __FUNCTION__))->typed.type))->base.code) == ARRAY_TYPE
|| (((enum tree_code) (((contains_struct_check ((((contains_struct_check
((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3415, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3415, __FUNCTION__))->typed.type))->base.code) == RECORD_TYPE
|| ((enum tree_code) (((contains_struct_check ((((contains_struct_check
((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3415, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3415, __FUNCTION__))->typed.type))->base.code) == UNION_TYPE
|| ((enum tree_code) (((contains_struct_check ((((contains_struct_check
((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3415, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3415, __FUNCTION__))->typed.type))->base.code) == QUAL_UNION_TYPE
))
3416 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (lhs)))((tree_check4 ((((contains_struct_check ((((contains_struct_check
((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3416, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3416, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3416, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE
), (ARRAY_TYPE)))->base.u.bits.saturating_flag)
)
3417 return (void *)-1;
3418 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0)(*((const_cast<tree*> (tree_operand_check ((lhs), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3418, __FUNCTION__)))))
,
3419 &lhs_offset);
3420 if (!tem)
3421 return (void *)-1;
3422 if (TREE_CODE (tem)((enum tree_code) (tem)->base.code) == MEM_REF
3423 && poly_int_tree_p (TREE_OPERAND (tem, 1)(*((const_cast<tree*> (tree_operand_check ((tem), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3423, __FUNCTION__)))))
, &mem_offset))
3424 {
3425 lhs = TREE_OPERAND (tem, 0)(*((const_cast<tree*> (tree_operand_check ((tem), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3425, __FUNCTION__)))))
;
3426 if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME)
3427 lhs = vn_valueize (lhs);
3428 lhs_offset += mem_offset;
3429 }
3430 else if (DECL_P (tem)(tree_code_type[(int) (((enum tree_code) (tem)->base.code)
)] == tcc_declaration)
)
3431 lhs = build_fold_addr_expr (tem)build_fold_addr_expr_loc (((location_t) 0), (tem));
3432 else
3433 return (void *)-1;
3434 }
3435 if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) != SSA_NAME
3436 && TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) != ADDR_EXPR)
3437 return (void *)-1;
3438
3439 /* Extract a pointer base and an offset for the source. */
3440 rhs = gimple_call_arg (def_stmt, 1);
3441 rhs_offset = 0;
3442 if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == SSA_NAME)
3443 rhs = vn_valueize (rhs);
3444 if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == ADDR_EXPR)
3445 {
3446 if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (rhs)))(((enum tree_code) (((contains_struct_check ((((contains_struct_check
((rhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3446, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3446, __FUNCTION__))->typed.type))->base.code) == ARRAY_TYPE
|| (((enum tree_code) (((contains_struct_check ((((contains_struct_check
((rhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3446, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3446, __FUNCTION__))->typed.type))->base.code) == RECORD_TYPE
|| ((enum tree_code) (((contains_struct_check ((((contains_struct_check
((rhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3446, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3446, __FUNCTION__))->typed.type))->base.code) == UNION_TYPE
|| ((enum tree_code) (((contains_struct_check ((((contains_struct_check
((rhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3446, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3446, __FUNCTION__))->typed.type))->base.code) == QUAL_UNION_TYPE
))
3447 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (rhs)))((tree_check4 ((((contains_struct_check ((((contains_struct_check
((rhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3447, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3447, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3447, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE
), (ARRAY_TYPE)))->base.u.bits.saturating_flag)
)
3448 return (void *)-1;
3449 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0)(*((const_cast<tree*> (tree_operand_check ((rhs), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3449, __FUNCTION__)))))
,
3450 &rhs_offset);
3451 if (!tem)
3452 return (void *)-1;
3453 if (TREE_CODE (tem)((enum tree_code) (tem)->base.code) == MEM_REF
3454 && poly_int_tree_p (TREE_OPERAND (tem, 1)(*((const_cast<tree*> (tree_operand_check ((tem), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3454, __FUNCTION__)))))
, &mem_offset))
3455 {
3456 rhs = TREE_OPERAND (tem, 0)(*((const_cast<tree*> (tree_operand_check ((tem), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3456, __FUNCTION__)))))
;
3457 rhs_offset += mem_offset;
3458 }
3459 else if (DECL_P (tem)(tree_code_type[(int) (((enum tree_code) (tem)->base.code)
)] == tcc_declaration)
3460 || TREE_CODE (tem)((enum tree_code) (tem)->base.code) == STRING_CST)
3461 rhs = build_fold_addr_expr (tem)build_fold_addr_expr_loc (((location_t) 0), (tem));
3462 else
3463 return (void *)-1;
3464 }
3465 if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == SSA_NAME)
3466 rhs = SSA_VAL (rhs);
3467 else if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) != ADDR_EXPR)
3468 return (void *)-1;
3469
3470 /* The bases of the destination and the references have to agree. */
3471 if (TREE_CODE (base)((enum tree_code) (base)->base.code) == MEM_REF)
3472 {
3473 if (TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3473, __FUNCTION__)))))
!= lhs
3474 || !poly_int_tree_p (TREE_OPERAND (base, 1)(*((const_cast<tree*> (tree_operand_check ((base), (1),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3474, __FUNCTION__)))))
, &mem_offset))
3475 return (void *) -1;
3476 at += mem_offset;
3477 }
3478 else if (!DECL_P (base)(tree_code_type[(int) (((enum tree_code) (base)->base.code
))] == tcc_declaration)
3479 || TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) != ADDR_EXPR
3480 || TREE_OPERAND (lhs, 0)(*((const_cast<tree*> (tree_operand_check ((lhs), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3480, __FUNCTION__)))))
!= base)
3481 return (void *)-1;
3482
3483 /* If the access is completely outside of the memcpy destination
3484 area there is no aliasing. */
3485 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
3486 return NULLnullptr;
3487 /* And the access has to be contained within the memcpy destination. */
3488 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
3489 return (void *)-1;
3490
3491 /* Save the operands since we need to use the original ones for
3492 the hash entry we use. */
3493 if (!data->saved_operands.exists ())
3494 data->saved_operands = vr->operands.copy ();
3495
3496 /* Make room for 2 operands in the new reference. */
3497 if (vr->operands.length () < 2)
3498 {
3499 vec<vn_reference_op_s> old = vr->operands;
3500 vr->operands.safe_grow_cleared (2, true);
3501 if (old == shared_lookup_references)
3502 shared_lookup_references = vr->operands;
3503 }
3504 else
3505 vr->operands.truncate (2);
3506
3507 /* The looked-through reference is a simple MEM_REF. */
3508 memset (&op, 0, sizeof (op));
3509 op.type = vr->type;
3510 op.opcode = MEM_REF;
3511 op.op0 = build_int_cst (ptr_type_nodeglobal_trees[TI_PTR_TYPE], at - lhs_offset + rhs_offset);
3512 op.off = at - lhs_offset + rhs_offset;
3513 vr->operands[0] = op;
3514 op.type = TREE_TYPE (rhs)((contains_struct_check ((rhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3514, __FUNCTION__))->typed.type)
;
3515 op.opcode = TREE_CODE (rhs)((enum tree_code) (rhs)->base.code);
3516 op.op0 = rhs;
3517 op.off = -1;
3518 vr->operands[1] = op;
3519 vr->hashcode = vn_reference_compute_hash (vr);
3520
3521 /* Try folding the new reference to a constant. */
3522 tree val = fully_constant_vn_reference_p (vr);
3523 if (val)
3524 return data->finish (0, 0, val);
3525
3526 /* Adjust *ref from the new operands. */
3527 if (!ao_ref_init_from_vn_reference (&r, 0, 0, vr->type, vr->operands))
3528 return (void *)-1;
3529 /* This can happen with bitfields. */
3530 if (maybe_ne (ref->size, r.size))
3531 return (void *)-1;
3532 *ref = r;
3533
3534 /* Do not update last seen VUSE after translating. */
3535 data->last_vuse_ptr = NULLnullptr;
3536 /* Invalidate the original access path since it now contains
3537 the wrong base. */
3538 data->orig_ref.ref = NULL_TREE(tree) nullptr;
3539 /* Use the alias-set of this stmt for recording an eventual result. */
3540 if (data->first_set == -2)
3541 {
3542 data->first_set = 0;
3543 data->first_base_set = 0;
3544 }
3545
3546 /* Keep looking for the adjusted *REF / VR pair. */
3547 return NULLnullptr;
3548 }
3549
3550 /* Bail out and stop walking. */
3551 return (void *)-1;
3552}
3553
3554/* Return a reference op vector from OP that can be used for
3555 vn_reference_lookup_pieces. The caller is responsible for releasing
3556 the vector. */
3557
3558vec<vn_reference_op_s>
3559vn_reference_operands_for_lookup (tree op)
3560{
3561 bool valueized;
3562 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
3563}
3564
3565/* Lookup a reference operation by it's parts, in the current hash table.
3566 Returns the resulting value number if it exists in the hash table,
3567 NULL_TREE otherwise. VNRESULT will be filled in with the actual
3568 vn_reference_t stored in the hashtable if something is found. */
3569
3570tree
3571vn_reference_lookup_pieces (tree vuse, alias_set_type set,
3572 alias_set_type base_set, tree type,
3573 vec<vn_reference_op_s> operands,
3574 vn_reference_t *vnresult, vn_lookup_kind kind)
3575{
3576 struct vn_reference_s vr1;
3577 vn_reference_t tmp;
3578 tree cst;
3579
3580 if (!vnresult)
3581 vnresult = &tmp;
3582 *vnresult = NULLnullptr;
3583
3584 vr1.vuse = vuse_ssa_val (vuse);
3585 shared_lookup_references.truncate (0);
3586 shared_lookup_references.safe_grow (operands.length (), true);
3587 memcpy (shared_lookup_references.address (),
3588 operands.address (),
3589 sizeof (vn_reference_op_s)
3590 * operands.length ());
3591 bool valueized_p;
3592 valueize_refs_1 (&shared_lookup_references, &valueized_p);
3593 vr1.operands = shared_lookup_references;
3594 vr1.type = type;
3595 vr1.set = set;
3596 vr1.base_set = base_set;
3597 vr1.hashcode = vn_reference_compute_hash (&vr1);
3598 if ((cst = fully_constant_vn_reference_p (&vr1)))
3599 return cst;
3600
3601 vn_reference_lookup_1 (&vr1, vnresult);
3602 if (!*vnresult
3603 && kind != VN_NOWALK
3604 && vr1.vuse)
3605 {
3606 ao_ref r;
3607 unsigned limit = param_sccvn_max_alias_queries_per_accessglobal_options.x_param_sccvn_max_alias_queries_per_access;
3608 vn_walk_cb_data data (&vr1, NULL_TREE(tree) nullptr, NULLnullptr, kind, true, NULL_TREE(tree) nullptr);
3609 vec<vn_reference_op_s> ops_for_ref;
3610 if (!valueized_p)
3611 ops_for_ref = vr1.operands;
3612 else
3613 {
3614 /* For ao_ref_from_mem we have to ensure only available SSA names
3615 end up in base and the only convenient way to make this work
3616 for PRE is to re-valueize with that in mind. */
3617 ops_for_ref.create (operands.length ());
3618 ops_for_ref.quick_grow (operands.length ());
3619 memcpy (ops_for_ref.address (),
3620 operands.address (),
3621 sizeof (vn_reference_op_s)
3622 * operands.length ());
3623 valueize_refs_1 (&ops_for_ref, &valueized_p, true);
3624 }
3625 if (ao_ref_init_from_vn_reference (&r, set, base_set, type,
3626 ops_for_ref))
3627 *vnresult
3628 = ((vn_reference_t)
3629 walk_non_aliased_vuses (&r, vr1.vuse, true, vn_reference_lookup_2,
3630 vn_reference_lookup_3, vuse_valueize,
3631 limit, &data));
3632 if (ops_for_ref != shared_lookup_references)
3633 ops_for_ref.release ();
3634 gcc_checking_assert (vr1.operands == shared_lookup_references)((void)(!(vr1.operands == shared_lookup_references) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3634, __FUNCTION__), 0 : 0))
;
3635 }
3636
3637 if (*vnresult)
3638 return (*vnresult)->result;
3639
3640 return NULL_TREE(tree) nullptr;
3641}
3642
3643/* Lookup OP in the current hash table, and return the resulting value
3644 number if it exists in the hash table. Return NULL_TREE if it does
3645 not exist in the hash table or if the result field of the structure
3646 was NULL.. VNRESULT will be filled in with the vn_reference_t
3647 stored in the hashtable if one exists. When TBAA_P is false assume
3648 we are looking up a store and treat it as having alias-set zero.
3649 *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded.
3650 MASK is either NULL_TREE, or can be an INTEGER_CST if the result of the
3651 load is bitwise anded with MASK and so we are only interested in a subset
3652 of the bits and can ignore if the other bits are uninitialized or
3653 not initialized with constants. */
3654
3655tree
3656vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
3657 vn_reference_t *vnresult, bool tbaa_p,
3658 tree *last_vuse_ptr, tree mask)
3659{
3660 vec<vn_reference_op_s> operands;
3661 struct vn_reference_s vr1;
3662 bool valueized_anything;
3663
3664 if (vnresult)
3665 *vnresult = NULLnullptr;
3666
3667 vr1.vuse = vuse_ssa_val (vuse);
3668 vr1.operands = operands
3669 = valueize_shared_reference_ops_from_ref (op, &valueized_anything);
3670 vr1.type = TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3670, __FUNCTION__))->typed.type)
;
3671 ao_ref op_ref;
3672 ao_ref_init (&op_ref, op);
3673 vr1.set = ao_ref_alias_set (&op_ref);
3674 vr1.base_set = ao_ref_base_alias_set (&op_ref);
3675 vr1.hashcode = vn_reference_compute_hash (&vr1);
3676 if (mask == NULL_TREE(tree) nullptr)
3677 if (tree cst = fully_constant_vn_reference_p (&vr1))
3678 return cst;
3679
3680 if (kind != VN_NOWALK && vr1.vuse)
3681 {
3682 vn_reference_t wvnresult;
3683 ao_ref r;
3684 unsigned limit = param_sccvn_max_alias_queries_per_accessglobal_options.x_param_sccvn_max_alias_queries_per_access;
3685 auto_vec<vn_reference_op_s> ops_for_ref;
3686 if (valueized_anything)
3687 {
3688 copy_reference_ops_from_ref (op, &ops_for_ref);
3689 bool tem;
3690 valueize_refs_1 (&ops_for_ref, &tem, true);
3691 }
3692 /* Make sure to use a valueized reference if we valueized anything.
3693 Otherwise preserve the full reference for advanced TBAA. */
3694 if (!valueized_anything
3695 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.base_set,
3696 vr1.type, ops_for_ref))
3697 ao_ref_init (&r, op);
3698 vn_walk_cb_data data (&vr1, r.ref ? NULL_TREE(tree) nullptr : op,
3699 last_vuse_ptr, kind, tbaa_p, mask);
3700
3701 wvnresult
3702 = ((vn_reference_t)
3703 walk_non_aliased_vuses (&r, vr1.vuse, tbaa_p, vn_reference_lookup_2,
3704 vn_reference_lookup_3, vuse_valueize, limit,
3705 &data));
3706 gcc_checking_assert (vr1.operands == shared_lookup_references)((void)(!(vr1.operands == shared_lookup_references) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3706, __FUNCTION__), 0 : 0))
;
3707 if (wvnresult)
3708 {
3709 gcc_assert (mask == NULL_TREE)((void)(!(mask == (tree) nullptr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3709, __FUNCTION__), 0 : 0))
;
3710 if (vnresult)
3711 *vnresult = wvnresult;
3712 return wvnresult->result;
3713 }
3714 else if (mask)
3715 return data.masked_result;
3716
3717 return NULL_TREE(tree) nullptr;
3718 }
3719
3720 if (last_vuse_ptr)
3721 *last_vuse_ptr = vr1.vuse;
3722 if (mask)
3723 return NULL_TREE(tree) nullptr;
3724 return vn_reference_lookup_1 (&vr1, vnresult);
3725}
3726
3727/* Lookup CALL in the current hash table and return the entry in
3728 *VNRESULT if found. Populates *VR for the hashtable lookup. */
3729
3730void
3731vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
3732 vn_reference_t vr)
3733{
3734 if (vnresult)
3735 *vnresult = NULLnullptr;
3736
3737 tree vuse = gimple_vuse (call);
3738
3739 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE(tree) nullptr;
3740 vr->operands = valueize_shared_reference_ops_from_call (call);
3741 tree lhs = gimple_call_lhs (call);
3742 /* For non-SSA return values the referece ops contain the LHS. */
3743 vr->type = ((lhs && TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME)
3744 ? TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3744, __FUNCTION__))->typed.type)
: NULL_TREE(tree) nullptr);
3745 vr->punned = false;
3746 vr->set = 0;
3747 vr->base_set = 0;
3748 vr->hashcode = vn_reference_compute_hash (vr);
3749 vn_reference_lookup_1 (vr, vnresult);
3750}
3751
3752/* Insert OP into the current hash table with a value number of RESULT. */
3753
3754static void
3755vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
3756{
3757 vn_reference_s **slot;
3758 vn_reference_t vr1;
3759 bool tem;
3760
3761 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s)((vn_reference_s *) __extension__ ({ struct obstack *__h = ((
&vn_tables_obstack)); __extension__ ({ struct obstack *__o
= (__h); size_t __len = ((sizeof (vn_reference_s))); if (__extension__
({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit
- __o1->next_free); }) < __len) _obstack_newchunk (__o
, __len); ((void) ((__o)->next_free += (__len))); }); __extension__
({ struct obstack *__o1 = (__h); void *__value = (void *) __o1
->object_base; if (__o1->next_free == __value) __o1->
maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t
) < sizeof (void *) ? ((__o1->object_base) + (((__o1->
next_free) - (__o1->object_base) + (__o1->alignment_mask
)) & ~(__o1->alignment_mask))) : (char *) (((ptrdiff_t
) (__o1->next_free) + (__o1->alignment_mask)) & ~(__o1
->alignment_mask))); if ((size_t) (__o1->next_free - (char
*) __o1->chunk) > (size_t) (__o1->chunk_limit - (char
*) __o1->chunk)) __o1->next_free = __o1->chunk_limit
; __o1->object_base = __o1->next_free; __value; }); }))
;
3762 if (TREE_CODE (result)((enum tree_code) (result)->base.code) == SSA_NAME)
3763 vr1->value_id = VN_INFO (result)->value_id;
3764 else
3765 vr1->value_id = get_or_alloc_constant_value_id (result);
3766 vr1->vuse = vuse_ssa_val (vuse);
3767 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
3768 vr1->type = TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3768, __FUNCTION__))->typed.type)
;
3769 vr1->punned = false;
3770 ao_ref op_ref;
3771 ao_ref_init (&op_ref, op);
3772 vr1->set = ao_ref_alias_set (&op_ref);
3773 vr1->base_set = ao_ref_base_alias_set (&op_ref);
3774 vr1->hashcode = vn_reference_compute_hash (vr1);
3775 vr1->result = TREE_CODE (result)((enum tree_code) (result)->base.code) == SSA_NAME ? SSA_VAL (result) : result;
3776 vr1->result_vdef = vdef;
3777
3778 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
3779 INSERT);
3780
3781 /* Because IL walking on reference lookup can end up visiting
3782 a def that is only to be visited later in iteration order
3783 when we are about to make an irreducible region reducible
3784 the def can be effectively processed and its ref being inserted
3785 by vn_reference_lookup_3 already. So we cannot assert (!*slot)
3786 but save a lookup if we deal with already inserted refs here. */
3787 if (*slot)
3788 {
3789 /* We cannot assert that we have the same value either because
3790 when disentangling an irreducible region we may end up visiting
3791 a use before the corresponding def. That's a missed optimization
3792 only though. See gcc.dg/tree-ssa/pr87126.c for example. */
3793 if (dump_file && (dump_flags & TDF_DETAILS)
3794 && !operand_equal_p ((*slot)->result, vr1->result, 0))
3795 {
3796 fprintf (dump_file, "Keeping old value ");
3797 print_generic_expr (dump_file, (*slot)->result);
3798 fprintf (dump_file, " because of collision\n");
3799 }
3800 free_reference (vr1);
3801 obstack_free (&vn_tables_obstack, vr1)__extension__ ({ struct obstack *__o = (&vn_tables_obstack
); void *__obj = (void *) (vr1); if (__obj > (void *) __o->
chunk && __obj < (void *) __o->chunk_limit) __o
->next_free = __o->object_base = (char *) __obj; else _obstack_free
(__o, __obj); })
;
3802 return;
3803 }
3804
3805 *slot = vr1;
3806 vr1->next = last_inserted_ref;
3807 last_inserted_ref = vr1;
3808}
3809
3810/* Insert a reference by it's pieces into the current hash table with
3811 a value number of RESULT. Return the resulting reference
3812 structure we created. */
3813
3814vn_reference_t
3815vn_reference_insert_pieces (tree vuse, alias_set_type set,
3816 alias_set_type base_set, tree type,
3817 vec<vn_reference_op_s> operands,
3818 tree result, unsigned int value_id)
3819
3820{
3821 vn_reference_s **slot;
3822 vn_reference_t vr1;
3823
3824 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s)((vn_reference_s *) __extension__ ({ struct obstack *__h = ((
&vn_tables_obstack)); __extension__ ({ struct obstack *__o
= (__h); size_t __len = ((sizeof (vn_reference_s))); if (__extension__
({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit
- __o1->next_free); }) < __len) _obstack_newchunk (__o
, __len); ((void) ((__o)->next_free += (__len))); }); __extension__
({ struct obstack *__o1 = (__h); void *__value = (void *) __o1
->object_base; if (__o1->next_free == __value) __o1->
maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t
) < sizeof (void *) ? ((__o1->object_base) + (((__o1->
next_free) - (__o1->object_base) + (__o1->alignment_mask
)) & ~(__o1->alignment_mask))) : (char *) (((ptrdiff_t
) (__o1->next_free) + (__o1->alignment_mask)) & ~(__o1
->alignment_mask))); if ((size_t) (__o1->next_free - (char
*) __o1->chunk) > (size_t) (__o1->chunk_limit - (char
*) __o1->chunk)) __o1->next_free = __o1->chunk_limit
; __o1->object_base = __o1->next_free; __value; }); }))
;
3825 vr1->value_id = value_id;
3826 vr1->vuse = vuse_ssa_val (vuse);
3827 vr1->operands = operands;
3828 valueize_refs (&vr1->operands);
3829 vr1->type = type;
3830 vr1->punned = false;
3831 vr1->set = set;
3832 vr1->base_set = base_set;
3833 vr1->hashcode = vn_reference_compute_hash (vr1);
3834 if (result && TREE_CODE (result)((enum tree_code) (result)->base.code) == SSA_NAME)
3835 result = SSA_VAL (result);
3836 vr1->result = result;
3837 vr1->result_vdef = NULL_TREE(tree) nullptr;
3838
3839 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
3840 INSERT);
3841
3842 /* At this point we should have all the things inserted that we have
3843 seen before, and we should never try inserting something that
3844 already exists. */
3845 gcc_assert (!*slot)((void)(!(!*slot) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3845, __FUNCTION__), 0 : 0))
;
3846
3847 *slot = vr1;
3848 vr1->next = last_inserted_ref;
3849 last_inserted_ref = vr1;
3850 return vr1;
3851}
3852
3853/* Compute and return the hash value for nary operation VBO1. */
3854
3855static hashval_t
3856vn_nary_op_compute_hash (const vn_nary_op_t vno1)
3857{
3858 inchash::hash hstate;
3859 unsigned i;
3860
3861 if (((vno1->length == 2
3862 && commutative_tree_code (vno1->opcode))
3863 || (vno1->length == 3
3864 && commutative_ternary_tree_code (vno1->opcode)))
3865 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
3866 std::swap (vno1->op[0], vno1->op[1]);
3867 else if (TREE_CODE_CLASS (vno1->opcode)tree_code_type[(int) (vno1->opcode)] == tcc_comparison
3868 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
3869 {
3870 std::swap (vno1->op[0], vno1->op[1]);
3871 vno1->opcode = swap_tree_comparison (vno1->opcode);
3872 }
3873
3874 hstate.add_int (vno1->opcode);
3875 for (i = 0; i < vno1->length; ++i)
3876 inchash::add_expr (vno1->op[i], hstate);
3877
3878 return hstate.end ();
3879}
3880
3881/* Compare nary operations VNO1 and VNO2 and return true if they are
3882 equivalent. */
3883
3884bool
3885vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
3886{
3887 unsigned i;
3888
3889 if (vno1->hashcode != vno2->hashcode)
3890 return false;
3891
3892 if (vno1->length != vno2->length)
3893 return false;
3894
3895 if (vno1->opcode != vno2->opcode
3896 || !types_compatible_p (vno1->type, vno2->type))
3897 return false;
3898
3899 for (i = 0; i < vno1->length; ++i)
3900 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
3901 return false;
3902
3903 /* BIT_INSERT_EXPR has an implict operand as the type precision
3904 of op1. Need to check to make sure they are the same. */
3905 if (vno1->opcode == BIT_INSERT_EXPR
3906 && TREE_CODE (vno1->op[1])((enum tree_code) (vno1->op[1])->base.code) == INTEGER_CST
3907 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))((tree_class_check ((((contains_struct_check ((vno1->op[1]
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3907, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3907, __FUNCTION__))->type_common.precision)
3908 != TYPE_PRECISION (TREE_TYPE (vno2->op[1]))((tree_class_check ((((contains_struct_check ((vno2->op[1]
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3908, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3908, __FUNCTION__))->type_common.precision)
)
3909 return false;
3910
3911 return true;
3912}
3913
3914/* Initialize VNO from the pieces provided. */
3915
3916static void
3917init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
3918 enum tree_code code, tree type, tree *ops)
3919{
3920 vno->opcode = code;
3921 vno->length = length;
3922 vno->type = type;
3923 memcpy (&vno->op[0], ops, sizeof (tree) * length);
3924}
3925
3926/* Return the number of operands for a vn_nary ops structure from STMT. */
3927
3928static unsigned int
3929vn_nary_length_from_stmt (gimple *stmt)
3930{
3931 switch (gimple_assign_rhs_code (stmt))
3932 {
3933 case REALPART_EXPR:
3934 case IMAGPART_EXPR:
3935 case VIEW_CONVERT_EXPR:
3936 return 1;
3937
3938 case BIT_FIELD_REF:
3939 return 3;
3940
3941 case CONSTRUCTOR:
3942 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt))(vec_safe_length (((tree_check ((gimple_assign_rhs1 (stmt)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3942, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts)))
;
3943
3944 default:
3945 return gimple_num_ops (stmt) - 1;
3946 }
3947}
3948
3949/* Initialize VNO from STMT. */
3950
3951static void
3952init_vn_nary_op_from_stmt (vn_nary_op_t vno, gassign *stmt)
3953{
3954 unsigned i;
3955
3956 vno->opcode = gimple_assign_rhs_code (stmt);
3957 vno->type = TREE_TYPE (gimple_assign_lhs (stmt))((contains_struct_check ((gimple_assign_lhs (stmt)), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3957, __FUNCTION__))->typed.type)
;
3958 switch (vno->opcode)
3959 {
3960 case REALPART_EXPR:
3961 case IMAGPART_EXPR:
3962 case VIEW_CONVERT_EXPR:
3963 vno->length = 1;
3964 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)(*((const_cast<tree*> (tree_operand_check ((gimple_assign_rhs1
(stmt)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3964, __FUNCTION__)))))
;
3965 break;
3966
3967 case BIT_FIELD_REF:
3968 vno->length = 3;
3969 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)(*((const_cast<tree*> (tree_operand_check ((gimple_assign_rhs1
(stmt)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3969, __FUNCTION__)))))
;
3970 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1)(*((const_cast<tree*> (tree_operand_check ((gimple_assign_rhs1
(stmt)), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3970, __FUNCTION__)))))
;
3971 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2)(*((const_cast<tree*> (tree_operand_check ((gimple_assign_rhs1
(stmt)), (2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3971, __FUNCTION__)))))
;
3972 break;
3973
3974 case CONSTRUCTOR:
3975 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt))(vec_safe_length (((tree_check ((gimple_assign_rhs1 (stmt)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3975, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts)))
;
3976 for (i = 0; i < vno->length; ++i)
3977 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)(&(*((tree_check ((gimple_assign_rhs1 (stmt)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3977, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts))[
i])
->value;
3978 break;
3979
3980 default:
3981 gcc_checking_assert (!gimple_assign_single_p (stmt))((void)(!(!gimple_assign_single_p (stmt)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3981, __FUNCTION__), 0 : 0))
;
3982 vno->length = gimple_num_ops (stmt) - 1;
3983 for (i = 0; i < vno->length; ++i)
3984 vno->op[i] = gimple_op (stmt, i + 1);
3985 }
3986}
3987
3988/* Compute the hashcode for VNO and look for it in the hash table;
3989 return the resulting value number if it exists in the hash table.
3990 Return NULL_TREE if it does not exist in the hash table or if the
3991 result field of the operation is NULL. VNRESULT will contain the
3992 vn_nary_op_t from the hashtable if it exists. */
3993
3994static tree
3995vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
3996{
3997 vn_nary_op_s **slot;
3998
3999 if (vnresult)
4000 *vnresult = NULLnullptr;
4001
4002 for (unsigned i = 0; i < vno->length; ++i)
4003 if (TREE_CODE (vno->op[i])((enum tree_code) (vno->op[i])->base.code) == SSA_NAME)
4004 vno->op[i] = SSA_VAL (vno->op[i]);
4005
4006 vno->hashcode = vn_nary_op_compute_hash (vno);
4007 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
4008 if (!slot)
4009 return NULL_TREE(tree) nullptr;
4010 if (vnresult)
4011 *vnresult = *slot;
4012 return (*slot)->predicated_values ? NULL_TREE(tree) nullptr : (*slot)->u.result;
4013}
4014
4015/* Lookup a n-ary operation by its pieces and return the resulting value
4016 number if it exists in the hash table. Return NULL_TREE if it does
4017 not exist in the hash table or if the result field of the operation
4018 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
4019 if it exists. */
4020
4021tree
4022vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
4023 tree type, tree *ops, vn_nary_op_t *vnresult)
4024{
4025 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,((struct vn_nary_op_s *) __builtin_alloca((sizeof_vn_nary_op (
length))))
4026 sizeof_vn_nary_op (length))((struct vn_nary_op_s *) __builtin_alloca((sizeof_vn_nary_op (
length))))
;
4027 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4028 return vn_nary_op_lookup_1 (vno1, vnresult);
4029}
4030
4031/* Lookup the rhs of STMT in the current hash table, and return the resulting
4032 value number if it exists in the hash table. Return NULL_TREE if
4033 it does not exist in the hash table. VNRESULT will contain the
4034 vn_nary_op_t from the hashtable if it exists. */
4035
4036tree
4037vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
4038{
4039 vn_nary_op_t vno1
4040 = XALLOCAVAR (struct vn_nary_op_s,((struct vn_nary_op_s *) __builtin_alloca((sizeof_vn_nary_op (
vn_nary_length_from_stmt (stmt)))))
4041 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)))((struct vn_nary_op_s *) __builtin_alloca((sizeof_vn_nary_op (
vn_nary_length_from_stmt (stmt)))))
;
4042 init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (stmt));
4043 return vn_nary_op_lookup_1 (vno1, vnresult);
4044}
4045
4046/* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
4047
4048static vn_nary_op_t
4049alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
4050{
4051 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length))__extension__ ({ struct obstack *__h = (stack); __extension__
({ struct obstack *__o = (__h); size_t __len = ((sizeof_vn_nary_op
(length))); if (__extension__ ({ struct obstack const *__o1 =
(__o); (size_t) (__o1->chunk_limit - __o1->next_free);
}) < __len) _obstack_newchunk (__o, __len); ((void) ((__o
)->next_free += (__len))); }); __extension__ ({ struct obstack
*__o1 = (__h); void *__value = (void *) __o1->object_base
; if (__o1->next_free == __value) __o1->maybe_empty_object
= 1; __o1->next_free = (sizeof (ptrdiff_t) < sizeof (void
*) ? ((__o1->object_base) + (((__o1->next_free) - (__o1
->object_base) + (__o1->alignment_mask)) & ~(__o1->
alignment_mask))) : (char *) (((ptrdiff_t) (__o1->next_free
) + (__o1->alignment_mask)) & ~(__o1->alignment_mask
))); if ((size_t) (__o1->next_free - (char *) __o1->chunk
) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk
)) __o1->next_free = __o1->chunk_limit; __o1->object_base
= __o1->next_free; __value; }); })
;
4052}
4053
4054/* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
4055 obstack. */
4056
4057static vn_nary_op_t
4058alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
4059{
4060 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack);
4061
4062 vno1->value_id = value_id;
4063 vno1->length = length;
4064 vno1->predicated_values = 0;
4065 vno1->u.result = result;
4066
4067 return vno1;
4068}
4069
4070/* Insert VNO into TABLE. */
4071
4072static vn_nary_op_t
4073vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table)
4074{
4075 vn_nary_op_s **slot;
4076
4077 gcc_assert (! vno->predicated_values((void)(!(! vno->predicated_values || (! vno->u.values->
next && vno->u.values->n == 1)) ? fancy_abort (
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4079, __FUNCTION__), 0 : 0))
4078 || (! vno->u.values->next((void)(!(! vno->predicated_values || (! vno->u.values->
next && vno->u.values->n == 1)) ? fancy_abort (
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4079, __FUNCTION__), 0 : 0))
4079 && vno->u.values->n == 1))((void)(!(! vno->predicated_values || (! vno->u.values->
next && vno->u.values->n == 1)) ? fancy_abort (
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4079, __FUNCTION__), 0 : 0))
;
4080
4081 for (unsigned i = 0; i < vno->length; ++i)
4082 if (TREE_CODE (vno->op[i])((enum tree_code) (vno->op[i])->base.code) == SSA_NAME)
4083 vno->op[i] = SSA_VAL (vno->op[i]);
4084
4085 vno->hashcode = vn_nary_op_compute_hash (vno);
4086 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
4087 vno->unwind_to = *slot;
4088 if (*slot)
4089 {
4090 /* Prefer non-predicated values.
4091 ??? Only if those are constant, otherwise, with constant predicated
4092 value, turn them into predicated values with entry-block validity
4093 (??? but we always find the first valid result currently). */
4094 if ((*slot)->predicated_values
4095 && ! vno->predicated_values)
4096 {
4097 /* ??? We cannot remove *slot from the unwind stack list.
4098 For the moment we deal with this by skipping not found
4099 entries but this isn't ideal ... */
4100 *slot = vno;
4101 /* ??? Maintain a stack of states we can unwind in
4102 vn_nary_op_s? But how far do we unwind? In reality
4103 we need to push change records somewhere... Or not
4104 unwind vn_nary_op_s and linking them but instead
4105 unwind the results "list", linking that, which also
4106 doesn't move on hashtable resize. */
4107 /* We can also have a ->unwind_to recording *slot there.
4108 That way we can make u.values a fixed size array with
4109 recording the number of entries but of course we then
4110 have always N copies for each unwind_to-state. Or we
4111 make sure to only ever append and each unwinding will
4112 pop off one entry (but how to deal with predicated
4113 replaced with non-predicated here?) */
4114 vno->next = last_inserted_nary;
4115 last_inserted_nary = vno;
4116 return vno;
4117 }
4118 else if (vno->predicated_values
4119 && ! (*slot)->predicated_values)
4120 return *slot;
4121 else if (vno->predicated_values
4122 && (*slot)->predicated_values)
4123 {
4124 /* ??? Factor this all into a insert_single_predicated_value
4125 routine. */
4126 gcc_assert (!vno->u.values->next && vno->u.values->n == 1)((void)(!(!vno->u.values->next && vno->u.values
->n == 1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4126, __FUNCTION__), 0 : 0))
;
4127 basic_block vno_bb
4128 = BASIC_BLOCK_FOR_FN (cfun, vno->u.values->valid_dominated_by_p[0])((*(((cfun + 0))->cfg->x_basic_block_info))[(vno->u.
values->valid_dominated_by_p[0])])
;
4129 vn_pval *nval = vno->u.values;
4130 vn_pval **next = &vno->u.values;
4131 bool found = false;
4132 for (vn_pval *val = (*slot)->u.values; val; val = val->next)
4133 {
4134 if (expressions_equal_p (val->result, nval->result))
4135 {
4136 found = true;
4137 for (unsigned i = 0; i < val->n; ++i)
4138 {
4139 basic_block val_bb
4140 = BASIC_BLOCK_FOR_FN (cfun,((*(((cfun + 0))->cfg->x_basic_block_info))[(val->valid_dominated_by_p
[i])])
4141 val->valid_dominated_by_p[i])((*(((cfun + 0))->cfg->x_basic_block_info))[(val->valid_dominated_by_p
[i])])
;
4142 if (dominated_by_p (CDI_DOMINATORS, vno_bb, val_bb))
4143 /* Value registered with more generic predicate. */
4144 return *slot;
4145 else if (dominated_by_p (CDI_DOMINATORS, val_bb, vno_bb))
4146 /* Shouldn't happen, we insert in RPO order. */
4147 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4147, __FUNCTION__))
;
4148 }
4149 /* Append value. */
4150 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,__extension__ ({ struct obstack *__h = (&vn_tables_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (vn_pval) + val->n * sizeof (int))); if (__extension__
({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit
- __o1->next_free); }) < __len) _obstack_newchunk (__o
, __len); ((void) ((__o)->next_free += (__len))); }); __extension__
({ struct obstack *__o1 = (__h); void *__value = (void *) __o1
->object_base; if (__o1->next_free == __value) __o1->
maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t
) < sizeof (void *) ? ((__o1->object_base) + (((__o1->
next_free) - (__o1->object_base) + (__o1->alignment_mask
)) & ~(__o1->alignment_mask))) : (char *) (((ptrdiff_t
) (__o1->next_free) + (__o1->alignment_mask)) & ~(__o1
->alignment_mask))); if ((size_t) (__o1->next_free - (char
*) __o1->chunk) > (size_t) (__o1->chunk_limit - (char
*) __o1->chunk)) __o1->next_free = __o1->chunk_limit
; __o1->object_base = __o1->next_free; __value; }); })
4151 sizeof (vn_pval)__extension__ ({ struct obstack *__h = (&vn_tables_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (vn_pval) + val->n * sizeof (int))); if (__extension__
({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit
- __o1->next_free); }) < __len) _obstack_newchunk (__o
, __len); ((void) ((__o)->next_free += (__len))); }); __extension__
({ struct obstack *__o1 = (__h); void *__value = (void *) __o1
->object_base; if (__o1->next_free == __value) __o1->
maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t
) < sizeof (void *) ? ((__o1->object_base) + (((__o1->
next_free) - (__o1->object_base) + (__o1->alignment_mask
)) & ~(__o1->alignment_mask))) : (char *) (((ptrdiff_t
) (__o1->next_free) + (__o1->alignment_mask)) & ~(__o1
->alignment_mask))); if ((size_t) (__o1->next_free - (char
*) __o1->chunk) > (size_t) (__o1->chunk_limit - (char
*) __o1->chunk)) __o1->next_free = __o1->chunk_limit
; __o1->object_base = __o1->next_free; __value; }); })
4152 + val->n * sizeof (int))__extension__ ({ struct obstack *__h = (&vn_tables_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (vn_pval) + val->n * sizeof (int))); if (__extension__
({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit
- __o1->next_free); }) < __len) _obstack_newchunk (__o
, __len); ((void) ((__o)->next_free += (__len))); }); __extension__
({ struct obstack *__o1 = (__h); void *__value = (void *) __o1
->object_base; if (__o1->next_free == __value) __o1->
maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t
) < sizeof (void *) ? ((__o1->object_base) + (((__o1->
next_free) - (__o1->object_base) + (__o1->alignment_mask
)) & ~(__o1->alignment_mask))) : (char *) (((ptrdiff_t
) (__o1->next_free) + (__o1->alignment_mask)) & ~(__o1
->alignment_mask))); if ((size_t) (__o1->next_free - (char
*) __o1->chunk) > (size_t) (__o1->chunk_limit - (char
*) __o1->chunk)) __o1->next_free = __o1->chunk_limit
; __o1->object_base = __o1->next_free; __value; }); })
;
4153 (*next)->next = NULLnullptr;
4154 (*next)->result = val->result;
4155 (*next)->n = val->n + 1;
4156 memcpy ((*next)->valid_dominated_by_p,
4157 val->valid_dominated_by_p,
4158 val->n * sizeof (int));
4159 (*next)->valid_dominated_by_p[val->n] = vno_bb->index;
4160 next = &(*next)->next;
4161 if (dump_file && (dump_flags & TDF_DETAILS))
4162 fprintf (dump_file, "Appending predicate to value.\n");
4163 continue;
4164 }
4165 /* Copy other predicated values. */
4166 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,__extension__ ({ struct obstack *__h = (&vn_tables_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (vn_pval) + (val->n-1) * sizeof (int))); if (__extension__
({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit
- __o1->next_free); }) < __len) _obstack_newchunk (__o
, __len); ((void) ((__o)->next_free += (__len))); }); __extension__
({ struct obstack *__o1 = (__h); void *__value = (void *) __o1
->object_base; if (__o1->next_free == __value) __o1->
maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t
) < sizeof (void *) ? ((__o1->object_base) + (((__o1->
next_free) - (__o1->object_base) + (__o1->alignment_mask
)) & ~(__o1->alignment_mask))) : (char *) (((ptrdiff_t
) (__o1->next_free) + (__o1->alignment_mask)) & ~(__o1
->alignment_mask))); if ((size_t) (__o1->next_free - (char
*) __o1->chunk) > (size_t) (__o1->chunk_limit - (char
*) __o1->chunk)) __o1->next_free = __o1->chunk_limit
; __o1->object_base = __o1->next_free; __value; }); })
4167 sizeof (vn_pval)__extension__ ({ struct obstack *__h = (&vn_tables_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (vn_pval) + (val->n-1) * sizeof (int))); if (__extension__
({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit
- __o1->next_free); }) < __len) _obstack_newchunk (__o
, __len); ((void) ((__o)->next_free += (__len))); }); __extension__
({ struct obstack *__o1 = (__h); void *__value = (void *) __o1
->object_base; if (__o1->next_free == __value) __o1->
maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t
) < sizeof (void *) ? ((__o1->object_base) + (((__o1->
next_free) - (__o1->object_base) + (__o1->alignment_mask
)) & ~(__o1->alignment_mask))) : (char *) (((ptrdiff_t
) (__o1->next_free) + (__o1->alignment_mask)) & ~(__o1
->alignment_mask))); if ((size_t) (__o1->next_free - (char
*) __o1->chunk) > (size_t) (__o1->chunk_limit - (char
*) __o1->chunk)) __o1->next_free = __o1->chunk_limit
; __o1->object_base = __o1->next_free; __value; }); })
4168 + (val->n-1) * sizeof (int))__extension__ ({ struct obstack *__h = (&vn_tables_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (vn_pval) + (val->n-1) * sizeof (int))); if (__extension__
({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit
- __o1->next_free); }) < __len) _obstack_newchunk (__o
, __len); ((void) ((__o)->next_free += (__len))); }); __extension__
({ struct obstack *__o1 = (__h); void *__value = (void *) __o1
->object_base; if (__o1->next_free == __value) __o1->
maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t
) < sizeof (void *) ? ((__o1->object_base) + (((__o1->
next_free) - (__o1->object_base) + (__o1->alignment_mask
)) & ~(__o1->alignment_mask))) : (char *) (((ptrdiff_t
) (__o1->next_free) + (__o1->alignment_mask)) & ~(__o1
->alignment_mask))); if ((size_t) (__o1->next_free - (char
*) __o1->chunk) > (size_t) (__o1->chunk_limit - (char
*) __o1->chunk)) __o1->next_free = __o1->chunk_limit
; __o1->object_base = __o1->next_free; __value; }); })
;
4169 memcpy (*next, val, sizeof (vn_pval) + (val->n-1) * sizeof (int));
4170 (*next)->next = NULLnullptr;
4171 next = &(*next)->next;
4172 }
4173 if (!found)
4174 *next = nval;
4175
4176 *slot = vno;
4177 vno->next = last_inserted_nary;
4178 last_inserted_nary = vno;
4179 return vno;
4180 }
4181
4182 /* While we do not want to insert things twice it's awkward to
4183 avoid it in the case where visit_nary_op pattern-matches stuff
4184 and ends up simplifying the replacement to itself. We then
4185 get two inserts, one from visit_nary_op and one from
4186 vn_nary_build_or_lookup.
4187 So allow inserts with the same value number. */
4188 if ((*slot)->u.result == vno->u.result)
4189 return *slot;
4190 }
4191
4192 /* ??? There's also optimistic vs. previous commited state merging
4193 that is problematic for the case of unwinding. */
4194
4195 /* ??? We should return NULL if we do not use 'vno' and have the
4196 caller release it. */
4197 gcc_assert (!*slot)((void)(!(!*slot) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4197, __FUNCTION__), 0 : 0))
;
4198
4199 *slot = vno;
4200 vno->next = last_inserted_nary;
4201 last_inserted_nary = vno;
4202 return vno;
4203}
4204
4205/* Insert a n-ary operation into the current hash table using it's
4206 pieces. Return the vn_nary_op_t structure we created and put in
4207 the hashtable. */
4208
4209vn_nary_op_t
4210vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
4211 tree type, tree *ops,
4212 tree result, unsigned int value_id)
4213{
4214 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
4215 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4216 return vn_nary_op_insert_into (vno1, valid_info->nary);
4217}
4218
4219static vn_nary_op_t
4220vn_nary_op_insert_pieces_predicated (unsigned int length, enum tree_code code,
4221 tree type, tree *ops,
4222 tree result, unsigned int value_id,
4223 edge pred_e)
4224{
4225 /* ??? Currently tracking BBs. */
4226 if (! single_pred_p (pred_e->dest))
4227 {
4228 /* Never record for backedges. */
4229 if (pred_e->flags & EDGE_DFS_BACK)
4230 return NULLnullptr;
4231 edge_iterator ei;
4232 edge e;
4233 int cnt = 0;
4234 /* Ignore backedges. */
4235 FOR_EACH_EDGE (e, ei, pred_e->dest->preds)for ((ei) = ei_start_1 (&((pred_e->dest->preds))); ei_cond
((ei), &(e)); ei_next (&(ei)))
4236 if (! dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4237 cnt++;
4238 if (cnt != 1)
4239 return NULLnullptr;
4240 }
4241 if (dump_file && (dump_flags & TDF_DETAILS)
4242 /* ??? Fix dumping, but currently we only get comparisons. */
4243 && TREE_CODE_CLASS (code)tree_code_type[(int) (code)] == tcc_comparison)
4244 {
4245 fprintf (dump_file, "Recording on edge %d->%d ", pred_e->src->index,
4246 pred_e->dest->index);
4247 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4248 fprintf (dump_file, " %s ", get_tree_code_name (code));
4249 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4250 fprintf (dump_file, " == %s\n",
4251 integer_zerop (result) ? "false" : "true");
4252 }
4253 vn_nary_op_t vno1 = alloc_vn_nary_op (length, NULL_TREE(tree) nullptr, value_id);
4254 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4255 vno1->predicated_values = 1;
4256 vno1->u.values = (vn_pval *) obstack_alloc (&vn_tables_obstack,__extension__ ({ struct obstack *__h = (&vn_tables_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (vn_pval))); if (__extension__ ({ struct obstack const
*__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->next_free
); }) < __len) _obstack_newchunk (__o, __len); ((void) ((__o
)->next_free += (__len))); }); __extension__ ({ struct obstack
*__o1 = (__h); void *__value = (void *) __o1->object_base
; if (__o1->next_free == __value) __o1->maybe_empty_object
= 1; __o1->next_free = (sizeof (ptrdiff_t) < sizeof (void
*) ? ((__o1->object_base) + (((__o1->next_free) - (__o1
->object_base) + (__o1->alignment_mask)) & ~(__o1->
alignment_mask))) : (char *) (((ptrdiff_t) (__o1->next_free
) + (__o1->alignment_mask)) & ~(__o1->alignment_mask
))); if ((size_t) (__o1->next_free - (char *) __o1->chunk
) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk
)) __o1->next_free = __o1->chunk_limit; __o1->object_base
= __o1->next_free; __value; }); })
4257 sizeof (vn_pval))__extension__ ({ struct obstack *__h = (&vn_tables_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (vn_pval))); if (__extension__ ({ struct obstack const
*__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->next_free
); }) < __len) _obstack_newchunk (__o, __len); ((void) ((__o
)->next_free += (__len))); }); __extension__ ({ struct obstack
*__o1 = (__h); void *__value = (void *) __o1->object_base
; if (__o1->next_free == __value) __o1->maybe_empty_object
= 1; __o1->next_free = (sizeof (ptrdiff_t) < sizeof (void
*) ? ((__o1->object_base) + (((__o1->next_free) - (__o1
->object_base) + (__o1->alignment_mask)) & ~(__o1->
alignment_mask))) : (char *) (((ptrdiff_t) (__o1->next_free
) + (__o1->alignment_mask)) & ~(__o1->alignment_mask
))); if ((size_t) (__o1->next_free - (char *) __o1->chunk
) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk
)) __o1->next_free = __o1->chunk_limit; __o1->object_base
= __o1->next_free; __value; }); })
;
4258 vno1->u.values->next = NULLnullptr;
4259 vno1->u.values->result = result;
4260 vno1->u.values->n = 1;
4261 vno1->u.values->valid_dominated_by_p[0] = pred_e->dest->index;
4262 return vn_nary_op_insert_into (vno1, valid_info->nary);
4263}
4264
4265static bool
4266dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool);
4267
4268static tree
4269vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb)
4270{
4271 if (! vno->predicated_values)
4272 return vno->u.result;
4273 for (vn_pval *val = vno->u.values; val; val = val->next)
4274 for (unsigned i = 0; i < val->n; ++i)
4275 /* Do not handle backedge executability optimistically since
4276 when figuring out whether to iterate we do not consider
4277 changed predication. */
4278 if (dominated_by_p_w_unex
4279 (bb, BASIC_BLOCK_FOR_FN (cfun, val->valid_dominated_by_p[i])((*(((cfun + 0))->cfg->x_basic_block_info))[(val->valid_dominated_by_p
[i])])
,
4280 false))
4281 return val->result;
4282 return NULL_TREE(tree) nullptr;
4283}
4284
4285/* Insert the rhs of STMT into the current hash table with a value number of
4286 RESULT. */
4287
4288static vn_nary_op_t
4289vn_nary_op_insert_stmt (gimple *stmt, tree result)
4290{
4291 vn_nary_op_t vno1
4292 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
4293 result, VN_INFO (result)->value_id);
4294 init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (stmt));
4295 return vn_nary_op_insert_into (vno1, valid_info->nary);
4296}
4297
4298/* Compute a hashcode for PHI operation VP1 and return it. */
4299
4300static inline hashval_t
4301vn_phi_compute_hash (vn_phi_t vp1)
4302{
4303 inchash::hash hstate;
4304 tree phi1op;
4305 tree type;
4306 edge e;
4307 edge_iterator ei;
4308
4309 hstate.add_int (EDGE_COUNT (vp1->block->preds)vec_safe_length (vp1->block->preds));
4310 switch (EDGE_COUNT (vp1->block->preds)vec_safe_length (vp1->block->preds))
4311 {
4312 case 1:
4313 break;
4314 case 2:
4315 if (vp1->block->loop_father->header == vp1->block)
4316 ;
4317 else
4318 break;
4319 /* Fallthru. */
4320 default:
4321 hstate.add_int (vp1->block->index);
4322 }
4323
4324 /* If all PHI arguments are constants we need to distinguish
4325 the PHI node via its type. */
4326 type = vp1->type;
4327 hstate.merge_hash (vn_hash_type (type));
4328
4329 FOR_EACH_EDGE (e, ei, vp1->block->preds)for ((ei) = ei_start_1 (&((vp1->block->preds))); ei_cond
((ei), &(e)); ei_next (&(ei)))
4330 {
4331 /* Don't hash backedge values they need to be handled as VN_TOP
4332 for optimistic value-numbering. */
4333 if (e->flags & EDGE_DFS_BACK)
4334 continue;
4335
4336 phi1op = vp1->phiargs[e->dest_idx];
4337 if (phi1op == VN_TOP)
4338 continue;
4339 inchash::add_expr (phi1op, hstate);
4340 }
4341
4342 return hstate.end ();
4343}
4344
4345
4346/* Return true if COND1 and COND2 represent the same condition, set
4347 *INVERTED_P if one needs to be inverted to make it the same as
4348 the other. */
4349
4350static bool
4351cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
4352 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
4353{
4354 enum tree_code code1 = gimple_cond_code (cond1);
4355 enum tree_code code2 = gimple_cond_code (cond2);
4356
4357 *inverted_p = false;
4358 if (code1 == code2)
4359 ;
4360 else if (code1 == swap_tree_comparison (code2))
4361 std::swap (lhs2, rhs2);
4362 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
4363 *inverted_p = true;
4364 else if (code1 == invert_tree_comparison
4365 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
4366 {
4367 std::swap (lhs2, rhs2);
4368 *inverted_p = true;
4369 }
4370 else
4371 return false;
4372
4373 return ((expressions_equal_p (lhs1, lhs2)
4374 && expressions_equal_p (rhs1, rhs2))
4375 || (commutative_tree_code (code1)
4376 && expressions_equal_p (lhs1, rhs2)
4377 && expressions_equal_p (rhs1, lhs2)));
4378}
4379
4380/* Compare two phi entries for equality, ignoring VN_TOP arguments. */
4381
4382static int
4383vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
4384{
4385 if (vp1->hashcode != vp2->hashcode)
4386 return false;
4387
4388 if (vp1->block != vp2->block)
4389 {
4390 if (EDGE_COUNT (vp1->block->preds)vec_safe_length (vp1->block->preds) != EDGE_COUNT (vp2->block->preds)vec_safe_length (vp2->block->preds))
4391 return false;
4392
4393 switch (EDGE_COUNT (vp1->block->preds)vec_safe_length (vp1->block->preds))
4394 {
4395 case 1:
4396 /* Single-arg PHIs are just copies. */
4397 break;
4398
4399 case 2:
4400 {
4401 /* Rule out backedges into the PHI. */
4402 if (vp1->block->loop_father->header == vp1->block
4403 || vp2->block->loop_father->header == vp2->block)
4404 return false;
4405
4406 /* If the PHI nodes do not have compatible types
4407 they are not the same. */
4408 if (!types_compatible_p (vp1->type, vp2->type))
4409 return false;
4410
4411 basic_block idom1
4412 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4413 basic_block idom2
4414 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
4415 /* If the immediate dominator end in switch stmts multiple
4416 values may end up in the same PHI arg via intermediate
4417 CFG merges. */
4418 if (EDGE_COUNT (idom1->succs)vec_safe_length (idom1->succs) != 2
4419 || EDGE_COUNT (idom2->succs)vec_safe_length (idom2->succs) != 2)
4420 return false;
4421
4422 /* Verify the controlling stmt is the same. */
4423 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
4424 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
4425 if (! last1 || ! last2)
4426 return false;
4427 bool inverted_p;
4428 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
4429 last2, vp2->cclhs, vp2->ccrhs,
4430 &inverted_p))
4431 return false;
4432
4433 /* Get at true/false controlled edges into the PHI. */
4434 edge te1, te2, fe1, fe2;
4435 if (! extract_true_false_controlled_edges (idom1, vp1->block,
4436 &te1, &fe1)
4437 || ! extract_true_false_controlled_edges (idom2, vp2->block,
4438 &te2, &fe2))
4439 return false;
4440
4441 /* Swap edges if the second condition is the inverted of the
4442 first. */
4443 if (inverted_p)
4444 std::swap (te2, fe2);
4445
4446 /* Since we do not know which edge will be executed we have
4447 to be careful when matching VN_TOP. Be conservative and
4448 only match VN_TOP == VN_TOP for now, we could allow
4449 VN_TOP on the not prevailing PHI though. See for example
4450 PR102920. */
4451 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
4452 vp2->phiargs[te2->dest_idx], false)
4453 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
4454 vp2->phiargs[fe2->dest_idx], false))
4455 return false;
4456
4457 return true;
4458 }
4459
4460 default:
4461 return false;
4462 }
4463 }
4464
4465 /* If the PHI nodes do not have compatible types
4466 they are not the same. */
4467 if (!types_compatible_p (vp1->type, vp2->type))
4468 return false;
4469
4470 /* Any phi in the same block will have it's arguments in the
4471 same edge order, because of how we store phi nodes. */
4472 unsigned nargs = EDGE_COUNT (vp1->block->preds)vec_safe_length (vp1->block->preds);
4473 for (unsigned i = 0; i < nargs; ++i)
4474 {
4475 tree phi1op = vp1->phiargs[i];
4476 tree phi2op = vp2->phiargs[i];
4477 if (phi1op == phi2op)
4478 continue;
4479 if (!expressions_equal_p (phi1op, phi2op, false))
4480 return false;
4481 }
4482
4483 return true;
4484}
4485
4486/* Lookup PHI in the current hash table, and return the resulting
4487 value number if it exists in the hash table. Return NULL_TREE if
4488 it does not exist in the hash table. */
4489
4490static tree
4491vn_phi_lookup (gimple *phi, bool backedges_varying_p)
4492{
4493 vn_phi_s **slot;
4494 struct vn_phi_s *vp1;
4495 edge e;
4496 edge_iterator ei;
4497
4498 vp1 = XALLOCAVAR (struct vn_phi_s,((struct vn_phi_s *) __builtin_alloca((sizeof (struct vn_phi_s
) + (gimple_phi_num_args (phi) - 1) * sizeof (tree))))
4499 sizeof (struct vn_phi_s)((struct vn_phi_s *) __builtin_alloca((sizeof (struct vn_phi_s
) + (gimple_phi_num_args (phi) - 1) * sizeof (tree))))
4500 + (gimple_phi_num_args (phi) - 1) * sizeof (tree))((struct vn_phi_s *) __builtin_alloca((sizeof (struct vn_phi_s
) + (gimple_phi_num_args (phi) - 1) * sizeof (tree))))
;
4501
4502 /* Canonicalize the SSA_NAME's to their value number. */
4503 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)for ((ei) = ei_start_1 (&((gimple_bb (phi)->preds))); ei_cond
((ei), &(e)); ei_next (&(ei)))
4504 {
4505 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e)gimple_phi_arg_def (((phi)), ((e)->dest_idx));
4506 if (TREE_CODE (def)((enum tree_code) (def)->base.code) == SSA_NAME
4507 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
4508 {
4509 if (ssa_undefined_value_p (def, false))
4510 def = VN_TOP;
4511 else
4512 def = SSA_VAL (def);
4513 }
4514 vp1->phiargs[e->dest_idx] = def;
4515 }
4516 vp1->type = TREE_TYPE (gimple_phi_result (phi))((contains_struct_check ((gimple_phi_result (phi)), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4516, __FUNCTION__))->typed.type)
;
4517 vp1->block = gimple_bb (phi);
4518 /* Extract values of the controlling condition. */
4519 vp1->cclhs = NULL_TREE(tree) nullptr;
4520 vp1->ccrhs = NULL_TREE(tree) nullptr;
4521 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4522 if (EDGE_COUNT (idom1->succs)vec_safe_length (idom1->succs) == 2)
4523 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
4524 {
4525 /* ??? We want to use SSA_VAL here. But possibly not
4526 allow VN_TOP. */
4527 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
4528 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
4529 }
4530 vp1->hashcode = vn_phi_compute_hash (vp1);
4531 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, NO_INSERT);
4532 if (!slot)
4533 return NULL_TREE(tree) nullptr;
4534 return (*slot)->result;
4535}
4536
4537/* Insert PHI into the current hash table with a value number of
4538 RESULT. */
4539
4540static vn_phi_t
4541vn_phi_insert (gimple *phi, tree result, bool backedges_varying_p)
4542{
4543 vn_phi_s **slot;
4544 vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,__extension__ ({ struct obstack *__h = (&vn_tables_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (vn_phi_s) + ((gimple_phi_num_args (phi) - 1) * sizeof
(tree)))); if (__extension__ ({ struct obstack const *__o1 =
(__o); (size_t) (__o1->chunk_limit - __o1->next_free);
}) < __len) _obstack_newchunk (__o, __len); ((void) ((__o
)->next_free += (__len))); }); __extension__ ({ struct obstack
*__o1 = (__h); void *__value = (void *) __o1->object_base
; if (__o1->next_free == __value) __o1->maybe_empty_object
= 1; __o1->next_free = (sizeof (ptrdiff_t) < sizeof (void
*) ? ((__o1->object_base) + (((__o1->next_free) - (__o1
->object_base) + (__o1->alignment_mask)) & ~(__o1->
alignment_mask))) : (char *) (((ptrdiff_t) (__o1->next_free
) + (__o1->alignment_mask)) & ~(__o1->alignment_mask
))); if ((size_t) (__o1->next_free - (char *) __o1->chunk
) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk
)) __o1->next_free = __o1->chunk_limit; __o1->object_base
= __o1->next_free; __value; }); })
4545 sizeof (vn_phi_s)__extension__ ({ struct obstack *__h = (&vn_tables_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (vn_phi_s) + ((gimple_phi_num_args (phi) - 1) * sizeof
(tree)))); if (__extension__ ({ struct obstack const *__o1 =
(__o); (size_t) (__o1->chunk_limit - __o1->next_free);
}) < __len) _obstack_newchunk (__o, __len); ((void) ((__o
)->next_free += (__len))); }); __extension__ ({ struct obstack
*__o1 = (__h); void *__value = (void *) __o1->object_base
; if (__o1->next_free == __value) __o1->maybe_empty_object
= 1; __o1->next_free = (sizeof (ptrdiff_t) < sizeof (void
*) ? ((__o1->object_base) + (((__o1->next_free) - (__o1
->object_base) + (__o1->alignment_mask)) & ~(__o1->
alignment_mask))) : (char *) (((ptrdiff_t) (__o1->next_free
) + (__o1->alignment_mask)) & ~(__o1->alignment_mask
))); if ((size_t) (__o1->next_free - (char *) __o1->chunk
) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk
)) __o1->next_free = __o1->chunk_limit; __o1->object_base
= __o1->next_free; __value; }); })
4546 + ((gimple_phi_num_args (phi) - 1)__extension__ ({ struct obstack *__h = (&vn_tables_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (vn_phi_s) + ((gimple_phi_num_args (phi) - 1) * sizeof
(tree)))); if (__extension__ ({ struct obstack const *__o1 =
(__o); (size_t) (__o1->chunk_limit - __o1->next_free);
}) < __len) _obstack_newchunk (__o, __len); ((void) ((__o
)->next_free += (__len))); }); __extension__ ({ struct obstack
*__o1 = (__h); void *__value = (void *) __o1->object_base
; if (__o1->next_free == __value) __o1->maybe_empty_object
= 1; __o1->next_free = (sizeof (ptrdiff_t) < sizeof (void
*) ? ((__o1->object_base) + (((__o1->next_free) - (__o1
->object_base) + (__o1->alignment_mask)) & ~(__o1->
alignment_mask))) : (char *) (((ptrdiff_t) (__o1->next_free
) + (__o1->alignment_mask)) & ~(__o1->alignment_mask
))); if ((size_t) (__o1->next_free - (char *) __o1->chunk
) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk
)) __o1->next_free = __o1->chunk_limit; __o1->object_base
= __o1->next_free; __value; }); })
4547 * sizeof (tree)))__extension__ ({ struct obstack *__h = (&vn_tables_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (vn_phi_s) + ((gimple_phi_num_args (phi) - 1) * sizeof
(tree)))); if (__extension__ ({ struct obstack const *__o1 =
(__o); (size_t) (__o1->chunk_limit - __o1->next_free);
}) < __len) _obstack_newchunk (__o, __len); ((void) ((__o
)->next_free += (__len))); }); __extension__ ({ struct obstack
*__o1 = (__h); void *__value = (void *) __o1->object_base
; if (__o1->next_free == __value) __o1->maybe_empty_object
= 1; __o1->next_free = (sizeof (ptrdiff_t) < sizeof (void
*) ? ((__o1->object_base) + (((__o1->next_free) - (__o1
->object_base) + (__o1->alignment_mask)) & ~(__o1->
alignment_mask))) : (char *) (((ptrdiff_t) (__o1->next_free
) + (__o1->alignment_mask)) & ~(__o1->alignment_mask
))); if ((size_t) (__o1->next_free - (char *) __o1->chunk
) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk
)) __o1->next_free = __o1->chunk_limit; __o1->object_base
= __o1->next_free; __value; }); })
;
4548 edge e;
4549 edge_iterator ei;
4550
4551 /* Canonicalize the SSA_NAME's to their value number. */
4552 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)for ((ei) = ei_start_1 (&((gimple_bb (phi)->preds))); ei_cond
((ei), &(e)); ei_next (&(ei)))
4553 {
4554 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e)gimple_phi_arg_def (((phi)), ((e)->dest_idx));
4555 if (TREE_CODE (def)((enum tree_code) (def)->base.code) == SSA_NAME
4556 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
4557 {
4558 if (ssa_undefined_value_p (def, false))
4559 def = VN_TOP;
4560 else
4561 def = SSA_VAL (def);
4562 }
4563 vp1->phiargs[e->dest_idx] = def;
4564 }
4565 vp1->value_id = VN_INFO (result)->value_id;
4566 vp1->type = TREE_TYPE (gimple_phi_result (phi))((contains_struct_check ((gimple_phi_result (phi)), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4566, __FUNCTION__))->typed.type)
;
4567 vp1->block = gimple_bb (phi);
4568 /* Extract values of the controlling condition. */
4569 vp1->cclhs = NULL_TREE(tree) nullptr;
4570 vp1->ccrhs = NULL_TREE(tree) nullptr;
4571 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4572 if (EDGE_COUNT (idom1->succs)vec_safe_length (idom1->succs) == 2)
4573 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
4574 {
4575 /* ??? We want to use SSA_VAL here. But possibly not
4576 allow VN_TOP. */
4577 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
4578 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
4579 }
4580 vp1->result = result;
4581 vp1->hashcode = vn_phi_compute_hash (vp1);
4582
4583 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
4584 gcc_assert (!*slot)((void)(!(!*slot) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4584, __FUNCTION__), 0 : 0))
;
4585
4586 *slot = vp1;
4587 vp1->next = last_inserted_phi;
4588 last_inserted_phi = vp1;
4589 return vp1;
4590}
4591
4592
4593/* Return true if BB1 is dominated by BB2 taking into account edges
4594 that are not executable. When ALLOW_BACK is false consider not
4595 executable backedges as executable. */
4596
4597static bool
4598dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool allow_back)
4599{
4600 edge_iterator ei;
4601 edge e;
4602
4603 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4604 return true;
4605
4606 /* Before iterating we'd like to know if there exists a
4607 (executable) path from bb2 to bb1 at all, if not we can
4608 directly return false. For now simply iterate once. */
4609
4610 /* Iterate to the single executable bb1 predecessor. */
4611 if (EDGE_COUNT (bb1->preds)vec_safe_length (bb1->preds) > 1)
4612 {
4613 edge prede = NULLnullptr;
4614 FOR_EACH_EDGE (e, ei, bb1->preds)for ((ei) = ei_start_1 (&((bb1->preds))); ei_cond ((ei
), &(e)); ei_next (&(ei)))
4615 if ((e->flags & EDGE_EXECUTABLE)
4616 || (!allow_back && (e->flags & EDGE_DFS_BACK)))
4617 {
4618 if (prede)
4619 {
4620 prede = NULLnullptr;
4621 break;
4622 }
4623 prede = e;
4624 }
4625 if (prede)
4626 {
4627 bb1 = prede->src;
4628
4629 /* Re-do the dominance check with changed bb1. */
4630 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4631 return true;
4632 }
4633 }
4634
4635 /* Iterate to the single executable bb2 successor. */
4636 edge succe = NULLnullptr;
4637 FOR_EACH_EDGE (e, ei, bb2->succs)for ((ei) = ei_start_1 (&((bb2->succs))); ei_cond ((ei
), &(e)); ei_next (&(ei)))
4638 if ((e->flags & EDGE_EXECUTABLE)
4639 || (!allow_back && (e->flags & EDGE_DFS_BACK)))
4640 {
4641 if (succe)
4642 {
4643 succe = NULLnullptr;
4644 break;
4645 }
4646 succe = e;
4647 }
4648 if (succe)
4649 {
4650 /* Verify the reached block is only reached through succe.
4651 If there is only one edge we can spare us the dominator
4652 check and iterate directly. */
4653 if (EDGE_COUNT (succe->dest->preds)vec_safe_length (succe->dest->preds) > 1)
4654 {
4655 FOR_EACH_EDGE (e, ei, succe->dest->preds)for ((ei) = ei_start_1 (&((succe->dest->preds))); ei_cond
((ei), &(e)); ei_next (&(ei)))
4656 if (e != succe
4657 && ((e->flags & EDGE_EXECUTABLE)
4658 || (!allow_back && (e->flags & EDGE_DFS_BACK))))
4659 {
4660 succe = NULLnullptr;
4661 break;
4662 }
4663 }
4664 if (succe)
4665 {
4666 bb2 = succe->dest;
4667
4668 /* Re-do the dominance check with changed bb2. */
4669 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4670 return true;
4671 }
4672 }
4673
4674 /* We could now iterate updating bb1 / bb2. */
4675 return false;
4676}
4677
4678/* Set the value number of FROM to TO, return true if it has changed
4679 as a result. */
4680
4681static inline bool
4682set_ssa_val_to (tree from, tree to)
4683{
4684 vn_ssa_aux_t from_info = VN_INFO (from);
4685 tree currval = from_info->valnum; // SSA_VAL (from)
4686 poly_int64 toff, coff;
4687 bool curr_undefined = false;
4688 bool curr_invariant = false;
4689
4690 /* The only thing we allow as value numbers are ssa_names
4691 and invariants. So assert that here. We don't allow VN_TOP
4692 as visiting a stmt should produce a value-number other than
4693 that.
4694 ??? Still VN_TOP can happen for unreachable code, so force
4695 it to varying in that case. Not all code is prepared to
4696 get VN_TOP on valueization. */
4697 if (to == VN_TOP)
4698 {
4699 /* ??? When iterating and visiting PHI <undef, backedge-value>
4700 for the first time we rightfully get VN_TOP and we need to
4701 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
4702 With SCCVN we were simply lucky we iterated the other PHI
4703 cycles first and thus visited the backedge-value DEF. */
4704 if (currval == VN_TOP)
4705 goto set_and_exit;
4706 if (dump_file && (dump_flags & TDF_DETAILS))
4707 fprintf (dump_file, "Forcing value number to varying on "
4708 "receiving VN_TOP\n");
4709 to = from;
4710 }
4711
4712 gcc_checking_assert (to != NULL_TREE((void)(!(to != (tree) nullptr && ((((enum tree_code)
(to)->base.code) == SSA_NAME && (to == from || SSA_VAL
(to) == to)) || is_gimple_min_invariant (to))) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4715, __FUNCTION__), 0 : 0))
4713 && ((TREE_CODE (to) == SSA_NAME((void)(!(to != (tree) nullptr && ((((enum tree_code)
(to)->base.code) == SSA_NAME && (to == from || SSA_VAL
(to) == to)) || is_gimple_min_invariant (to))) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4715, __FUNCTION__), 0 : 0))
4714 && (to == from || SSA_VAL (to) == to))((void)(!(to != (tree) nullptr && ((((enum tree_code)
(to)->base.code) == SSA_NAME && (to == from || SSA_VAL
(to) == to)) || is_gimple_min_invariant (to))) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4715, __FUNCTION__), 0 : 0))
4715 || is_gimple_min_invariant (to)))((void)(!(to != (tree) nullptr && ((((enum tree_code)
(to)->base.code) == SSA_NAME && (to == from || SSA_VAL
(to) == to)) || is_gimple_min_invariant (to))) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4715, __FUNCTION__), 0 : 0))
;
4716
4717 if (from != to)
4718 {
4719 if (currval == from)
4720 {
4721 if (dump_file && (dump_flags & TDF_DETAILS))
4722 {
4723 fprintf (dump_file, "Not changing value number of ");
4724 print_generic_expr (dump_file, from);
4725 fprintf (dump_file, " from VARYING to ");
4726 print_generic_expr (dump_file, to);
4727 fprintf (dump_file, "\n");
4728 }
4729 return false;
4730 }
4731 curr_invariant = is_gimple_min_invariant (currval);
4732 curr_undefined = (TREE_CODE (currval)((enum tree_code) (currval)->base.code) == SSA_NAME
4733 && ssa_undefined_value_p (currval, false));
4734 if (currval != VN_TOP
4735 && !curr_invariant
4736 && !curr_undefined
4737 && is_gimple_min_invariant (to))
4738 {
4739 if (dump_file && (dump_flags & TDF_DETAILS))
4740 {
4741 fprintf (dump_file, "Forcing VARYING instead of changing "
4742 "value number of ");
4743 print_generic_expr (dump_file, from);
4744 fprintf (dump_file, " from ");
4745 print_generic_expr (dump_file, currval);
4746 fprintf (dump_file, " (non-constant) to ");
4747 print_generic_expr (dump_file, to);
4748 fprintf (dump_file, " (constant)\n");
4749 }
4750 to = from;
4751 }
4752 else if (currval != VN_TOP
4753 && !curr_undefined
4754 && TREE_CODE (to)((enum tree_code) (to)->base.code) == SSA_NAME
4755 && ssa_undefined_value_p (to, false))
4756 {
4757 if (dump_file && (dump_flags & TDF_DETAILS))
4758 {
4759 fprintf (dump_file, "Forcing VARYING instead of changing "
4760 "value number of ");
4761 print_generic_expr (dump_file, from);
4762 fprintf (dump_file, " from ");
4763 print_generic_expr (dump_file, currval);
4764 fprintf (dump_file, " (non-undefined) to ");
4765 print_generic_expr (dump_file, to);
4766 fprintf (dump_file, " (undefined)\n");
4767 }
4768 to = from;
4769 }
4770 else if (TREE_CODE (to)((enum tree_code) (to)->base.code) == SSA_NAME
4771 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to)(tree_check ((to), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4771, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
)
4772 to = from;
4773 }
4774
4775set_and_exit:
4776 if (dump_file && (dump_flags & TDF_DETAILS))
4777 {
4778 fprintf (dump_file, "Setting value number of ");
4779 print_generic_expr (dump_file, from);
4780 fprintf (dump_file, " to ");
4781 print_generic_expr (dump_file, to);
4782 }
4783
4784 if (currval != to
4785 && !operand_equal_p (currval, to, 0)
4786 /* Different undefined SSA names are not actually different. See
4787 PR82320 for a testcase were we'd otherwise not terminate iteration. */
4788 && !(curr_undefined
4789 && TREE_CODE (to)((enum tree_code) (to)->base.code) == SSA_NAME
4790 && ssa_undefined_value_p (to, false))
4791 /* ??? For addresses involving volatile objects or types operand_equal_p
4792 does not reliably detect ADDR_EXPRs as equal. We know we are only
4793 getting invariant gimple addresses here, so can use
4794 get_addr_base_and_unit_offset to do this comparison. */
4795 && !(TREE_CODE (currval)((enum tree_code) (currval)->base.code) == ADDR_EXPR
4796 && TREE_CODE (to)((enum tree_code) (to)->base.code) == ADDR_EXPR
4797 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0)(*((const_cast<tree*> (tree_operand_check ((currval), (
0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4797, __FUNCTION__)))))
, &coff)
4798 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0)(*((const_cast<tree*> (tree_operand_check ((to), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4798, __FUNCTION__)))))
, &toff))
4799 && known_eq (coff, toff)(!maybe_ne (coff, toff))))
4800 {
4801 if (to != from
4802 && currval != VN_TOP
4803 && !curr_undefined
4804 /* We do not want to allow lattice transitions from one value
4805 to another since that may lead to not terminating iteration
4806 (see PR95049). Since there's no convenient way to check
4807 for the allowed transition of VAL -> PHI (loop entry value,
4808 same on two PHIs, to same PHI result) we restrict the check
4809 to invariants. */
4810 && curr_invariant
4811 && is_gimple_min_invariant (to))
4812 {
4813 if (dump_file && (dump_flags & TDF_DETAILS))
4814 fprintf (dump_file, " forced VARYING");
4815 to = from;
4816 }
4817 if (dump_file && (dump_flags & TDF_DETAILS))
4818 fprintf (dump_file, " (changed)\n");
4819 from_info->valnum = to;
4820 return true;
4821 }
4822 if (dump_file && (dump_flags & TDF_DETAILS))
4823 fprintf (dump_file, "\n");
4824 return false;
4825}
4826
4827/* Set all definitions in STMT to value number to themselves.
4828 Return true if a value number changed. */
4829
4830static bool
4831defs_to_varying (gimple *stmt)
4832{
4833 bool changed = false;
4834 ssa_op_iter iter;
4835 def_operand_p defp;
4836
4837 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)for (defp = op_iter_init_def (&(iter), stmt, ((0x08) | 0x02
)); !op_iter_done (&(iter)); defp = op_iter_next_def (&
(iter)))
4838 {
4839 tree def = DEF_FROM_PTR (defp)get_def_from_ptr (defp);
4840 changed |= set_ssa_val_to (def, def);
4841 }
4842 return changed;
4843}
4844
4845/* Visit a copy between LHS and RHS, return true if the value number
4846 changed. */
4847
4848static bool
4849visit_copy (tree lhs, tree rhs)
4850{
4851 /* Valueize. */
4852 rhs = SSA_VAL (rhs);
4853
4854 return set_ssa_val_to (lhs, rhs);
4855}
4856
4857/* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
4858 is the same. */
4859
4860static tree
4861valueized_wider_op (tree wide_type, tree op, bool allow_truncate)
4862{
4863 if (TREE_CODE (op)((enum tree_code) (op)->base.code) == SSA_NAME)
4864 op = vn_valueize (op);
4865
4866 /* Either we have the op widened available. */
4867 tree ops[3] = {};
4868 ops[0] = op;
4869 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
4870 wide_type, ops, NULLnullptr);
4871 if (tem)
4872 return tem;
4873
4874 /* Or the op is truncated from some existing value. */
4875 if (allow_truncate && TREE_CODE (op)((enum tree_code) (op)->base.code) == SSA_NAME)
4876 {
4877 gimple *def = SSA_NAME_DEF_STMT (op)(tree_check ((op), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4877, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
4878 if (is_gimple_assign (def)
4879 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def))((gimple_assign_rhs_code (def)) == NOP_EXPR || (gimple_assign_rhs_code
(def)) == CONVERT_EXPR)
)
4880 {
4881 tem = gimple_assign_rhs1 (def);
4882 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)((contains_struct_check ((tem), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4882, __FUNCTION__))->typed.type)
))
4883 {
4884 if (TREE_CODE (tem)((enum tree_code) (tem)->base.code) == SSA_NAME)
4885 tem = vn_valueize (tem);
4886 return tem;
4887 }
4888 }
4889 }
4890
4891 /* For constants simply extend it. */
4892 if (TREE_CODE (op)((enum tree_code) (op)->base.code) == INTEGER_CST)
4893 return wide_int_to_tree (wide_type, wi::to_wide (op));
4894
4895 return NULL_TREE(tree) nullptr;
4896}
4897
4898/* Visit a nary operator RHS, value number it, and return true if the
4899 value number of LHS has changed as a result. */
4900
4901static bool
4902visit_nary_op (tree lhs, gassign *stmt)
4903{
4904 vn_nary_op_t vnresult;
4905 tree result = vn_nary_op_lookup_stmt (stmt, &vnresult);
4906 if (! result && vnresult)
4907 result = vn_nary_op_get_predicated_value (vnresult, gimple_bb (stmt));
4908 if (result)
4909 return set_ssa_val_to (lhs, result);
4910
4911 /* Do some special pattern matching for redundancies of operations
4912 in different types. */
4913 enum tree_code code = gimple_assign_rhs_code (stmt);
4914 tree type = TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4914, __FUNCTION__))->typed.type)
;
4915 tree rhs1 = gimple_assign_rhs1 (stmt);
4916 switch (code)
4917 {
4918 CASE_CONVERTcase NOP_EXPR: case CONVERT_EXPR:
4919 /* Match arithmetic done in a different type where we can easily
4920 substitute the result from some earlier sign-changed or widened
4921 operation. */
4922 if (INTEGRAL_TYPE_P (type)(((enum tree_code) (type)->base.code) == ENUMERAL_TYPE || (
(enum tree_code) (type)->base.code) == BOOLEAN_TYPE || ((enum
tree_code) (type)->base.code) == INTEGER_TYPE)
4923 && TREE_CODE (rhs1)((enum tree_code) (rhs1)->base.code) == SSA_NAME
4924 /* We only handle sign-changes, zero-extension -> & mask or
4925 sign-extension if we know the inner operation doesn't
4926 overflow. */
4927 && (((TYPE_UNSIGNED (TREE_TYPE (rhs1))((tree_class_check ((((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4927, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4927, __FUNCTION__))->base.u.bits.unsigned_flag)
4928 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))(((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4928, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4928, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4928, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
4929 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))((((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4929, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4929, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
) ? !global_options.x_flag_wrapv_pointer : (!(any_integral_type_check
((((contains_struct_check ((rhs1), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4929, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4929, __FUNCTION__))->base.u.bits.unsigned_flag &&
!global_options.x_flag_wrapv && !global_options.x_flag_trapv
))
))
4930 && TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4930, __FUNCTION__))->type_common.precision)
> TYPE_PRECISION (TREE_TYPE (rhs1))((tree_class_check ((((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4930, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4930, __FUNCTION__))->type_common.precision)
)
4931 || TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4931, __FUNCTION__))->type_common.precision)
== TYPE_PRECISION (TREE_TYPE (rhs1))((tree_class_check ((((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4931, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4931, __FUNCTION__))->type_common.precision)
))
4932 {
4933 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1)(tree_check ((rhs1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4933, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
);
4934 if (def
4935 && (gimple_assign_rhs_code (def) == PLUS_EXPR
4936 || gimple_assign_rhs_code (def) == MINUS_EXPR
4937 || gimple_assign_rhs_code (def) == MULT_EXPR))
4938 {
4939 tree ops[3] = {};
4940 /* When requiring a sign-extension we cannot model a
4941 previous truncation with a single op so don't bother. */
4942 bool allow_truncate = TYPE_UNSIGNED (TREE_TYPE (rhs1))((tree_class_check ((((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4942, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4942, __FUNCTION__))->base.u.bits.unsigned_flag)
;
4943 /* Either we have the op widened available. */
4944 ops[0] = valueized_wider_op (type, gimple_assign_rhs1 (def),
4945 allow_truncate);
4946 if (ops[0])
4947 ops[1] = valueized_wider_op (type, gimple_assign_rhs2 (def),
4948 allow_truncate);
4949 if (ops[0] && ops[1])
4950 {
4951 ops[0] = vn_nary_op_lookup_pieces
4952 (2, gimple_assign_rhs_code (def), type, ops, NULLnullptr);
4953 /* We have wider operation available. */
4954 if (ops[0]
4955 /* If the leader is a wrapping operation we can
4956 insert it for code hoisting w/o introducing
4957 undefined overflow. If it is not it has to
4958 be available. See PR86554. */
4959 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops[0]))((((enum tree_code) (((contains_struct_check ((ops[0]), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4959, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((ops[0]), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4959, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
) ? global_options.x_flag_wrapv_pointer : ((any_integral_type_check
((((contains_struct_check ((ops[0]), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4959, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4959, __FUNCTION__))->base.u.bits.unsigned_flag || global_options
.x_flag_wrapv))
4960 || (rpo_avail && vn_context_bb
4961 && rpo_avail->eliminate_avail (vn_context_bb,
4962 ops[0]))))
4963 {
4964 unsigned lhs_prec = TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4964, __FUNCTION__))->type_common.precision)
;
4965 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1))((tree_class_check ((((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4965, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4965, __FUNCTION__))->type_common.precision)
;
4966 if (lhs_prec == rhs_prec
4967 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))(((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4967, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4967, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4967, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
4968 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))((((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4968, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4968, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
) ? !global_options.x_flag_wrapv_pointer : (!(any_integral_type_check
((((contains_struct_check ((rhs1), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4968, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4968, __FUNCTION__))->base.u.bits.unsigned_flag &&
!global_options.x_flag_wrapv && !global_options.x_flag_trapv
))
))
4969 {
4970 gimple_match_op match_op (gimple_match_cond::UNCOND,
4971 NOP_EXPR, type, ops[0]);
4972 result = vn_nary_build_or_lookup (&match_op);
4973 if (result)
4974 {
4975 bool changed = set_ssa_val_to (lhs, result);
4976 vn_nary_op_insert_stmt (stmt, result);
4977 return changed;
4978 }
4979 }
4980 else
4981 {
4982 tree mask = wide_int_to_tree
4983 (type, wi::mask (rhs_prec, false, lhs_prec));
4984 gimple_match_op match_op (gimple_match_cond::UNCOND,
4985 BIT_AND_EXPR,
4986 TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4986, __FUNCTION__))->typed.type)
,
4987 ops[0], mask);
4988 result = vn_nary_build_or_lookup (&match_op);
4989 if (result)
4990 {
4991 bool changed = set_ssa_val_to (lhs, result);
4992 vn_nary_op_insert_stmt (stmt, result);
4993 return changed;
4994 }
4995 }
4996 }
4997 }
4998 }
4999 }
5000 break;
5001 case BIT_AND_EXPR:
5002 if (INTEGRAL_TYPE_P (type)(((enum tree_code) (type)->base.code) == ENUMERAL_TYPE || (
(enum tree_code) (type)->base.code) == BOOLEAN_TYPE || ((enum
tree_code) (type)->base.code) == INTEGER_TYPE)
5003 && TREE_CODE (rhs1)((enum tree_code) (rhs1)->base.code) == SSA_NAME
5004 && TREE_CODE (gimple_assign_rhs2 (stmt))((enum tree_code) (gimple_assign_rhs2 (stmt))->base.code) == INTEGER_CST
5005 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)(tree_check ((rhs1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5005, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
5006 && default_vn_walk_kind != VN_NOWALK
5007 && CHAR_BIT8 == 8
5008 && BITS_PER_UNIT(8) == 8
5009 && BYTES_BIG_ENDIAN0 == WORDS_BIG_ENDIAN0
5010 && !integer_all_onesp (gimple_assign_rhs2 (stmt))
5011 && !integer_zerop (gimple_assign_rhs2 (stmt)))
5012 {
5013 gassign *ass = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1)(tree_check ((rhs1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5013, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
);
5014 if (ass
5015 && !gimple_has_volatile_ops (ass)
5016 && vn_get_stmt_kind (ass) == VN_REFERENCE)
5017 {
5018 tree last_vuse = gimple_vuse (ass);
5019 tree op = gimple_assign_rhs1 (ass);
5020 tree result = vn_reference_lookup (op, gimple_vuse (ass),
5021 default_vn_walk_kind,
5022 NULLnullptr, true, &last_vuse,
5023 gimple_assign_rhs2 (stmt));
5024 if (result
5025 && useless_type_conversion_p (TREE_TYPE (result)((contains_struct_check ((result), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5025, __FUNCTION__))->typed.type)
,
5026 TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5026, __FUNCTION__))->typed.type)
))
5027 return set_ssa_val_to (lhs, result);
5028 }
5029 }
5030 break;
5031 case TRUNC_DIV_EXPR:
5032 if (TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5032, __FUNCTION__))->base.u.bits.unsigned_flag)
)
5033 break;
5034 /* Fallthru. */
5035 case RDIV_EXPR:
5036 case MULT_EXPR:
5037 /* Match up ([-]a){/,*}([-])b with v=a{/,*}b, replacing it with -v. */
5038 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
5039 {
5040 tree rhs[2];
5041 rhs[0] = rhs1;
5042 rhs[1] = gimple_assign_rhs2 (stmt);
5043 for (unsigned i = 0; i <= 1; ++i)
5044 {
5045 unsigned j = i == 0 ? 1 : 0;
5046 tree ops[2];
5047 gimple_match_op match_op (gimple_match_cond::UNCOND,
5048 NEGATE_EXPR, type, rhs[i]);
5049 ops[i] = vn_nary_build_or_lookup_1 (&match_op, false, true);
5050 ops[j] = rhs[j];
5051 if (ops[i]
5052 && (ops[0] = vn_nary_op_lookup_pieces (2, code,
5053 type, ops, NULLnullptr)))
5054 {
5055 gimple_match_op match_op (gimple_match_cond::UNCOND,
5056 NEGATE_EXPR, type, ops[0]);
5057 result = vn_nary_build_or_lookup_1 (&match_op, true, false);
5058 if (result)
5059 {
5060 bool changed = set_ssa_val_to (lhs, result);
5061 vn_nary_op_insert_stmt (stmt, result);
5062 return changed;
5063 }
5064 }
5065 }
5066 }
5067 break;
5068 default:
5069 break;
5070 }
5071
5072 bool changed = set_ssa_val_to (lhs, lhs);
5073 vn_nary_op_insert_stmt (stmt, lhs);
5074 return changed;
5075}
5076
5077/* Visit a call STMT storing into LHS. Return true if the value number
5078 of the LHS has changed as a result. */
5079
5080static bool
5081visit_reference_op_call (tree lhs, gcall *stmt)
5082{
5083 bool changed = false;
5084 struct vn_reference_s vr1;
5085 vn_reference_t vnresult = NULLnullptr;
5086 tree vdef = gimple_vdef (stmt);
5087
5088 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
5089 if (lhs && TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) != SSA_NAME)
5090 lhs = NULL_TREE(tree) nullptr;
5091
5092 vn_reference_lookup_call (stmt, &vnresult, &vr1);
5093 if (vnresult)
5094 {
5095 if (vnresult->result_vdef && vdef)
5096 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
5097 else if (vdef)
5098 /* If the call was discovered to be pure or const reflect
5099 that as far as possible. */
5100 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
5101
5102 if (!vnresult->result && lhs)
5103 vnresult->result = lhs;
5104
5105 if (vnresult->result && lhs)
5106 changed |= set_ssa_val_to (lhs, vnresult->result);
5107 }
5108 else
5109 {
5110 vn_reference_t vr2;
5111 vn_reference_s **slot;
5112 tree vdef_val = vdef;
5113 if (vdef)
5114 {
5115 /* If we value numbered an indirect functions function to
5116 one not clobbering memory value number its VDEF to its
5117 VUSE. */
5118 tree fn = gimple_call_fn (stmt);
5119 if (fn && TREE_CODE (fn)((enum tree_code) (fn)->base.code) == SSA_NAME)
5120 {
5121 fn = SSA_VAL (fn);
5122 if (TREE_CODE (fn)((enum tree_code) (fn)->base.code) == ADDR_EXPR
5123 && TREE_CODE (TREE_OPERAND (fn, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((fn), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5123, __FUNCTION__))))))->base.code)
== FUNCTION_DECL
5124 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0)(*((const_cast<tree*> (tree_operand_check ((fn), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5124, __FUNCTION__)))))
)
5125 & (ECF_CONST(1 << 0) | ECF_PURE(1 << 1))))
5126 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
5127 }
5128 changed |= set_ssa_val_to (vdef, vdef_val);
5129 }
5130 if (lhs)
5131 changed |= set_ssa_val_to (lhs, lhs);
5132 vr2 = XOBNEW (&vn_tables_obstack, vn_reference_s)((vn_reference_s *) __extension__ ({ struct obstack *__h = ((
&vn_tables_obstack)); __extension__ ({ struct obstack *__o
= (__h); size_t __len = ((sizeof (vn_reference_s))); if (__extension__
({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit
- __o1->next_free); }) < __len) _obstack_newchunk (__o
, __len); ((void) ((__o)->next_free += (__len))); }); __extension__
({ struct obstack *__o1 = (__h); void *__value = (void *) __o1
->object_base; if (__o1->next_free == __value) __o1->
maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t
) < sizeof (void *) ? ((__o1->object_base) + (((__o1->
next_free) - (__o1->object_base) + (__o1->alignment_mask
)) & ~(__o1->alignment_mask))) : (char *) (((ptrdiff_t
) (__o1->next_free) + (__o1->alignment_mask)) & ~(__o1
->alignment_mask))); if ((size_t) (__o1->next_free - (char
*) __o1->chunk) > (size_t) (__o1->chunk_limit - (char
*) __o1->chunk)) __o1->next_free = __o1->chunk_limit
; __o1->object_base = __o1->next_free; __value; }); }))
;
5133 vr2->vuse = vr1.vuse;
5134 /* As we are not walking the virtual operand chain we know the
5135 shared_lookup_references are still original so we can re-use
5136 them here. */
5137 vr2->operands = vr1.operands.copy ();
5138 vr2->type = vr1.type;
5139 vr2->punned = vr1.punned;
5140 vr2->set = vr1.set;
5141 vr2->base_set = vr1.base_set;
5142 vr2->hashcode = vr1.hashcode;
5143 vr2->result = lhs;
5144 vr2->result_vdef = vdef_val;
5145 vr2->value_id = 0;
5146 slot = valid_info->references->find_slot_with_hash (vr2, vr2->hashcode,
5147 INSERT);
5148 gcc_assert (!*slot)((void)(!(!*slot) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5148, __FUNCTION__), 0 : 0))
;
5149 *slot = vr2;
5150 vr2->next = last_inserted_ref;
5151 last_inserted_ref = vr2;
5152 }
5153
5154 return changed;
5155}
5156
5157/* Visit a load from a reference operator RHS, part of STMT, value number it,
5158 and return true if the value number of the LHS has changed as a result. */
5159
5160static bool
5161visit_reference_op_load (tree lhs, tree op, gimple *stmt)
5162{
5163 bool changed = false;
5164 tree result;
5165 vn_reference_t res;
5166
5167 tree vuse = gimple_vuse (stmt);
5168 tree last_vuse = vuse;
5169 result = vn_reference_lookup (op, vuse, default_vn_walk_kind, &res, true, &last_vuse);
5170
5171 /* We handle type-punning through unions by value-numbering based
5172 on offset and size of the access. Be prepared to handle a
5173 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
5174 if (result
5175 && !useless_type_conversion_p (TREE_TYPE (result)((contains_struct_check ((result), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5175, __FUNCTION__))->typed.type)
, TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5175, __FUNCTION__))->typed.type)
))
5176 {
5177 /* Avoid the type punning in case the result mode has padding where
5178 the op we lookup has not. */
5179 if (maybe_lt (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (result))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((result), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5179, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5179, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((result), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5179, __FUNCTION__))->typed.type)) : (((contains_struct_check
((result), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5179, __FUNCTION__))->typed.type))->type_common.mode)
),
5180 GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (op))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5180, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5180, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5180, __FUNCTION__))->typed.type)) : (((contains_struct_check
((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5180, __FUNCTION__))->typed.type))->type_common.mode)
)))
5181 result = NULL_TREE(tree) nullptr;
5182 else
5183 {
5184 /* We will be setting the value number of lhs to the value number
5185 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
5186 So first simplify and lookup this expression to see if it
5187 is already available. */
5188 gimple_match_op res_op (gimple_match_cond::UNCOND,
5189 VIEW_CONVERT_EXPR, TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5189, __FUNCTION__))->typed.type)
, result);
5190 result = vn_nary_build_or_lookup (&res_op);
5191 if (result
5192 && TREE_CODE (result)((enum tree_code) (result)->base.code) == SSA_NAME
5193 && VN_INFO (result)->needs_insertion)
5194 /* Track whether this is the canonical expression for different
5195 typed loads. We use that as a stopgap measure for code
5196 hoisting when dealing with floating point loads. */
5197 res->punned = true;
5198 }
5199
5200 /* When building the conversion fails avoid inserting the reference
5201 again. */
5202 if (!result)
5203 return set_ssa_val_to (lhs, lhs);
5204 }
5205
5206 if (result)
5207 changed = set_ssa_val_to (lhs, result);
5208 else
5209 {
5210 changed = set_ssa_val_to (lhs, lhs);
5211 vn_reference_insert (op, lhs, last_vuse, NULL_TREE(tree) nullptr);
5212 if (vuse && SSA_VAL (last_vuse) != SSA_VAL (vuse))
5213 {
5214 if (dump_file && (dump_flags & TDF_DETAILS))
5215 {
5216 fprintf (dump_file, "Using extra use virtual operand ");
5217 print_generic_expr (dump_file, last_vuse);
5218 fprintf (dump_file, "\n");
5219 }
5220 vn_reference_insert (op, lhs, vuse, NULL_TREE(tree) nullptr);
5221 }
5222 }
5223
5224 return changed;
5225}
5226
5227
5228/* Visit a store to a reference operator LHS, part of STMT, value number it,
5229 and return true if the value number of the LHS has changed as a result. */
5230
5231static bool
5232visit_reference_op_store (tree lhs, tree op, gimple *stmt)
5233{
5234 bool changed = false;
5235 vn_reference_t vnresult = NULLnullptr;
5236 tree assign;
5237 bool resultsame = false;
5238 tree vuse = gimple_vuse (stmt);
5239 tree vdef = gimple_vdef (stmt);
5240
5241 if (TREE_CODE (op)((enum tree_code) (op)->base.code) == SSA_NAME)
5242 op = SSA_VAL (op);
5243
5244 /* First we want to lookup using the *vuses* from the store and see
5245 if there the last store to this location with the same address
5246 had the same value.
5247
5248 The vuses represent the memory state before the store. If the
5249 memory state, address, and value of the store is the same as the
5250 last store to this location, then this store will produce the
5251 same memory state as that store.
5252
5253 In this case the vdef versions for this store are value numbered to those
5254 vuse versions, since they represent the same memory state after
5255 this store.
5256
5257 Otherwise, the vdefs for the store are used when inserting into
5258 the table, since the store generates a new memory state. */
5259
5260 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
5261 if (vnresult
5262 && vnresult->result)
5263 {
5264 tree result = vnresult->result;
5265 gcc_checking_assert (TREE_CODE (result) != SSA_NAME((void)(!(((enum tree_code) (result)->base.code) != SSA_NAME
|| result == SSA_VAL (result)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5266, __FUNCTION__), 0 : 0))
5266 || result == SSA_VAL (result))((void)(!(((enum tree_code) (result)->base.code) != SSA_NAME
|| result == SSA_VAL (result)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5266, __FUNCTION__), 0 : 0))
;
5267 resultsame = expressions_equal_p (result, op);
5268 if (resultsame)
5269 {
5270 /* If the TBAA state isn't compatible for downstream reads
5271 we cannot value-number the VDEFs the same. */
5272 ao_ref lhs_ref;
5273 ao_ref_init (&lhs_ref, lhs);
5274 alias_set_type set = ao_ref_alias_set (&lhs_ref);
5275 alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
5276 if ((vnresult->set != set
5277 && ! alias_set_subset_of (set, vnresult->set))
5278 || (vnresult->base_set != base_set
5279 && ! alias_set_subset_of (base_set, vnresult->base_set)))
5280 resultsame = false;
5281 }
5282 }
5283
5284 if (!resultsame)
5285 {
5286 /* Only perform the following when being called from PRE
5287 which embeds tail merging. */
5288 if (default_vn_walk_kind == VN_WALK)
5289 {
5290 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5290, __FUNCTION__))->typed.type)
, lhs, op);
5291 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
5292 if (vnresult)
5293 {
5294 VN_INFO (vdef)->visited = true;
5295 return set_ssa_val_to (vdef, vnresult->result_vdef);
5296 }
5297 }
5298
5299 if (dump_file && (dump_flags & TDF_DETAILS))
5300 {
5301 fprintf (dump_file, "No store match\n");
5302 fprintf (dump_file, "Value numbering store ");
5303 print_generic_expr (dump_file, lhs);
5304 fprintf (dump_file, " to ");
5305 print_generic_expr (dump_file, op);
5306 fprintf (dump_file, "\n");
5307 }
5308 /* Have to set value numbers before insert, since insert is
5309 going to valueize the references in-place. */
5310 if (vdef)
5311 changed |= set_ssa_val_to (vdef, vdef);
5312
5313 /* Do not insert structure copies into the tables. */
5314 if (is_gimple_min_invariant (op)
5315 || is_gimple_reg (op))
5316 vn_reference_insert (lhs, op, vdef, NULLnullptr);
5317
5318 /* Only perform the following when being called from PRE
5319 which embeds tail merging. */
5320 if (default_vn_walk_kind == VN_WALK)
5321 {
5322 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5322, __FUNCTION__))->typed.type)
, lhs, op);
5323 vn_reference_insert (assign, lhs, vuse, vdef);
5324 }
5325 }
5326 else
5327 {
5328 /* We had a match, so value number the vdef to have the value
5329 number of the vuse it came from. */
5330
5331 if (dump_file && (dump_flags & TDF_DETAILS))
5332 fprintf (dump_file, "Store matched earlier value, "
5333 "value numbering store vdefs to matching vuses.\n");
5334
5335 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
5336 }
5337
5338 return changed;
5339}
5340
5341/* Visit and value number PHI, return true if the value number
5342 changed. When BACKEDGES_VARYING_P is true then assume all
5343 backedge values are varying. When INSERTED is not NULL then
5344 this is just a ahead query for a possible iteration, set INSERTED
5345 to true if we'd insert into the hashtable. */
5346
5347static bool
5348visit_phi (gimple *phi, bool *inserted, bool backedges_varying_p)
5349{
5350 tree result, sameval = VN_TOP, seen_undef = NULL_TREE(tree) nullptr;
5351 tree backedge_val = NULL_TREE(tree) nullptr;
5352 bool seen_non_backedge = false;
5353 tree sameval_base = NULL_TREE(tree) nullptr;
5354 poly_int64 soff, doff;
5355 unsigned n_executable = 0;
5356 edge_iterator ei;
5357 edge e;
5358
5359 /* TODO: We could check for this in initialization, and replace this
5360 with a gcc_assert. */
5361 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi))(tree_check ((get_def_from_ptr (gimple_phi_result_ptr (phi)))
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5361, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
)
5362 return set_ssa_val_to (PHI_RESULT (phi)get_def_from_ptr (gimple_phi_result_ptr (phi)), PHI_RESULT (phi)get_def_from_ptr (gimple_phi_result_ptr (phi)));
5363
5364 /* We track whether a PHI was CSEd to to avoid excessive iterations
5365 that would be necessary only because the PHI changed arguments
5366 but not value. */
5367 if (!inserted)
5368 gimple_set_plf (phi, GF_PLF_1, false);
5369
5370 /* See if all non-TOP arguments have the same value. TOP is
5371 equivalent to everything, so we can ignore it. */
5372 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)for ((ei) = ei_start_1 (&((gimple_bb (phi)->preds))); ei_cond
((ei), &(e)); ei_next (&(ei)))
5373 if (e->flags & EDGE_EXECUTABLE)
5374 {
5375 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e)gimple_phi_arg_def (((phi)), ((e)->dest_idx));
5376
5377 if (def == PHI_RESULT (phi)get_def_from_ptr (gimple_phi_result_ptr (phi)))
5378 continue;
5379 ++n_executable;
5380 if (TREE_CODE (def)((enum tree_code) (def)->base.code) == SSA_NAME)
5381 {
5382 if (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK))
5383 def = SSA_VAL (def);
5384 if (e->flags & EDGE_DFS_BACK)
5385 backedge_val = def;
5386 }
5387 if (!(e->flags & EDGE_DFS_BACK))
5388 seen_non_backedge = true;
5389 if (def == VN_TOP)
5390 ;
5391 /* Ignore undefined defs for sameval but record one. */
5392 else if (TREE_CODE (def)((enum tree_code) (def)->base.code) == SSA_NAME
5393 && ! virtual_operand_p (def)
5394 && ssa_undefined_value_p (def, false))
5395 seen_undef = def;
5396 else if (sameval == VN_TOP)
5397 sameval = def;
5398 else if (!expressions_equal_p (def, sameval))
5399 {
5400 /* We know we're arriving only with invariant addresses here,
5401 try harder comparing them. We can do some caching here
5402 which we cannot do in expressions_equal_p. */
5403 if (TREE_CODE (def)((enum tree_code) (def)->base.code) == ADDR_EXPR
5404 && TREE_CODE (sameval)((enum tree_code) (sameval)->base.code) == ADDR_EXPR
5405 && sameval_base != (void *)-1)
5406 {
5407 if (!sameval_base)
5408 sameval_base = get_addr_base_and_unit_offset
5409 (TREE_OPERAND (sameval, 0)(*((const_cast<tree*> (tree_operand_check ((sameval), (
0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5409, __FUNCTION__)))))
, &soff);
5410 if (!sameval_base)
5411 sameval_base = (tree)(void *)-1;
5412 else if ((get_addr_base_and_unit_offset
5413 (TREE_OPERAND (def, 0)(*((const_cast<tree*> (tree_operand_check ((def), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5413, __FUNCTION__)))))
, &doff) == sameval_base)
5414 && known_eq (soff, doff)(!maybe_ne (soff, doff)))
5415 continue;
5416 }
5417 sameval = NULL_TREE(tree) nullptr;
5418 break;
5419 }
5420 }
5421
5422 /* If the value we want to use is flowing over the backedge and we
5423 should take it as VARYING but it has a non-VARYING value drop to
5424 VARYING.
5425 If we value-number a virtual operand never value-number to the
5426 value from the backedge as that confuses the alias-walking code.
5427 See gcc.dg/torture/pr87176.c. If the value is the same on a
5428 non-backedge everything is OK though. */
5429 bool visited_p;
5430 if ((backedge_val
5431 && !seen_non_backedge
5432 && TREE_CODE (backedge_val)((enum tree_code) (backedge_val)->base.code) == SSA_NAME
5433 && sameval == backedge_val
5434 && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val)(tree_check ((backedge_val), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5434, __FUNCTION__, (SSA_NAME)))->base.public_flag
5435 || SSA_VAL (backedge_val) != backedge_val))
5436 /* Do not value-number a virtual operand to sth not visited though
5437 given that allows us to escape a region in alias walking. */
5438 || (sameval
5439 && TREE_CODE (sameval)((enum tree_code) (sameval)->base.code) == SSA_NAME
5440 && !SSA_NAME_IS_DEFAULT_DEF (sameval)(tree_check ((sameval), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5440, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
5441 && SSA_NAME_IS_VIRTUAL_OPERAND (sameval)(tree_check ((sameval), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5441, __FUNCTION__, (SSA_NAME)))->base.public_flag
5442 && (SSA_VAL (sameval, &visited_p), !visited_p)))
5443 /* Note this just drops to VARYING without inserting the PHI into
5444 the hashes. */
5445 result = PHI_RESULT (phi)get_def_from_ptr (gimple_phi_result_ptr (phi));
5446 /* If none of the edges was executable keep the value-number at VN_TOP,
5447 if only a single edge is exectuable use its value. */
5448 else if (n_executable <= 1)
5449 result = seen_undef ? seen_undef : sameval;
5450 /* If we saw only undefined values and VN_TOP use one of the
5451 undefined values. */
5452 else if (sameval == VN_TOP)
5453 result = seen_undef ? seen_undef : sameval;
5454 /* First see if it is equivalent to a phi node in this block. We prefer
5455 this as it allows IV elimination - see PRs 66502 and 67167. */
5456 else if ((result = vn_phi_lookup (phi, backedges_varying_p)))
5457 {
5458 if (!inserted
5459 && TREE_CODE (result)((enum tree_code) (result)->base.code) == SSA_NAME
5460 && gimple_code (SSA_NAME_DEF_STMT (result)(tree_check ((result), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5460, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
) == GIMPLE_PHI)
5461 {
5462 gimple_set_plf (SSA_NAME_DEF_STMT (result)(tree_check ((result), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5462, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
, GF_PLF_1, true);
5463 if (dump_file && (dump_flags & TDF_DETAILS))
5464 {
5465 fprintf (dump_file, "Marking CSEd to PHI node ");
5466 print_gimple_expr (dump_file, SSA_NAME_DEF_STMT (result)(tree_check ((result), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5466, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
,
5467 0, TDF_SLIM);
5468 fprintf (dump_file, "\n");
5469 }
5470 }
5471 }
5472 /* If all values are the same use that, unless we've seen undefined
5473 values as well and the value isn't constant.
5474 CCP/copyprop have the same restriction to not remove uninit warnings. */
5475 else if (sameval
5476 && (! seen_undef || is_gimple_min_invariant (sameval)))
5477 result = sameval;
5478 else
5479 {
5480 result = PHI_RESULT (phi)get_def_from_ptr (gimple_phi_result_ptr (phi));
5481 /* Only insert PHIs that are varying, for constant value numbers
5482 we mess up equivalences otherwise as we are only comparing
5483 the immediate controlling predicates. */
5484 vn_phi_insert (phi, result, backedges_varying_p);
5485 if (inserted)
5486 *inserted = true;
5487 }
5488
5489 return set_ssa_val_to (PHI_RESULT (phi)get_def_from_ptr (gimple_phi_result_ptr (phi)), result);
5490}
5491
5492/* Try to simplify RHS using equivalences and constant folding. */
5493
5494static tree
5495try_to_simplify (gassign *stmt)
5496{
5497 enum tree_code code = gimple_assign_rhs_code (stmt);
5498 tree tem;
5499
5500 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
5501 in this case, there is no point in doing extra work. */
5502 if (code == SSA_NAME)
5503 return NULL_TREE(tree) nullptr;
5504
5505 /* First try constant folding based on our current lattice. */
5506 mprts_hook = vn_lookup_simplify_result;
5507 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
5508 mprts_hook = NULLnullptr;
5509 if (tem
5510 && (TREE_CODE (tem)((enum tree_code) (tem)->base.code) == SSA_NAME
5511 || is_gimple_min_invariant (tem)))
5512 return tem;
5513
5514 return NULL_TREE(tree) nullptr;
5515}
5516
5517/* Visit and value number STMT, return true if the value number
5518 changed. */
5519
5520static bool
5521visit_stmt (gimple *stmt, bool backedges_varying_p = false)
5522{
5523 bool changed = false;
5524
5525 if (dump_file && (dump_flags & TDF_DETAILS))
5526 {
5527 fprintf (dump_file, "Value numbering stmt = ");
5528 print_gimple_stmt (dump_file, stmt, 0);
5529 }
5530
5531 if (gimple_code (stmt) == GIMPLE_PHI)
5532 changed = visit_phi (stmt, NULLnullptr, backedges_varying_p);
5533 else if (gimple_has_volatile_ops (stmt))
5534 changed = defs_to_varying (stmt);
5535 else if (gassign *ass = dyn_cast <gassign *> (stmt))
5536 {
5537 enum tree_code code = gimple_assign_rhs_code (ass);
5538 tree lhs = gimple_assign_lhs (ass);
5539 tree rhs1 = gimple_assign_rhs1 (ass);
5540 tree simplified;
5541
5542 /* Shortcut for copies. Simplifying copies is pointless,
5543 since we copy the expression and value they represent. */
5544 if (code == SSA_NAME
5545 && TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME)
5546 {
5547 changed = visit_copy (lhs, rhs1);
5548 goto done;
5549 }
5550 simplified = try_to_simplify (ass);
5551 if (simplified)
5552 {
5553 if (dump_file && (dump_flags & TDF_DETAILS))
5554 {
5555 fprintf (dump_file, "RHS ");
5556 print_gimple_expr (dump_file, ass, 0);
5557 fprintf (dump_file, " simplified to ");
5558 print_generic_expr (dump_file, simplified);
5559 fprintf (dump_file, "\n");
5560 }
5561 }
5562 /* Setting value numbers to constants will occasionally
5563 screw up phi congruence because constants are not
5564 uniquely associated with a single ssa name that can be
5565 looked up. */
5566 if (simplified
5567 && is_gimple_min_invariant (simplified)
5568 && TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME)
5569 {
5570 changed = set_ssa_val_to (lhs, simplified);
5571 goto done;
5572 }
5573 else if (simplified
5574 && TREE_CODE (simplified)((enum tree_code) (simplified)->base.code) == SSA_NAME
5575 && TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME)
5576 {
5577 changed = visit_copy (lhs, simplified);
5578 goto done;
5579 }
5580
5581 if ((TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME
5582 /* We can substitute SSA_NAMEs that are live over
5583 abnormal edges with their constant value. */
5584 && !(gimple_assign_copy_p (ass)
5585 && is_gimple_min_invariant (rhs1))
5586 && !(simplified
5587 && is_gimple_min_invariant (simplified))
5588 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)(tree_check ((lhs), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5588, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
)
5589 /* Stores or copies from SSA_NAMEs that are live over
5590 abnormal edges are a problem. */
5591 || (code == SSA_NAME
5592 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)(tree_check ((rhs1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5592, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
))
5593 changed = defs_to_varying (ass);
5594 else if (REFERENCE_CLASS_P (lhs)(tree_code_type[(int) (((enum tree_code) (lhs)->base.code)
)] == tcc_reference)
5595 || DECL_P (lhs)(tree_code_type[(int) (((enum tree_code) (lhs)->base.code)
)] == tcc_declaration)
)
5596 changed = visit_reference_op_store (lhs, rhs1, ass);
5597 else if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME)
5598 {
5599 if ((gimple_assign_copy_p (ass)
5600 && is_gimple_min_invariant (rhs1))
5601 || (simplified
5602 && is_gimple_min_invariant (simplified)))
5603 {
5604 if (simplified)
5605 changed = set_ssa_val_to (lhs, simplified);
5606 else
5607 changed = set_ssa_val_to (lhs, rhs1);
5608 }
5609 else
5610 {
5611 /* Visit the original statement. */
5612 switch (vn_get_stmt_kind (ass))
5613 {
5614 case VN_NARY:
5615 changed = visit_nary_op (lhs, ass);
5616 break;
5617 case VN_REFERENCE:
5618 changed = visit_reference_op_load (lhs, rhs1, ass);
5619 break;
5620 default:
5621 changed = defs_to_varying (ass);
5622 break;
5623 }
5624 }
5625 }
5626 else
5627 changed = defs_to_varying (ass);
5628 }
5629 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5630 {
5631 tree lhs = gimple_call_lhs (call_stmt);
5632 if (lhs && TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME)
5633 {
5634 /* Try constant folding based on our current lattice. */
5635 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
5636 vn_valueize);
5637 if (simplified)
5638 {
5639 if (dump_file && (dump_flags & TDF_DETAILS))
5640 {
5641 fprintf (dump_file, "call ");
5642 print_gimple_expr (dump_file, call_stmt, 0);
5643 fprintf (dump_file, " simplified to ");
5644 print_generic_expr (dump_file, simplified);
5645 fprintf (dump_file, "\n");
5646 }
5647 }
5648 /* Setting value numbers to constants will occasionally
5649 screw up phi congruence because constants are not
5650 uniquely associated with a single ssa name that can be
5651 looked up. */
5652 if (simplified
5653 && is_gimple_min_invariant (simplified))
5654 {
5655 changed = set_ssa_val_to (lhs, simplified);
5656 if (gimple_vdef (call_stmt))
5657 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
5658 SSA_VAL (gimple_vuse (call_stmt)));
5659 goto done;
5660 }
5661 else if (simplified
5662 && TREE_CODE (simplified)((enum tree_code) (simplified)->base.code) == SSA_NAME)
5663