Bug Summary

File:build/gcc/ipa-prop.cc
Warning:line 1739, column 7
Called C++ object pointer is null

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-suse-linux -analyze -disable-free -clear-ast-before-backend -disable-llvm-verifier -discard-value-names -main-file-name ipa-prop.cc -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -ffp-contract=on -fno-rounding-math -mconstructor-aliases -funwind-tables=2 -target-cpu x86-64 -tune-cpu generic -debugger-tuning=gdb -fcoverage-compilation-dir=/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -resource-dir /usr/lib64/clang/15.0.7 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13/backward -internal-isystem /usr/lib64/clang/15.0.7/include -internal-isystem /usr/local/include -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../x86_64-suse-linux/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir=/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -D__GCC_HAVE_DWARF2_CFI_ASM=1 -o /buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2023-03-27-141847-20772-1/report-_es4xO.plist -x c++ /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc

/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc

1/* Interprocedural analyses.
2 Copyright (C) 2005-2023 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "backend.h"
24#include "rtl.h"
25#include "tree.h"
26#include "gimple.h"
27#include "alloc-pool.h"
28#include "tree-pass.h"
29#include "ssa.h"
30#include "tree-streamer.h"
31#include "cgraph.h"
32#include "diagnostic.h"
33#include "fold-const.h"
34#include "gimple-iterator.h"
35#include "gimple-fold.h"
36#include "tree-eh.h"
37#include "calls.h"
38#include "stor-layout.h"
39#include "print-tree.h"
40#include "gimplify.h"
41#include "gimplify-me.h"
42#include "gimple-walk.h"
43#include "symbol-summary.h"
44#include "ipa-prop.h"
45#include "tree-cfg.h"
46#include "tree-dfa.h"
47#include "tree-inline.h"
48#include "ipa-fnsummary.h"
49#include "gimple-pretty-print.h"
50#include "ipa-utils.h"
51#include "dbgcnt.h"
52#include "domwalk.h"
53#include "builtins.h"
54#include "tree-cfgcleanup.h"
55#include "options.h"
56#include "symtab-clones.h"
57#include "attr-fnspec.h"
58#include "gimple-range.h"
59
60/* Function summary where the parameter infos are actually stored. */
61ipa_node_params_t *ipa_node_params_sum = NULLnullptr;
62
63function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULLnullptr;
64
65/* Edge summary for IPA-CP edge information. */
66ipa_edge_args_sum_t *ipa_edge_args_sum;
67
68/* Traits for a hash table for reusing already existing ipa_bits. */
69
70struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
71{
72 typedef ipa_bits *value_type;
73 typedef ipa_bits *compare_type;
74 static hashval_t
75 hash (const ipa_bits *p)
76 {
77 hashval_t t = (hashval_t) p->value.to_shwi ();
78 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
79 }
80 static bool
81 equal (const ipa_bits *a, const ipa_bits *b)
82 {
83 return a->value == b->value && a->mask == b->mask;
84 }
85 static const bool empty_zero_p = true;
86 static void
87 mark_empty (ipa_bits *&p)
88 {
89 p = NULLnullptr;
90 }
91 static bool
92 is_empty (const ipa_bits *p)
93 {
94 return p == NULLnullptr;
95 }
96 static bool
97 is_deleted (const ipa_bits *p)
98 {
99 return p == reinterpret_cast<const ipa_bits *> (1);
100 }
101 static void
102 mark_deleted (ipa_bits *&p)
103 {
104 p = reinterpret_cast<ipa_bits *> (1);
105 }
106};
107
108/* Hash table for avoid repeated allocations of equal ipa_bits. */
109static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
110
111/* Traits for a hash table for reusing value_ranges used for IPA. Note that
112 the equiv bitmap is not hashed and is expected to be NULL. */
113
114struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
115{
116 typedef value_range *value_type;
117 typedef value_range *compare_type;
118 static hashval_t
119 hash (const value_range *p)
120 {
121 inchash::hash hstate (p->kind ());
122 inchash::add_expr (p->min (), hstate);
123 inchash::add_expr (p->max (), hstate);
124 return hstate.end ();
125 }
126 static bool
127 equal (const value_range *a, const value_range *b)
128 {
129 return (types_compatible_p (a->type (), b->type ())
130 && *a == *b);
131 }
132 static const bool empty_zero_p = true;
133 static void
134 mark_empty (value_range *&p)
135 {
136 p = NULLnullptr;
137 }
138 static bool
139 is_empty (const value_range *p)
140 {
141 return p == NULLnullptr;
142 }
143 static bool
144 is_deleted (const value_range *p)
145 {
146 return p == reinterpret_cast<const value_range *> (1);
147 }
148 static void
149 mark_deleted (value_range *&p)
150 {
151 p = reinterpret_cast<value_range *> (1);
152 }
153};
154
155/* Hash table for avoid repeated allocations of equal value_ranges. */
156static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
157
158/* Holders of ipa cgraph hooks: */
159static struct cgraph_node_hook_list *function_insertion_hook_holder;
160
161/* Description of a reference to an IPA constant. */
162struct ipa_cst_ref_desc
163{
164 /* Edge that corresponds to the statement which took the reference. */
165 struct cgraph_edge *cs;
166 /* Linked list of duplicates created when call graph edges are cloned. */
167 struct ipa_cst_ref_desc *next_duplicate;
168 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
169 if out of control. */
170 int refcount;
171};
172
173/* Allocation pool for reference descriptions. */
174
175static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
176 ("IPA-PROP ref descriptions");
177
178/* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
179 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
180
181static bool
182ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
183{
184 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 184, __FUNCTION__, (FUNCTION_DECL)))->function_decl.function_specific_optimization
)
;
185
186 if (!fs_opts)
187 return false;
188 return !opt_for_fn (node->decl, optimize)(opts_for_fn (node->decl)->x_optimize) || !opt_for_fn (node->decl, flag_ipa_cp)(opts_for_fn (node->decl)->x_flag_ipa_cp);
189}
190
191/* Return index of the formal whose tree is PTREE in function which corresponds
192 to INFO. */
193
194static int
195ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
196 tree ptree)
197{
198 int i, count;
199
200 count = vec_safe_length (descriptors);
201 for (i = 0; i < count; i++)
202 if ((*descriptors)[i].decl_or_type == ptree)
203 return i;
204
205 return -1;
206}
207
208/* Return index of the formal whose tree is PTREE in function which corresponds
209 to INFO. */
210
211int
212ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
213{
214 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
215}
216
217/* Populate the param_decl field in parameter DESCRIPTORS that correspond to
218 NODE. */
219
220static void
221ipa_populate_param_decls (struct cgraph_node *node,
222 vec<ipa_param_descriptor, va_gc> &descriptors)
223{
224 tree fndecl;
225 tree fnargs;
226 tree parm;
227 int param_num;
228
229 fndecl = node->decl;
230 gcc_assert (gimple_has_body_p (fndecl))((void)(!(gimple_has_body_p (fndecl)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 230, __FUNCTION__), 0 : 0))
;
231 fnargs = DECL_ARGUMENTS (fndecl)((tree_check ((fndecl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 231, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
;
232 param_num = 0;
233 for (parm = fnargs; parm; parm = DECL_CHAIN (parm)(((contains_struct_check (((contains_struct_check ((parm), (TS_DECL_MINIMAL
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 233, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 233, __FUNCTION__))->common.chain))
)
234 {
235 descriptors[param_num].decl_or_type = parm;
236 unsigned int cost = estimate_move_cost (TREE_TYPE (parm)((contains_struct_check ((parm), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 236, __FUNCTION__))->typed.type)
, true);
237 descriptors[param_num].move_cost = cost;
238 /* Watch overflow, move_cost is a bitfield. */
239 gcc_checking_assert (cost == descriptors[param_num].move_cost)((void)(!(cost == descriptors[param_num].move_cost) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 239, __FUNCTION__), 0 : 0))
;
240 param_num++;
241 }
242}
243
244/* Return how many formal parameters FNDECL has. */
245
246int
247count_formal_params (tree fndecl)
248{
249 tree parm;
250 int count = 0;
251 gcc_assert (gimple_has_body_p (fndecl))((void)(!(gimple_has_body_p (fndecl)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 251, __FUNCTION__), 0 : 0))
;
252
253 for (parm = DECL_ARGUMENTS (fndecl)((tree_check ((fndecl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 253, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
; parm; parm = DECL_CHAIN (parm)(((contains_struct_check (((contains_struct_check ((parm), (TS_DECL_MINIMAL
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 253, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 253, __FUNCTION__))->common.chain))
)
254 count++;
255
256 return count;
257}
258
259/* Return the declaration of Ith formal parameter of the function corresponding
260 to INFO. Note there is no setter function as this array is built just once
261 using ipa_initialize_node_params. */
262
263void
264ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
265{
266 fprintf (file, "param #%i", i);
267 if ((*info->descriptors)[i].decl_or_type)
268 {
269 fprintf (file, " ");
270 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
271 }
272}
273
274/* If necessary, allocate vector of parameter descriptors in info of NODE.
275 Return true if they were allocated, false if not. */
276
277static bool
278ipa_alloc_node_params (struct cgraph_node *node, int param_count)
279{
280 ipa_node_params *info = ipa_node_params_sum->get_create (node);
281
282 if (!info->descriptors && param_count)
283 {
284 vec_safe_grow_cleared (info->descriptors, param_count, true);
285 return true;
286 }
287 else
288 return false;
289}
290
291/* Initialize the ipa_node_params structure associated with NODE by counting
292 the function parameters, creating the descriptors and populating their
293 param_decls. */
294
295void
296ipa_initialize_node_params (struct cgraph_node *node)
297{
298 ipa_node_params *info = ipa_node_params_sum->get_create (node);
299
300 if (!info->descriptors
301 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
302 ipa_populate_param_decls (node, *info->descriptors);
303}
304
305/* Print the jump functions associated with call graph edge CS to file F. */
306
307static void
308ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
309{
310 ipa_edge_args *args = ipa_edge_args_sum->get (cs);
311 int count = ipa_get_cs_argument_count (args);
312
313 for (int i = 0; i < count; i++)
314 {
315 struct ipa_jump_func *jump_func;
316 enum jump_func_type type;
317
318 jump_func = ipa_get_ith_jump_func (args, i);
319 type = jump_func->type;
320
321 fprintf (f, " param %d: ", i);
322 if (type == IPA_JF_UNKNOWN)
323 fprintf (f, "UNKNOWN\n");
324 else if (type == IPA_JF_CONST)
325 {
326 tree val = jump_func->value.constant.value;
327 fprintf (f, "CONST: ");
328 print_generic_expr (f, val);
329 if (TREE_CODE (val)((enum tree_code) (val)->base.code) == ADDR_EXPR
330 && TREE_CODE (TREE_OPERAND (val, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((val), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 330, __FUNCTION__))))))->base.code)
== CONST_DECL)
331 {
332 fprintf (f, " -> ");
333 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0))((contains_struct_check (((*((const_cast<tree*> (tree_operand_check
((val), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 333, __FUNCTION__)))))), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 333, __FUNCTION__))->decl_common.initial)
);
334 }
335 fprintf (f, "\n");
336 }
337 else if (type == IPA_JF_PASS_THROUGH)
338 {
339 fprintf (f, "PASS THROUGH: ");
340 fprintf (f, "%d, op %s",
341 jump_func->value.pass_through.formal_id,
342 get_tree_code_name(jump_func->value.pass_through.operation));
343 if (jump_func->value.pass_through.operation != NOP_EXPR)
344 {
345 fprintf (f, " ");
346 print_generic_expr (f, jump_func->value.pass_through.operand);
347 }
348 if (jump_func->value.pass_through.agg_preserved)
349 fprintf (f, ", agg_preserved");
350 fprintf (f, "\n");
351 }
352 else if (type == IPA_JF_ANCESTOR)
353 {
354 fprintf (f, "ANCESTOR: ");
355 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC"%" "l" "d",
356 jump_func->value.ancestor.formal_id,
357 jump_func->value.ancestor.offset);
358 if (jump_func->value.ancestor.agg_preserved)
359 fprintf (f, ", agg_preserved");
360 if (jump_func->value.ancestor.keep_null)
361 fprintf (f, ", keep_null");
362 fprintf (f, "\n");
363 }
364
365 if (jump_func->agg.items)
366 {
367 struct ipa_agg_jf_item *item;
368 int j;
369
370 fprintf (f, " Aggregate passed by %s:\n",
371 jump_func->agg.by_ref ? "reference" : "value");
372 FOR_EACH_VEC_ELT (*jump_func->agg.items, j, item)for (j = 0; (*jump_func->agg.items).iterate ((j), &(item
)); ++(j))
373 {
374 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC"%" "l" "d" ", ",
375 item->offset);
376 fprintf (f, "type: ");
377 print_generic_expr (f, item->type);
378 fprintf (f, ", ");
379 if (item->jftype == IPA_JF_PASS_THROUGH)
380 fprintf (f, "PASS THROUGH: %d,",
381 item->value.pass_through.formal_id);
382 else if (item->jftype == IPA_JF_LOAD_AGG)
383 {
384 fprintf (f, "LOAD AGG: %d",
385 item->value.pass_through.formal_id);
386 fprintf (f, " [offset: " HOST_WIDE_INT_PRINT_DEC"%" "l" "d" ", by %s],",
387 item->value.load_agg.offset,
388 item->value.load_agg.by_ref ? "reference"
389 : "value");
390 }
391
392 if (item->jftype == IPA_JF_PASS_THROUGH
393 || item->jftype == IPA_JF_LOAD_AGG)
394 {
395 fprintf (f, " op %s",
396 get_tree_code_name (item->value.pass_through.operation));
397 if (item->value.pass_through.operation != NOP_EXPR)
398 {
399 fprintf (f, " ");
400 print_generic_expr (f, item->value.pass_through.operand);
401 }
402 }
403 else if (item->jftype == IPA_JF_CONST)
404 {
405 fprintf (f, "CONST: ");
406 print_generic_expr (f, item->value.constant);
407 }
408 else if (item->jftype == IPA_JF_UNKNOWN)
409 fprintf (f, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC"%" "l" "d" " bits",
410 tree_to_uhwi (TYPE_SIZE (item->type)((tree_class_check ((item->type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 410, __FUNCTION__))->type_common.size)
));
411 fprintf (f, "\n");
412 }
413 }
414
415 class ipa_polymorphic_call_context *ctx
416 = ipa_get_ith_polymorhic_call_context (args, i);
417 if (ctx && !ctx->useless_p ())
418 {
419 fprintf (f, " Context: ");
420 ctx->dump (dump_file);
421 }
422
423 if (jump_func->bits)
424 {
425 fprintf (f, " value: ");
426 print_hex (jump_func->bits->value, f);
427 fprintf (f, ", mask: ");
428 print_hex (jump_func->bits->mask, f);
429 fprintf (f, "\n");
430 }
431 else
432 fprintf (f, " Unknown bits\n");
433
434 if (jump_func->m_vr)
435 {
436 fprintf (f, " VR ");
437 fprintf (f, "%s[",
438 (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
439 print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
440 fprintf (f, ", ");
441 print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
442 fprintf (f, "]\n");
443 }
444 else
445 fprintf (f, " Unknown VR\n");
446 }
447}
448
449
450/* Print the jump functions of all arguments on all call graph edges going from
451 NODE to file F. */
452
453void
454ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
455{
456 struct cgraph_edge *cs;
457
458 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
459 for (cs = node->callees; cs; cs = cs->next_callee)
460 {
461
462 fprintf (f, " callsite %s -> %s : \n",
463 node->dump_name (),
464 cs->callee->dump_name ());
465 if (!ipa_edge_args_info_available_for_edge_p (cs))
466 fprintf (f, " no arg info\n");
467 else
468 ipa_print_node_jump_functions_for_edge (f, cs);
469 }
470
471 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
472 {
473 class cgraph_indirect_call_info *ii;
474
475 ii = cs->indirect_info;
476 if (ii->agg_contents)
477 fprintf (f, " indirect %s callsite, calling param %i, "
478 "offset " HOST_WIDE_INT_PRINT_DEC"%" "l" "d" ", %s",
479 ii->member_ptr ? "member ptr" : "aggregate",
480 ii->param_index, ii->offset,
481 ii->by_ref ? "by reference" : "by_value");
482 else
483 fprintf (f, " indirect %s callsite, calling param %i, "
484 "offset " HOST_WIDE_INT_PRINT_DEC"%" "l" "d",
485 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
486 ii->offset);
487
488 if (cs->call_stmt)
489 {
490 fprintf (f, ", for stmt ");
491 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
492 }
493 else
494 fprintf (f, "\n");
495 if (ii->polymorphic)
496 ii->context.dump (f);
497 if (!ipa_edge_args_info_available_for_edge_p (cs))
498 fprintf (f, " no arg info\n");
499 else
500 ipa_print_node_jump_functions_for_edge (f, cs);
501 }
502}
503
504/* Print ipa_jump_func data structures of all nodes in the call graph to F. */
505
506void
507ipa_print_all_jump_functions (FILE *f)
508{
509 struct cgraph_node *node;
510
511 fprintf (f, "\nJump functions:\n");
512 FOR_EACH_FUNCTION (node)for ((node) = symtab->first_function (); (node); (node) = symtab
->next_function ((node)))
513 {
514 ipa_print_node_jump_functions (f, node);
515 }
516}
517
518/* Set jfunc to be a know-really nothing jump function. */
519
520static void
521ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
522{
523 jfunc->type = IPA_JF_UNKNOWN;
524}
525
526/* Set JFUNC to be a copy of another jmp (to be used by jump function
527 combination code). The two functions will share their rdesc. */
528
529static void
530ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
531 struct ipa_jump_func *src)
532
533{
534 gcc_checking_assert (src->type == IPA_JF_CONST)((void)(!(src->type == IPA_JF_CONST) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 534, __FUNCTION__), 0 : 0))
;
535 dst->type = IPA_JF_CONST;
536 dst->value.constant = src->value.constant;
537}
538
539/* Set JFUNC to be a constant jmp function. */
540
541static void
542ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
543 struct cgraph_edge *cs)
544{
545 jfunc->type = IPA_JF_CONST;
546 jfunc->value.constant.value = unshare_expr_without_location (constant);
547
548 if (TREE_CODE (constant)((enum tree_code) (constant)->base.code) == ADDR_EXPR
28
Assuming field 'code' is not equal to ADDR_EXPR
549 && (TREE_CODE (TREE_OPERAND (constant, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((constant), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 549, __FUNCTION__))))))->base.code)
== FUNCTION_DECL
550 || (TREE_CODE (TREE_OPERAND (constant, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((constant), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 550, __FUNCTION__))))))->base.code)
== VAR_DECL
551 && TREE_STATIC (TREE_OPERAND (constant, 0))(((*((const_cast<tree*> (tree_operand_check ((constant)
, (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 551, __FUNCTION__))))))->base.static_flag)
)))
552 {
553 struct ipa_cst_ref_desc *rdesc;
554
555 rdesc = ipa_refdesc_pool.allocate ();
556 rdesc->cs = cs;
557 rdesc->next_duplicate = NULLnullptr;
558 rdesc->refcount = 1;
559 jfunc->value.constant.rdesc = rdesc;
560 }
561 else
562 jfunc->value.constant.rdesc = NULLnullptr;
563}
29
Returning without writing to 'jfunc->agg.items'
564
565/* Set JFUNC to be a simple pass-through jump function. */
566static void
567ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
568 bool agg_preserved)
569{
570 jfunc->type = IPA_JF_PASS_THROUGH;
571 jfunc->value.pass_through.operand = NULL_TREE(tree) nullptr;
572 jfunc->value.pass_through.formal_id = formal_id;
573 jfunc->value.pass_through.operation = NOP_EXPR;
574 jfunc->value.pass_through.agg_preserved = agg_preserved;
575}
576
577/* Set JFUNC to be an unary pass through jump function. */
578
579static void
580ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
581 enum tree_code operation)
582{
583 jfunc->type = IPA_JF_PASS_THROUGH;
584 jfunc->value.pass_through.operand = NULL_TREE(tree) nullptr;
585 jfunc->value.pass_through.formal_id = formal_id;
586 jfunc->value.pass_through.operation = operation;
587 jfunc->value.pass_through.agg_preserved = false;
588}
589/* Set JFUNC to be an arithmetic pass through jump function. */
590
591static void
592ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
593 tree operand, enum tree_code operation)
594{
595 jfunc->type = IPA_JF_PASS_THROUGH;
596 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
597 jfunc->value.pass_through.formal_id = formal_id;
598 jfunc->value.pass_through.operation = operation;
599 jfunc->value.pass_through.agg_preserved = false;
600}
601
602/* Set JFUNC to be an ancestor jump function. */
603
604static void
605ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INTlong offset,
606 int formal_id, bool agg_preserved, bool keep_null)
607{
608 jfunc->type = IPA_JF_ANCESTOR;
609 jfunc->value.ancestor.formal_id = formal_id;
610 jfunc->value.ancestor.offset = offset;
611 jfunc->value.ancestor.agg_preserved = agg_preserved;
612 jfunc->value.ancestor.keep_null = keep_null;
613}
614
615/* Get IPA BB information about the given BB. FBI is the context of analyzis
616 of this function body. */
617
618static struct ipa_bb_info *
619ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
620{
621 gcc_checking_assert (fbi)((void)(!(fbi) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 621, __FUNCTION__), 0 : 0))
;
622 return &fbi->bb_infos[bb->index];
623}
624
625/* Structure to be passed in between detect_type_change and
626 check_stmt_for_type_change. */
627
628struct prop_type_change_info
629{
630 /* Offset into the object where there is the virtual method pointer we are
631 looking for. */
632 HOST_WIDE_INTlong offset;
633 /* The declaration or SSA_NAME pointer of the base that we are checking for
634 type change. */
635 tree object;
636 /* Set to true if dynamic type change has been detected. */
637 bool type_maybe_changed;
638};
639
640/* Return true if STMT can modify a virtual method table pointer.
641
642 This function makes special assumptions about both constructors and
643 destructors which are all the functions that are allowed to alter the VMT
644 pointers. It assumes that destructors begin with assignment into all VMT
645 pointers and that constructors essentially look in the following way:
646
647 1) The very first thing they do is that they call constructors of ancestor
648 sub-objects that have them.
649
650 2) Then VMT pointers of this and all its ancestors is set to new values
651 corresponding to the type corresponding to the constructor.
652
653 3) Only afterwards, other stuff such as constructor of member sub-objects
654 and the code written by the user is run. Only this may include calling
655 virtual functions, directly or indirectly.
656
657 There is no way to call a constructor of an ancestor sub-object in any
658 other way.
659
660 This means that we do not have to care whether constructors get the correct
661 type information because they will always change it (in fact, if we define
662 the type to be given by the VMT pointer, it is undefined).
663
664 The most important fact to derive from the above is that if, for some
665 statement in the section 3, we try to detect whether the dynamic type has
666 changed, we can safely ignore all calls as we examine the function body
667 backwards until we reach statements in section 2 because these calls cannot
668 be ancestor constructors or destructors (if the input is not bogus) and so
669 do not change the dynamic type (this holds true only for automatically
670 allocated objects but at the moment we devirtualize only these). We then
671 must detect that statements in section 2 change the dynamic type and can try
672 to derive the new type. That is enough and we can stop, we will never see
673 the calls into constructors of sub-objects in this code. Therefore we can
674 safely ignore all call statements that we traverse.
675 */
676
677static bool
678stmt_may_be_vtbl_ptr_store (gimple *stmt)
679{
680 if (is_gimple_call (stmt))
681 return false;
682 if (gimple_clobber_p (stmt))
683 return false;
684 else if (is_gimple_assign (stmt))
685 {
686 tree lhs = gimple_assign_lhs (stmt);
687
688 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs))(((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 688, __FUNCTION__))->typed.type))->base.code) == ARRAY_TYPE
|| (((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 688, __FUNCTION__))->typed.type))->base.code) == RECORD_TYPE
|| ((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 688, __FUNCTION__))->typed.type))->base.code) == UNION_TYPE
|| ((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 688, __FUNCTION__))->typed.type))->base.code) == QUAL_UNION_TYPE
))
)
689 {
690 if (flag_strict_aliasingglobal_options.x_flag_strict_aliasing
691 && !POINTER_TYPE_P (TREE_TYPE (lhs))(((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 691, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 691, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
692 return false;
693
694 if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == COMPONENT_REF
695 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))((contains_struct_check (((*((const_cast<tree*> (tree_operand_check
((lhs), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 695, __FUNCTION__)))))), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 695, __FUNCTION__))->decl_common.virtual_flag)
)
696 return false;
697 /* In the future we might want to use get_ref_base_and_extent to find
698 if there is a field corresponding to the offset and if so, proceed
699 almost like if it was a component ref. */
700 }
701 }
702 return true;
703}
704
705/* Callback of walk_aliased_vdefs and a helper function for detect_type_change
706 to check whether a particular statement may modify the virtual table
707 pointerIt stores its result into DATA, which points to a
708 prop_type_change_info structure. */
709
710static bool
711check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED__attribute__ ((__unused__)), tree vdef, void *data)
712{
713 gimple *stmt = SSA_NAME_DEF_STMT (vdef)(tree_check ((vdef), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 713, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
714 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
715
716 if (stmt_may_be_vtbl_ptr_store (stmt))
717 {
718 tci->type_maybe_changed = true;
719 return true;
720 }
721 else
722 return false;
723}
724
725/* See if ARG is PARAM_DECl describing instance passed by pointer
726 or reference in FUNCTION. Return false if the dynamic type may change
727 in between beggining of the function until CALL is invoked.
728
729 Generally functions are not allowed to change type of such instances,
730 but they call destructors. We assume that methods cannot destroy the THIS
731 pointer. Also as a special cases, constructor and destructors may change
732 type of the THIS pointer. */
733
734static bool
735param_type_may_change_p (tree function, tree arg, gimple *call)
736{
737 /* Pure functions cannot do any changes on the dynamic type;
738 that require writting to memory. */
739 if (flags_from_decl_or_type (function) & (ECF_PURE(1 << 1) | ECF_CONST(1 << 0)))
740 return false;
741 /* We need to check if we are within inlined consturctor
742 or destructor (ideally we would have way to check that the
743 inline cdtor is actually working on ARG, but we don't have
744 easy tie on this, so punt on all non-pure cdtors.
745 We may also record the types of cdtors and once we know type
746 of the instance match them.
747
748 Also code unification optimizations may merge calls from
749 different blocks making return values unreliable. So
750 do nothing during late optimization. */
751 if (DECL_STRUCT_FUNCTION (function)((tree_check ((function), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 751, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
->after_inlining)
752 return true;
753 if (TREE_CODE (arg)((enum tree_code) (arg)->base.code) == SSA_NAME
754 && SSA_NAME_IS_DEFAULT_DEF (arg)(tree_check ((arg), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 754, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
755 && TREE_CODE (SSA_NAME_VAR (arg))((enum tree_code) (((tree_check ((arg), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 755, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree) nullptr
|| ((enum tree_code) ((arg)->ssa_name.var)->base.code)
== IDENTIFIER_NODE ? (tree) nullptr : (arg)->ssa_name.var
))->base.code)
== PARM_DECL)
756 {
757 /* Normal (non-THIS) argument. */
758 if ((SSA_NAME_VAR (arg)((tree_check ((arg), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 758, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree) nullptr
|| ((enum tree_code) ((arg)->ssa_name.var)->base.code)
== IDENTIFIER_NODE ? (tree) nullptr : (arg)->ssa_name.var
)
!= DECL_ARGUMENTS (function)((tree_check ((function), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 758, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
759 || TREE_CODE (TREE_TYPE (function))((enum tree_code) (((contains_struct_check ((function), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 759, __FUNCTION__))->typed.type))->base.code)
!= METHOD_TYPE)
760 /* THIS pointer of an method - here we want to watch constructors
761 and destructors as those definitely may change the dynamic
762 type. */
763 || (TREE_CODE (TREE_TYPE (function))((enum tree_code) (((contains_struct_check ((function), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 763, __FUNCTION__))->typed.type))->base.code)
== METHOD_TYPE
764 && !DECL_CXX_CONSTRUCTOR_P (function)((tree_check ((function), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 764, __FUNCTION__, (FUNCTION_DECL)))->decl_with_vis.cxx_constructor
)
765 && !DECL_CXX_DESTRUCTOR_P (function)((tree_check ((function), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 765, __FUNCTION__, (FUNCTION_DECL)))->decl_with_vis.cxx_destructor
)
766 && (SSA_NAME_VAR (arg)((tree_check ((arg), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 766, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree) nullptr
|| ((enum tree_code) ((arg)->ssa_name.var)->base.code)
== IDENTIFIER_NODE ? (tree) nullptr : (arg)->ssa_name.var
)
== DECL_ARGUMENTS (function)((tree_check ((function), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 766, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
)))
767 {
768 /* Walk the inline stack and watch out for ctors/dtors. */
769 for (tree block = gimple_block (call); block && TREE_CODE (block)((enum tree_code) (block)->base.code) == BLOCK;
770 block = BLOCK_SUPERCONTEXT (block)((tree_check ((block), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 770, __FUNCTION__, (BLOCK)))->block.supercontext)
)
771 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
772 return true;
773 return false;
774 }
775 }
776 return true;
777}
778
779/* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
780 callsite CALL) by looking for assignments to its virtual table pointer. If
781 it is, return true. ARG is the object itself (not a pointer
782 to it, unless dereferenced). BASE is the base of the memory access as
783 returned by get_ref_base_and_extent, as is the offset.
784
785 This is helper function for detect_type_change and detect_type_change_ssa
786 that does the heavy work which is usually unnecesary. */
787
788static bool
789detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
790 tree base, tree comp_type, gcall *call,
791 HOST_WIDE_INTlong offset)
792{
793 struct prop_type_change_info tci;
794 ao_ref ao;
795
796 gcc_checking_assert (DECL_P (arg)((void)(!((tree_code_type_tmpl <0>::tree_code_type[(int
) (((enum tree_code) (arg)->base.code))] == tcc_declaration
) || ((enum tree_code) (arg)->base.code) == MEM_REF || handled_component_p
(arg)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 798, __FUNCTION__), 0 : 0))
797 || TREE_CODE (arg) == MEM_REF((void)(!((tree_code_type_tmpl <0>::tree_code_type[(int
) (((enum tree_code) (arg)->base.code))] == tcc_declaration
) || ((enum tree_code) (arg)->base.code) == MEM_REF || handled_component_p
(arg)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 798, __FUNCTION__), 0 : 0))
798 || handled_component_p (arg))((void)(!((tree_code_type_tmpl <0>::tree_code_type[(int
) (((enum tree_code) (arg)->base.code))] == tcc_declaration
) || ((enum tree_code) (arg)->base.code) == MEM_REF || handled_component_p
(arg)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 798, __FUNCTION__), 0 : 0))
;
799
800 comp_type = TYPE_MAIN_VARIANT (comp_type)((tree_class_check ((comp_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 800, __FUNCTION__))->type_common.main_variant)
;
801
802 /* Const calls cannot call virtual methods through VMT and so type changes do
803 not matter. */
804 if (!flag_devirtualizeglobal_options.x_flag_devirtualize || !gimple_vuse (call)
805 /* Be sure expected_type is polymorphic. */
806 || !comp_type
807 || TREE_CODE (comp_type)((enum tree_code) (comp_type)->base.code) != RECORD_TYPE
808 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))((tree_check3 ((((tree_class_check ((comp_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 808, __FUNCTION__))->type_common.main_variant)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 808, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE
)))->type_non_common.maxval)
809 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type)))((tree_check ((((tree_check3 ((((tree_class_check ((comp_type
), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 809, __FUNCTION__))->type_common.main_variant)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 809, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE
)))->type_non_common.maxval)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 809, __FUNCTION__, (TREE_BINFO)))->binfo.vtable)
)
810 return true;
811
812 if (fbi->aa_walk_budget == 0)
813 return false;
814
815 ao_ref_init (&ao, arg);
816 ao.base = base;
817 ao.offset = offset;
818 ao.size = POINTER_SIZE(((global_options.x_ix86_isa_flags & (1UL << 58)) !=
0) ? 32 : ((8) * (((global_options.x_ix86_isa_flags & (1UL
<< 1)) != 0) ? 8 : 4)))
;
819 ao.max_size = ao.size;
820
821 tci.offset = offset;
822 tci.object = get_base_address (arg);
823 tci.type_maybe_changed = false;
824
825 int walked
826 = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
827 &tci, NULLnullptr, NULLnullptr, fbi->aa_walk_budget);
828 if (walked >= 0)
829 fbi->aa_walk_budget -= walked;
830 else
831 fbi->aa_walk_budget = 0;
832
833 if (walked >= 0 && !tci.type_maybe_changed)
834 return false;
835
836 return true;
837}
838
839/* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
840 If it is, return true. ARG is the object itself (not a pointer
841 to it, unless dereferenced). BASE is the base of the memory access as
842 returned by get_ref_base_and_extent, as is the offset. */
843
844static bool
845detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
846 tree comp_type, gcall *call,
847 HOST_WIDE_INTlong offset)
848{
849 if (!flag_devirtualizeglobal_options.x_flag_devirtualize)
850 return false;
851
852 if (TREE_CODE (base)((enum tree_code) (base)->base.code) == MEM_REF
853 && !param_type_may_change_p (current_function_decl,
854 TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 854, __FUNCTION__)))))
,
855 call))
856 return false;
857 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
858 call, offset);
859}
860
861/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
862 SSA name (its dereference will become the base and the offset is assumed to
863 be zero). */
864
865static bool
866detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
867 gcall *call)
868{
869 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME)((void)(!(((enum tree_code) (arg)->base.code) == SSA_NAME)
? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 869, __FUNCTION__), 0 : 0))
;
870 if (!flag_devirtualizeglobal_options.x_flag_devirtualize
871 || !POINTER_TYPE_P (TREE_TYPE (arg))(((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 871, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 871, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
872 return false;
873
874 if (!param_type_may_change_p (current_function_decl, arg, call))
875 return false;
876
877 arg = build2 (MEM_REF, ptr_type_nodeglobal_trees[TI_PTR_TYPE], arg,
878 build_int_cst (ptr_type_nodeglobal_trees[TI_PTR_TYPE], 0));
879
880 return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
881 call, 0);
882}
883
884/* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
885 boolean variable pointed to by DATA. */
886
887static bool
888mark_modified (ao_ref *ao ATTRIBUTE_UNUSED__attribute__ ((__unused__)), tree vdef ATTRIBUTE_UNUSED__attribute__ ((__unused__)),
889 void *data)
890{
891 bool *b = (bool *) data;
892 *b = true;
893 return true;
894}
895
896/* Find the nearest valid aa status for parameter specified by INDEX that
897 dominates BB. */
898
899static struct ipa_param_aa_status *
900find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
901 int index)
902{
903 while (true)
904 {
905 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
906 if (!bb)
907 return NULLnullptr;
908 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
909 if (!bi->param_aa_statuses.is_empty ()
910 && bi->param_aa_statuses[index].valid)
911 return &bi->param_aa_statuses[index];
912 }
913}
914
915/* Get AA status structure for the given BB and parameter with INDEX. Allocate
916 structures and/or intialize the result with a dominating description as
917 necessary. */
918
919static struct ipa_param_aa_status *
920parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
921 int index)
922{
923 gcc_checking_assert (fbi)((void)(!(fbi) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 923, __FUNCTION__), 0 : 0))
;
924 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
925 if (bi->param_aa_statuses.is_empty ())
926 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count, true);
927 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
928 if (!paa->valid)
929 {
930 gcc_checking_assert (!paa->parm_modified((void)(!(!paa->parm_modified && !paa->ref_modified
&& !paa->pt_modified) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 932, __FUNCTION__), 0 : 0))
931 && !paa->ref_modified((void)(!(!paa->parm_modified && !paa->ref_modified
&& !paa->pt_modified) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 932, __FUNCTION__), 0 : 0))
932 && !paa->pt_modified)((void)(!(!paa->parm_modified && !paa->ref_modified
&& !paa->pt_modified) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 932, __FUNCTION__), 0 : 0))
;
933 struct ipa_param_aa_status *dom_paa;
934 dom_paa = find_dominating_aa_status (fbi, bb, index);
935 if (dom_paa)
936 *paa = *dom_paa;
937 else
938 paa->valid = true;
939 }
940
941 return paa;
942}
943
944/* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
945 a value known not to be modified in this function before reaching the
946 statement STMT. FBI holds information about the function we have so far
947 gathered but do not survive the summary building stage. */
948
949static bool
950parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
951 gimple *stmt, tree parm_load)
952{
953 struct ipa_param_aa_status *paa;
954 bool modified = false;
955 ao_ref refd;
956
957 tree base = get_base_address (parm_load);
958 gcc_assert (TREE_CODE (base) == PARM_DECL)((void)(!(((enum tree_code) (base)->base.code) == PARM_DECL
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 958, __FUNCTION__), 0 : 0))
;
959 if (TREE_READONLY (base)((non_type_check ((base), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 959, __FUNCTION__))->base.readonly_flag)
)
960 return true;
961
962 gcc_checking_assert (fbi)((void)(!(fbi) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 962, __FUNCTION__), 0 : 0))
;
963 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
964 if (paa->parm_modified || fbi->aa_walk_budget == 0)
965 return false;
966
967 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE)((void)(!(gimple_vuse (stmt) != (tree) nullptr) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 967, __FUNCTION__), 0 : 0))
;
968 ao_ref_init (&refd, parm_load);
969 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
970 &modified, NULLnullptr, NULLnullptr,
971 fbi->aa_walk_budget);
972 if (walked < 0)
973 {
974 modified = true;
975 fbi->aa_walk_budget = 0;
976 }
977 else
978 fbi->aa_walk_budget -= walked;
979 if (paa && modified)
980 paa->parm_modified = true;
981 return !modified;
982}
983
984/* If STMT is an assignment that loads a value from an parameter declaration,
985 return the index of the parameter in ipa_node_params which has not been
986 modified. Otherwise return -1. */
987
988static int
989load_from_unmodified_param (struct ipa_func_body_info *fbi,
990 vec<ipa_param_descriptor, va_gc> *descriptors,
991 gimple *stmt)
992{
993 int index;
994 tree op1;
995
996 if (!gimple_assign_single_p (stmt))
997 return -1;
998
999 op1 = gimple_assign_rhs1 (stmt);
1000 if (TREE_CODE (op1)((enum tree_code) (op1)->base.code) != PARM_DECL)
1001 return -1;
1002
1003 index = ipa_get_param_decl_index_1 (descriptors, op1);
1004 if (index < 0
1005 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
1006 return -1;
1007
1008 return index;
1009}
1010
1011/* Return true if memory reference REF (which must be a load through parameter
1012 with INDEX) loads data that are known to be unmodified in this function
1013 before reaching statement STMT. */
1014
1015static bool
1016parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
1017 int index, gimple *stmt, tree ref)
1018{
1019 struct ipa_param_aa_status *paa;
1020 bool modified = false;
1021 ao_ref refd;
1022
1023 gcc_checking_assert (fbi)((void)(!(fbi) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1023, __FUNCTION__), 0 : 0))
;
1024 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1025 if (paa->ref_modified || fbi->aa_walk_budget == 0)
1026 return false;
1027
1028 gcc_checking_assert (gimple_vuse (stmt))((void)(!(gimple_vuse (stmt)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1028, __FUNCTION__), 0 : 0))
;
1029 ao_ref_init (&refd, ref);
1030 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1031 &modified, NULLnullptr, NULLnullptr,
1032 fbi->aa_walk_budget);
1033 if (walked < 0)
1034 {
1035 modified = true;
1036 fbi->aa_walk_budget = 0;
1037 }
1038 else
1039 fbi->aa_walk_budget -= walked;
1040 if (modified)
1041 paa->ref_modified = true;
1042 return !modified;
1043}
1044
1045/* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1046 is known to be unmodified in this function before reaching call statement
1047 CALL into which it is passed. FBI describes the function body. */
1048
1049static bool
1050parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1051 gimple *call, tree parm)
1052{
1053 bool modified = false;
1054 ao_ref refd;
1055
1056 /* It's unnecessary to calculate anything about memory contnets for a const
1057 function because it is not goin to use it. But do not cache the result
1058 either. Also, no such calculations for non-pointers. */
1059 if (!gimple_vuse (call)
1060 || !POINTER_TYPE_P (TREE_TYPE (parm))(((enum tree_code) (((contains_struct_check ((parm), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1060, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((parm), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1060, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
1061 return false;
1062
1063 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1064 gimple_bb (call),
1065 index);
1066 if (paa->pt_modified || fbi->aa_walk_budget == 0)
1067 return false;
1068
1069 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE(tree) nullptr);
1070 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1071 &modified, NULLnullptr, NULLnullptr,
1072 fbi->aa_walk_budget);
1073 if (walked < 0)
1074 {
1075 fbi->aa_walk_budget = 0;
1076 modified = true;
1077 }
1078 else
1079 fbi->aa_walk_budget -= walked;
1080 if (modified)
1081 paa->pt_modified = true;
1082 return !modified;
1083}
1084
1085/* Return true if we can prove that OP is a memory reference loading
1086 data from an aggregate passed as a parameter.
1087
1088 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1089 false if it cannot prove that the value has not been modified before the
1090 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1091 if it cannot prove the value has not been modified, in that case it will
1092 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1093
1094 INFO and PARMS_AINFO describe parameters of the current function (but the
1095 latter can be NULL), STMT is the load statement. If function returns true,
1096 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1097 within the aggregate and whether it is a load from a value passed by
1098 reference respectively.
1099
1100 Return false if the offset divided by BITS_PER_UNIT would not fit into an
1101 unsigned int. */
1102
1103bool
1104ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1105 vec<ipa_param_descriptor, va_gc> *descriptors,
1106 gimple *stmt, tree op, int *index_p,
1107 HOST_WIDE_INTlong *offset_p, poly_int64 *size_p,
1108 bool *by_ref_p, bool *guaranteed_unmodified)
1109{
1110 int index;
1111 HOST_WIDE_INTlong size;
1112 bool reverse;
1113 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1114
1115 if (!base
1116 || (*offset_p / BITS_PER_UNIT(8)) > UINT_MAX(2147483647 *2U +1U))
1117 return false;
1118
1119 /* We can not propagate across volatile loads. */
1120 if (TREE_THIS_VOLATILE (op)((op)->base.volatile_flag))
1121 return false;
1122
1123 if (DECL_P (base)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code
) (base)->base.code))] == tcc_declaration)
)
1124 {
1125 int index = ipa_get_param_decl_index_1 (descriptors, base);
1126 if (index >= 0
1127 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1128 {
1129 *index_p = index;
1130 *by_ref_p = false;
1131 if (size_p)
1132 *size_p = size;
1133 if (guaranteed_unmodified)
1134 *guaranteed_unmodified = true;
1135 return true;
1136 }
1137 return false;
1138 }
1139
1140 if (TREE_CODE (base)((enum tree_code) (base)->base.code) != MEM_REF
1141 || TREE_CODE (TREE_OPERAND (base, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((base), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1141, __FUNCTION__))))))->base.code)
!= SSA_NAME
1142 || !integer_zerop (TREE_OPERAND (base, 1)(*((const_cast<tree*> (tree_operand_check ((base), (1),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1142, __FUNCTION__)))))
))
1143 return false;
1144
1145 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))(tree_check (((*((const_cast<tree*> (tree_operand_check
((base), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1145, __FUNCTION__)))))), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1145, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
)
1146 {
1147 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0))((tree_check (((*((const_cast<tree*> (tree_operand_check
((base), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1147, __FUNCTION__)))))), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1147, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) (((*((const_cast<tree*> (
tree_operand_check ((base), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1147, __FUNCTION__))))))->ssa_name.var)->base.code) ==
IDENTIFIER_NODE ? (tree) nullptr : ((*((const_cast<tree*>
(tree_operand_check ((base), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1147, __FUNCTION__))))))->ssa_name.var)
;
1148 index = ipa_get_param_decl_index_1 (descriptors, parm);
1149 }
1150 else
1151 {
1152 /* This branch catches situations where a pointer parameter is not a
1153 gimple register, for example:
1154
1155 void hip7(S*) (struct S * p)
1156 {
1157 void (*<T2e4>) (struct S *) D.1867;
1158 struct S * p.1;
1159
1160 <bb 2>:
1161 p.1_1 = p;
1162 D.1867_2 = p.1_1->f;
1163 D.1867_2 ();
1164 gdp = &p;
1165 */
1166
1167 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0))(tree_check (((*((const_cast<tree*> (tree_operand_check
((base), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1167, __FUNCTION__)))))), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1167, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
1168 index = load_from_unmodified_param (fbi, descriptors, def);
1169 }
1170
1171 if (index >= 0)
1172 {
1173 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1174 if (!data_preserved && !guaranteed_unmodified)
1175 return false;
1176
1177 *index_p = index;
1178 *by_ref_p = true;
1179 if (size_p)
1180 *size_p = size;
1181 if (guaranteed_unmodified)
1182 *guaranteed_unmodified = data_preserved;
1183 return true;
1184 }
1185 return false;
1186}
1187
1188/* If STMT is an assignment that loads a value from a parameter declaration,
1189 or from an aggregate passed as the parameter either by value or reference,
1190 return the index of the parameter in ipa_node_params. Otherwise return -1.
1191
1192 FBI holds gathered information about the function. INFO describes
1193 parameters of the function, STMT is the assignment statement. If it is a
1194 memory load from an aggregate, *OFFSET_P is filled with offset within the
1195 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1196 reference. */
1197
1198static int
1199load_from_unmodified_param_or_agg (struct ipa_func_body_info *fbi,
1200 class ipa_node_params *info,
1201 gimple *stmt,
1202 HOST_WIDE_INTlong *offset_p,
1203 bool *by_ref_p)
1204{
1205 int index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1206 poly_int64 size;
1207
1208 /* Load value from a parameter declaration. */
1209 if (index >= 0)
1210 {
1211 *offset_p = -1;
1212 return index;
1213 }
1214
1215 if (!gimple_assign_load_p (stmt))
1216 return -1;
1217
1218 tree rhs = gimple_assign_rhs1 (stmt);
1219
1220 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1221 for (tree t = rhs; handled_component_p (t); t = TREE_OPERAND (t, 0)(*((const_cast<tree*> (tree_operand_check ((t), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1221, __FUNCTION__)))))
)
1222 if (TREE_CODE (t)((enum tree_code) (t)->base.code) == VIEW_CONVERT_EXPR)
1223 return -1;
1224
1225 /* Skip memory reference containing bit-field. */
1226 if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == BIT_FIELD_REF
1227 || contains_bitfld_component_ref_p (rhs))
1228 return -1;
1229
1230 if (!ipa_load_from_parm_agg (fbi, info->descriptors, stmt, rhs, &index,
1231 offset_p, &size, by_ref_p))
1232 return -1;
1233
1234 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs))),((void)(!(!maybe_ne (tree_to_poly_int64 (((tree_class_check (
(((contains_struct_check ((rhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1234, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1234, __FUNCTION__))->type_common.size)), size)) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1235, __FUNCTION__), 0 : 0))
1235 size))((void)(!(!maybe_ne (tree_to_poly_int64 (((tree_class_check (
(((contains_struct_check ((rhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1234, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1234, __FUNCTION__))->type_common.size)), size)) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1235, __FUNCTION__), 0 : 0))
;
1236 if (!*by_ref_p)
1237 {
1238 tree param_type = ipa_get_type (info, index);
1239
1240 if (!param_type || !AGGREGATE_TYPE_P (param_type)(((enum tree_code) (param_type)->base.code) == ARRAY_TYPE ||
(((enum tree_code) (param_type)->base.code) == RECORD_TYPE
|| ((enum tree_code) (param_type)->base.code) == UNION_TYPE
|| ((enum tree_code) (param_type)->base.code) == QUAL_UNION_TYPE
))
)
1241 return -1;
1242 }
1243 else if (TREE_THIS_VOLATILE (rhs)((rhs)->base.volatile_flag))
1244 return -1;
1245
1246 return index;
1247}
1248
1249/* Walk pointer adjustemnts from OP (such as POINTER_PLUS and ADDR_EXPR)
1250 to find original pointer. Initialize RET to the pointer which results from
1251 the walk.
1252 If offset is known return true and initialize OFFSET_RET. */
1253
1254bool
1255unadjusted_ptr_and_unit_offset (tree op, tree *ret, poly_int64 *offset_ret)
1256{
1257 poly_int64 offset = 0;
1258 bool offset_known = true;
1259 int i;
1260
1261 for (i = 0; i < param_ipa_jump_function_lookupsglobal_options.x_param_ipa_jump_function_lookups; i++)
1262 {
1263 if (TREE_CODE (op)((enum tree_code) (op)->base.code) == ADDR_EXPR)
1264 {
1265 poly_int64 extra_offset = 0;
1266 tree base = get_addr_base_and_unit_offset (TREE_OPERAND (op, 0)(*((const_cast<tree*> (tree_operand_check ((op), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1266, __FUNCTION__)))))
,
1267 &offset);
1268 if (!base)
1269 {
1270 base = get_base_address (TREE_OPERAND (op, 0)(*((const_cast<tree*> (tree_operand_check ((op), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1270, __FUNCTION__)))))
);
1271 if (TREE_CODE (base)((enum tree_code) (base)->base.code) != MEM_REF)
1272 break;
1273 offset_known = false;
1274 }
1275 else
1276 {
1277 if (TREE_CODE (base)((enum tree_code) (base)->base.code) != MEM_REF)
1278 break;
1279 offset += extra_offset;
1280 }
1281 op = TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1281, __FUNCTION__)))))
;
1282 if (mem_ref_offset (base).to_shwi (&extra_offset))
1283 offset += extra_offset;
1284 else
1285 offset_known = false;
1286 }
1287 else if (TREE_CODE (op)((enum tree_code) (op)->base.code) == SSA_NAME
1288 && !SSA_NAME_IS_DEFAULT_DEF (op)(tree_check ((op), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1288, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
)
1289 {
1290 gimple *pstmt = SSA_NAME_DEF_STMT (op)(tree_check ((op), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1290, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
1291
1292 if (gimple_assign_single_p (pstmt))
1293 op = gimple_assign_rhs1 (pstmt);
1294 else if (is_gimple_assign (pstmt)
1295 && gimple_assign_rhs_code (pstmt) == POINTER_PLUS_EXPR)
1296 {
1297 poly_int64 extra_offset = 0;
1298 if (ptrdiff_tree_p (gimple_assign_rhs2 (pstmt),
1299 &extra_offset))
1300 offset += extra_offset;
1301 else
1302 offset_known = false;
1303 op = gimple_assign_rhs1 (pstmt);
1304 }
1305 else
1306 break;
1307 }
1308 else
1309 break;
1310 }
1311 *ret = op;
1312 *offset_ret = offset;
1313 return offset_known;
1314}
1315
1316/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1317 of an assignment statement STMT, try to determine whether we are actually
1318 handling any of the following cases and construct an appropriate jump
1319 function into JFUNC if so:
1320
1321 1) The passed value is loaded from a formal parameter which is not a gimple
1322 register (most probably because it is addressable, the value has to be
1323 scalar) and we can guarantee the value has not changed. This case can
1324 therefore be described by a simple pass-through jump function. For example:
1325
1326 foo (int a)
1327 {
1328 int a.0;
1329
1330 a.0_2 = a;
1331 bar (a.0_2);
1332
1333 2) The passed value can be described by a simple arithmetic pass-through
1334 jump function. E.g.
1335
1336 foo (int a)
1337 {
1338 int D.2064;
1339
1340 D.2064_4 = a.1(D) + 4;
1341 bar (D.2064_4);
1342
1343 This case can also occur in combination of the previous one, e.g.:
1344
1345 foo (int a, int z)
1346 {
1347 int a.0;
1348 int D.2064;
1349
1350 a.0_3 = a;
1351 D.2064_4 = a.0_3 + 4;
1352 foo (D.2064_4);
1353
1354 3) The passed value is an address of an object within another one (which
1355 also passed by reference). Such situations are described by an ancestor
1356 jump function and describe situations such as:
1357
1358 B::foo() (struct B * const this)
1359 {
1360 struct A * D.1845;
1361
1362 D.1845_2 = &this_1(D)->D.1748;
1363 A::bar (D.1845_2);
1364
1365 INFO is the structure describing individual parameters access different
1366 stages of IPA optimizations. PARMS_AINFO contains the information that is
1367 only needed for intraprocedural analysis. */
1368
1369static void
1370compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1371 class ipa_node_params *info,
1372 struct ipa_jump_func *jfunc,
1373 gcall *call, gimple *stmt, tree name,
1374 tree param_type)
1375{
1376 HOST_WIDE_INTlong offset, size;
1377 tree op1, tc_ssa, base, ssa;
1378 bool reverse;
1379 int index;
1380
1381 op1 = gimple_assign_rhs1 (stmt);
1382
1383 if (TREE_CODE (op1)((enum tree_code) (op1)->base.code) == SSA_NAME)
1384 {
1385 if (SSA_NAME_IS_DEFAULT_DEF (op1)(tree_check ((op1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1385, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
)
1386 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1)((tree_check ((op1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1386, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((op1)->ssa_name.var)->base
.code) == IDENTIFIER_NODE ? (tree) nullptr : (op1)->ssa_name
.var)
);
1387 else
1388 index = load_from_unmodified_param (fbi, info->descriptors,
1389 SSA_NAME_DEF_STMT (op1)(tree_check ((op1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1389, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
);
1390 tc_ssa = op1;
1391 }
1392 else
1393 {
1394 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1395 tc_ssa = gimple_assign_lhs (stmt);
1396 }
1397
1398 if (index >= 0)
1399 {
1400 switch (gimple_assign_rhs_class (stmt))
1401 {
1402 case GIMPLE_BINARY_RHS:
1403 {
1404 tree op2 = gimple_assign_rhs2 (stmt);
1405 if (!is_gimple_ip_invariant (op2)
1406 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))tree_code_type_tmpl <0>::tree_code_type[(int) (gimple_assign_rhs_code
(stmt))]
1407 != tcc_comparison)
1408 && !useless_type_conversion_p (TREE_TYPE (name)((contains_struct_check ((name), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1408, __FUNCTION__))->typed.type)
,
1409 TREE_TYPE (op1)((contains_struct_check ((op1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1409, __FUNCTION__))->typed.type)
)))
1410 return;
1411
1412 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1413 gimple_assign_rhs_code (stmt));
1414 break;
1415 }
1416 case GIMPLE_SINGLE_RHS:
1417 {
1418 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1419 tc_ssa);
1420 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1421 break;
1422 }
1423 case GIMPLE_UNARY_RHS:
1424 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))((gimple_assign_rhs_code (stmt)) == NOP_EXPR || (gimple_assign_rhs_code
(stmt)) == CONVERT_EXPR)
)
1425 ipa_set_jf_unary_pass_through (jfunc, index,
1426 gimple_assign_rhs_code (stmt));
1427 default:;
1428 }
1429 return;
1430 }
1431
1432 if (TREE_CODE (op1)((enum tree_code) (op1)->base.code) != ADDR_EXPR)
1433 return;
1434 op1 = TREE_OPERAND (op1, 0)(*((const_cast<tree*> (tree_operand_check ((op1), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1434, __FUNCTION__)))))
;
1435 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1436 offset_int mem_offset;
1437 if (!base
1438 || TREE_CODE (base)((enum tree_code) (base)->base.code) != MEM_REF
1439 || !mem_ref_offset (base).is_constant (&mem_offset))
1440 return;
1441 offset += mem_offset.to_short_addr () * BITS_PER_UNIT(8);
1442 ssa = TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1442, __FUNCTION__)))))
;
1443 if (TREE_CODE (ssa)((enum tree_code) (ssa)->base.code) != SSA_NAME
1444 || !SSA_NAME_IS_DEFAULT_DEF (ssa)(tree_check ((ssa), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1444, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
1445 || offset < 0)
1446 return;
1447
1448 /* Dynamic types are changed in constructors and destructors. */
1449 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa)((tree_check ((ssa), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1449, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((ssa)->ssa_name.var)->base
.code) == IDENTIFIER_NODE ? (tree) nullptr : (ssa)->ssa_name
.var)
);
1450 if (index >= 0 && param_type && POINTER_TYPE_P (param_type)(((enum tree_code) (param_type)->base.code) == POINTER_TYPE
|| ((enum tree_code) (param_type)->base.code) == REFERENCE_TYPE
)
)
1451 ipa_set_ancestor_jf (jfunc, offset, index,
1452 parm_ref_data_pass_through_p (fbi, index, call, ssa),
1453 false);
1454}
1455
1456/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1457 it looks like:
1458
1459 iftmp.1_3 = &obj_2(D)->D.1762;
1460
1461 The base of the MEM_REF must be a default definition SSA NAME of a
1462 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1463 whole MEM_REF expression is returned and the offset calculated from any
1464 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1465 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1466
1467static tree
1468get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INTlong *offset)
1469{
1470 HOST_WIDE_INTlong size;
1471 tree expr, parm, obj;
1472 bool reverse;
1473
1474 if (!gimple_assign_single_p (assign))
1475 return NULL_TREE(tree) nullptr;
1476 expr = gimple_assign_rhs1 (assign);
1477
1478 if (TREE_CODE (expr)((enum tree_code) (expr)->base.code) != ADDR_EXPR)
1479 return NULL_TREE(tree) nullptr;
1480 expr = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1480, __FUNCTION__)))))
;
1481 obj = expr;
1482 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1483
1484 offset_int mem_offset;
1485 if (!expr
1486 || TREE_CODE (expr)((enum tree_code) (expr)->base.code) != MEM_REF
1487 || !mem_ref_offset (expr).is_constant (&mem_offset))
1488 return NULL_TREE(tree) nullptr;
1489 parm = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1489, __FUNCTION__)))))
;
1490 if (TREE_CODE (parm)((enum tree_code) (parm)->base.code) != SSA_NAME
1491 || !SSA_NAME_IS_DEFAULT_DEF (parm)(tree_check ((parm), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1491, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
1492 || TREE_CODE (SSA_NAME_VAR (parm))((enum tree_code) (((tree_check ((parm), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1492, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((parm)->ssa_name.var)->base
.code) == IDENTIFIER_NODE ? (tree) nullptr : (parm)->ssa_name
.var))->base.code)
!= PARM_DECL)
1493 return NULL_TREE(tree) nullptr;
1494
1495 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT(8);
1496 *obj_p = obj;
1497 return expr;
1498}
1499
1500
1501/* Given that an actual argument is an SSA_NAME that is a result of a phi
1502 statement PHI, try to find out whether NAME is in fact a
1503 multiple-inheritance typecast from a descendant into an ancestor of a formal
1504 parameter and thus can be described by an ancestor jump function and if so,
1505 write the appropriate function into JFUNC.
1506
1507 Essentially we want to match the following pattern:
1508
1509 if (obj_2(D) != 0B)
1510 goto <bb 3>;
1511 else
1512 goto <bb 4>;
1513
1514 <bb 3>:
1515 iftmp.1_3 = &obj_2(D)->D.1762;
1516
1517 <bb 4>:
1518 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1519 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1520 return D.1879_6; */
1521
1522static void
1523compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1524 class ipa_node_params *info,
1525 struct ipa_jump_func *jfunc,
1526 gcall *call, gphi *phi)
1527{
1528 HOST_WIDE_INTlong offset;
1529 gimple *assign, *cond;
1530 basic_block phi_bb, assign_bb, cond_bb;
1531 tree tmp, parm, expr, obj;
1532 int index, i;
1533
1534 if (gimple_phi_num_args (phi) != 2)
1535 return;
1536
1537 if (integer_zerop (PHI_ARG_DEF (phi, 1)gimple_phi_arg_def ((phi), (1))))
1538 tmp = PHI_ARG_DEF (phi, 0)gimple_phi_arg_def ((phi), (0));
1539 else if (integer_zerop (PHI_ARG_DEF (phi, 0)gimple_phi_arg_def ((phi), (0))))
1540 tmp = PHI_ARG_DEF (phi, 1)gimple_phi_arg_def ((phi), (1));
1541 else
1542 return;
1543 if (TREE_CODE (tmp)((enum tree_code) (tmp)->base.code) != SSA_NAME
1544 || SSA_NAME_IS_DEFAULT_DEF (tmp)(tree_check ((tmp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1544, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
1545 || !POINTER_TYPE_P (TREE_TYPE (tmp))(((enum tree_code) (((contains_struct_check ((tmp), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1545, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((tmp), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1545, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
1546 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp)))((enum tree_code) (((contains_struct_check ((((contains_struct_check
((tmp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1546, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1546, __FUNCTION__))->typed.type))->base.code)
!= RECORD_TYPE)
1547 return;
1548
1549 assign = SSA_NAME_DEF_STMT (tmp)(tree_check ((tmp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1549, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
1550 assign_bb = gimple_bb (assign);
1551 if (!single_pred_p (assign_bb))
1552 return;
1553 expr = get_ancestor_addr_info (assign, &obj, &offset);
1554 if (!expr)
1555 return;
1556 parm = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1556, __FUNCTION__)))))
;
1557 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm)((tree_check ((parm), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1557, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((parm)->ssa_name.var)->base
.code) == IDENTIFIER_NODE ? (tree) nullptr : (parm)->ssa_name
.var)
);
1558 if (index < 0)
1559 return;
1560
1561 cond_bb = single_pred (assign_bb);
1562 cond = last_stmt (cond_bb);
1563 if (!cond
1564 || gimple_code (cond) != GIMPLE_COND
1565 || gimple_cond_code (cond) != NE_EXPR
1566 || gimple_cond_lhs (cond) != parm
1567 || !integer_zerop (gimple_cond_rhs (cond)))
1568 return;
1569
1570 phi_bb = gimple_bb (phi);
1571 for (i = 0; i < 2; i++)
1572 {
1573 basic_block pred = EDGE_PRED (phi_bb, i)(*(phi_bb)->preds)[(i)]->src;
1574 if (pred != assign_bb && pred != cond_bb)
1575 return;
1576 }
1577
1578 ipa_set_ancestor_jf (jfunc, offset, index,
1579 parm_ref_data_pass_through_p (fbi, index, call, parm),
1580 true);
1581}
1582
1583/* Inspect the given TYPE and return true iff it has the same structure (the
1584 same number of fields of the same types) as a C++ member pointer. If
1585 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1586 corresponding fields there. */
1587
1588static bool
1589type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1590{
1591 tree fld;
1592
1593 if (TREE_CODE (type)((enum tree_code) (type)->base.code) != RECORD_TYPE)
1594 return false;
1595
1596 fld = TYPE_FIELDS (type)((tree_check3 ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1596, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE
)))->type_non_common.values)
;
1597 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))(((enum tree_code) (((contains_struct_check ((fld), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1597, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((fld), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1597, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
1598 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld)))((enum tree_code) (((contains_struct_check ((((contains_struct_check
((fld), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1598, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1598, __FUNCTION__))->typed.type))->base.code)
!= METHOD_TYPE
1599 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)((tree_check ((fld), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1599, __FUNCTION__, (FIELD_DECL)))->field_decl.offset)
))
1600 return false;
1601
1602 if (method_ptr)
1603 *method_ptr = fld;
1604
1605 fld = DECL_CHAIN (fld)(((contains_struct_check (((contains_struct_check ((fld), (TS_DECL_MINIMAL
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1605, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1605, __FUNCTION__))->common.chain))
;
1606 if (!fld || INTEGRAL_TYPE_P (fld)(((enum tree_code) (fld)->base.code) == ENUMERAL_TYPE || (
(enum tree_code) (fld)->base.code) == BOOLEAN_TYPE || ((enum
tree_code) (fld)->base.code) == INTEGER_TYPE)
1607 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)((tree_check ((fld), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1607, __FUNCTION__, (FIELD_DECL)))->field_decl.offset)
))
1608 return false;
1609 if (delta)
1610 *delta = fld;
1611
1612 if (DECL_CHAIN (fld)(((contains_struct_check (((contains_struct_check ((fld), (TS_DECL_MINIMAL
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1612, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1612, __FUNCTION__))->common.chain))
)
1613 return false;
1614
1615 return true;
1616}
1617
1618/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1619 return the rhs of its defining statement, and this statement is stored in
1620 *RHS_STMT. Otherwise return RHS as it is. */
1621
1622static inline tree
1623get_ssa_def_if_simple_copy (tree rhs, gimple **rhs_stmt)
1624{
1625 while (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs)(tree_check ((rhs), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1625, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
)
1626 {
1627 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs)(tree_check ((rhs), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1627, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
1628
1629 if (gimple_assign_single_p (def_stmt))
1630 rhs = gimple_assign_rhs1 (def_stmt);
1631 else
1632 break;
1633 *rhs_stmt = def_stmt;
1634 }
1635 return rhs;
1636}
1637
1638/* Simple linked list, describing contents of an aggregate before call. */
1639
1640struct ipa_known_agg_contents_list
1641{
1642 /* Offset and size of the described part of the aggregate. */
1643 HOST_WIDE_INTlong offset, size;
1644
1645 /* Type of the described part of the aggregate. */
1646 tree type;
1647
1648 /* Known constant value or jump function data describing contents. */
1649 struct ipa_load_agg_data value;
1650
1651 /* Pointer to the next structure in the list. */
1652 struct ipa_known_agg_contents_list *next;
1653};
1654
1655/* Add an aggregate content item into a linked list of
1656 ipa_known_agg_contents_list structure, in which all elements
1657 are sorted ascendingly by offset. */
1658
1659static inline void
1660add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1661 struct ipa_known_agg_contents_list *item)
1662{
1663 struct ipa_known_agg_contents_list *list = *plist;
1664
1665 for (; list; list = list->next)
50
Loop condition is false. Execution continues on line 1673
1666 {
1667 if (list->offset >= item->offset)
1668 break;
1669
1670 plist = &list->next;
1671 }
1672
1673 item->next = list;
1674 *plist = item;
51
The value of 'item' is assigned to 'list', which participates in a condition later
1675}
52
Returning without writing to '(*plist)->value.pass_through.operand', which participates in a condition later
1676
1677/* Check whether a given aggregate content is clobbered by certain element in
1678 a linked list of ipa_known_agg_contents_list. */
1679
1680static inline bool
1681clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1682 struct ipa_known_agg_contents_list *item)
1683{
1684 for (; list; list = list->next)
1685 {
1686 if (list->offset >= item->offset)
1687 return list->offset < item->offset + item->size;
1688
1689 if (list->offset + list->size > item->offset)
1690 return true;
1691 }
1692
1693 return false;
1694}
1695
1696/* Build aggregate jump function from LIST, assuming there are exactly
1697 VALUE_COUNT entries there and that offset of the passed argument
1698 is ARG_OFFSET and store it into JFUNC. */
1699
1700static void
1701build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1702 int value_count, HOST_WIDE_INTlong arg_offset,
1703 struct ipa_jump_func *jfunc)
1704{
1705 vec_safe_reserve (jfunc->agg.items, value_count, true);
60
Calling 'vec_safe_reserve<ipa_agg_jf_item, va_gc>'
69
Returning from 'vec_safe_reserve<ipa_agg_jf_item, va_gc>'
1706 for (; list; list = list->next)
70
Loop condition is true. Entering loop body
1707 {
1708 struct ipa_agg_jf_item item;
1709 tree operand = list->value.pass_through.operand;
1710
1711 if (list->value.pass_through.formal_id
70.1
Field 'formal_id' is < 0
70.1
Field 'formal_id' is < 0
>= 0)
71
Taking false branch
1712 {
1713 /* Content value is derived from some formal paramerter. */
1714 if (list->value.offset >= 0)
1715 item.jftype = IPA_JF_LOAD_AGG;
1716 else
1717 item.jftype = IPA_JF_PASS_THROUGH;
1718
1719 item.value.load_agg = list->value;
1720 if (operand)
1721 item.value.pass_through.operand
1722 = unshare_expr_without_location (operand);
1723 }
1724 else if (operand
71.1
'operand' is non-null
71.1
'operand' is non-null
)
72
Taking true branch
1725 {
1726 /* Content value is known constant. */
1727 item.jftype = IPA_JF_CONST;
1728 item.value.constant = unshare_expr_without_location (operand);
1729 }
1730 else
1731 continue;
1732
1733 item.type = list->type;
1734 gcc_assert (tree_to_shwi (TYPE_SIZE (list->type)) == list->size)((void)(!(tree_to_shwi (((tree_class_check ((list->type), (
tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1734, __FUNCTION__))->type_common.size)) == list->size
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1734, __FUNCTION__), 0 : 0))
;
73
Assuming the condition is true
74
'?' condition is false
1735
1736 item.offset = list->offset - arg_offset;
1737 gcc_assert ((item.offset % BITS_PER_UNIT) == 0)((void)(!((item.offset % (8)) == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1737, __FUNCTION__), 0 : 0))
;
75
Assuming the condition is true
76
'?' condition is false
1738
1739 jfunc->agg.items->quick_push (item);
77
Called C++ object pointer is null
1740 }
1741}
1742
1743/* Given an assignment statement STMT, try to collect information into
1744 AGG_VALUE that will be used to construct jump function for RHS of the
1745 assignment, from which content value of an aggregate part comes.
1746
1747 Besides constant and simple pass-through jump functions, also try to
1748 identify whether it matches the following pattern that can be described by
1749 a load-value-from-aggregate jump function, which is a derivative of simple
1750 pass-through jump function.
1751
1752 foo (int *p)
1753 {
1754 ...
1755
1756 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1757 bar (q_5);
1758 }
1759
1760 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1761 constant, simple pass-through and load-vale-from-aggregate. If value
1762 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1763 set to -1. For simple pass-through and load-value-from-aggregate, field
1764 FORMAL_ID specifies the related formal parameter index, and field
1765 OFFSET can be used to distinguish them, -1 means simple pass-through,
1766 otherwise means load-value-from-aggregate. */
1767
1768static void
1769analyze_agg_content_value (struct ipa_func_body_info *fbi,
1770 struct ipa_load_agg_data *agg_value,
1771 gimple *stmt)
1772{
1773 tree lhs = gimple_assign_lhs (stmt);
1774 tree rhs1 = gimple_assign_rhs1 (stmt);
1775 enum tree_code code;
1776 int index = -1;
1777
1778 /* Initialize jump function data for the aggregate part. */
1779 memset (agg_value, 0, sizeof (*agg_value));
1780 agg_value->pass_through.operation = NOP_EXPR;
1781 agg_value->pass_through.formal_id = -1;
1782 agg_value->offset = -1;
1783
1784 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))(((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1784, __FUNCTION__))->typed.type))->base.code) == ARRAY_TYPE
|| (((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1784, __FUNCTION__))->typed.type))->base.code) == RECORD_TYPE
|| ((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1784, __FUNCTION__))->typed.type))->base.code) == UNION_TYPE
|| ((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1784, __FUNCTION__))->typed.type))->base.code) == QUAL_UNION_TYPE
))
/* TODO: Support aggregate type. */
1785 || TREE_THIS_VOLATILE (lhs)((lhs)->base.volatile_flag)
1786 || TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == BIT_FIELD_REF
1787 || contains_bitfld_component_ref_p (lhs))
1788 return;
1789
1790 /* Skip SSA copies. */
1791 while (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
1792 {
1793 if (TREE_CODE (rhs1)((enum tree_code) (rhs1)->base.code) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (rhs1)(tree_check ((rhs1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1793, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
)
1794 break;
1795
1796 stmt = SSA_NAME_DEF_STMT (rhs1)(tree_check ((rhs1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1796, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
1797 if (!is_gimple_assign (stmt))
1798 break;
1799
1800 rhs1 = gimple_assign_rhs1 (stmt);
1801 }
1802
1803 if (gphi *phi = dyn_cast<gphi *> (stmt))
1804 {
1805 /* Also special case like the following (a is a formal parameter):
1806
1807 _12 = *a_11(D).dim[0].stride;
1808 ...
1809 # iftmp.22_9 = PHI <_12(2), 1(3)>
1810 ...
1811 parm.6.dim[0].stride = iftmp.22_9;
1812 ...
1813 __x_MOD_foo (&parm.6, b_31(D));
1814
1815 The aggregate function describing parm.6.dim[0].stride is encoded as a
1816 PASS-THROUGH jump function with ASSERT_EXPR operation whith operand 1
1817 (the constant from the PHI node). */
1818
1819 if (gimple_phi_num_args (phi) != 2)
1820 return;
1821 tree arg0 = gimple_phi_arg_def (phi, 0);
1822 tree arg1 = gimple_phi_arg_def (phi, 1);
1823 tree operand;
1824
1825 if (is_gimple_ip_invariant (arg1))
1826 {
1827 operand = arg1;
1828 rhs1 = arg0;
1829 }
1830 else if (is_gimple_ip_invariant (arg0))
1831 {
1832 operand = arg0;
1833 rhs1 = arg1;
1834 }
1835 else
1836 return;
1837
1838 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1839 if (!is_gimple_assign (stmt))
1840 return;
1841
1842 code = ASSERT_EXPR;
1843 agg_value->pass_through.operand = operand;
1844 }
1845 else if (is_gimple_assign (stmt))
1846 {
1847 code = gimple_assign_rhs_code (stmt);
1848 switch (gimple_assign_rhs_class (stmt))
1849 {
1850 case GIMPLE_SINGLE_RHS:
1851 if (is_gimple_ip_invariant (rhs1))
1852 {
1853 agg_value->pass_through.operand = rhs1;
1854 return;
1855 }
1856 code = NOP_EXPR;
1857 break;
1858
1859 case GIMPLE_UNARY_RHS:
1860 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1861 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1862 tcc_binary, this subtleness is somewhat misleading.
1863
1864 Since tcc_unary is widely used in IPA-CP code to check an operation
1865 with one operand, here we only allow tc_unary operation to avoid
1866 possible problem. Then we can use (opclass == tc_unary) or not to
1867 distinguish unary and binary. */
1868 if (TREE_CODE_CLASS (code)tree_code_type_tmpl <0>::tree_code_type[(int) (code)] != tcc_unary || CONVERT_EXPR_CODE_P (code)((code) == NOP_EXPR || (code) == CONVERT_EXPR))
1869 return;
1870
1871 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1872 break;
1873
1874 case GIMPLE_BINARY_RHS:
1875 {
1876 gimple *rhs1_stmt = stmt;
1877 gimple *rhs2_stmt = stmt;
1878 tree rhs2 = gimple_assign_rhs2 (stmt);
1879
1880 rhs1 = get_ssa_def_if_simple_copy (rhs1, &rhs1_stmt);
1881 rhs2 = get_ssa_def_if_simple_copy (rhs2, &rhs2_stmt);
1882
1883 if (is_gimple_ip_invariant (rhs2))
1884 {
1885 agg_value->pass_through.operand = rhs2;
1886 stmt = rhs1_stmt;
1887 }
1888 else if (is_gimple_ip_invariant (rhs1))
1889 {
1890 if (TREE_CODE_CLASS (code)tree_code_type_tmpl <0>::tree_code_type[(int) (code)] == tcc_comparison)
1891 code = swap_tree_comparison (code);
1892 else if (!commutative_tree_code (code))
1893 return;
1894
1895 agg_value->pass_through.operand = rhs1;
1896 stmt = rhs2_stmt;
1897 rhs1 = rhs2;
1898 }
1899 else
1900 return;
1901
1902 if (TREE_CODE_CLASS (code)tree_code_type_tmpl <0>::tree_code_type[(int) (code)] != tcc_comparison
1903 && !useless_type_conversion_p (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1903, __FUNCTION__))->typed.type)
,
1904 TREE_TYPE (rhs1)((contains_struct_check ((rhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1904, __FUNCTION__))->typed.type)
))
1905 return;
1906 }
1907 break;
1908
1909 default:
1910 return;
1911 }
1912 }
1913 else
1914 return;
1915
1916 if (TREE_CODE (rhs1)((enum tree_code) (rhs1)->base.code) != SSA_NAME)
1917 index = load_from_unmodified_param_or_agg (fbi, fbi->info, stmt,
1918 &agg_value->offset,
1919 &agg_value->by_ref);
1920 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1)(tree_check ((rhs1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1920, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
)
1921 index = ipa_get_param_decl_index (fbi->info, SSA_NAME_VAR (rhs1)((tree_check ((rhs1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1921, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((rhs1)->ssa_name.var)->base
.code) == IDENTIFIER_NODE ? (tree) nullptr : (rhs1)->ssa_name
.var)
);
1922
1923 if (index >= 0)
1924 {
1925 if (agg_value->offset >= 0)
1926 agg_value->type = TREE_TYPE (rhs1)((contains_struct_check ((rhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1926, __FUNCTION__))->typed.type)
;
1927 agg_value->pass_through.formal_id = index;
1928 agg_value->pass_through.operation = code;
1929 }
1930 else
1931 agg_value->pass_through.operand = NULL_TREE(tree) nullptr;
1932}
1933
1934/* If STMT is a memory store to the object whose address is BASE, extract
1935 information (offset, size, and value) into CONTENT, and return true,
1936 otherwise we conservatively assume the whole object is modified with
1937 unknown content, and return false. CHECK_REF means that access to object
1938 is expected to be in form of MEM_REF expression. */
1939
1940static bool
1941extract_mem_content (struct ipa_func_body_info *fbi,
1942 gimple *stmt, tree base, bool check_ref,
1943 struct ipa_known_agg_contents_list *content)
1944{
1945 HOST_WIDE_INTlong lhs_offset, lhs_size;
1946 bool reverse;
1947
1948 if (!is_gimple_assign (stmt))
1949 return false;
1950
1951 tree lhs = gimple_assign_lhs (stmt);
1952 tree lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset, &lhs_size,
1953 &reverse);
1954 if (!lhs_base)
1955 return false;
1956
1957 if (check_ref)
1958 {
1959 if (TREE_CODE (lhs_base)((enum tree_code) (lhs_base)->base.code) != MEM_REF
1960 || TREE_OPERAND (lhs_base, 0)(*((const_cast<tree*> (tree_operand_check ((lhs_base), (
0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1960, __FUNCTION__)))))
!= base
1961 || !integer_zerop (TREE_OPERAND (lhs_base, 1)(*((const_cast<tree*> (tree_operand_check ((lhs_base), (
1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1961, __FUNCTION__)))))
))
1962 return false;
1963 }
1964 else if (lhs_base != base)
1965 return false;
1966
1967 content->offset = lhs_offset;
1968 content->size = lhs_size;
1969 content->type = TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 1969, __FUNCTION__))->typed.type)
;
1970 content->next = NULLnullptr;
1971
1972 analyze_agg_content_value (fbi, &content->value, stmt);
1973 return true;
1974}
1975
1976/* Traverse statements from CALL backwards, scanning whether an aggregate given
1977 in ARG is filled in constants or values that are derived from caller's
1978 formal parameter in the way described by some kinds of jump functions. FBI
1979 is the context of the caller function for interprocedural analysis. ARG can
1980 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
1981 the type of the aggregate, JFUNC is the jump function for the aggregate. */
1982
1983static void
1984determine_known_aggregate_parts (struct ipa_func_body_info *fbi,
1985 gcall *call, tree arg,
1986 tree arg_type,
1987 struct ipa_jump_func *jfunc)
1988{
1989 struct ipa_known_agg_contents_list *list = NULLnullptr, *all_list = NULLnullptr;
1990 bitmap visited = NULLnullptr;
1991 int item_count = 0, value_count = 0;
1992 HOST_WIDE_INTlong arg_offset, arg_size;
1993 tree arg_base;
1994 bool check_ref, by_ref;
1995 ao_ref r;
1996 int max_agg_items = opt_for_fn (fbi->node->decl, param_ipa_max_agg_items)(opts_for_fn (fbi->node->decl)->x_param_ipa_max_agg_items
)
;
1997
1998 if (max_agg_items == 0)
34
Assuming 'max_agg_items' is not equal to 0
1999 return;
2000
2001 /* The function operates in three stages. First, we prepare check_ref, r,
2002 arg_base and arg_offset based on what is actually passed as an actual
2003 argument. */
2004
2005 if (POINTER_TYPE_P (arg_type)(((enum tree_code) (arg_type)->base.code) == POINTER_TYPE ||
((enum tree_code) (arg_type)->base.code) == REFERENCE_TYPE
)
)
35
Taking false branch
36
Taking false branch
2006 {
2007 by_ref = true;
2008 if (TREE_CODE (arg)((enum tree_code) (arg)->base.code) == SSA_NAME)
2009 {
2010 tree type_size;
2011 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))((tree_class_check ((((contains_struct_check ((arg_type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2011, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2011, __FUNCTION__))->type_common.size)
)
2012 || !POINTER_TYPE_P (TREE_TYPE (arg))(((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2012, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2012, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
2013 return;
2014 check_ref = true;
2015 arg_base = arg;
2016 arg_offset = 0;
2017 type_size = TYPE_SIZE (TREE_TYPE (arg_type))((tree_class_check ((((contains_struct_check ((arg_type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2017, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2017, __FUNCTION__))->type_common.size)
;
2018 arg_size = tree_to_uhwi (type_size);
2019 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE(tree) nullptr);
2020 }
2021 else if (TREE_CODE (arg)((enum tree_code) (arg)->base.code) == ADDR_EXPR)
2022 {
2023 bool reverse;
2024
2025 arg = TREE_OPERAND (arg, 0)(*((const_cast<tree*> (tree_operand_check ((arg), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2025, __FUNCTION__)))))
;
2026 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2027 &arg_size, &reverse);
2028 if (!arg_base)
2029 return;
2030 if (DECL_P (arg_base)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code
) (arg_base)->base.code))] == tcc_declaration)
)
2031 {
2032 check_ref = false;
2033 ao_ref_init (&r, arg_base);
2034 }
2035 else
2036 return;
2037 }
2038 else
2039 return;
2040 }
2041 else
2042 {
2043 bool reverse;
2044
2045 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)))((void)(!((((enum tree_code) (((contains_struct_check ((arg),
(TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2045, __FUNCTION__))->typed.type))->base.code) == ARRAY_TYPE
|| (((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2045, __FUNCTION__))->typed.type))->base.code) == RECORD_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2045, __FUNCTION__))->typed.type))->base.code) == UNION_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2045, __FUNCTION__))->typed.type))->base.code) == QUAL_UNION_TYPE
))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2045, __FUNCTION__), 0 : 0))
;
37
'?' condition is false
2046
2047 by_ref = false;
2048 check_ref = false;
2049 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2050 &arg_size, &reverse);
2051 if (!arg_base)
38
Assuming 'arg_base' is non-null
39
Taking false branch
2052 return;
2053
2054 ao_ref_init (&r, arg);
2055 }
2056
2057 /* Second stage traverses virtual SSA web backwards starting from the call
2058 statement, only looks at individual dominating virtual operand (its
2059 definition dominates the call), as long as it is confident that content
2060 of the aggregate is affected by definition of the virtual operand, it
2061 builds a sorted linked list of ipa_agg_jf_list describing that. */
2062
2063 for (tree dom_vuse = gimple_vuse (call);
42
Loop condition is true. Entering loop body
2064 dom_vuse && fbi->aa_walk_budget > 0;)
40
Assuming 'dom_vuse' is non-null
41
Assuming field 'aa_walk_budget' is > 0
2065 {
2066 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse)(tree_check ((dom_vuse), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2066, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2067
2068 if (gimple_code (stmt) == GIMPLE_PHI)
43
Assuming the condition is false
44
Taking false branch
2069 {
2070 dom_vuse = get_continuation_for_phi (stmt, &r, true,
2071 fbi->aa_walk_budget,
2072 &visited, false, NULLnullptr, NULLnullptr);
2073 continue;
2074 }
2075
2076 fbi->aa_walk_budget--;
2077 if (stmt_may_clobber_ref_p_1 (stmt, &r))
45
Assuming the condition is true
46
Taking true branch
2078 {
2079 struct ipa_known_agg_contents_list *content
2080 = XALLOCA (struct ipa_known_agg_contents_list)((struct ipa_known_agg_contents_list *) __builtin_alloca(sizeof
(struct ipa_known_agg_contents_list)))
;
2081
2082 if (!extract_mem_content (fbi, stmt, arg_base, check_ref, content))
2083 break;
2084
2085 /* Now we get a dominating virtual operand, and need to check
2086 whether its value is clobbered any other dominating one. */
2087 if ((content->value.pass_through.formal_id
46.1
Field 'formal_id' is < 0
46.1
Field 'formal_id' is < 0
>= 0
48
Taking true branch
2088 || content->value.pass_through.operand)
47
Assuming field 'operand' is non-null
2089 && !clobber_by_agg_contents_list_p (all_list, content))
2090 {
2091 struct ipa_known_agg_contents_list *copy
2092 = XALLOCA (struct ipa_known_agg_contents_list)((struct ipa_known_agg_contents_list *) __builtin_alloca(sizeof
(struct ipa_known_agg_contents_list)))
;
2093
2094 /* Add to the list consisting of only dominating virtual
2095 operands, whose definitions can finally reach the call. */
2096 add_to_agg_contents_list (&list, (*copy = *content, copy));
49
Calling 'add_to_agg_contents_list'
53
Returning from 'add_to_agg_contents_list'
2097
2098 if (++value_count == max_agg_items)
54
Assuming the condition is true
55
Taking true branch
2099 break;
2100 }
2101
2102 /* Add to the list consisting of all dominating virtual operands. */
2103 add_to_agg_contents_list (&all_list, content);
2104
2105 if (++item_count == 2 * max_agg_items)
2106 break;
2107 }
2108 dom_vuse = gimple_vuse (stmt);
2109 }
2110
2111 if (visited
56.1
'visited' is null
56.1
'visited' is null
)
56
Execution continues on line 2111
57
Taking false branch
2112 BITMAP_FREE (visited)((void) (bitmap_obstack_free ((bitmap) visited), (visited) = (
bitmap) nullptr))
;
2113
2114 /* Third stage just goes over the list and creates an appropriate vector of
2115 ipa_agg_jf_item structures out of it, of course only if there are
2116 any meaningful items to begin with. */
2117
2118 if (value_count
57.1
'value_count' is 1
57.1
'value_count' is 1
)
58
Taking true branch
2119 {
2120 jfunc->agg.by_ref = by_ref;
2121 build_agg_jump_func_from_list (list, value_count, arg_offset, jfunc);
59
Calling 'build_agg_jump_func_from_list'
2122 }
2123}
2124
2125
2126/* Return the Ith param type of callee associated with call graph
2127 edge E. */
2128
2129tree
2130ipa_get_callee_param_type (struct cgraph_edge *e, int i)
2131{
2132 int n;
2133 tree type = (e->callee
2134 ? TREE_TYPE (e->callee->decl)((contains_struct_check ((e->callee->decl), (TS_TYPED),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2134, __FUNCTION__))->typed.type)
2135 : gimple_call_fntype (e->call_stmt));
2136 tree t = TYPE_ARG_TYPES (type)((tree_check2 ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2136, __FUNCTION__, (FUNCTION_TYPE), (METHOD_TYPE)))->type_non_common
.values)
;
2137
2138 for (n = 0; n < i; n++)
2139 {
2140 if (!t)
2141 break;
2142 t = TREE_CHAIN (t)((contains_struct_check ((t), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2142, __FUNCTION__))->common.chain)
;
2143 }
2144 if (t)
2145 return TREE_VALUE (t)((tree_check ((t), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2145, __FUNCTION__, (TREE_LIST)))->list.value)
;
2146 if (!e->callee)
2147 return NULLnullptr;
2148 t = DECL_ARGUMENTS (e->callee->decl)((tree_check ((e->callee->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2148, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
;
2149 for (n = 0; n < i; n++)
2150 {
2151 if (!t)
2152 return NULLnullptr;
2153 t = TREE_CHAIN (t)((contains_struct_check ((t), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2153, __FUNCTION__))->common.chain)
;
2154 }
2155 if (t)
2156 return TREE_TYPE (t)((contains_struct_check ((t), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2156, __FUNCTION__))->typed.type)
;
2157 return NULLnullptr;
2158}
2159
2160/* Return ipa_bits with VALUE and MASK values, which can be either a newly
2161 allocated structure or a previously existing one shared with other jump
2162 functions and/or transformation summaries. */
2163
2164ipa_bits *
2165ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
2166{
2167 ipa_bits tmp;
2168 tmp.value = value;
2169 tmp.mask = mask;
2170
2171 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
2172 if (*slot)
2173 return *slot;
2174
2175 ipa_bits *res = ggc_alloc<ipa_bits> ();
2176 res->value = value;
2177 res->mask = mask;
2178 *slot = res;
2179
2180 return res;
2181}
2182
2183/* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
2184 table in order to avoid creating multiple same ipa_bits structures. */
2185
2186static void
2187ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
2188 const widest_int &mask)
2189{
2190 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
2191}
2192
2193/* Return a pointer to a value_range just like *TMP, but either find it in
2194 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
2195
2196static value_range *
2197ipa_get_value_range (value_range *tmp)
2198{
2199 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
2200 if (*slot)
2201 return *slot;
2202
2203 value_range *vr = new (ggc_alloc<value_range> ()) value_range;
2204 *vr = *tmp;
2205 *slot = vr;
2206
2207 return vr;
2208}
2209
2210/* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
2211 equiv set. Use hash table in order to avoid creating multiple same copies of
2212 value_ranges. */
2213
2214static value_range *
2215ipa_get_value_range (enum value_range_kind kind, tree min, tree max)
2216{
2217 value_range tmp (min, max, kind);
2218 return ipa_get_value_range (&tmp);
2219}
2220
2221/* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
2222 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
2223 same value_range structures. */
2224
2225static void
2226ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
2227 tree min, tree max)
2228{
2229 jf->m_vr = ipa_get_value_range (type, min, max);
2230}
2231
2232/* Assign to JF a pointer to a value_range just like TMP but either fetch a
2233 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2234
2235static void
2236ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
2237{
2238 jf->m_vr = ipa_get_value_range (tmp);
2239}
2240
2241/* Compute jump function for all arguments of callsite CS and insert the
2242 information in the jump_functions array in the ipa_edge_args corresponding
2243 to this callsite. */
2244
2245static void
2246ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
2247 struct cgraph_edge *cs)
2248{
2249 ipa_node_params *info = ipa_node_params_sum->get (cs->caller);
2250 ipa_edge_args *args = ipa_edge_args_sum->get_create (cs);
2251 gcall *call = cs->call_stmt;
2252 int n, arg_num = gimple_call_num_args (call);
2253 bool useful_context = false;
2254 value_range vr;
2255
2256 if (arg_num == 0 || args->jump_functions)
1
Assuming 'arg_num' is not equal to 0
2
Assuming field 'jump_functions' is null
3
Taking false branch
2257 return;
2258 vec_safe_grow_cleared (args->jump_functions, arg_num, true);
2259 if (flag_devirtualizeglobal_options.x_flag_devirtualize)
4
Assuming field 'x_flag_devirtualize' is 0
5
Taking false branch
2260 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num, true);
2261
2262 if (gimple_call_internal_p (call))
6
Taking false branch
2263 return;
2264 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
7
Taking false branch
2265 return;
2266
2267 for (n = 0; n
7.1
'n' is < 'arg_num'
7.1
'n' is < 'arg_num'
< arg_num; n++)
8
Loop condition is true. Entering loop body
2268 {
2269 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
2270 tree arg = gimple_call_arg (call, n);
2271 tree param_type = ipa_get_callee_param_type (cs, n);
2272 if (flag_devirtualizeglobal_options.x_flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg))(((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2272, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2272, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
9
Assuming field 'x_flag_devirtualize' is 0
2273 {
2274 tree instance;
2275 class ipa_polymorphic_call_context context (cs->caller->decl,
2276 arg, cs->call_stmt,
2277 &instance);
2278 context.get_dynamic_type (instance, arg, NULLnullptr, cs->call_stmt,
2279 &fbi->aa_walk_budget);
2280 *ipa_get_ith_polymorhic_call_context (args, n) = context;
2281 if (!context.useless_p ())
2282 useful_context = true;
2283 }
2284
2285 if (POINTER_TYPE_P (TREE_TYPE (arg))(((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2285, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2285, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
10
Assuming field 'code' is not equal to POINTER_TYPE
11
Assuming field 'code' is not equal to REFERENCE_TYPE
2286 {
2287 bool addr_nonzero = false;
2288 bool strict_overflow = false;
2289
2290 if (TREE_CODE (arg)((enum tree_code) (arg)->base.code) == SSA_NAME
2291 && param_type
2292 && get_range_query (cfun(cfun + 0))->range_of_expr (vr, arg)
2293 && vr.nonzero_p ())
2294 addr_nonzero = true;
2295 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
2296 addr_nonzero = true;
2297
2298 if (addr_nonzero)
2299 {
2300 tree z = build_int_cst (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2300, __FUNCTION__))->typed.type)
, 0);
2301 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
2302 }
2303 else
2304 gcc_assert (!jfunc->m_vr)((void)(!(!jfunc->m_vr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2304, __FUNCTION__), 0 : 0))
;
2305 }
2306 else
2307 {
2308 if (TREE_CODE (arg)((enum tree_code) (arg)->base.code) == SSA_NAME
12
Taking false branch
13
Assuming field 'code' is not equal to SSA_NAME
2309 && param_type
2310 /* Limit the ranger query to integral types as the rest
2311 of this file uses value_range's, which only hold
2312 integers and pointers. */
2313 && irange::supports_p (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2313, __FUNCTION__))->typed.type)
)
2314 && get_range_query (cfun(cfun + 0))->range_of_expr (vr, arg)
2315 && !vr.undefined_p ())
2316 {
2317 value_range resvr;
2318 range_fold_unary_expr (&resvr, NOP_EXPR, param_type,
2319 &vr, TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2319, __FUNCTION__))->typed.type)
);
2320 if (!resvr.undefined_p () && !resvr.varying_p ())
2321 ipa_set_jfunc_vr (jfunc, &resvr);
2322 else
2323 gcc_assert (!jfunc->m_vr)((void)(!(!jfunc->m_vr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2323, __FUNCTION__), 0 : 0))
;
2324 }
2325 else
2326 gcc_assert (!jfunc->m_vr)((void)(!(!jfunc->m_vr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2326, __FUNCTION__), 0 : 0))
;
14
Assuming field 'm_vr' is null
15
'?' condition is false
2327 }
2328
2329 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))(((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2329, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2329, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2329, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
16
Assuming field 'code' is not equal to ENUMERAL_TYPE
17
Assuming field 'code' is not equal to BOOLEAN_TYPE
18
Assuming field 'code' is not equal to INTEGER_TYPE
2330 && (TREE_CODE (arg)((enum tree_code) (arg)->base.code) == SSA_NAME || TREE_CODE (arg)((enum tree_code) (arg)->base.code) == INTEGER_CST))
2331 {
2332 if (TREE_CODE (arg)((enum tree_code) (arg)->base.code) == SSA_NAME)
2333 ipa_set_jfunc_bits (jfunc, 0,
2334 widest_int::from (get_nonzero_bits (arg),
2335 TYPE_SIGN (TREE_TYPE (arg))((signop) ((tree_class_check ((((contains_struct_check ((arg)
, (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2335, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2335, __FUNCTION__))->base.u.bits.unsigned_flag))
));
2336 else
2337 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
2338 }
2339 else if (POINTER_TYPE_P (TREE_TYPE (arg))(((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2339, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2339, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
19
Taking false branch
2340 {
2341 unsigned HOST_WIDE_INTlong bitpos;
2342 unsigned align;
2343
2344 get_pointer_alignment_1 (arg, &align, &bitpos);
2345 widest_int mask = wi::bit_and_not
2346 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg))((tree_class_check ((((contains_struct_check ((arg), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2346, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2346, __FUNCTION__))->type_common.precision)
, false),
2347 align / BITS_PER_UNIT(8) - 1);
2348 widest_int value = bitpos / BITS_PER_UNIT(8);
2349 ipa_set_jfunc_bits (jfunc, value, mask);
2350 }
2351 else
2352 gcc_assert (!jfunc->bits)((void)(!(!jfunc->bits) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2352, __FUNCTION__), 0 : 0))
;
20
Assuming field 'bits' is null
21
'?' condition is false
2353
2354 if (is_gimple_ip_invariant (arg)
22
Assuming the condition is false
26
Taking true branch
2355 || (VAR_P (arg)(((enum tree_code) (arg)->base.code) == VAR_DECL)
23
Assuming field 'code' is equal to VAR_DECL
2356 && is_global_var (arg)
24
Assuming the condition is true
2357 && TREE_READONLY (arg)((non_type_check ((arg), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2357, __FUNCTION__))->base.readonly_flag)
))
25
Assuming field 'readonly_flag' is not equal to 0
2358 ipa_set_jf_constant (jfunc, arg, cs);
27
Calling 'ipa_set_jf_constant'
30
Returning from 'ipa_set_jf_constant'
2359 else if (!is_gimple_reg_type (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2359, __FUNCTION__))->typed.type)
)
2360 && TREE_CODE (arg)((enum tree_code) (arg)->base.code) == PARM_DECL)
2361 {
2362 int index = ipa_get_param_decl_index (info, arg);
2363
2364 gcc_assert (index >=0)((void)(!(index >=0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2364, __FUNCTION__), 0 : 0))
;
2365 /* Aggregate passed by value, check for pass-through, otherwise we
2366 will attempt to fill in aggregate contents later in this
2367 for cycle. */
2368 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
2369 {
2370 ipa_set_jf_simple_pass_through (jfunc, index, false);
2371 continue;
2372 }
2373 }
2374 else if (TREE_CODE (arg)((enum tree_code) (arg)->base.code) == SSA_NAME)
2375 {
2376 if (SSA_NAME_IS_DEFAULT_DEF (arg)(tree_check ((arg), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2376, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
)
2377 {
2378 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg)((tree_check ((arg), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2378, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((arg)->ssa_name.var)->base
.code) == IDENTIFIER_NODE ? (tree) nullptr : (arg)->ssa_name
.var)
);
2379 if (index >= 0)
2380 {
2381 bool agg_p;
2382 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
2383 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
2384 }
2385 }
2386 else
2387 {
2388 gimple *stmt = SSA_NAME_DEF_STMT (arg)(tree_check ((arg), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2388, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2389 if (is_gimple_assign (stmt))
2390 compute_complex_assign_jump_func (fbi, info, jfunc,
2391 call, stmt, arg, param_type);
2392 else if (gimple_code (stmt) == GIMPLE_PHI)
2393 compute_complex_ancestor_jump_func (fbi, info, jfunc,
2394 call,
2395 as_a <gphi *> (stmt));
2396 }
2397 }
2398
2399 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2400 passed (because type conversions are ignored in gimple). Usually we can
2401 safely get type from function declaration, but in case of K&R prototypes or
2402 variadic functions we can try our luck with type of the pointer passed.
2403 TODO: Since we look for actual initialization of the memory object, we may better
2404 work out the type based on the memory stores we find. */
2405 if (!param_type
30.1
'param_type' is null
30.1
'param_type' is null
)
31
Taking true branch
2406 param_type = TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2406, __FUNCTION__))->typed.type)
;
2407
2408 if ((jfunc->type
31.1
Field 'type' is not equal to IPA_JF_PASS_THROUGH
31.1
Field 'type' is not equal to IPA_JF_PASS_THROUGH
!= IPA_JF_PASS_THROUGH
2409 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2410 && (jfunc->type
31.2
Field 'type' is not equal to IPA_JF_ANCESTOR
31.2
Field 'type' is not equal to IPA_JF_ANCESTOR
!= IPA_JF_ANCESTOR
2411 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2412 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))(((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2412, __FUNCTION__))->typed.type))->base.code) == ARRAY_TYPE
|| (((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2412, __FUNCTION__))->typed.type))->base.code) == RECORD_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2412, __FUNCTION__))->typed.type))->base.code) == UNION_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2412, __FUNCTION__))->typed.type))->base.code) == QUAL_UNION_TYPE
))
32
Assuming field 'code' is equal to ARRAY_TYPE
2413 || POINTER_TYPE_P (param_type)(((enum tree_code) (param_type)->base.code) == POINTER_TYPE
|| ((enum tree_code) (param_type)->base.code) == REFERENCE_TYPE
)
))
2414 determine_known_aggregate_parts (fbi, call, arg, param_type, jfunc);
33
Calling 'determine_known_aggregate_parts'
2415 }
2416 if (!useful_context)
2417 vec_free (args->polymorphic_call_contexts);
2418}
2419
2420/* Compute jump functions for all edges - both direct and indirect - outgoing
2421 from BB. */
2422
2423static void
2424ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2425{
2426 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2427 int i;
2428 struct cgraph_edge *cs;
2429
2430 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)for (i = (bi->cg_edges).length () - 1; (bi->cg_edges).iterate
((i), &(cs)); (i)--)
2431 {
2432 struct cgraph_node *callee = cs->callee;
2433
2434 if (callee)
2435 {
2436 callee = callee->ultimate_alias_target ();
2437 /* We do not need to bother analyzing calls to unknown functions
2438 unless they may become known during lto/whopr. */
2439 if (!callee->definition && !flag_ltoglobal_options.x_flag_lto
2440 && !gimple_call_fnspec (cs->call_stmt).known_p ())
2441 continue;
2442 }
2443 ipa_compute_jump_functions_for_edge (fbi, cs);
2444 }
2445}
2446
2447/* If STMT looks like a statement loading a value from a member pointer formal
2448 parameter, return that parameter and store the offset of the field to
2449 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2450 might be clobbered). If USE_DELTA, then we look for a use of the delta
2451 field rather than the pfn. */
2452
2453static tree
2454ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2455 HOST_WIDE_INTlong *offset_p)
2456{
2457 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2458
2459 if (!gimple_assign_single_p (stmt))
2460 return NULL_TREE(tree) nullptr;
2461
2462 rhs = gimple_assign_rhs1 (stmt);
2463 if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == COMPONENT_REF)
2464 {
2465 ref_field = TREE_OPERAND (rhs, 1)(*((const_cast<tree*> (tree_operand_check ((rhs), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2465, __FUNCTION__)))))
;
2466 rhs = TREE_OPERAND (rhs, 0)(*((const_cast<tree*> (tree_operand_check ((rhs), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2466, __FUNCTION__)))))
;
2467 }
2468 else
2469 ref_field = NULL_TREE(tree) nullptr;
2470 if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) != MEM_REF)
2471 return NULL_TREE(tree) nullptr;
2472 rec = TREE_OPERAND (rhs, 0)(*((const_cast<tree*> (tree_operand_check ((rhs), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2472, __FUNCTION__)))))
;
2473 if (TREE_CODE (rec)((enum tree_code) (rec)->base.code) != ADDR_EXPR)
2474 return NULL_TREE(tree) nullptr;
2475 rec = TREE_OPERAND (rec, 0)(*((const_cast<tree*> (tree_operand_check ((rec), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2475, __FUNCTION__)))))
;
2476 if (TREE_CODE (rec)((enum tree_code) (rec)->base.code) != PARM_DECL
2477 || !type_like_member_ptr_p (TREE_TYPE (rec)((contains_struct_check ((rec), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2477, __FUNCTION__))->typed.type)
, &ptr_field, &delta_field))
2478 return NULL_TREE(tree) nullptr;
2479 ref_offset = TREE_OPERAND (rhs, 1)(*((const_cast<tree*> (tree_operand_check ((rhs), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2479, __FUNCTION__)))))
;
2480
2481 if (use_delta)
2482 fld = delta_field;
2483 else
2484 fld = ptr_field;
2485 if (offset_p)
2486 *offset_p = int_bit_position (fld);
2487
2488 if (ref_field)
2489 {
2490 if (integer_nonzerop (ref_offset))
2491 return NULL_TREE(tree) nullptr;
2492 return ref_field == fld ? rec : NULL_TREE(tree) nullptr;
2493 }
2494 else
2495 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2496 : NULL_TREE(tree) nullptr;
2497}
2498
2499/* Returns true iff T is an SSA_NAME defined by a statement. */
2500
2501static bool
2502ipa_is_ssa_with_stmt_def (tree t)
2503{
2504 if (TREE_CODE (t)((enum tree_code) (t)->base.code) == SSA_NAME
2505 && !SSA_NAME_IS_DEFAULT_DEF (t)(tree_check ((t), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2505, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
)
2506 return true;
2507 else
2508 return false;
2509}
2510
2511/* Find the indirect call graph edge corresponding to STMT and mark it as a
2512 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2513 indirect call graph edge.
2514 If POLYMORPHIC is true record is as a destination of polymorphic call. */
2515
2516static struct cgraph_edge *
2517ipa_note_param_call (struct cgraph_node *node, int param_index,
2518 gcall *stmt, bool polymorphic)
2519{
2520 struct cgraph_edge *cs;
2521
2522 cs = node->get_edge (stmt);
2523 cs->indirect_info->param_index = param_index;
2524 cs->indirect_info->agg_contents = 0;
2525 cs->indirect_info->member_ptr = 0;
2526 cs->indirect_info->guaranteed_unmodified = 0;
2527 ipa_node_params *info = ipa_node_params_sum->get (node);
2528 ipa_set_param_used_by_indirect_call (info, param_index, true);
2529 if (cs->indirect_info->polymorphic || polymorphic)
2530 ipa_set_param_used_by_polymorphic_call (info, param_index, true);
2531 return cs;
2532}
2533
2534/* Analyze the CALL and examine uses of formal parameters of the caller NODE
2535 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2536 intermediate information about each formal parameter. Currently it checks
2537 whether the call calls a pointer that is a formal parameter and if so, the
2538 parameter is marked with the called flag and an indirect call graph edge
2539 describing the call is created. This is very simple for ordinary pointers
2540 represented in SSA but not-so-nice when it comes to member pointers. The
2541 ugly part of this function does nothing more than trying to match the
2542 pattern of such a call. An example of such a pattern is the gimple dump
2543 below, the call is on the last line:
2544
2545 <bb 2>:
2546 f$__delta_5 = f.__delta;
2547 f$__pfn_24 = f.__pfn;
2548
2549 or
2550 <bb 2>:
2551 f$__delta_5 = MEM[(struct *)&f];
2552 f$__pfn_24 = MEM[(struct *)&f + 4B];
2553
2554 and a few lines below:
2555
2556 <bb 5>
2557 D.2496_3 = (int) f$__pfn_24;
2558 D.2497_4 = D.2496_3 & 1;
2559 if (D.2497_4 != 0)
2560 goto <bb 3>;
2561 else
2562 goto <bb 4>;
2563
2564 <bb 6>:
2565 D.2500_7 = (unsigned int) f$__delta_5;
2566 D.2501_8 = &S + D.2500_7;
2567 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2568 D.2503_10 = *D.2502_9;
2569 D.2504_12 = f$__pfn_24 + -1;
2570 D.2505_13 = (unsigned int) D.2504_12;
2571 D.2506_14 = D.2503_10 + D.2505_13;
2572 D.2507_15 = *D.2506_14;
2573 iftmp.11_16 = (String:: *) D.2507_15;
2574
2575 <bb 7>:
2576 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2577 D.2500_19 = (unsigned int) f$__delta_5;
2578 D.2508_20 = &S + D.2500_19;
2579 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2580
2581 Such patterns are results of simple calls to a member pointer:
2582
2583 int doprinting (int (MyString::* f)(int) const)
2584 {
2585 MyString S ("somestring");
2586
2587 return (S.*f)(4);
2588 }
2589
2590 Moreover, the function also looks for called pointers loaded from aggregates
2591 passed by value or reference. */
2592
2593static void
2594ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2595 tree target)
2596{
2597 class ipa_node_params *info = fbi->info;
2598 HOST_WIDE_INTlong offset;
2599 bool by_ref;
2600
2601 if (SSA_NAME_IS_DEFAULT_DEF (target)(tree_check ((target), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2601, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
)
2602 {
2603 tree var = SSA_NAME_VAR (target)((tree_check ((target), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2603, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((target)->ssa_name.var)->
base.code) == IDENTIFIER_NODE ? (tree) nullptr : (target)->
ssa_name.var)
;
2604 int index = ipa_get_param_decl_index (info, var);
2605 if (index >= 0)
2606 ipa_note_param_call (fbi->node, index, call, false);
2607 return;
2608 }
2609
2610 int index;
2611 gimple *def = SSA_NAME_DEF_STMT (target)(tree_check ((target), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2611, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2612 bool guaranteed_unmodified;
2613 if (gimple_assign_single_p (def)
2614 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2615 gimple_assign_rhs1 (def), &index, &offset,
2616 NULLnullptr, &by_ref, &guaranteed_unmodified))
2617 {
2618 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2619 call, false);
2620 cs->indirect_info->offset = offset;
2621 cs->indirect_info->agg_contents = 1;
2622 cs->indirect_info->by_ref = by_ref;
2623 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2624 return;
2625 }
2626
2627 /* Now we need to try to match the complex pattern of calling a member
2628 pointer. */
2629 if (gimple_code (def) != GIMPLE_PHI
2630 || gimple_phi_num_args (def) != 2
2631 || !POINTER_TYPE_P (TREE_TYPE (target))(((enum tree_code) (((contains_struct_check ((target), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2631, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((target), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2631, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
2632 || TREE_CODE (TREE_TYPE (TREE_TYPE (target)))((enum tree_code) (((contains_struct_check ((((contains_struct_check
((target), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2632, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2632, __FUNCTION__))->typed.type))->base.code)
!= METHOD_TYPE)
2633 return;
2634
2635 /* First, we need to check whether one of these is a load from a member
2636 pointer that is a parameter to this function. */
2637 tree n1 = PHI_ARG_DEF (def, 0)gimple_phi_arg_def ((def), (0));
2638 tree n2 = PHI_ARG_DEF (def, 1)gimple_phi_arg_def ((def), (1));
2639 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2640 return;
2641 gimple *d1 = SSA_NAME_DEF_STMT (n1)(tree_check ((n1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2641, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2642 gimple *d2 = SSA_NAME_DEF_STMT (n2)(tree_check ((n2), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2642, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2643
2644 tree rec;
2645 basic_block bb, virt_bb;
2646 basic_block join = gimple_bb (def);
2647 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2648 {
2649 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULLnullptr))
2650 return;
2651
2652 bb = EDGE_PRED (join, 0)(*(join)->preds)[(0)]->src;
2653 virt_bb = gimple_bb (d2);
2654 }
2655 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2656 {
2657 bb = EDGE_PRED (join, 1)(*(join)->preds)[(1)]->src;
2658 virt_bb = gimple_bb (d1);
2659 }
2660 else
2661 return;
2662
2663 /* Second, we need to check that the basic blocks are laid out in the way
2664 corresponding to the pattern. */
2665
2666 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2667 || single_pred (virt_bb) != bb
2668 || single_succ (virt_bb) != join)
2669 return;
2670
2671 /* Third, let's see that the branching is done depending on the least
2672 significant bit of the pfn. */
2673
2674 gimple *branch = last_stmt (bb);
2675 if (!branch || gimple_code (branch) != GIMPLE_COND)
2676 return;
2677
2678 if ((gimple_cond_code (branch) != NE_EXPR
2679 && gimple_cond_code (branch) != EQ_EXPR)
2680 || !integer_zerop (gimple_cond_rhs (branch)))
2681 return;
2682
2683 tree cond = gimple_cond_lhs (branch);
2684 if (!ipa_is_ssa_with_stmt_def (cond))
2685 return;
2686
2687 def = SSA_NAME_DEF_STMT (cond)(tree_check ((cond), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2687, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2688 if (!is_gimple_assign (def)
2689 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2690 || !integer_onep (gimple_assign_rhs2 (def)))
2691 return;
2692
2693 cond = gimple_assign_rhs1 (def);
2694 if (!ipa_is_ssa_with_stmt_def (cond))
2695 return;
2696
2697 def = SSA_NAME_DEF_STMT (cond)(tree_check ((cond), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2697, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2698
2699 if (is_gimple_assign (def)
2700 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def))((gimple_assign_rhs_code (def)) == NOP_EXPR || (gimple_assign_rhs_code
(def)) == CONVERT_EXPR)
)
2701 {
2702 cond = gimple_assign_rhs1 (def);
2703 if (!ipa_is_ssa_with_stmt_def (cond))
2704 return;
2705 def = SSA_NAME_DEF_STMT (cond)(tree_check ((cond), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2705, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2706 }
2707
2708 tree rec2;
2709 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2710 (TARGET_PTRMEMFUNC_VBIT_LOCATIONptrmemfunc_vbit_in_pfn
2711 == ptrmemfunc_vbit_in_delta),
2712 NULLnullptr);
2713 if (rec != rec2)
2714 return;
2715
2716 index = ipa_get_param_decl_index (info, rec);
2717 if (index >= 0
2718 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2719 {
2720 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2721 call, false);
2722 cs->indirect_info->offset = offset;
2723 cs->indirect_info->agg_contents = 1;
2724 cs->indirect_info->member_ptr = 1;
2725 cs->indirect_info->guaranteed_unmodified = 1;
2726 }
2727
2728 return;
2729}
2730
2731/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2732 object referenced in the expression is a formal parameter of the caller
2733 FBI->node (described by FBI->info), create a call note for the
2734 statement. */
2735
2736static void
2737ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2738 gcall *call, tree target)
2739{
2740 tree obj = OBJ_TYPE_REF_OBJECT (target)(*((const_cast<tree*> (tree_operand_check (((tree_check
((target), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2740, __FUNCTION__, (OBJ_TYPE_REF)))), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2740, __FUNCTION__)))))
;
2741 int index;
2742 HOST_WIDE_INTlong anc_offset;
2743
2744 if (!flag_devirtualizeglobal_options.x_flag_devirtualize)
2745 return;
2746
2747 if (TREE_CODE (obj)((enum tree_code) (obj)->base.code) != SSA_NAME)
2748 return;
2749
2750 class ipa_node_params *info = fbi->info;
2751 if (SSA_NAME_IS_DEFAULT_DEF (obj)(tree_check ((obj), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2751, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
)
2752 {
2753 if (TREE_CODE (SSA_NAME_VAR (obj))((enum tree_code) (((tree_check ((obj), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2753, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((obj)->ssa_name.var)->base
.code) == IDENTIFIER_NODE ? (tree) nullptr : (obj)->ssa_name
.var))->base.code)
!= PARM_DECL)
2754 return;
2755
2756 anc_offset = 0;
2757 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj)((tree_check ((obj), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2757, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((obj)->ssa_name.var)->base
.code) == IDENTIFIER_NODE ? (tree) nullptr : (obj)->ssa_name
.var)
);
2758 gcc_assert (index >= 0)((void)(!(index >= 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2758, __FUNCTION__), 0 : 0))
;
2759 if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
2760 call))
2761 return;
2762 }
2763 else
2764 {
2765 gimple *stmt = SSA_NAME_DEF_STMT (obj)(tree_check ((obj), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2765, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2766 tree expr;
2767
2768 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2769 if (!expr)
2770 return;
2771 index = ipa_get_param_decl_index (info,
2772 SSA_NAME_VAR (TREE_OPERAND (expr, 0))((tree_check (((*((const_cast<tree*> (tree_operand_check
((expr), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2772, __FUNCTION__)))))), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2772, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) (((*((const_cast<tree*> (
tree_operand_check ((expr), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2772, __FUNCTION__))))))->ssa_name.var)->base.code) ==
IDENTIFIER_NODE ? (tree) nullptr : ((*((const_cast<tree*>
(tree_operand_check ((expr), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2772, __FUNCTION__))))))->ssa_name.var)
);
2773 gcc_assert (index >= 0)((void)(!(index >= 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2773, __FUNCTION__), 0 : 0))
;
2774 if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
2775 call, anc_offset))
2776 return;
2777 }
2778
2779 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2780 call, true);
2781 class cgraph_indirect_call_info *ii = cs->indirect_info;
2782 ii->offset = anc_offset;
2783 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target)(*((const_cast<tree*> (tree_operand_check (((tree_check
((target), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2783, __FUNCTION__, (OBJ_TYPE_REF)))), (2), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2783, __FUNCTION__)))))
);
2784 ii->otr_type = obj_type_ref_class (target);
2785 ii->polymorphic = 1;
2786}
2787
2788/* Analyze a call statement CALL whether and how it utilizes formal parameters
2789 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2790 containing intermediate information about each formal parameter. */
2791
2792static void
2793ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2794{
2795 tree target = gimple_call_fn (call);
2796
2797 if (!target
2798 || (TREE_CODE (target)((enum tree_code) (target)->base.code) != SSA_NAME
2799 && !virtual_method_call_p (target)))
2800 return;
2801
2802 struct cgraph_edge *cs = fbi->node->get_edge (call);
2803 /* If we previously turned the call into a direct call, there is
2804 no need to analyze. */
2805 if (cs && !cs->indirect_unknown_callee)
2806 return;
2807
2808 if (cs->indirect_info->polymorphic && flag_devirtualizeglobal_options.x_flag_devirtualize)
2809 {
2810 tree instance;
2811 tree target = gimple_call_fn (call);
2812 ipa_polymorphic_call_context context (current_function_decl,
2813 target, call, &instance);
2814
2815 gcc_checking_assert (cs->indirect_info->otr_type((void)(!(cs->indirect_info->otr_type == obj_type_ref_class
(target)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2816, __FUNCTION__), 0 : 0))
2816 == obj_type_ref_class (target))((void)(!(cs->indirect_info->otr_type == obj_type_ref_class
(target)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2816, __FUNCTION__), 0 : 0))
;
2817 gcc_checking_assert (cs->indirect_info->otr_token((void)(!(cs->indirect_info->otr_token == tree_to_shwi (
(*((const_cast<tree*> (tree_operand_check (((tree_check
((target), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2818, __FUNCTION__, (OBJ_TYPE_REF)))), (2), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2818, __FUNCTION__))))))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2818, __FUNCTION__), 0 : 0))
2818 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)))((void)(!(cs->indirect_info->otr_token == tree_to_shwi (
(*((const_cast<tree*> (tree_operand_check (((tree_check
((target), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2818, __FUNCTION__, (OBJ_TYPE_REF)))), (2), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2818, __FUNCTION__))))))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2818, __FUNCTION__), 0 : 0))
;
2819
2820 cs->indirect_info->vptr_changed
2821 = !context.get_dynamic_type (instance,
2822 OBJ_TYPE_REF_OBJECT (target)(*((const_cast<tree*> (tree_operand_check (((tree_check
((target), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2822, __FUNCTION__, (OBJ_TYPE_REF)))), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2822, __FUNCTION__)))))
,
2823 obj_type_ref_class (target), call,
2824 &fbi->aa_walk_budget);
2825 cs->indirect_info->context = context;
2826 }
2827
2828 if (TREE_CODE (target)((enum tree_code) (target)->base.code) == SSA_NAME)
2829 ipa_analyze_indirect_call_uses (fbi, call, target);
2830 else if (virtual_method_call_p (target))
2831 ipa_analyze_virtual_call_uses (fbi, call, target);
2832}
2833
2834
2835/* Analyze the call statement STMT with respect to formal parameters (described
2836 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2837 formal parameters are called. */
2838
2839static void
2840ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2841{
2842 if (is_gimple_call (stmt))
2843 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2844}
2845
2846/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2847 If OP is a parameter declaration, mark it as used in the info structure
2848 passed in DATA. */
2849
2850static bool
2851visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2852{
2853 class ipa_node_params *info = (class ipa_node_params *) data;
2854
2855 op = get_base_address (op);
2856 if (op
2857 && TREE_CODE (op)((enum tree_code) (op)->base.code) == PARM_DECL)
2858 {
2859 int index = ipa_get_param_decl_index (info, op);
2860 gcc_assert (index >= 0)((void)(!(index >= 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2860, __FUNCTION__), 0 : 0))
;
2861 ipa_set_param_used (info, index, true);
2862 }
2863
2864 return false;
2865}
2866
2867/* Scan the statements in BB and inspect the uses of formal parameters. Store
2868 the findings in various structures of the associated ipa_node_params
2869 structure, such as parameter flags, notes etc. FBI holds various data about
2870 the function being analyzed. */
2871
2872static void
2873ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2874{
2875 gimple_stmt_iterator gsi;
2876 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2877 {
2878 gimple *stmt = gsi_stmt (gsi);
2879
2880 if (is_gimple_debug (stmt))
2881 continue;
2882
2883 ipa_analyze_stmt_uses (fbi, stmt);
2884 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2885 visit_ref_for_mod_analysis,
2886 visit_ref_for_mod_analysis,
2887 visit_ref_for_mod_analysis);
2888 }
2889 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2890 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2891 visit_ref_for_mod_analysis,
2892 visit_ref_for_mod_analysis,
2893 visit_ref_for_mod_analysis);
2894}
2895
2896/* Return true EXPR is a load from a dereference of SSA_NAME NAME. */
2897
2898static bool
2899load_from_dereferenced_name (tree expr, tree name)
2900{
2901 tree base = get_base_address (expr);
2902 return (TREE_CODE (base)((enum tree_code) (base)->base.code) == MEM_REF
2903 && TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2903, __FUNCTION__)))))
== name);
2904}
2905
2906/* Calculate controlled uses of parameters of NODE. */
2907
2908static void
2909ipa_analyze_controlled_uses (struct cgraph_node *node)
2910{
2911 ipa_node_params *info = ipa_node_params_sum->get (node);
2912
2913 for (int i = 0; i < ipa_get_param_count (info); i++)
2914 {
2915 tree parm = ipa_get_param (info, i);
2916 int call_uses = 0;
2917 bool load_dereferenced = false;
2918
2919 /* For SSA regs see if parameter is used. For non-SSA we compute
2920 the flag during modification analysis. */
2921 if (is_gimple_reg (parm))
2922 {
2923 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 2923, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
,
2924 parm);
2925 if (ddef && !has_zero_uses (ddef))
2926 {
2927 imm_use_iterator imm_iter;
2928 gimple *stmt;
2929
2930 ipa_set_param_used (info, i, true);
2931 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, ddef)for (struct auto_end_imm_use_stmt_traverse auto_end_imm_use_stmt_traverse
((((stmt) = first_imm_use_stmt (&(imm_iter), (ddef))), &
(imm_iter))); !end_imm_use_stmt_p (&(imm_iter)); (void) (
(stmt) = next_imm_use_stmt (&(imm_iter))))
2932 {
2933 if (is_gimple_debug (stmt))
2934 continue;
2935
2936 int all_stmt_uses = 0;
2937 use_operand_p use_p;
2938 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)for ((use_p) = first_imm_use_on_stmt (&(imm_iter)); !end_imm_use_on_stmt_p
(&(imm_iter)); (void) ((use_p) = next_imm_use_on_stmt (&
(imm_iter))))
2939 all_stmt_uses++;
2940
2941 if (is_gimple_call (stmt))
2942 {
2943 if (gimple_call_internal_p (stmt))
2944 {
2945 call_uses = IPA_UNDESCRIBED_USE-1;
2946 break;
2947 }
2948 int recognized_stmt_uses;
2949 if (gimple_call_fn (stmt) == ddef)
2950 recognized_stmt_uses = 1;
2951 else
2952 recognized_stmt_uses = 0;
2953 unsigned arg_count = gimple_call_num_args (stmt);
2954 for (unsigned i = 0; i < arg_count; i++)
2955 {
2956 tree arg = gimple_call_arg (stmt, i);
2957 if (arg == ddef)
2958 recognized_stmt_uses++;
2959 else if (load_from_dereferenced_name (arg, ddef))
2960 {
2961 load_dereferenced = true;
2962 recognized_stmt_uses++;
2963 }
2964 }
2965
2966 if (recognized_stmt_uses != all_stmt_uses)
2967 {
2968 call_uses = IPA_UNDESCRIBED_USE-1;
2969 break;
2970 }
2971 if (call_uses >= 0)
2972 call_uses += all_stmt_uses;
2973 }
2974 else if (gimple_assign_single_p (stmt))
2975 {
2976 tree rhs = gimple_assign_rhs1 (stmt);
2977 if (all_stmt_uses != 1
2978 || !load_from_dereferenced_name (rhs, ddef))
2979 {
2980 call_uses = IPA_UNDESCRIBED_USE-1;
2981 break;
2982 }
2983 load_dereferenced = true;
2984 }
2985 else
2986 {
2987 call_uses = IPA_UNDESCRIBED_USE-1;
2988 break;
2989 }
2990 }
2991 }
2992 else
2993 call_uses = 0;
2994 }
2995 else
2996 call_uses = IPA_UNDESCRIBED_USE-1;
2997 ipa_set_controlled_uses (info, i, call_uses);
2998 ipa_set_param_load_dereferenced (info, i, load_dereferenced);
2999 }
3000}
3001
3002/* Free stuff in BI. */
3003
3004static void
3005free_ipa_bb_info (struct ipa_bb_info *bi)
3006{
3007 bi->cg_edges.release ();
3008 bi->param_aa_statuses.release ();
3009}
3010
3011/* Dominator walker driving the analysis. */
3012
3013class analysis_dom_walker : public dom_walker
3014{
3015public:
3016 analysis_dom_walker (struct ipa_func_body_info *fbi)
3017 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
3018
3019 edge before_dom_children (basic_block) final override;
3020
3021private:
3022 struct ipa_func_body_info *m_fbi;
3023};
3024
3025edge
3026analysis_dom_walker::before_dom_children (basic_block bb)
3027{
3028 ipa_analyze_params_uses_in_bb (m_fbi, bb);
3029 ipa_compute_jump_functions_for_bb (m_fbi, bb);
3030 return NULLnullptr;
3031}
3032
3033/* Release body info FBI. */
3034
3035void
3036ipa_release_body_info (struct ipa_func_body_info *fbi)
3037{
3038 int i;
3039 struct ipa_bb_info *bi;
3040
3041 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)for (i = 0; (fbi->bb_infos).iterate ((i), &(bi)); ++(i
))
3042 free_ipa_bb_info (bi);
3043 fbi->bb_infos.release ();
3044}
3045
3046/* Initialize the array describing properties of formal parameters
3047 of NODE, analyze their uses and compute jump functions associated
3048 with actual arguments of calls from within NODE. */
3049
3050void
3051ipa_analyze_node (struct cgraph_node *node)
3052{
3053 struct ipa_func_body_info fbi;
3054 class ipa_node_params *info;
3055
3056 ipa_check_create_node_params ();
3057 ipa_check_create_edge_args ();
3058 info = ipa_node_params_sum->get_create (node);
3059
3060 if (info->analysis_done)
3061 return;
3062 info->analysis_done = 1;
3063
3064 if (ipa_func_spec_opts_forbid_analysis_p (node)
3065 || (count_formal_params (node->decl)
3066 >= (1 << IPA_PROP_ARG_INDEX_LIMIT_BITS16)))
3067 {
3068 gcc_assert (!ipa_get_param_count (info))((void)(!(!ipa_get_param_count (info)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3068, __FUNCTION__), 0 : 0))
;
3069 return;
3070 }
3071
3072 struct function *func = DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3072, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
;
3073 push_cfun (func);
3074 calculate_dominance_info (CDI_DOMINATORS);
3075 ipa_initialize_node_params (node);
3076 ipa_analyze_controlled_uses (node);
3077
3078 fbi.node = node;
3079 fbi.info = info;
3080 fbi.bb_infos = vNULL;
3081 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun)(((cfun + 0))->cfg->x_last_basic_block), true);
3082 fbi.param_count = ipa_get_param_count (info);
3083 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps)(opts_for_fn (node->decl)->x_param_ipa_max_aa_steps);
3084
3085 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
3086 {
3087 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
3088 bi->cg_edges.safe_push (cs);
3089 }
3090
3091 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
3092 {
3093 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
3094 bi->cg_edges.safe_push (cs);
3095 }
3096
3097 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr));
3098
3099 ipa_release_body_info (&fbi);
3100 free_dominance_info (CDI_DOMINATORS);
3101 pop_cfun ();
3102}
3103
3104/* Update the jump functions associated with call graph edge E when the call
3105 graph edge CS is being inlined, assuming that E->caller is already (possibly
3106 indirectly) inlined into CS->callee and that E has not been inlined. */
3107
3108static void
3109update_jump_functions_after_inlining (struct cgraph_edge *cs,
3110 struct cgraph_edge *e)
3111{
3112 ipa_edge_args *top = ipa_edge_args_sum->get (cs);
3113 ipa_edge_args *args = ipa_edge_args_sum->get (e);
3114 if (!args)
3115 return;
3116 int count = ipa_get_cs_argument_count (args);
3117 int i;
3118
3119 for (i = 0; i < count; i++)
3120 {
3121 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
3122 class ipa_polymorphic_call_context *dst_ctx
3123 = ipa_get_ith_polymorhic_call_context (args, i);
3124
3125 if (dst->agg.items)
3126 {
3127 struct ipa_agg_jf_item *item;
3128 int j;
3129
3130 FOR_EACH_VEC_ELT (*dst->agg.items, j, item)for (j = 0; (*dst->agg.items).iterate ((j), &(item)); ++
(j))
3131 {
3132 int dst_fid;
3133 struct ipa_jump_func *src;
3134
3135 if (item->jftype != IPA_JF_PASS_THROUGH
3136 && item->jftype != IPA_JF_LOAD_AGG)
3137 continue;
3138
3139 dst_fid = item->value.pass_through.formal_id;
3140 if (!top || dst_fid >= ipa_get_cs_argument_count (top))
3141 {
3142 item->jftype = IPA_JF_UNKNOWN;
3143 continue;
3144 }
3145
3146 item->value.pass_through.formal_id = -1;
3147 src = ipa_get_ith_jump_func (top, dst_fid);
3148 if (src->type == IPA_JF_CONST)
3149 {
3150 if (item->jftype == IPA_JF_PASS_THROUGH
3151 && item->value.pass_through.operation == NOP_EXPR)
3152 {
3153 item->jftype = IPA_JF_CONST;
3154 item->value.constant = src->value.constant.value;
3155 continue;
3156 }
3157 }
3158 else if (src->type == IPA_JF_PASS_THROUGH
3159 && src->value.pass_through.operation == NOP_EXPR)
3160 {
3161 if (item->jftype == IPA_JF_PASS_THROUGH
3162 || !item->value.load_agg.by_ref
3163 || src->value.pass_through.agg_preserved)
3164 item->value.pass_through.formal_id
3165 = src->value.pass_through.formal_id;
3166 }
3167 else if (src->type == IPA_JF_ANCESTOR)
3168 {
3169 if (item->jftype == IPA_JF_PASS_THROUGH)
3170 {
3171 if (!src->value.ancestor.offset)
3172 item->value.pass_through.formal_id
3173 = src->value.ancestor.formal_id;
3174 }
3175 else if (src->value.ancestor.agg_preserved)
3176 {
3177 gcc_checking_assert (item->value.load_agg.by_ref)((void)(!(item->value.load_agg.by_ref) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3177, __FUNCTION__), 0 : 0))
;
3178
3179 item->value.pass_through.formal_id
3180 = src->value.ancestor.formal_id;
3181 item->value.load_agg.offset
3182 += src->value.ancestor.offset;
3183 }
3184 }
3185
3186 if (item->value.pass_through.formal_id < 0)
3187 item->jftype = IPA_JF_UNKNOWN;
3188 }
3189 }
3190
3191 if (!top)
3192 {
3193 ipa_set_jf_unknown (dst);
3194 continue;
3195 }
3196
3197 if (dst->type == IPA_JF_ANCESTOR)
3198 {
3199 struct ipa_jump_func *src;
3200 int dst_fid = dst->value.ancestor.formal_id;
3201 class ipa_polymorphic_call_context *src_ctx
3202 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3203
3204 /* Variable number of arguments can cause havoc if we try to access
3205 one that does not exist in the inlined edge. So make sure we
3206 don't. */
3207 if (dst_fid >= ipa_get_cs_argument_count (top))
3208 {
3209 ipa_set_jf_unknown (dst);
3210 continue;
3211 }
3212
3213 src = ipa_get_ith_jump_func (top, dst_fid);
3214
3215 if (src_ctx && !src_ctx->useless_p ())
3216 {
3217 class ipa_polymorphic_call_context ctx = *src_ctx;
3218
3219 /* TODO: Make type preserved safe WRT contexts. */
3220 if (!ipa_get_jf_ancestor_type_preserved (dst))
3221 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3222 ctx.offset_by (dst->value.ancestor.offset);
3223 if (!ctx.useless_p ())
3224 {
3225 if (!dst_ctx)
3226 {
3227 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3228 count, true);
3229 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3230 }
3231
3232 dst_ctx->combine_with (ctx);
3233 }
3234 }
3235
3236 /* Parameter and argument in ancestor jump function must be pointer
3237 type, which means access to aggregate must be by-reference. */
3238 gcc_assert (!src->agg.items || src->agg.by_ref)((void)(!(!src->agg.items || src->agg.by_ref) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3238, __FUNCTION__), 0 : 0))
;
3239
3240 if (src->agg.items && dst->value.ancestor.agg_preserved)
3241 {
3242 struct ipa_agg_jf_item *item;
3243 int j;
3244
3245 /* Currently we do not produce clobber aggregate jump functions,
3246 replace with merging when we do. */
3247 gcc_assert (!dst->agg.items)((void)(!(!dst->agg.items) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3247, __FUNCTION__), 0 : 0))
;
3248
3249 dst->agg.items = vec_safe_copy (src->agg.items);
3250 dst->agg.by_ref = src->agg.by_ref;
3251 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)for (j = 0; vec_safe_iterate ((dst->agg.items), (j), &
(item)); ++(j))
3252 item->offset -= dst->value.ancestor.offset;
3253 }
3254
3255 if (src->type == IPA_JF_PASS_THROUGH
3256 && src->value.pass_through.operation == NOP_EXPR)
3257 {
3258 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
3259 dst->value.ancestor.agg_preserved &=
3260 src->value.pass_through.agg_preserved;
3261 }
3262 else if (src->type == IPA_JF_ANCESTOR)
3263 {
3264 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
3265 dst->value.ancestor.offset += src->value.ancestor.offset;
3266 dst->value.ancestor.agg_preserved &=
3267 src->value.ancestor.agg_preserved;
3268 dst->value.ancestor.keep_null |= src->value.ancestor.keep_null;
3269 }
3270 else
3271 ipa_set_jf_unknown (dst);
3272 }
3273 else if (dst->type == IPA_JF_PASS_THROUGH)
3274 {
3275 struct ipa_jump_func *src;
3276 /* We must check range due to calls with variable number of arguments
3277 and we cannot combine jump functions with operations. */
3278 if (dst->value.pass_through.operation == NOP_EXPR
3279 && (top && dst->value.pass_through.formal_id
3280 < ipa_get_cs_argument_count (top)))
3281 {
3282 int dst_fid = dst->value.pass_through.formal_id;
3283 src = ipa_get_ith_jump_func (top, dst_fid);
3284 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
3285 class ipa_polymorphic_call_context *src_ctx
3286 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3287
3288 if (src_ctx && !src_ctx->useless_p ())
3289 {
3290 class ipa_polymorphic_call_context ctx = *src_ctx;
3291
3292 /* TODO: Make type preserved safe WRT contexts. */
3293 if (!ipa_get_jf_pass_through_type_preserved (dst))
3294 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3295 if (!ctx.useless_p ())
3296 {
3297 if (!dst_ctx)
3298 {
3299 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3300 count, true);
3301 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3302 }
3303 dst_ctx->combine_with (ctx);
3304 }
3305 }
3306 switch (src->type)
3307 {
3308 case IPA_JF_UNKNOWN:
3309 ipa_set_jf_unknown (dst);
3310 break;
3311 case IPA_JF_CONST:
3312 ipa_set_jf_cst_copy (dst, src);
3313 break;
3314
3315 case IPA_JF_PASS_THROUGH:
3316 {
3317 int formal_id = ipa_get_jf_pass_through_formal_id (src);
3318 enum tree_code operation;
3319 operation = ipa_get_jf_pass_through_operation (src);
3320
3321 if (operation == NOP_EXPR)
3322 {
3323 bool agg_p;
3324 agg_p = dst_agg_p
3325 && ipa_get_jf_pass_through_agg_preserved (src);
3326 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
3327 }
3328 else if (TREE_CODE_CLASS (operation)tree_code_type_tmpl <0>::tree_code_type[(int) (operation
)]
== tcc_unary)
3329 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
3330 else
3331 {
3332 tree operand = ipa_get_jf_pass_through_operand (src);
3333 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
3334 operation);
3335 }
3336 break;
3337 }
3338 case IPA_JF_ANCESTOR:
3339 {
3340 bool agg_p;
3341 agg_p = dst_agg_p
3342 && ipa_get_jf_ancestor_agg_preserved (src);
3343 ipa_set_ancestor_jf (dst,
3344 ipa_get_jf_ancestor_offset (src),
3345 ipa_get_jf_ancestor_formal_id (src),
3346 agg_p,
3347 ipa_get_jf_ancestor_keep_null (src));
3348 break;
3349 }
3350 default:
3351 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3351, __FUNCTION__))
;
3352 }
3353
3354 if (src->agg.items
3355 && (dst_agg_p || !src->agg.by_ref))
3356 {
3357 /* Currently we do not produce clobber aggregate jump
3358 functions, replace with merging when we do. */
3359 gcc_assert (!dst->agg.items)((void)(!(!dst->agg.items) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3359, __FUNCTION__), 0 : 0))
;
3360
3361 dst->agg.by_ref = src->agg.by_ref;
3362 dst->agg.items = vec_safe_copy (src->agg.items);
3363 }
3364 }
3365 else
3366 ipa_set_jf_unknown (dst);
3367 }
3368 }
3369}
3370
3371/* If TARGET is an addr_expr of a function declaration, make it the
3372 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3373 Otherwise, return NULL. */
3374
3375struct cgraph_edge *
3376ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
3377 bool speculative)
3378{
3379 struct cgraph_node *callee;
3380 bool unreachable = false;
3381
3382 if (TREE_CODE (target)((enum tree_code) (target)->base.code) == ADDR_EXPR)
3383 target = TREE_OPERAND (target, 0)(*((const_cast<tree*> (tree_operand_check ((target), (0
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3383, __FUNCTION__)))))
;
3384 if (TREE_CODE (target)((enum tree_code) (target)->base.code) != FUNCTION_DECL)
3385 {
3386 target = canonicalize_constructor_val (target, NULLnullptr);
3387 if (!target || TREE_CODE (target)((enum tree_code) (target)->base.code) != FUNCTION_DECL)
3388 {
3389 /* Member pointer call that goes through a VMT lookup. */
3390 if (ie->indirect_info->member_ptr
3391 /* Or if target is not an invariant expression and we do not
3392 know if it will evaulate to function at runtime.
3393 This can happen when folding through &VAR, where &VAR
3394 is IP invariant, but VAR itself is not.
3395
3396 TODO: Revisit this when GCC 5 is branched. It seems that
3397 member_ptr check is not needed and that we may try to fold
3398 the expression and see if VAR is readonly. */
3399 || !is_gimple_ip_invariant (target))
3400 {
3401 if (dump_enabled_p ())
3402 {
3403 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3404 "discovered direct call non-invariant %s\n",
3405 ie->caller->dump_name ());
3406 }
3407 return NULLnullptr;
3408 }
3409
3410
3411 if (dump_enabled_p ())
3412 {
3413 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3414 "discovered direct call to non-function in %s, "
3415 "making it __builtin_unreachable\n",
3416 ie->caller->dump_name ());
3417 }
3418
3419 target = builtin_decl_unreachable ();
3420 callee = cgraph_node::get_create (target);
3421 unreachable = true;
3422 }
3423 else
3424 callee = cgraph_node::get (target);
3425 }
3426 else
3427 callee = cgraph_node::get (target);
3428
3429 /* Because may-edges are not explicitely represented and vtable may be external,
3430 we may create the first reference to the object in the unit. */
3431 if (!callee || callee->inlined_to)
3432 {
3433
3434 /* We are better to ensure we can refer to it.
3435 In the case of static functions we are out of luck, since we already
3436 removed its body. In the case of public functions we may or may
3437 not introduce the reference. */
3438 if (!canonicalize_constructor_val (target, NULLnullptr)
3439 || !TREE_PUBLIC (target)((target)->base.public_flag))
3440 {
3441 if (dump_file)
3442 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
3443 "(%s -> %s) but cannot refer to it. Giving up.\n",
3444 ie->caller->dump_name (),
3445 ie->callee->dump_name ());
3446 return NULLnullptr;
3447 }
3448 callee = cgraph_node::get_create (target);
3449 }
3450
3451 /* If the edge is already speculated. */
3452 if (speculative && ie->speculative)
3453 {
3454 if (dump_file)
3455 {
3456 cgraph_edge *e2 = ie->speculative_call_for_target (callee);
3457 if (!e2)
3458 {
3459 if (dump_file)
3460 fprintf (dump_file, "ipa-prop: Discovered call to a "
3461 "speculative target (%s -> %s) but the call is "
3462 "already speculated to different target. "
3463 "Giving up.\n",
3464 ie->caller->dump_name (), callee->dump_name ());
3465 }
3466 else
3467 {
3468 if (dump_file)
3469 fprintf (dump_file,
3470 "ipa-prop: Discovered call to a speculative target "
3471 "(%s -> %s) this agree with previous speculation.\n",
3472 ie->caller->dump_name (), callee->dump_name ());
3473 }
3474 }
3475 return NULLnullptr;
3476 }
3477
3478 if (!dbg_cnt (devirt))
3479 return NULLnullptr;
3480
3481 ipa_check_create_node_params ();
3482
3483 /* We cannot make edges to inline clones. It is bug that someone removed
3484 the cgraph node too early. */
3485 gcc_assert (!callee->inlined_to)((void)(!(!callee->inlined_to) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3485, __FUNCTION__), 0 : 0))
;
3486
3487 if (dump_file && !unreachable)
3488 {
3489 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
3490 "(%s -> %s), for stmt ",
3491 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
3492 speculative ? "speculative" : "known",
3493 ie->caller->dump_name (),
3494 callee->dump_name ());
3495 if (ie->call_stmt)
3496 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
3497 else
3498 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
3499 }
3500 if (dump_enabled_p ())
3501 {
3502 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3503 "converting indirect call in %s to direct call to %s\n",
3504 ie->caller->dump_name (), callee->dump_name ());
3505 }
3506 if (!speculative)
3507 {
3508 struct cgraph_edge *orig = ie;
3509 ie = cgraph_edge::make_direct (ie, callee);
3510 /* If we resolved speculative edge the cost is already up to date
3511 for direct call (adjusted by inline_edge_duplication_hook). */
3512 if (ie == orig)
3513 {
3514 ipa_call_summary *es = ipa_call_summaries->get (ie);
3515 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
3516 - eni_size_weights.call_cost);
3517 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
3518 - eni_time_weights.call_cost);
3519 }
3520 }
3521 else
3522 {
3523 if (!callee->can_be_discarded_p ())
3524 {
3525 cgraph_node *alias;
3526 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
3527 if (alias)
3528 callee = alias;
3529 }
3530 /* make_speculative will update ie's cost to direct call cost. */
3531 ie = ie->make_speculative
3532 (callee, ie->count.apply_scale (8, 10));
3533 }
3534
3535 return ie;
3536}
3537
3538/* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3539 CONSTRUCTOR and return it. Return NULL if the search fails for some
3540 reason. */
3541
3542static tree
3543find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INTlong req_offset)
3544{
3545 tree type = TREE_TYPE (constructor)((contains_struct_check ((constructor), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3545, __FUNCTION__))->typed.type)
;
3546 if (TREE_CODE (type)((enum tree_code) (type)->base.code) != ARRAY_TYPE
3547 && TREE_CODE (type)((enum tree_code) (type)->base.code) != RECORD_TYPE)
3548 return NULLnullptr;
3549
3550 unsigned ix;
3551 tree index, val;
3552 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)for (ix = 0; (ix >= vec_safe_length (((tree_check ((constructor
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3552, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts)))
? false : (((void) (val = (*((tree_check ((constructor), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3552, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts))[
ix].value)), (index = (*((tree_check ((constructor), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3552, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts))[
ix].index), true); (ix)++)
3553 {
3554 HOST_WIDE_INTlong elt_offset;
3555 if (TREE_CODE (type)((enum tree_code) (type)->base.code) == ARRAY_TYPE)
3556 {
3557 offset_int off;
3558 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type))((tree_class_check ((((contains_struct_check ((type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3558, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3558, __FUNCTION__))->type_common.size_unit)
;
3559 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST)((void)(!(((enum tree_code) (unit_size)->base.code) == INTEGER_CST
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3559, __FUNCTION__), 0 : 0))
;
3560
3561 if (index)
3562 {
3563 if (TREE_CODE (index)((enum tree_code) (index)->base.code) == RANGE_EXPR)
3564 off = wi::to_offset (TREE_OPERAND (index, 0)(*((const_cast<tree*> (tree_operand_check ((index), (0)
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3564, __FUNCTION__)))))
);
3565 else
3566 off = wi::to_offset (index);
3567 if (TYPE_DOMAIN (type)((tree_check ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3567, __FUNCTION__, (ARRAY_TYPE)))->type_non_common.values
)
&& TYPE_MIN_VALUE (TYPE_DOMAIN (type))((tree_check5 ((((tree_check ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3567, __FUNCTION__, (ARRAY_TYPE)))->type_non_common.values
)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3567, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.minval
)
)
3568 {
3569 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type))((tree_check5 ((((tree_check ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3569, __FUNCTION__, (ARRAY_TYPE)))->type_non_common.values
)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3569, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.minval
)
;
3570 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST)((void)(!(((enum tree_code) (unit_size)->base.code) == INTEGER_CST
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3570, __FUNCTION__), 0 : 0))
;
3571 off = wi::sext (off - wi::to_offset (low_bound),
3572 TYPE_PRECISION (TREE_TYPE (index))((tree_class_check ((((contains_struct_check ((index), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3572, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3572, __FUNCTION__))->type_common.precision)
);
3573 }
3574 off *= wi::to_offset (unit_size);
3575 /* ??? Handle more than just the first index of a
3576 RANGE_EXPR. */
3577 }
3578 else
3579 off = wi::to_offset (unit_size) * ix;
3580
3581 off = wi::lshift (off, LOG2_BITS_PER_UNIT3);
3582 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3583 continue;
3584 elt_offset = off.to_shwi ();
3585 }
3586 else if (TREE_CODE (type)((enum tree_code) (type)->base.code) == RECORD_TYPE)
3587 {
3588 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL)((void)(!(index && ((enum tree_code) (index)->base
.code) == FIELD_DECL) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3588, __FUNCTION__), 0 : 0))
;
3589 if (DECL_BIT_FIELD (index)((tree_check ((index), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3589, __FUNCTION__, (FIELD_DECL)))->decl_common.decl_flag_1
)
)
3590 continue;
3591 elt_offset = int_bit_position (index);
3592 }
3593 else
3594 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3594, __FUNCTION__))
;
3595
3596 if (elt_offset > req_offset)
3597 return NULLnullptr;
3598
3599 if (TREE_CODE (val)((enum tree_code) (val)->base.code) == CONSTRUCTOR)
3600 return find_constructor_constant_at_offset (val,
3601 req_offset - elt_offset);
3602
3603 if (elt_offset == req_offset
3604 && is_gimple_reg_type (TREE_TYPE (val)((contains_struct_check ((val), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3604, __FUNCTION__))->typed.type)
)
3605 && is_gimple_ip_invariant (val))
3606 return val;
3607 }
3608 return NULLnullptr;
3609}
3610
3611/* Check whether SCALAR could be used to look up an aggregate interprocedural
3612 invariant from a static constructor and if so, return it. Otherwise return
3613 NULL. */
3614
3615tree
3616ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INTlong offset, bool by_ref)
3617{
3618 if (by_ref)
3619 {
3620 if (TREE_CODE (scalar)((enum tree_code) (scalar)->base.code) != ADDR_EXPR)
3621 return NULLnullptr;
3622 scalar = TREE_OPERAND (scalar, 0)(*((const_cast<tree*> (tree_operand_check ((scalar), (0
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3622, __FUNCTION__)))))
;
3623 }
3624
3625 if (!VAR_P (scalar)(((enum tree_code) (scalar)->base.code) == VAR_DECL)
3626 || !is_global_var (scalar)
3627 || !TREE_READONLY (scalar)((non_type_check ((scalar), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3627, __FUNCTION__))->base.readonly_flag)
3628 || !DECL_INITIAL (scalar)((contains_struct_check ((scalar), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3628, __FUNCTION__))->decl_common.initial)
3629 || TREE_CODE (DECL_INITIAL (scalar))((enum tree_code) (((contains_struct_check ((scalar), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3629, __FUNCTION__))->decl_common.initial))->base.code
)
!= CONSTRUCTOR)
3630 return NULLnullptr;
3631
3632 return find_constructor_constant_at_offset (DECL_INITIAL (scalar)((contains_struct_check ((scalar), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3632, __FUNCTION__))->decl_common.initial)
, offset);
3633}
3634
3635/* Retrieve value from AGG_JFUNC for the given OFFSET or return NULL if there
3636 is none. BY_REF specifies whether the value has to be passed by reference
3637 or by value. */
3638
3639static tree
3640ipa_find_agg_cst_from_jfunc_items (struct ipa_agg_jump_function *agg_jfunc,
3641 ipa_node_params *src_info,
3642 cgraph_node *src_node,
3643 HOST_WIDE_INTlong offset, bool by_ref)
3644{
3645 if (by_ref != agg_jfunc->by_ref)
3646 return NULL_TREE(tree) nullptr;
3647
3648 for (const ipa_agg_jf_item &item : agg_jfunc->items)
3649 if (item.offset == offset)
3650 return ipa_agg_value_from_jfunc (src_info, src_node, &item);
3651
3652 return NULL_TREE(tree) nullptr;
3653}
3654
3655/* Remove a reference to SYMBOL from the list of references of a node given by
3656 reference description RDESC. Return true if the reference has been
3657 successfully found and removed. */
3658
3659static bool
3660remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3661{
3662 struct ipa_ref *to_del;
3663 struct cgraph_edge *origin;
3664
3665 origin = rdesc->cs;
3666 if (!origin)
3667 return false;
3668 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3669 origin->lto_stmt_uid);
3670 if (!to_del)
3671 return false;
3672
3673 to_del->remove_reference ();
3674 if (dump_file)
3675 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3676 origin->caller->dump_name (), symbol->dump_name ());
3677 return true;
3678}
3679
3680/* If JFUNC has a reference description with refcount different from
3681 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3682 NULL. JFUNC must be a constant jump function. */
3683
3684static struct ipa_cst_ref_desc *
3685jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3686{
3687 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3688 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE-1)
3689 return rdesc;
3690 else
3691 return NULLnullptr;
3692}
3693
3694/* If the value of constant jump function JFUNC is an address of a function
3695 declaration, return the associated call graph node. Otherwise return
3696 NULL. */
3697
3698static symtab_node *
3699symtab_node_for_jfunc (struct ipa_jump_func *jfunc)
3700{
3701 gcc_checking_assert (jfunc->type == IPA_JF_CONST)((void)(!(jfunc->type == IPA_JF_CONST) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3701, __FUNCTION__), 0 : 0))
;
3702 tree cst = ipa_get_jf_constant (jfunc);
3703 if (TREE_CODE (cst)((enum tree_code) (cst)->base.code) != ADDR_EXPR
3704 || (TREE_CODE (TREE_OPERAND (cst, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((cst), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3704, __FUNCTION__))))))->base.code)
!= FUNCTION_DECL
3705 && TREE_CODE (TREE_OPERAND (cst, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((cst), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3705, __FUNCTION__))))))->base.code)
!= VAR_DECL))
3706 return NULLnullptr;
3707
3708 return symtab_node::get (TREE_OPERAND (cst, 0)(*((const_cast<tree*> (tree_operand_check ((cst), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3708, __FUNCTION__)))))
);
3709}
3710
3711
3712/* If JFUNC is a constant jump function with a usable rdesc, decrement its
3713 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3714 the edge specified in the rdesc. Return false if either the symbol or the
3715 reference could not be found, otherwise return true. */
3716
3717static bool
3718try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3719{
3720 struct ipa_cst_ref_desc *rdesc;
3721 if (jfunc->type == IPA_JF_CONST
3722 && (rdesc = jfunc_rdesc_usable (jfunc))
3723 && --rdesc->refcount == 0)
3724 {
3725 symtab_node *symbol = symtab_node_for_jfunc (jfunc);
3726 if (!symbol)
3727 return false;
3728
3729 return remove_described_reference (symbol, rdesc);
3730 }
3731 return true;
3732}
3733
3734/* Try to find a destination for indirect edge IE that corresponds to a simple
3735 call or a call of a member function pointer and where the destination is a
3736 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3737 the type of the parameter to which the result of JFUNC is passed. If it can
3738 be determined, return the newly direct edge, otherwise return NULL.
3739 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3740 relative to. */
3741
3742static struct cgraph_edge *
3743try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3744 struct ipa_jump_func *jfunc, tree target_type,
3745 struct cgraph_node *new_root,
3746 class ipa_node_params *new_root_info)
3747{
3748 struct cgraph_edge *cs;
3749 tree target = NULL_TREE(tree) nullptr;
3750 bool agg_contents = ie->indirect_info->agg_contents;
3751 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3752 if (agg_contents)
3753 {
3754 if (scalar)
3755 target = ipa_find_agg_cst_from_init (scalar, ie->indirect_info->offset,
3756 ie->indirect_info->by_ref);
3757 if (!target && ie->indirect_info->guaranteed_unmodified)
3758 target = ipa_find_agg_cst_from_jfunc_items (&jfunc->agg, new_root_info,
3759 new_root,
3760 ie->indirect_info->offset,
3761 ie->indirect_info->by_ref);
3762 }
3763 else
3764 target = scalar;
3765 if (!target)
3766 return NULLnullptr;
3767 cs = ipa_make_edge_direct_to_target (ie, target);
3768
3769 if (cs && !agg_contents)
3770 {
3771 bool ok;
3772 gcc_checking_assert (cs->callee((void)(!(cs->callee && (cs != ie || jfunc->type
!= IPA_JF_CONST || !symtab_node_for_jfunc (jfunc) || cs->
callee == symtab_node_for_jfunc (jfunc))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3776, __FUNCTION__), 0 : 0))
3773 && (cs != ie((void)(!(cs->callee && (cs != ie || jfunc->type
!= IPA_JF_CONST || !symtab_node_for_jfunc (jfunc) || cs->
callee == symtab_node_for_jfunc (jfunc))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3776, __FUNCTION__), 0 : 0))
3774 || jfunc->type != IPA_JF_CONST((void)(!(cs->callee && (cs != ie || jfunc->type
!= IPA_JF_CONST || !symtab_node_for_jfunc (jfunc) || cs->
callee == symtab_node_for_jfunc (jfunc))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3776, __FUNCTION__), 0 : 0))
3775 || !symtab_node_for_jfunc (jfunc)((void)(!(cs->callee && (cs != ie || jfunc->type
!= IPA_JF_CONST || !symtab_node_for_jfunc (jfunc) || cs->
callee == symtab_node_for_jfunc (jfunc))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3776, __FUNCTION__), 0 : 0))
3776 || cs->callee == symtab_node_for_jfunc (jfunc)))((void)(!(cs->callee && (cs != ie || jfunc->type
!= IPA_JF_CONST || !symtab_node_for_jfunc (jfunc) || cs->
callee == symtab_node_for_jfunc (jfunc))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3776, __FUNCTION__), 0 : 0))
;
3777 ok = try_decrement_rdesc_refcount (jfunc);
3778 gcc_checking_assert (ok)((void)(!(ok) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3778, __FUNCTION__), 0 : 0))
;
3779 }
3780
3781 return cs;
3782}
3783
3784/* Return the target to be used in cases of impossible devirtualization. IE
3785 and target (the latter can be NULL) are dumped when dumping is enabled. */
3786
3787tree
3788ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3789{
3790 if (dump_file)
3791 {
3792 if (target)
3793 fprintf (dump_file,
3794 "Type inconsistent devirtualization: %s->%s\n",
3795 ie->caller->dump_name (),
3796 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target))((const char *) (tree_check ((decl_assembler_name (target)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3796, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)
);
3797 else
3798 fprintf (dump_file,
3799 "No devirtualization target in %s\n",
3800 ie->caller->dump_name ());
3801 }
3802 tree new_target = builtin_decl_unreachable ();
3803 cgraph_node::get_create (new_target);
3804 return new_target;
3805}
3806
3807/* Try to find a destination for indirect edge IE that corresponds to a virtual
3808 call based on a formal parameter which is described by jump function JFUNC
3809 and if it can be determined, make it direct and return the direct edge.
3810 Otherwise, return NULL. CTX describes the polymorphic context that the
3811 parameter the call is based on brings along with it. NEW_ROOT and
3812 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3813 to. */
3814
3815static struct cgraph_edge *
3816try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3817 struct ipa_jump_func *jfunc,
3818 class ipa_polymorphic_call_context ctx,
3819 struct cgraph_node *new_root,
3820 class ipa_node_params *new_root_info)
3821{
3822 tree target = NULLnullptr;
3823 bool speculative = false;
3824
3825 if (!opt_for_fn (ie->caller->decl, flag_devirtualize)(opts_for_fn (ie->caller->decl)->x_flag_devirtualize
)
)
3826 return NULLnullptr;
3827
3828 gcc_assert (!ie->indirect_info->by_ref)((void)(!(!ie->indirect_info->by_ref) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 3828, __FUNCTION__), 0 : 0))
;
3829
3830 /* Try to do lookup via known virtual table pointer value. */
3831 if (!ie->indirect_info->vptr_changed
3832 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)(opts_for_fn (ie->caller->decl)->x_flag_devirtualize_speculatively
)
)
3833 {
3834 tree vtable;
3835 unsigned HOST_WIDE_INTlong offset;
3836 tree t = NULL_TREE(tree) nullptr;
3837 if (jfunc->type == IPA_JF_CONST)
3838 t = ipa_find_agg_cst_from_init (ipa_get_jf_constant (jfunc),
3839 ie->indirect_info->offset, true);
3840 if (!t)
3841 t = ipa_find_agg_cst_from_jfunc_items (&jfunc->agg, new_root_info,
3842 new_root,
3843 ie->indirect_info->offset, true);
3844 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3845 {
3846 bool can_refer;
3847 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3848 vtable, offset, &can_refer);
3849 if (can_refer)
3850 {
3851 if (!t
3852 || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE)
3853 || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE_TRAP)
3854 || !possible_polymorphic_call_target_p
3855 (ie, cgraph_node::get (t)))
3856 {
3857 /* Do not speculate builtin_unreachable, it is stupid! */
3858 if (!ie->indirect_info->vptr_changed)
3859 target = ipa_impossible_devirt_target (ie, target);
3860 else
3861 target = NULLnullptr;
3862 }
3863 else
3864 {
3865 target = t;
3866 speculative = ie->indirect_info->vptr_changed;
3867 }
3868 }
3869 }
3870 }
3871
3872 ipa_polymorphic_call_context ie_context (ie);
3873 vec <cgraph_node *>targets;
3874 bool final;
3875
3876 ctx.offset_by (ie->indirect_info->offset);
3877 if (ie->indirect_info->vptr_changed)
3878 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3879 ie->indirect_info->otr_type);
3880 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3881 targets = possible_polymorphic_call_targets
3882 (ie->indirect_info->otr_type,
3883 ie->indirect_info->otr_token,
3884 ctx, &final);
3885 if (final && targets.length () <= 1)
3886 {
3887 speculative = false;
3888 if (targets.length () == 1)
3889 target = targets[0]->decl;
3890 else
3891 target = ipa_impossible_devirt_target (ie, NULL_TREE(tree) nullptr);
3892 }
3893 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)(opts_for_fn (ie->caller->decl)->x_flag_devirtualize_speculatively
)
3894 && !ie->speculative && ie->maybe_hot_p ())
3895 {
3896 cgraph_node *n;
3897 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3898 ie->indirect_info->otr_token,
3899 ie->indirect_info->context);
3900 if (n)
3901 {
3902 target = n->decl;
3903 speculative = true;
3904 }
3905 }
3906
3907 if (target)
3908 {
3909 if (!possible_polymorphic_call_target_p
3910 (ie, cgraph_node::get_create (target)))
3911 {
3912 if (speculative)
3913 return NULLnullptr;
3914 target = ipa_impossible_devirt_target (ie, target);
3915 }
3916 return ipa_make_edge_direct_to_target (ie, target, speculative);
3917 }
3918 else
3919 return NULLnullptr;
3920}
3921
3922/* Update the param called notes associated with NODE when CS is being inlined,
3923 assuming NODE is (potentially indirectly) inlined into CS->callee.
3924 Moreover, if the callee is discovered to be constant, create a new cgraph
3925 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3926 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3927
3928static bool
3929update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3930 struct cgraph_node *node,
3931 vec<cgraph_edge *> *new_edges)
3932{
3933 class ipa_edge_args *top;
3934 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3935 struct cgraph_node *new_root;
3936 class ipa_node_params *new_root_info, *inlined_node_info;
3937 bool res = false;
3938
3939 ipa_check_create_edge_args ();
3940 top = ipa_edge_args_sum->get (cs);
3941 new_root = cs->caller->inlined_to
3942 ? cs->caller->inlined_to : cs->caller;
3943 new_root_info = ipa_node_params_sum->get (new_root);
3944 inlined_node_info = ipa_node_params_sum->get (cs->callee->function_symbol ());
3945
3946 for (ie = node->indirect_calls; ie; ie = next_ie)
3947 {
3948 class cgraph_indirect_call_info *ici = ie->indirect_info;
3949 struct ipa_jump_func *jfunc;
3950 int param_index;
3951
3952 next_ie = ie->next_callee;
3953
3954 if (ici->param_index == -1)
3955 continue;
3956
3957 /* We must check range due to calls with variable number of arguments: */
3958 if (!top || ici->param_index >= ipa_get_cs_argument_count (top))
3959 {
3960 ici->param_index = -1;
3961 continue;
3962 }
3963
3964 param_index = ici->param_index;
3965 jfunc = ipa_get_ith_jump_func (top, param_index);
3966
3967 auto_vec<cgraph_node *, 4> spec_targets;
3968 if (ie->speculative)
3969 for (cgraph_edge *direct = ie->first_speculative_call_target ();
3970 direct;
3971 direct = direct->next_speculative_call_target ())
3972 spec_targets.safe_push (direct->callee);
3973
3974 if (!opt_for_fn (node->decl, flag_indirect_inlining)(opts_for_fn (node->decl)->x_flag_indirect_inlining))
3975 new_direct_edge = NULLnullptr;
3976 else if (ici->polymorphic)
3977 {
3978 ipa_polymorphic_call_context ctx;
3979 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3980 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx,
3981 new_root,
3982 new_root_info);
3983 }
3984 else
3985 {
3986 tree target_type = ipa_get_type (inlined_node_info, param_index);
3987 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3988 target_type,
3989 new_root,
3990 new_root_info);
3991 }
3992
3993 /* If speculation was removed, then we need to do nothing. */
3994 if (new_direct_edge && new_direct_edge != ie
3995 && spec_targets.contains (new_direct_edge->callee))
3996 {
3997 new_direct_edge->indirect_inlining_edge = 1;
3998 res = true;
3999 if (!new_direct_edge->speculative)
4000 continue;
4001 }
4002 else if (new_direct_edge)
4003 {
4004 new_direct_edge->indirect_inlining_edge = 1;
4005 if (new_edges)
4006 {
4007 new_edges->safe_push (new_direct_edge);
4008 res = true;
4009 }
4010 /* If speculative edge was introduced we still need to update
4011 call info of the indirect edge. */
4012 if (!new_direct_edge->speculative)
4013 continue;
4014 }
4015 if (jfunc->type == IPA_JF_PASS_THROUGH
4016 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
4017 {
4018 if (ici->agg_contents
4019 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
4020 && !ici->polymorphic)
4021 ici->param_index = -1;
4022 else
4023 {
4024 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
4025 if (ici->polymorphic
4026 && !ipa_get_jf_pass_through_type_preserved (jfunc))
4027 ici->vptr_changed = true;
4028 ipa_set_param_used_by_indirect_call (new_root_info,
4029 ici->param_index, true);
4030 if (ici->polymorphic)
4031 ipa_set_param_used_by_polymorphic_call (new_root_info,
4032 ici->param_index, true);
4033 }
4034 }
4035 else if (jfunc->type == IPA_JF_ANCESTOR)
4036 {
4037 if (ici->agg_contents
4038 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
4039 && !ici->polymorphic)
4040 ici->param_index = -1;
4041 else
4042 {
4043 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
4044 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
4045 if (ici->polymorphic
4046 && !ipa_get_jf_ancestor_type_preserved (jfunc))
4047 ici->vptr_changed = true;
4048 ipa_set_param_used_by_indirect_call (new_root_info,
4049 ici->param_index, true);
4050 if (ici->polymorphic)
4051 ipa_set_param_used_by_polymorphic_call (new_root_info,
4052 ici->param_index, true);
4053 }
4054 }
4055 else
4056 /* Either we can find a destination for this edge now or never. */
4057 ici->param_index = -1;
4058 }
4059
4060 return res;
4061}
4062
4063/* Recursively traverse subtree of NODE (including node) made of inlined
4064 cgraph_edges when CS has been inlined and invoke
4065 update_indirect_edges_after_inlining on all nodes and
4066 update_jump_functions_after_inlining on all non-inlined edges that lead out
4067 of this subtree. Newly discovered indirect edges will be added to
4068 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
4069 created. */
4070
4071static bool
4072propagate_info_to_inlined_callees (struct cgraph_edge *cs,
4073 struct cgraph_node *node,
4074 vec<cgraph_edge *> *new_edges)
4075{
4076 struct cgraph_edge *e;
4077 bool res;
4078
4079 res = update_indirect_edges_after_inlining (cs, node, new_edges);
4080
4081 for (e = node->callees; e; e = e->next_callee)
4082 if (!e->inline_failed)
4083 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
4084 else
4085 update_jump_functions_after_inlining (cs, e);
4086 for (e = node->indirect_calls; e; e = e->next_callee)
4087 update_jump_functions_after_inlining (cs, e);
4088
4089 return res;
4090}
4091
4092/* Combine two controlled uses counts as done during inlining. */
4093
4094static int
4095combine_controlled_uses_counters (int c, int d)
4096{
4097 if (c == IPA_UNDESCRIBED_USE-1 || d == IPA_UNDESCRIBED_USE-1)
4098 return IPA_UNDESCRIBED_USE-1;
4099 else
4100 return c + d - 1;
4101}
4102
4103/* Propagate number of controlled users from CS->caleee to the new root of the
4104 tree of inlined nodes. */
4105
4106static void
4107propagate_controlled_uses (struct cgraph_edge *cs)
4108{
4109 ipa_edge_args *args = ipa_edge_args_sum->get (cs);
4110 if (!args)
4111 return;
4112 struct cgraph_node *new_root = cs->caller->inlined_to
4113 ? cs->caller->inlined_to : cs->caller;
4114 ipa_node_params *new_root_info = ipa_node_params_sum->get (new_root);
4115 ipa_node_params *old_root_info = ipa_node_params_sum->get (cs->callee);
4116 int count, i;
4117
4118 if (!old_root_info)
4119 return;
4120
4121 count = MIN (ipa_get_cs_argument_count (args),((ipa_get_cs_argument_count (args)) < (ipa_get_param_count
(old_root_info)) ? (ipa_get_cs_argument_count (args)) : (ipa_get_param_count
(old_root_info)))
4122 ipa_get_param_count (old_root_info))((ipa_get_cs_argument_count (args)) < (ipa_get_param_count
(old_root_info)) ? (ipa_get_cs_argument_count (args)) : (ipa_get_param_count
(old_root_info)))
;
4123 for (i = 0; i < count; i++)
4124 {
4125 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4126 struct ipa_cst_ref_desc *rdesc;
4127
4128 if (jf->type == IPA_JF_PASS_THROUGH)
4129 {
4130 int src_idx, c, d;
4131 src_idx = ipa_get_jf_pass_through_formal_id (jf);
4132 c = ipa_get_controlled_uses (new_root_info, src_idx);
4133 d = ipa_get_controlled_uses (old_root_info, i);
4134
4135 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)((void)(!(ipa_get_jf_pass_through_operation (jf) == NOP_EXPR ||
c == -1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4136, __FUNCTION__), 0 : 0))
4136 == NOP_EXPR || c == IPA_UNDESCRIBED_USE)((void)(!(ipa_get_jf_pass_through_operation (jf) == NOP_EXPR ||
c == -1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4136, __FUNCTION__), 0 : 0))
;
4137 c = combine_controlled_uses_counters (c, d);
4138 ipa_set_controlled_uses (new_root_info, src_idx, c);
4139 bool lderef = true;
4140 if (c != IPA_UNDESCRIBED_USE-1)
4141 {
4142 lderef = (ipa_get_param_load_dereferenced (new_root_info, src_idx)
4143 || ipa_get_param_load_dereferenced (old_root_info, i));
4144 ipa_set_param_load_dereferenced (new_root_info, src_idx, lderef);
4145 }
4146
4147 if (c == 0 && !lderef && new_root_info->ipcp_orig_node)
4148 {
4149 struct cgraph_node *n;
4150 struct ipa_ref *ref;
4151 tree t = new_root_info->known_csts[src_idx];
4152
4153 if (t && TREE_CODE (t)((enum tree_code) (t)->base.code) == ADDR_EXPR
4154 && TREE_CODE (TREE_OPERAND (t, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((t), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4154, __FUNCTION__))))))->base.code)
== FUNCTION_DECL
4155 && (n = cgraph_node::get (TREE_OPERAND (t, 0)(*((const_cast<tree*> (tree_operand_check ((t), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4155, __FUNCTION__)))))
))
4156 && (ref = new_root->find_reference (n, NULLnullptr, 0)))
4157 {
4158 if (dump_file)
4159 fprintf (dump_file, "ipa-prop: Removing cloning-created "
4160 "reference from %s to %s.\n",
4161 new_root->dump_name (),
4162 n->dump_name ());
4163 ref->remove_reference ();
4164 }
4165 }
4166 }
4167 else if (jf->type == IPA_JF_CONST
4168 && (rdesc = jfunc_rdesc_usable (jf)))
4169 {
4170 int d = ipa_get_controlled_uses (old_root_info, i);
4171 int c = rdesc->refcount;
4172 tree cst = ipa_get_jf_constant (jf);
4173 rdesc->refcount = combine_controlled_uses_counters (c, d);
4174 if (rdesc->refcount != IPA_UNDESCRIBED_USE-1
4175 && ipa_get_param_load_dereferenced (old_root_info, i)
4176 && TREE_CODE (cst)((enum tree_code) (cst)->base.code) == ADDR_EXPR
4177 && TREE_CODE (TREE_OPERAND (cst, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((cst), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4177, __FUNCTION__))))))->base.code)
== VAR_DECL)
4178 {
4179 symtab_node *n = symtab_node::get (TREE_OPERAND (cst, 0)(*((const_cast<tree*> (tree_operand_check ((cst), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4179, __FUNCTION__)))))
);
4180 new_root->create_reference (n, IPA_REF_LOAD, NULLnullptr);
4181 if (dump_file)
4182 fprintf (dump_file, "ipa-prop: Address IPA constant will reach "
4183 "a load so adding LOAD reference from %s to %s.\n",
4184 new_root->dump_name (), n->dump_name ());
4185 }
4186 if (rdesc->refcount == 0)
4187 {
4188 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR((void)(!(((enum tree_code) (cst)->base.code) == ADDR_EXPR
&& ((((enum tree_code) ((*((const_cast<tree*> (
tree_operand_check ((cst), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4189, __FUNCTION__))))))->base.code) == FUNCTION_DECL) ||
(((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((cst), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4191, __FUNCTION__))))))->base.code) == VAR_DECL))) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4192, __FUNCTION__), 0 : 0))
4189 && ((TREE_CODE (TREE_OPERAND (cst, 0))((void)(!(((enum tree_code) (cst)->base.code) == ADDR_EXPR
&& ((((enum tree_code) ((*((const_cast<tree*> (
tree_operand_check ((cst), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4189, __FUNCTION__))))))->base.code) == FUNCTION_DECL) ||
(((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((cst), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4191, __FUNCTION__))))))->base.code) == VAR_DECL))) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4192, __FUNCTION__), 0 : 0))
4190 == FUNCTION_DECL)((void)(!(((enum tree_code) (cst)->base.code) == ADDR_EXPR
&& ((((enum tree_code) ((*((const_cast<tree*> (
tree_operand_check ((cst), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4189, __FUNCTION__))))))->base.code) == FUNCTION_DECL) ||
(((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((cst), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4191, __FUNCTION__))))))->base.code) == VAR_DECL))) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4192, __FUNCTION__), 0 : 0))
4191 || (TREE_CODE (TREE_OPERAND (cst, 0))((void)(!(((enum tree_code) (cst)->base.code) == ADDR_EXPR
&& ((((enum tree_code) ((*((const_cast<tree*> (
tree_operand_check ((cst), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4189, __FUNCTION__))))))->base.code) == FUNCTION_DECL) ||
(((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((cst), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4191, __FUNCTION__))))))->base.code) == VAR_DECL))) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4192, __FUNCTION__), 0 : 0))
4192 == VAR_DECL)))((void)(!(((enum tree_code) (cst)->base.code) == ADDR_EXPR
&& ((((enum tree_code) ((*((const_cast<tree*> (
tree_operand_check ((cst), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4189, __FUNCTION__))))))->base.code) == FUNCTION_DECL) ||
(((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((cst), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4191, __FUNCTION__))))))->base.code) == VAR_DECL))) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4192, __FUNCTION__), 0 : 0))
;
4193
4194 symtab_node *n = symtab_node::get (TREE_OPERAND (cst, 0)(*((const_cast<tree*> (tree_operand_check ((cst), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4194, __FUNCTION__)))))
);
4195 if (n)
4196 {
4197 remove_described_reference (n, rdesc);
4198 cgraph_node *clone = cs->caller;
4199 while (clone->inlined_to
4200 && clone->ipcp_clone
4201 && clone != rdesc->cs->caller)
4202 {
4203 struct ipa_ref *ref;
4204 ref = clone->find_reference (n, NULLnullptr, 0);
4205 if (ref)
4206 {
4207 if (dump_file)
4208 fprintf (dump_file, "ipa-prop: Removing "
4209 "cloning-created reference "
4210 "from %s to %s.\n",
4211 clone->dump_name (),
4212 n->dump_name ());
4213 ref->remove_reference ();
4214 }
4215 clone = clone->callers->caller;
4216 }
4217 }
4218 }
4219 }
4220 }
4221
4222 for (i = ipa_get_param_count (old_root_info);
4223 i < ipa_get_cs_argument_count (args);
4224 i++)
4225 {
4226 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4227
4228 if (jf->type == IPA_JF_CONST)
4229 {
4230 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
4231 if (rdesc)
4232 rdesc->refcount = IPA_UNDESCRIBED_USE-1;
4233 }
4234 else if (jf->type == IPA_JF_PASS_THROUGH)
4235 ipa_set_controlled_uses (new_root_info,
4236 jf->value.pass_through.formal_id,
4237 IPA_UNDESCRIBED_USE-1);
4238 }
4239}
4240
4241/* Update jump functions and call note functions on inlining the call site CS.
4242 CS is expected to lead to a node already cloned by
4243 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
4244 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4245 created. */
4246
4247bool
4248ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
4249 vec<cgraph_edge *> *new_edges)
4250{
4251 bool changed;
4252 /* Do nothing if the preparation phase has not been carried out yet
4253 (i.e. during early inlining). */
4254 if (!ipa_node_params_sum)
4255 return false;
4256 gcc_assert (ipa_edge_args_sum)((void)(!(ipa_edge_args_sum) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4256, __FUNCTION__), 0 : 0))
;
4257
4258 propagate_controlled_uses (cs);
4259 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
4260 ipa_node_params_sum->remove (cs->callee);
4261
4262 ipa_edge_args *args = ipa_edge_args_sum->get (cs);
4263 if (args)
4264 {
4265 bool ok = true;
4266 if (args->jump_functions)
4267 {
4268 struct ipa_jump_func *jf;
4269 int i;
4270 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)for (i = 0; (*args->jump_functions).iterate ((i), &(jf
)); ++(i))
4271 if (jf->type == IPA_JF_CONST
4272 && ipa_get_jf_constant_rdesc (jf))
4273 {
4274 ok = false;
4275 break;
4276 }
4277 }
4278 if (ok)
4279 ipa_edge_args_sum->remove (cs);
4280 }
4281 if (ipcp_transformation_sum)
4282 ipcp_transformation_sum->remove (cs->callee);
4283
4284 return changed;
4285}
4286
4287/* Ensure that array of edge arguments infos is big enough to accommodate a
4288 structure for all edges and reallocates it if not. Also, allocate
4289 associated hash tables is they do not already exist. */
4290
4291void
4292ipa_check_create_edge_args (void)
4293{
4294 if (!ipa_edge_args_sum)
4295 ipa_edge_args_sum
4296 = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ())
4297 ipa_edge_args_sum_t (symtab, true));
4298 if (!ipa_bits_hash_table)
4299 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4300 if (!ipa_vr_hash_table)
4301 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4302}
4303
4304/* Free all ipa_edge structures. */
4305
4306void
4307ipa_free_all_edge_args (void)
4308{
4309 if (!ipa_edge_args_sum)
4310 return;
4311
4312 ggc_delete (ipa_edge_args_sum);
4313 ipa_edge_args_sum = NULLnullptr;
4314}
4315
4316/* Free all ipa_node_params structures. */
4317
4318void
4319ipa_free_all_node_params (void)
4320{
4321 if (ipa_node_params_sum)
4322 ggc_delete (ipa_node_params_sum);
4323 ipa_node_params_sum = NULLnullptr;
4324}
4325
4326/* Initialize IPA CP transformation summary and also allocate any necessary hash
4327 tables if they do not already exist. */
4328
4329void
4330ipcp_transformation_initialize (void)
4331{
4332 if (!ipa_bits_hash_table)
4333 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4334 if (!ipa_vr_hash_table)
4335 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4336 if (ipcp_transformation_sum == NULLnullptr)
4337 {
4338 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
4339 ipcp_transformation_sum->disable_insertion_hook ();
4340 }
4341}
4342
4343/* Release the IPA CP transformation summary. */
4344
4345void
4346ipcp_free_transformation_sum (void)
4347{
4348 if (!ipcp_transformation_sum)
4349 return;
4350
4351 ipcp_transformation_sum->~function_summary<ipcp_transformation *> ();
4352 ggc_free (ipcp_transformation_sum);
4353 ipcp_transformation_sum = NULLnullptr;
4354}
4355
4356/* Set the aggregate replacements of NODE to be AGGVALS. */
4357
4358void
4359ipa_set_node_agg_value_chain (struct cgraph_node *node,
4360 vec<ipa_argagg_value, va_gc> *aggs)
4361{
4362 ipcp_transformation_initialize ();
4363 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
4364 s->m_agg_values = aggs;
4365}
4366
4367/* Hook that is called by cgraph.cc when an edge is removed. Adjust reference
4368 count data structures accordingly. */
4369
4370void
4371ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
4372{
4373 if (args->jump_functions)
4374 {
4375 struct ipa_jump_func *jf;
4376 int i;
4377 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)for (i = 0; (*args->jump_functions).iterate ((i), &(jf
)); ++(i))
4378 {
4379 struct ipa_cst_ref_desc *rdesc;
4380 try_decrement_rdesc_refcount (jf);
4381 if (jf->type == IPA_JF_CONST
4382 && (rdesc = ipa_get_jf_constant_rdesc (jf))
4383 && rdesc->cs == cs)
4384 rdesc->cs = NULLnullptr;
4385 }
4386 }
4387}
4388
4389/* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4390 reference count data strucutres accordingly. */
4391
4392void
4393ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
4394 ipa_edge_args *old_args, ipa_edge_args *new_args)
4395{
4396 unsigned int i;
4397
4398 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
4399 if (old_args->polymorphic_call_contexts)
4400 new_args->polymorphic_call_contexts
4401 = vec_safe_copy (old_args->polymorphic_call_contexts);
4402
4403 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4404 {
4405 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
4406 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
4407
4408 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
4409
4410 if (src_jf->type == IPA_JF_CONST)
4411 {
4412 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
4413
4414 if (!src_rdesc)
4415 dst_jf->value.constant.rdesc = NULLnullptr;
4416 else if (src->caller == dst->caller)
4417 {
4418 /* Creation of a speculative edge. If the source edge is the one
4419 grabbing a reference, we must create a new (duplicate)
4420 reference description. Otherwise they refer to the same
4421 description corresponding to a reference taken in a function
4422 src->caller is inlined to. In that case we just must
4423 increment the refcount. */
4424 if (src_rdesc->cs == src)
4425 {
4426 symtab_node *n = symtab_node_for_jfunc (src_jf);
4427 gcc_checking_assert (n)((void)(!(n) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4427, __FUNCTION__), 0 : 0))
;
4428 ipa_ref *ref
4429 = src->caller->find_reference (n, src->call_stmt,
4430 src->lto_stmt_uid);
4431 gcc_checking_assert (ref)((void)(!(ref) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4431, __FUNCTION__), 0 : 0))
;
4432 dst->caller->clone_reference (ref, ref->stmt);
4433
4434 ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4435 dst_rdesc->cs = dst;
4436 dst_rdesc->refcount = src_rdesc->refcount;
4437 dst_rdesc->next_duplicate = NULLnullptr;
4438 dst_jf->value.constant.rdesc = dst_rdesc;
4439 }
4440 else
4441 {
4442 src_rdesc->refcount++;
4443 dst_jf->value.constant.rdesc = src_rdesc;
4444 }
4445 }
4446 else if (src_rdesc->cs == src)
4447 {
4448 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4449 dst_rdesc->cs = dst;
4450 dst_rdesc->refcount = src_rdesc->refcount;
4451 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
4452 src_rdesc->next_duplicate = dst_rdesc;
4453 dst_jf->value.constant.rdesc = dst_rdesc;
4454 }
4455 else
4456 {
4457 struct ipa_cst_ref_desc *dst_rdesc;
4458 /* This can happen during inlining, when a JFUNC can refer to a
4459 reference taken in a function up in the tree of inline clones.
4460 We need to find the duplicate that refers to our tree of
4461 inline clones. */
4462
4463 gcc_assert (dst->caller->inlined_to)((void)(!(dst->caller->inlined_to) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4463, __FUNCTION__), 0 : 0))
;
4464 for (dst_rdesc = src_rdesc->next_duplicate;
4465 dst_rdesc;
4466 dst_rdesc = dst_rdesc->next_duplicate)
4467 {
4468 struct cgraph_node *top;
4469 top = dst_rdesc->cs->caller->inlined_to
4470 ? dst_rdesc->cs->caller->inlined_to
4471 : dst_rdesc->cs->caller;
4472 if (dst->caller->inlined_to == top)
4473 break;
4474 }
4475 gcc_assert (dst_rdesc)((void)(!(dst_rdesc) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4475, __FUNCTION__), 0 : 0))
;
4476 dst_jf->value.constant.rdesc = dst_rdesc;
4477 }
4478 }
4479 else if (dst_jf->type == IPA_JF_PASS_THROUGH
4480 && src->caller == dst->caller)
4481 {
4482 struct cgraph_node *inline_root = dst->caller->inlined_to
4483 ? dst->caller->inlined_to : dst->caller;
4484 ipa_node_params *root_info = ipa_node_params_sum->get (inline_root);
4485 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
4486
4487 int c = ipa_get_controlled_uses (root_info, idx);
4488 if (c != IPA_UNDESCRIBED_USE-1)
4489 {
4490 c++;
4491 ipa_set_controlled_uses (root_info, idx, c);
4492 }
4493 }
4494 }
4495}
4496
4497/* Analyze newly added function into callgraph. */
4498
4499static void
4500ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED__attribute__ ((__unused__)))
4501{
4502 if (node->has_gimple_body_p ())
4503 ipa_analyze_node (node);
4504}
4505
4506/* Hook that is called by summary when a node is duplicated. */
4507
4508void
4509ipa_node_params_t::duplicate(cgraph_node *, cgraph_node *,
4510 ipa_node_params *old_info,
4511 ipa_node_params *new_info)
4512{
4513 new_info->descriptors = vec_safe_copy (old_info->descriptors);
4514 new_info->lattices = NULLnullptr;
4515 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
4516 new_info->known_csts = old_info->known_csts.copy ();
4517 new_info->known_contexts = old_info->known_contexts.copy ();
4518
4519 new_info->analysis_done = old_info->analysis_done;
4520 new_info->node_enqueued = old_info->node_enqueued;
4521 new_info->versionable = old_info->versionable;
4522}
4523
4524/* Duplication of ipcp transformation summaries. */
4525
4526void
4527ipcp_transformation_t::duplicate(cgraph_node *, cgraph_node *dst,
4528 ipcp_transformation *src_trans,
4529 ipcp_transformation *dst_trans)
4530{
4531 /* Avoid redundant work of duplicating vectors we will never use. */
4532 if (dst->inlined_to)
4533 return;
4534 dst_trans->m_agg_values = vec_safe_copy (src_trans->m_agg_values);
4535 dst_trans->bits = vec_safe_copy (src_trans->bits);
4536 dst_trans->m_vr = vec_safe_copy (src_trans->m_vr);
4537}
4538
4539/* Register our cgraph hooks if they are not already there. */
4540
4541void
4542ipa_register_cgraph_hooks (void)
4543{
4544 ipa_check_create_node_params ();
4545 ipa_check_create_edge_args ();
4546
4547 function_insertion_hook_holder =
4548 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULLnullptr);
4549}
4550
4551/* Unregister our cgraph hooks if they are not already there. */
4552
4553static void
4554ipa_unregister_cgraph_hooks (void)
4555{
4556 if (function_insertion_hook_holder)
4557 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
4558 function_insertion_hook_holder = NULLnullptr;
4559}
4560
4561/* Free all ipa_node_params and all ipa_edge_args structures if they are no
4562 longer needed after ipa-cp. */
4563
4564void
4565ipa_free_all_structures_after_ipa_cp (void)
4566{
4567 if (!optimizeglobal_options.x_optimize && !in_lto_pglobal_options.x_in_lto_p)
4568 {
4569 ipa_free_all_edge_args ();
4570 ipa_free_all_node_params ();
4571 ipcp_sources_pool.release ();
4572 ipcp_cst_values_pool.release ();
4573 ipcp_poly_ctx_values_pool.release ();
4574 ipcp_agg_lattice_pool.release ();
4575 ipa_unregister_cgraph_hooks ();
4576 ipa_refdesc_pool.release ();
4577 }
4578}
4579
4580/* Free all ipa_node_params and all ipa_edge_args structures if they are no
4581 longer needed after indirect inlining. */
4582
4583void
4584ipa_free_all_structures_after_iinln (void)
4585{
4586 ipa_free_all_edge_args ();
4587 ipa_free_all_node_params ();
4588 ipa_unregister_cgraph_hooks ();
4589 ipcp_sources_pool.release ();
4590 ipcp_cst_values_pool.release ();
4591 ipcp_poly_ctx_values_pool.release ();
4592 ipcp_agg_lattice_pool.release ();
4593 ipa_refdesc_pool.release ();
4594}
4595
4596/* Print ipa_tree_map data structures of all functions in the
4597 callgraph to F. */
4598
4599void
4600ipa_print_node_params (FILE *f, struct cgraph_node *node)
4601{
4602 int i, count;
4603 class ipa_node_params *info;
4604
4605 if (!node->definition)
4606 return;
4607 info = ipa_node_params_sum->get (node);
4608 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
4609 if (!info)
4610 {
4611 fprintf (f, " no params return\n");
4612 return;
4613 }
4614 count = ipa_get_param_count (info);
4615 for (i = 0; i < count; i++)
4616 {
4617 int c;
4618
4619 fprintf (f, " ");
4620 ipa_dump_param (f, info, i);
4621 if (ipa_is_param_used (info, i))
4622 fprintf (f, " used");
4623 if (ipa_is_param_used_by_ipa_predicates (info, i))
4624 fprintf (f, " used_by_ipa_predicates");
4625 if (ipa_is_param_used_by_indirect_call (info, i))
4626 fprintf (f, " used_by_indirect_call");
4627 if (ipa_is_param_used_by_polymorphic_call (info, i))
4628 fprintf (f, " used_by_polymorphic_call");
4629 c = ipa_get_controlled_uses (info, i);
4630 if (c == IPA_UNDESCRIBED_USE-1)
4631 fprintf (f, " undescribed_use");
4632 else
4633 fprintf (f, " controlled_uses=%i %s", c,
4634 ipa_get_param_load_dereferenced (info, i)
4635 ? "(load_dereferenced)" : "");
4636 fprintf (f, "\n");
4637 }
4638}
4639
4640/* Print ipa_tree_map data structures of all functions in the
4641 callgraph to F. */
4642
4643void
4644ipa_print_all_params (FILE * f)
4645{
4646 struct cgraph_node *node;
4647
4648 fprintf (f, "\nFunction parameters:\n");
4649 FOR_EACH_FUNCTION (node)for ((node) = symtab->first_function (); (node); (node) = symtab
->next_function ((node)))
4650 ipa_print_node_params (f, node);
4651}
4652
4653/* Stream out jump function JUMP_FUNC to OB. */
4654
4655static void
4656ipa_write_jump_function (struct output_block *ob,
4657 struct ipa_jump_func *jump_func)
4658{
4659 struct ipa_agg_jf_item *item;
4660 struct bitpack_d bp;
4661 int i, count;
4662 int flag = 0;
4663
4664 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4665 as well as WPA memory by handling them specially. */
4666 if (jump_func->type == IPA_JF_CONST
4667 && TREE_CODE (jump_func->value.constant.value)((enum tree_code) (jump_func->value.constant.value)->base
.code)
== ADDR_EXPR)
4668 flag = 1;
4669
4670 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
4671 switch (jump_func->type)
4672 {
4673 case IPA_JF_UNKNOWN:
4674 break;
4675 case IPA_JF_CONST:
4676 gcc_assert (((void)(!(((((jump_func->value.constant.value)) &&
((tree_code_type_tmpl <0>::tree_code_type[(int) (((enum
tree_code) ((jump_func->value.constant.value))->base.code
))]) >= tcc_reference && (tree_code_type_tmpl <
0>::tree_code_type[(int) (((enum tree_code) ((jump_func->
value.constant.value))->base.code))]) <= tcc_expression
)) ? (jump_func->value.constant.value)->exp.locus : ((location_t
) 0)) == ((location_t) 0)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4677, __FUNCTION__), 0 : 0))
4677 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION)((void)(!(((((jump_func->value.constant.value)) &&
((tree_code_type_tmpl <0>::tree_code_type[(int) (((enum
tree_code) ((jump_func->value.constant.value))->base.code
))]) >= tcc_reference && (tree_code_type_tmpl <
0>::tree_code_type[(int) (((enum tree_code) ((jump_func->
value.constant.value))->base.code))]) <= tcc_expression
)) ? (jump_func->value.constant.value)->exp.locus : ((location_t
) 0)) == ((location_t) 0)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4677, __FUNCTION__), 0 : 0))
;
4678 stream_write_tree (ob,streamer_hooks.write_tree (ob, flag ? (*((const_cast<tree*
> (tree_operand_check ((jump_func->value.constant.value
), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4680, __FUNCTION__))))) : jump_func->value.constant.value
, true, true)
4679 flagstreamer_hooks.write_tree (ob, flag ? (*((const_cast<tree*
> (tree_operand_check ((jump_func->value.constant.value
), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4680, __FUNCTION__))))) : jump_func->value.constant.value
, true, true)
4680 ? TREE_OPERAND (jump_func->value.constant.value, 0)streamer_hooks.write_tree (ob, flag ? (*((const_cast<tree*
> (tree_operand_check ((jump_func->value.constant.value
), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4680, __FUNCTION__))))) : jump_func->value.constant.value
, true, true)
4681 : jump_func->value.constant.value, true)streamer_hooks.write_tree (ob, flag ? (*((const_cast<tree*
> (tree_operand_check ((jump_func->value.constant.value
), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4680, __FUNCTION__))))) : jump_func->value.constant.value
, true, true)
;
4682 break;
4683 case IPA_JF_PASS_THROUGH:
4684 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4685 if (jump_func->value.pass_through.operation == NOP_EXPR)
4686 {
4687 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4688 bp = bitpack_create (ob->main_stream);
4689 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4690 streamer_write_bitpack (&bp);
4691 }
4692 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)tree_code_type_tmpl <0>::tree_code_type[(int) (jump_func
->value.pass_through.operation)]
4693 == tcc_unary)
4694 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4695 else
4696 {
4697 stream_write_tree (ob, jump_func->value.pass_through.operand, true)streamer_hooks.write_tree (ob, jump_func->value.pass_through
.operand, true, true)
;
4698 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4699 }
4700 break;
4701 case IPA_JF_ANCESTOR:
4702 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4703 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4704 bp = bitpack_create (ob->main_stream);
4705 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4706 bp_pack_value (&bp, jump_func->value.ancestor.keep_null, 1);
4707 streamer_write_bitpack (&bp);
4708 break;
4709 default:
4710 fatal_error (UNKNOWN_LOCATION((location_t) 0), "invalid jump function in LTO stream");
4711 }
4712
4713 count = vec_safe_length (jump_func->agg.items);
4714 streamer_write_uhwi (ob, count);
4715 if (count)
4716 {
4717 bp = bitpack_create (ob->main_stream);
4718 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4719 streamer_write_bitpack (&bp);
4720 }
4721
4722 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)for (i = 0; vec_safe_iterate ((jump_func->agg.items), (i),
&(item)); ++(i))
4723 {
4724 stream_write_tree (ob, item->type, true)streamer_hooks.write_tree (ob, item->type, true, true);
4725 streamer_write_uhwi (ob, item->offset);
4726 streamer_write_uhwi (ob, item->jftype);
4727 switch (item->jftype)
4728 {
4729 case IPA_JF_UNKNOWN:
4730 break;
4731 case IPA_JF_CONST:
4732 stream_write_tree (ob, item->value.constant, true)streamer_hooks.write_tree (ob, item->value.constant, true,
true)
;
4733 break;
4734 case IPA_JF_PASS_THROUGH:
4735 case IPA_JF_LOAD_AGG:
4736 streamer_write_uhwi (ob, item->value.pass_through.operation);
4737 streamer_write_uhwi (ob, item->value.pass_through.formal_id);
4738 if (TREE_CODE_CLASS (item->value.pass_through.operation)tree_code_type_tmpl <0>::tree_code_type[(int) (item->
value.pass_through.operation)]
4739 != tcc_unary)
4740 stream_write_tree (ob, item->value.pass_through.operand, true)streamer_hooks.write_tree (ob, item->value.pass_through.operand
, true, true)
;
4741 if (item->jftype == IPA_JF_LOAD_AGG)
4742 {
4743 stream_write_tree (ob, item->value.load_agg.type, true)streamer_hooks.write_tree (ob, item->value.load_agg.type, true
, true)
;
4744 streamer_write_uhwi (ob, item->value.load_agg.offset);
4745 bp = bitpack_create (ob->main_stream);
4746 bp_pack_value (&bp, item->value.load_agg.by_ref, 1);
4747 streamer_write_bitpack (&bp);
4748 }
4749 break;
4750 default:
4751 fatal_error (UNKNOWN_LOCATION((location_t) 0),
4752 "invalid jump function in LTO stream");
4753 }
4754 }
4755
4756 bp = bitpack_create (ob->main_stream);
4757 bp_pack_value (&bp, !!jump_func->bits, 1);
4758 streamer_write_bitpack (&bp);
4759 if (jump_func->bits)
4760 {
4761 streamer_write_widest_int (ob, jump_func->bits->value);
4762 streamer_write_widest_int (ob, jump_func->bits->mask);
4763 }
4764 bp_pack_value (&bp, !!jump_func->m_vr, 1);
4765 streamer_write_bitpack (&bp);
4766 if (jump_func->m_vr)
4767 {
4768 streamer_write_enum (ob->main_stream, value_rang_type,streamer_write_hwi_in_range ((ob->main_stream), 0, (int)(VR_LAST
) - 1, (int)(jump_func->m_vr->kind ()))
4769 VR_LAST, jump_func->m_vr->kind ())streamer_write_hwi_in_range ((ob->main_stream), 0, (int)(VR_LAST
) - 1, (int)(jump_func->m_vr->kind ()))
;
4770 stream_write_tree (ob, jump_func->m_vr->min (), true)streamer_hooks.write_tree (ob, jump_func->m_vr->min (),
true, true)
;
4771 stream_write_tree (ob, jump_func->m_vr->max (), true)streamer_hooks.write_tree (ob, jump_func->m_vr->max (),
true, true)
;
4772 }
4773}
4774
4775/* Read in jump function JUMP_FUNC from IB. */
4776
4777static void
4778ipa_read_jump_function (class lto_input_block *ib,
4779 struct ipa_jump_func *jump_func,
4780 struct cgraph_edge *cs,
4781 class data_in *data_in,
4782 bool prevails)
4783{
4784 enum jump_func_type jftype;
4785 enum tree_code operation;
4786 int i, count;
4787 int val = streamer_read_uhwi (ib);
4788 bool flag = val & 1;
4789
4790 jftype = (enum jump_func_type) (val / 2);
4791 switch (jftype)
4792 {
4793 case IPA_JF_UNKNOWN:
4794 ipa_set_jf_unknown (jump_func);
4795 break;
4796 case IPA_JF_CONST:
4797 {
4798 tree t = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
4799 if (flag && prevails)
4800 t = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (t)((contains_struct_check ((t), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4800, __FUNCTION__))->typed.type)
), t);
4801 ipa_set_jf_constant (jump_func, t, cs);
4802 }
4803 break;
4804 case IPA_JF_PASS_THROUGH:
4805 operation = (enum tree_code) streamer_read_uhwi (ib);
4806 if (operation == NOP_EXPR)
4807 {
4808 int formal_id = streamer_read_uhwi (ib);
4809 struct bitpack_d bp = streamer_read_bitpack (ib);
4810 bool agg_preserved = bp_unpack_value (&bp, 1);
4811 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4812 }
4813 else if (TREE_CODE_CLASS (operation)tree_code_type_tmpl <0>::tree_code_type[(int) (operation
)]
== tcc_unary)
4814 {
4815 int formal_id = streamer_read_uhwi (ib);
4816 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4817 }
4818 else
4819 {
4820 tree operand = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
4821 int formal_id = streamer_read_uhwi (ib);
4822 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4823 operation);
4824 }
4825 break;
4826 case IPA_JF_ANCESTOR:
4827 {
4828 HOST_WIDE_INTlong offset = streamer_read_uhwi (ib);
4829 int formal_id = streamer_read_uhwi (ib);
4830 struct bitpack_d bp = streamer_read_bitpack (ib);
4831 bool agg_preserved = bp_unpack_value (&bp, 1);
4832 bool keep_null = bp_unpack_value (&bp, 1);
4833 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved,
4834 keep_null);
4835 break;
4836 }
4837 default:
4838 fatal_error (UNKNOWN_LOCATION((location_t) 0), "invalid jump function in LTO stream");
4839 }
4840
4841 count = streamer_read_uhwi (ib);
4842 if (prevails)
4843 {
4844 jump_func->agg.items = NULLnullptr;
4845 vec_safe_reserve (jump_func->agg.items, count, true);
4846 }
4847 if (count)
4848 {
4849 struct bitpack_d bp = streamer_read_bitpack (ib);
4850 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4851 }
4852 for (i = 0; i < count; i++)
4853 {
4854 struct ipa_agg_jf_item item;
4855 item.type = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
4856 item.offset = streamer_read_uhwi (ib);
4857 item.jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4858
4859 switch (item.jftype)
4860 {
4861 case IPA_JF_UNKNOWN:
4862 break;
4863 case IPA_JF_CONST:
4864 item.value.constant = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
4865 break;
4866 case IPA_JF_PASS_THROUGH:
4867 case IPA_JF_LOAD_AGG:
4868 operation = (enum tree_code) streamer_read_uhwi (ib);
4869 item.value.pass_through.operation = operation;
4870 item.value.pass_through.formal_id = streamer_read_uhwi (ib);
4871 if (TREE_CODE_CLASS (operation)tree_code_type_tmpl <0>::tree_code_type[(int) (operation
)]
== tcc_unary)
4872 item.value.pass_through.operand = NULL_TREE(tree) nullptr;
4873 else
4874 item.value.pass_through.operand = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
4875 if (item.jftype == IPA_JF_LOAD_AGG)
4876 {
4877 struct bitpack_d bp;
4878 item.value.load_agg.type = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
4879 item.value.load_agg.offset = streamer_read_uhwi (ib);
4880 bp = streamer_read_bitpack (ib);
4881 item.value.load_agg.by_ref = bp_unpack_value (&bp, 1);
4882 }
4883 break;
4884 default:
4885 fatal_error (UNKNOWN_LOCATION((location_t) 0),
4886 "invalid jump function in LTO stream");
4887 }
4888 if (prevails)
4889 jump_func->agg.items->quick_push (item);
4890 }
4891
4892 struct bitpack_d bp = streamer_read_bitpack (ib);
4893 bool bits_known = bp_unpack_value (&bp, 1);
4894 if (bits_known)
4895 {
4896 widest_int value = streamer_read_widest_int (ib);
4897 widest_int mask = streamer_read_widest_int (ib);
4898 if (prevails)
4899 ipa_set_jfunc_bits (jump_func, value, mask);
4900 }
4901 else
4902 jump_func->bits = NULLnullptr;
4903
4904 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4905 bool vr_known = bp_unpack_value (&vr_bp, 1);
4906 if (vr_known)
4907 {
4908 enum value_range_kind type = streamer_read_enum (ib, value_range_kind,(enum value_range_kind)streamer_read_hwi_in_range ((ib), "value_range_kind"
, 0, (int)(VR_LAST) - 1)
4909 VR_LAST)(enum value_range_kind)streamer_read_hwi_in_range ((ib), "value_range_kind"
, 0, (int)(VR_LAST) - 1)
;
4910 tree min = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
4911 tree max = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
4912 if (prevails)
4913 ipa_set_jfunc_vr (jump_func, type, min, max);
4914 }
4915 else
4916 jump_func->m_vr = NULLnullptr;
4917}
4918
4919/* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4920 relevant to indirect inlining to OB. */
4921
4922static void
4923ipa_write_indirect_edge_info (struct output_block *ob,
4924 struct cgraph_edge *cs)
4925{
4926 class cgraph_indirect_call_info *ii = cs->indirect_info;
4927 struct bitpack_d bp;
4928
4929 streamer_write_hwi (ob, ii->param_index);
4930 bp = bitpack_create (ob->main_stream);
4931 bp_pack_value (&bp, ii->polymorphic, 1);
4932 bp_pack_value (&bp, ii->agg_contents, 1);
4933 bp_pack_value (&bp, ii->member_ptr, 1);
4934 bp_pack_value (&bp, ii->by_ref, 1);
4935 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4936 bp_pack_value (&bp, ii->vptr_changed, 1);
4937 streamer_write_bitpack (&bp);
4938 if (ii->agg_contents || ii->polymorphic)
4939 streamer_write_hwi (ob, ii->offset);
4940 else
4941 gcc_assert (ii->offset == 0)((void)(!(ii->offset == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 4941, __FUNCTION__), 0 : 0))
;
4942
4943 if (ii->polymorphic)
4944 {
4945 streamer_write_hwi (ob, ii->otr_token);
4946 stream_write_tree (ob, ii->otr_type, true)streamer_hooks.write_tree (ob, ii->otr_type, true, true);
4947 ii->context.stream_out (ob);
4948 }
4949}
4950
4951/* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4952 relevant to indirect inlining from IB. */
4953
4954static void
4955ipa_read_indirect_edge_info (class lto_input_block *ib,
4956 class data_in *data_in,
4957 struct cgraph_edge *cs,
4958 class ipa_node_params *info)
4959{
4960 class cgraph_indirect_call_info *ii = cs->indirect_info;
4961 struct bitpack_d bp;
4962
4963 ii->param_index = (int) streamer_read_hwi (ib);
4964 bp = streamer_read_bitpack (ib);
4965 ii->polymorphic = bp_unpack_value (&bp, 1);
4966 ii->agg_contents = bp_unpack_value (&bp, 1);
4967 ii->member_ptr = bp_unpack_value (&bp, 1);
4968 ii->by_ref = bp_unpack_value (&bp, 1);
4969 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4970 ii->vptr_changed = bp_unpack_value (&bp, 1);
4971 if (ii->agg_contents || ii->polymorphic)
4972 ii->offset = (HOST_WIDE_INTlong) streamer_read_hwi (ib);
4973 else
4974 ii->offset = 0;
4975 if (ii->polymorphic)
4976 {
4977 ii->otr_token = (HOST_WIDE_INTlong) streamer_read_hwi (ib);
4978 ii->otr_type = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
4979 ii->context.stream_in (ib, data_in);
4980 }
4981 if (info && ii->param_index >= 0)
4982 {
4983 if (ii->polymorphic)
4984 ipa_set_param_used_by_polymorphic_call (info,
4985 ii->param_index , true);
4986 ipa_set_param_used_by_indirect_call (info,
4987 ii->param_index, true);
4988 }
4989}
4990
4991/* Stream out NODE info to OB. */
4992
4993static void
4994ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4995{
4996 int node_ref;
4997 lto_symtab_encoder_t encoder;
4998 ipa_node_params *info = ipa_node_params_sum->get (node);
4999 int j;
5000 struct cgraph_edge *e;
5001 struct bitpack_d bp;
5002
5003 encoder = ob->decl_state->symtab_node_encoder;
5004 node_ref = lto_symtab_encoder_encode (encoder, node);
5005 streamer_write_uhwi (ob, node_ref);
5006
5007 streamer_write_uhwi (ob, ipa_get_param_count (info));
5008 for (j = 0; j < ipa_get_param_count (info); j++)
5009 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
5010 bp = bitpack_create (ob->main_stream);
5011 gcc_assert (info->analysis_done((void)(!(info->analysis_done || ipa_get_param_count (info
) == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 5012, __FUNCTION__), 0 : 0))
5012 || ipa_get_param_count (info) == 0)((void)(!(info->analysis_done || ipa_get_param_count (info
) == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 5012, __FUNCTION__), 0 : 0))
;
5013 gcc_assert (!info->node_enqueued)((void)(!(!info->node_enqueued) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 5013, __FUNCTION__), 0 : 0))
;
5014 gcc_assert (!info->ipcp_orig_node)((void)(!(!info->ipcp_orig_node) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 5014, __FUNCTION__), 0 : 0))
;
5015 for (j = 0; j < ipa_get_param_count (info); j++)
5016 {
5017 /* TODO: We could just not stream the bit in the undescribed case. */
5018 bool d = (ipa_get_controlled_uses (info, j) != IPA_UNDESCRIBED_USE-1)
5019 ? ipa_get_param_load_dereferenced (info, j) : true;
5020 bp_pack_value (&bp, d, 1);
5021 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
5022 }
5023 streamer_write_bitpack (&bp);
5024 for (j = 0; j < ipa_get_param_count (info); j++)
5025 {
5026 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
5027 stream_write_tree (ob, ipa_get_type (info, j), true)streamer_hooks.write_tree (ob, ipa_get_type (info, j), true, true
)
;
5028 }
5029 for (e = node->callees; e; e = e->next_callee)
5030 {
5031 ipa_edge_args *args = ipa_edge_args_sum->get (e);
5032
5033 if (!args)
5034 {
5035 streamer_write_uhwi (ob, 0);
5036 continue;
5037 }
5038
5039 streamer_write_uhwi (ob,
5040 ipa_get_cs_argument_count (args) * 2
5041 + (args->polymorphic_call_contexts != NULLnullptr));
5042 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5043 {
5044 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5045 if (args->polymorphic_call_contexts != NULLnullptr)
5046 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5047 }
5048 }
5049 for (e = node->indirect_calls; e; e = e->next_callee)
5050 {
5051 ipa_edge_args *args = ipa_edge_args_sum->get (e);
5052 if (!args)
5053 streamer_write_uhwi (ob, 0);
5054 else
5055 {
5056 streamer_write_uhwi (ob,
5057 ipa_get_cs_argument_count (args) * 2
5058 + (args->polymorphic_call_contexts != NULLnullptr));
5059 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5060 {
5061 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5062 if (args->polymorphic_call_contexts != NULLnullptr)
5063 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5064 }
5065 }
5066 ipa_write_indirect_edge_info (ob, e);
5067 }
5068}
5069
5070/* Stream in edge E from IB. */
5071
5072static void
5073ipa_read_edge_info (class lto_input_block *ib,
5074 class data_in *data_in,
5075 struct cgraph_edge *e, bool prevails)
5076{
5077 int count = streamer_read_uhwi (ib);
5078 bool contexts_computed = count & 1;
5079
5080 count /= 2;
5081 if (!count)
5082 return;
5083 if (prevails
5084 && (e->possibly_call_in_translation_unit_p ()
5085 /* Also stream in jump functions to builtins in hope that they
5086 will get fnspecs. */
5087 || fndecl_built_in_p (e->callee->decl, BUILT_IN_NORMAL)))
5088 {
5089 ipa_edge_args *args = ipa_edge_args_sum->get_create (e);
5090 vec_safe_grow_cleared (args->jump_functions, count, true);
5091 if (contexts_computed)
5092 vec_safe_grow_cleared (args->polymorphic_call_contexts, count, true);
5093 for (int k = 0; k < count; k++)
5094 {
5095 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5096 data_in, prevails);
5097 if (contexts_computed)
5098 ipa_get_ith_polymorhic_call_context (args, k)->stream_in
5099 (ib, data_in);
5100 }
5101 }
5102 else
5103 {
5104 for (int k = 0; k < count; k++)
5105 {
5106 struct ipa_jump_func dummy;
5107 ipa_read_jump_function (ib, &dummy, e,
5108 data_in, prevails);
5109 if (contexts_computed)
5110 {
5111 class ipa_polymorphic_call_context ctx;
5112 ctx.stream_in (ib, data_in);
5113 }
5114 }
5115 }
5116}
5117
5118/* Stream in NODE info from IB. */
5119
5120static void
5121ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
5122 class data_in *data_in)
5123{
5124 int k;
5125 struct cgraph_edge *e;
5126 struct bitpack_d bp;
5127 bool prevails = node->prevailing_p ();
5128 ipa_node_params *info
5129 = prevails ? ipa_node_params_sum->get_create (node) : NULLnullptr;
5130
5131 int param_count = streamer_read_uhwi (ib);
5132 if (prevails)
5133 {
5134 ipa_alloc_node_params (node, param_count);
5135 for (k = 0; k < param_count; k++)
5136 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
5137 if (ipa_get_param_count (info) != 0)
5138 info->analysis_done = true;
5139 info->node_enqueued = false;
5140 }
5141 else
5142 for (k = 0; k < param_count; k++)
5143 streamer_read_uhwi (ib);
5144
5145 bp = streamer_read_bitpack (ib);
5146 for (k = 0; k < param_count; k++)
5147 {
5148 bool load_dereferenced = bp_unpack_value (&bp, 1);
5149 bool used = bp_unpack_value (&bp, 1);
5150
5151 if (prevails)
5152 {
5153 ipa_set_param_load_dereferenced (info, k, load_dereferenced);
5154 ipa_set_param_used (info, k, used);
5155 }
5156 }
5157 for (k = 0; k < param_count; k++)
5158 {
5159 int nuses = streamer_read_hwi (ib);
5160 tree type = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
5161
5162 if (prevails)
5163 {
5164 ipa_set_controlled_uses (info, k, nuses);
5165 (*info->descriptors)[k].decl_or_type = type;
5166 }
5167 }
5168 for (e = node->callees; e; e = e->next_callee)
5169 ipa_read_edge_info (ib, data_in, e, prevails);
5170 for (e = node->indirect_calls; e; e = e->next_callee)
5171 {
5172 ipa_read_edge_info (ib, data_in, e, prevails);
5173 ipa_read_indirect_edge_info (ib, data_in, e, info);
5174 }
5175}
5176
5177/* Write jump functions for nodes in SET. */
5178
5179void
5180ipa_prop_write_jump_functions (void)
5181{
5182 struct output_block *ob;
5183 unsigned int count = 0;
5184 lto_symtab_encoder_iterator lsei;
5185 lto_symtab_encoder_t encoder;
5186
5187 if (!ipa_node_params_sum || !ipa_edge_args_sum)
5188 return;
5189
5190 ob = create_output_block (LTO_section_jump_functions);
5191 encoder = ob->decl_state->symtab_node_encoder;
5192 ob->symbol = NULLnullptr;
5193 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5194 lsei_next_function_in_partition (&lsei))
5195 {
5196 cgraph_node *node = lsei_cgraph_node (lsei);
5197 if (node->has_gimple_body_p ()
5198 && ipa_node_params_sum->get (node) != NULLnullptr)
5199 count++;
5200 }
5201
5202 streamer_write_uhwi (ob, count);
5203
5204 /* Process all of the functions. */
5205 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5206 lsei_next_function_in_partition (&lsei))
5207 {
5208 cgraph_node *node = lsei_cgraph_node (lsei);
5209 if (node->has_gimple_body_p ()
5210 && ipa_node_params_sum->get (node) != NULLnullptr)
5211 ipa_write_node_info (ob, node);
5212 }
5213 streamer_write_char_stream (ob->main_stream, 0);
5214 produce_asm (ob, NULLnullptr);
5215 destroy_output_block (ob);
5216}
5217
5218/* Read section in file FILE_DATA of length LEN with data DATA. */
5219
5220static void
5221ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5222 size_t len)
5223{
5224 const struct lto_function_header *header =
5225 (const struct lto_function_header *) data;
5226 const int cfg_offset = sizeof (struct lto_function_header);
5227 const int main_offset = cfg_offset + header->cfg_size;
5228 const int string_offset = main_offset + header->main_size;
5229 class data_in *data_in;
5230 unsigned int i;
5231 unsigned int count;
5232
5233 lto_input_block ib_main ((const char *) data + main_offset,
5234 header->main_size, file_data->mode_table);
5235
5236 data_in =
5237 lto_data_in_create (file_data, (const char *) data + string_offset,
5238 header->string_size, vNULL);
5239 count = streamer_read_uhwi (&ib_main);
5240
5241 for (i = 0; i < count; i++)
5242 {
5243 unsigned int index;
5244 struct cgraph_node *node;
5245 lto_symtab_encoder_t encoder;
5246
5247 index = streamer_read_uhwi (&ib_main);
5248 encoder = file_data->symtab_node_encoder;
5249 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5250 index));
5251 gcc_assert (node->definition)((void)(!(node->definition) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 5251, __FUNCTION__), 0 : 0))
;
5252 ipa_read_node_info (&ib_main, node, data_in);
5253 }
5254 lto_free_section_data (file_data, LTO_section_jump_functions, NULLnullptr, data,
5255 len);
5256 lto_data_in_delete (data_in);
5257}
5258
5259/* Read ipcp jump functions. */
5260
5261void
5262ipa_prop_read_jump_functions (void)
5263{
5264 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5265 struct lto_file_decl_data *file_data;
5266 unsigned int j = 0;
5267
5268 ipa_check_create_node_params ();
5269 ipa_check_create_edge_args ();
5270 ipa_register_cgraph_hooks ();
5271
5272 while ((file_data = file_data_vec[j++]))
5273 {
5274 size_t len;
5275 const char *data
5276 = lto_get_summary_section_data (file_data, LTO_section_jump_functions,
5277 &len);
5278 if (data)
5279 ipa_prop_read_section (file_data, data, len);
5280 }
5281}
5282
5283/* Return true if the IPA-CP transformation summary TS is non-NULL and contains
5284 useful info. */
5285static bool
5286useful_ipcp_transformation_info_p (ipcp_transformation *ts)
5287{
5288 if (!ts)
5289 return false;
5290 if (!vec_safe_is_empty (ts->m_agg_values)
5291 || !vec_safe_is_empty (ts->bits)
5292 || !vec_safe_is_empty (ts->m_vr))
5293 return true;
5294 return false;
5295}
5296
5297/* Write into OB IPA-CP transfromation summary TS describing NODE. */
5298
5299void
5300write_ipcp_transformation_info (output_block *ob, cgraph_node *node,
5301 ipcp_transformation *ts)
5302{
5303 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
5304 int node_ref = lto_symtab_encoder_encode (encoder, node);
5305 streamer_write_uhwi (ob, node_ref);
5306
5307 streamer_write_uhwi (ob, vec_safe_length (ts->m_agg_values));
5308 for (const ipa_argagg_value &av : ts->m_agg_values)
5309 {
5310 struct bitpack_d bp;
5311
5312 stream_write_tree (ob, av.value, true)streamer_hooks.write_tree (ob, av.value, true, true);
5313 streamer_write_uhwi (ob, av.unit_offset);
5314 streamer_write_uhwi (ob, av.index);
5315
5316 bp = bitpack_create (ob->main_stream);
5317 bp_pack_value (&bp, av.by_ref, 1);
5318 streamer_write_bitpack (&bp);
5319 }
5320
5321 streamer_write_uhwi (ob, vec_safe_length (ts->m_vr));
5322 for (const ipa_vr &parm_vr : ts->m_vr)
5323 {
5324 struct bitpack_d bp;
5325 bp = bitpack_create (ob->main_stream);
5326 bp_pack_value (&bp, parm_vr.known, 1);
5327 streamer_write_bitpack (&bp);
5328 if (parm_vr.known)
5329 {
5330 streamer_write_enum (ob->main_stream, value_rang_type,streamer_write_hwi_in_range ((ob->main_stream), 0, (int)(VR_LAST
) - 1, (int)(parm_vr.type))
5331 VR_LAST, parm_vr.type)streamer_write_hwi_in_range ((ob->main_stream), 0, (int)(VR_LAST
) - 1, (int)(parm_vr.type))
;
5332 streamer_write_wide_int (ob, parm_vr.min);
5333 streamer_write_wide_int (ob, parm_vr.max);
5334 }
5335 }
5336
5337 streamer_write_uhwi (ob, vec_safe_length (ts->bits));
5338 for (const ipa_bits *bits_jfunc : ts->bits)
5339 {
5340 struct bitpack_d bp = bitpack_create (ob->main_stream);
5341 bp_pack_value (&bp, !!bits_jfunc, 1);
5342 streamer_write_bitpack (&bp);
5343 if (bits_jfunc)
5344 {
5345 streamer_write_widest_int (ob, bits_jfunc->value);
5346 streamer_write_widest_int (ob, bits_jfunc->mask);
5347 }
5348 }
5349}
5350
5351/* Stream in the aggregate value replacement chain for NODE from IB. */
5352
5353static void
5354read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5355 data_in *data_in)
5356{
5357 unsigned int count, i;
5358 ipcp_transformation_initialize ();
5359 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5360
5361 count = streamer_read_uhwi (ib);
5362 if (count > 0)
5363 {
5364 vec_safe_grow_cleared (ts->m_agg_values, count, true);
5365 for (i = 0; i <count; i++)
5366 {
5367 ipa_argagg_value *av = &(*ts->m_agg_values)[i];;
5368
5369 av->value = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
5370 av->unit_offset = streamer_read_uhwi (ib);
5371 av->index = streamer_read_uhwi (ib);
5372
5373 bitpack_d bp = streamer_read_bitpack (ib);
5374 av->by_ref = bp_unpack_value (&bp, 1);
5375 }
5376 }
5377
5378 count = streamer_read_uhwi (ib);
5379 if (count > 0)
5380 {
5381 vec_safe_grow_cleared (ts->m_vr, count, true);
5382 for (i = 0; i < count; i++)
5383 {
5384 ipa_vr *parm_vr;
5385 parm_vr = &(*ts->m_vr)[i];
5386 struct bitpack_d bp;
5387 bp = streamer_read_bitpack (ib);
5388 parm_vr->known = bp_unpack_value (&bp, 1);
5389 if (parm_vr->known)
5390 {
5391 parm_vr->type = streamer_read_enum (ib, value_range_kind,(enum value_range_kind)streamer_read_hwi_in_range ((ib), "value_range_kind"
, 0, (int)(VR_LAST) - 1)
5392 VR_LAST)(enum value_range_kind)streamer_read_hwi_in_range ((ib), "value_range_kind"
, 0, (int)(VR_LAST) - 1)
;
5393 parm_vr->min = streamer_read_wide_int (ib);
5394 parm_vr->max = streamer_read_wide_int (ib);
5395 }
5396 }
5397 }
5398 count = streamer_read_uhwi (ib);
5399 if (count > 0)
5400 {
5401 vec_safe_grow_cleared (ts->bits, count, true);
5402 for (i = 0; i < count; i++)
5403 {
5404 struct bitpack_d bp = streamer_read_bitpack (ib);
5405 bool known = bp_unpack_value (&bp, 1);
5406 if (known)
5407 {
5408 const widest_int value = streamer_read_widest_int (ib);
5409 const widest_int mask = streamer_read_widest_int (ib);
5410 ipa_bits *bits
5411 = ipa_get_ipa_bits_for_value (value, mask);
5412 (*ts->bits)[i] = bits;
5413 }
5414 }
5415 }
5416}
5417
5418/* Write all aggregate replacement for nodes in set. */
5419
5420void
5421ipcp_write_transformation_summaries (void)
5422{
5423 struct output_block *ob;
5424 unsigned int count = 0;
5425 lto_symtab_encoder_t encoder;
5426
5427 ob = create_output_block (LTO_section_ipcp_transform);
5428 encoder = ob->decl_state->symtab_node_encoder;
5429 ob->symbol = NULLnullptr;
5430
5431 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
5432 {
5433 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
5434 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
5435 if (!cnode)
5436 continue;
5437 ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
5438 if (useful_ipcp_transformation_info_p (ts)
5439 && lto_symtab_encoder_encode_body_p (encoder, cnode))
5440 count++;
5441 }
5442
5443 streamer_write_uhwi (ob, count);
5444
5445 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
5446 {
5447 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
5448 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
5449 if (!cnode)
5450 continue;
5451 ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
5452 if (useful_ipcp_transformation_info_p (ts)
5453 && lto_symtab_encoder_encode_body_p (encoder, cnode))
5454 write_ipcp_transformation_info (ob, cnode, ts);
5455 }
5456 streamer_write_char_stream (ob->main_stream, 0);
5457 produce_asm (ob, NULLnullptr);
5458 destroy_output_block (ob);
5459}
5460
5461/* Read replacements section in file FILE_DATA of length LEN with data
5462 DATA. */
5463
5464static void
5465read_replacements_section (struct lto_file_decl_data *file_data,
5466 const char *data,
5467 size_t len)
5468{
5469 const struct lto_function_header *header =
5470 (const struct lto_function_header *) data;
5471 const int cfg_offset = sizeof (struct lto_function_header);
5472 const int main_offset = cfg_offset + header->cfg_size;
5473 const int string_offset = main_offset + header->main_size;
5474 class data_in *data_in;
5475 unsigned int i;
5476 unsigned int count;
5477
5478 lto_input_block ib_main ((const char *) data + main_offset,
5479 header->main_size, file_data->mode_table);
5480
5481 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5482 header->string_size, vNULL);
5483 count = streamer_read_uhwi (&ib_main);
5484
5485 for (i = 0; i < count; i++)
5486 {
5487 unsigned int index;
5488 struct cgraph_node *node;
5489 lto_symtab_encoder_t encoder;
5490
5491 index = streamer_read_uhwi (&ib_main);
5492 encoder = file_data->symtab_node_encoder;
5493 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5494 index));
5495 read_ipcp_transformation_info (&ib_main, node, data_in);
5496 }
5497 lto_free_section_data (file_data, LTO_section_jump_functions, NULLnullptr, data,
5498 len);
5499 lto_data_in_delete (data_in);
5500}
5501
5502/* Read IPA-CP aggregate replacements. */
5503
5504void
5505ipcp_read_transformation_summaries (void)
5506{
5507 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5508 struct lto_file_decl_data *file_data;
5509 unsigned int j = 0;
5510
5511 while ((file_data = file_data_vec[j++]))
5512 {
5513 size_t len;
5514 const char *data
5515 = lto_get_summary_section_data (file_data, LTO_section_ipcp_transform,
5516 &len);
5517 if (data)
5518 read_replacements_section (file_data, data, len);
5519 }
5520}
5521
5522/* Adjust the aggregate replacements in TS to reflect any parameter removals
5523 which might have already taken place. If after adjustments there are no
5524 aggregate replacements left, the m_agg_values will be set to NULL. In other
5525 cases, it may be shrunk. */
5526
5527static void
5528adjust_agg_replacement_values (cgraph_node *node, ipcp_transformation *ts)
5529{
5530 clone_info *cinfo = clone_info::get (node);
5531 if (!cinfo || !cinfo->param_adjustments)
5532 return;
5533
5534 auto_vec<int, 16> new_indices;
5535 cinfo->param_adjustments->get_updated_indices (&new_indices);
5536 bool removed_item = false;
5537 unsigned dst_index = 0;
5538 unsigned count = ts->m_agg_values->length ();
5539 for (unsigned i = 0; i < count; i++)
5540 {
5541 ipa_argagg_value *v = &(*ts->m_agg_values)[i];
5542 gcc_checking_assert (v->index >= 0)((void)(!(v->index >= 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 5542, __FUNCTION__), 0 : 0))
;
5543
5544 int new_idx = -1;
5545 if ((unsigned) v->index < new_indices.length ())
5546 new_idx = new_indices[v->index];
5547
5548 if (new_idx >= 0)
5549 {
5550 v->index = new_idx;
5551 if (removed_item)
5552 (*ts->m_agg_values)[dst_index] = *v;
5553 dst_index++;
5554 }
5555 else
5556 removed_item = true;
5557 }
5558
5559 if (dst_index == 0)
5560 {
5561 ggc_free (ts->m_agg_values);
5562 ts->m_agg_values = NULLnullptr;
5563 }
5564 else if (removed_item)
5565 ts->m_agg_values->truncate (dst_index);
5566
5567 return;
5568}
5569
5570/* Dominator walker driving the ipcp modification phase. */
5571
5572class ipcp_modif_dom_walker : public dom_walker
5573{
5574public:
5575 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5576 vec<ipa_param_descriptor, va_gc> *descs,
5577 ipcp_transformation *ts, bool *sc)
5578 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5579 m_ts (ts), m_something_changed (sc) {}
5580
5581 edge before_dom_children (basic_block) final override;
5582 bool cleanup_eh ()
5583 { return gimple_purge_all_dead_eh_edges (m_need_eh_cleanup); }
5584
5585private:
5586 struct ipa_func_body_info *m_fbi;
5587 vec<ipa_param_descriptor, va_gc> *m_descriptors;
5588 ipcp_transformation *m_ts;
5589 bool *m_something_changed;
5590 auto_bitmap m_need_eh_cleanup;
5591};
5592
5593edge
5594ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5595{
5596 gimple_stmt_iterator gsi;
5597 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5598 {
5599 gimple *stmt = gsi_stmt (gsi);
5600 tree rhs, val, t;
5601 HOST_WIDE_INTlong bit_offset;
5602 poly_int64 size;
5603 int index;
5604 bool by_ref, vce;
5605
5606 if (!gimple_assign_load_p (stmt))
5607 continue;
5608 rhs = gimple_assign_rhs1 (stmt);
5609 if (!is_gimple_reg_type (TREE_TYPE (rhs)((contains_struct_check ((rhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 5609, __FUNCTION__))->typed.type)
))
5610 continue;
5611
5612 vce = false;
5613 t = rhs;
5614 while (handled_component_p (t))
5615 {
5616 /* V_C_E can do things like convert an array of integers to one
5617 bigger integer and similar things we do not handle below. */
5618 if (TREE_CODE (t)((enum tree_code) (t)->base.code) == VIEW_CONVERT_EXPR)
5619 {
5620 vce = true;
5621 break;
5622 }
5623 t = TREE_OPERAND (t, 0)(*((const_cast<tree*> (tree_operand_check ((t), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 5623, __FUNCTION__)))))
;
5624 }
5625 if (vce)
5626 continue;
5627
5628 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5629 &bit_offset, &size, &by_ref))
5630 continue;
5631 unsigned unit_offset = bit_offset / BITS_PER_UNIT(8);
5632 ipa_argagg_value_list avl (m_ts);
5633 tree v = avl.get_value (index, unit_offset, by_ref);
5634
5635 if (!v
5636 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v))((tree_class_check ((((contains_struct_check ((v), (TS_TYPED)
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 5636, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.cc"
, 5636, __FUNCTION__))->type_common.size)
), size))
5637 continue;