Bug Summary

File:build/gcc/vec.h
Warning:line 741, column 3
Called C++ object pointer is null

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-unknown-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name ipa-prop.c -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -fno-rounding-math -mconstructor-aliases -munwind-tables -target-cpu x86-64 -tune-cpu generic -debugger-tuning=gdb -fcoverage-compilation-dir=/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -resource-dir /usr/lib64/clang/13.0.0 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11/backward -internal-isystem /usr/lib64/clang/13.0.0/include -internal-isystem /usr/local/include -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../x86_64-suse-linux/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-error=format-diag -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir=/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -D__GCC_HAVE_DWARF2_CFI_ASM=1 -o /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2021-11-20-133755-20252-1/report-hbosMg.plist -x c++ /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c

/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c

1/* Interprocedural analyses.
2 Copyright (C) 2005-2021 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "backend.h"
24#include "rtl.h"
25#include "tree.h"
26#include "gimple.h"
27#include "alloc-pool.h"
28#include "tree-pass.h"
29#include "ssa.h"
30#include "tree-streamer.h"
31#include "cgraph.h"
32#include "diagnostic.h"
33#include "fold-const.h"
34#include "gimple-fold.h"
35#include "tree-eh.h"
36#include "calls.h"
37#include "stor-layout.h"
38#include "print-tree.h"
39#include "gimplify.h"
40#include "gimple-iterator.h"
41#include "gimplify-me.h"
42#include "gimple-walk.h"
43#include "symbol-summary.h"
44#include "ipa-prop.h"
45#include "tree-cfg.h"
46#include "tree-dfa.h"
47#include "tree-inline.h"
48#include "ipa-fnsummary.h"
49#include "gimple-pretty-print.h"
50#include "ipa-utils.h"
51#include "dbgcnt.h"
52#include "domwalk.h"
53#include "builtins.h"
54#include "tree-cfgcleanup.h"
55#include "options.h"
56#include "symtab-clones.h"
57#include "attr-fnspec.h"
58#include "gimple-range.h"
59
60/* Function summary where the parameter infos are actually stored. */
61ipa_node_params_t *ipa_node_params_sum = NULLnullptr;
62
63function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULLnullptr;
64
65/* Edge summary for IPA-CP edge information. */
66ipa_edge_args_sum_t *ipa_edge_args_sum;
67
68/* Traits for a hash table for reusing already existing ipa_bits. */
69
70struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
71{
72 typedef ipa_bits *value_type;
73 typedef ipa_bits *compare_type;
74 static hashval_t
75 hash (const ipa_bits *p)
76 {
77 hashval_t t = (hashval_t) p->value.to_shwi ();
78 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
79 }
80 static bool
81 equal (const ipa_bits *a, const ipa_bits *b)
82 {
83 return a->value == b->value && a->mask == b->mask;
84 }
85 static const bool empty_zero_p = true;
86 static void
87 mark_empty (ipa_bits *&p)
88 {
89 p = NULLnullptr;
90 }
91 static bool
92 is_empty (const ipa_bits *p)
93 {
94 return p == NULLnullptr;
95 }
96 static bool
97 is_deleted (const ipa_bits *p)
98 {
99 return p == reinterpret_cast<const ipa_bits *> (1);
100 }
101 static void
102 mark_deleted (ipa_bits *&p)
103 {
104 p = reinterpret_cast<ipa_bits *> (1);
105 }
106};
107
108/* Hash table for avoid repeated allocations of equal ipa_bits. */
109static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
110
111/* Traits for a hash table for reusing value_ranges used for IPA. Note that
112 the equiv bitmap is not hashed and is expected to be NULL. */
113
114struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
115{
116 typedef value_range *value_type;
117 typedef value_range *compare_type;
118 static hashval_t
119 hash (const value_range *p)
120 {
121 inchash::hash hstate (p->kind ());
122 inchash::add_expr (p->min (), hstate);
123 inchash::add_expr (p->max (), hstate);
124 return hstate.end ();
125 }
126 static bool
127 equal (const value_range *a, const value_range *b)
128 {
129 return (a->equal_p (*b)
130 && types_compatible_p (a->type (), b->type ()));
131 }
132 static const bool empty_zero_p = true;
133 static void
134 mark_empty (value_range *&p)
135 {
136 p = NULLnullptr;
137 }
138 static bool
139 is_empty (const value_range *p)
140 {
141 return p == NULLnullptr;
142 }
143 static bool
144 is_deleted (const value_range *p)
145 {
146 return p == reinterpret_cast<const value_range *> (1);
147 }
148 static void
149 mark_deleted (value_range *&p)
150 {
151 p = reinterpret_cast<value_range *> (1);
152 }
153};
154
155/* Hash table for avoid repeated allocations of equal value_ranges. */
156static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
157
158/* Holders of ipa cgraph hooks: */
159static struct cgraph_node_hook_list *function_insertion_hook_holder;
160
161/* Description of a reference to an IPA constant. */
162struct ipa_cst_ref_desc
163{
164 /* Edge that corresponds to the statement which took the reference. */
165 struct cgraph_edge *cs;
166 /* Linked list of duplicates created when call graph edges are cloned. */
167 struct ipa_cst_ref_desc *next_duplicate;
168 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
169 if out of control. */
170 int refcount;
171};
172
173/* Allocation pool for reference descriptions. */
174
175static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
176 ("IPA-PROP ref descriptions");
177
178/* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
179 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
180
181static bool
182ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
183{
184 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 184, __FUNCTION__, (FUNCTION_DECL)))->function_decl.function_specific_optimization
)
;
185
186 if (!fs_opts)
187 return false;
188 return !opt_for_fn (node->decl, optimize)(opts_for_fn (node->decl)->x_optimize) || !opt_for_fn (node->decl, flag_ipa_cp)(opts_for_fn (node->decl)->x_flag_ipa_cp);
189}
190
191/* Return index of the formal whose tree is PTREE in function which corresponds
192 to INFO. */
193
194static int
195ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
196 tree ptree)
197{
198 int i, count;
199
200 count = vec_safe_length (descriptors);
201 for (i = 0; i < count; i++)
202 if ((*descriptors)[i].decl_or_type == ptree)
203 return i;
204
205 return -1;
206}
207
208/* Return index of the formal whose tree is PTREE in function which corresponds
209 to INFO. */
210
211int
212ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
213{
214 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
215}
216
217/* Populate the param_decl field in parameter DESCRIPTORS that correspond to
218 NODE. */
219
220static void
221ipa_populate_param_decls (struct cgraph_node *node,
222 vec<ipa_param_descriptor, va_gc> &descriptors)
223{
224 tree fndecl;
225 tree fnargs;
226 tree parm;
227 int param_num;
228
229 fndecl = node->decl;
230 gcc_assert (gimple_has_body_p (fndecl))((void)(!(gimple_has_body_p (fndecl)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 230, __FUNCTION__), 0 : 0))
;
231 fnargs = DECL_ARGUMENTS (fndecl)((tree_check ((fndecl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 231, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
;
232 param_num = 0;
233 for (parm = fnargs; parm; parm = DECL_CHAIN (parm)(((contains_struct_check (((contains_struct_check ((parm), (TS_DECL_MINIMAL
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 233, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 233, __FUNCTION__))->common.chain))
)
234 {
235 descriptors[param_num].decl_or_type = parm;
236 unsigned int cost = estimate_move_cost (TREE_TYPE (parm)((contains_struct_check ((parm), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 236, __FUNCTION__))->typed.type)
, true);
237 descriptors[param_num].move_cost = cost;
238 /* Watch overflow, move_cost is a bitfield. */
239 gcc_checking_assert (cost == descriptors[param_num].move_cost)((void)(!(cost == descriptors[param_num].move_cost) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 239, __FUNCTION__), 0 : 0))
;
240 param_num++;
241 }
242}
243
244/* Return how many formal parameters FNDECL has. */
245
246int
247count_formal_params (tree fndecl)
248{
249 tree parm;
250 int count = 0;
251 gcc_assert (gimple_has_body_p (fndecl))((void)(!(gimple_has_body_p (fndecl)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 251, __FUNCTION__), 0 : 0))
;
252
253 for (parm = DECL_ARGUMENTS (fndecl)((tree_check ((fndecl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 253, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
; parm; parm = DECL_CHAIN (parm)(((contains_struct_check (((contains_struct_check ((parm), (TS_DECL_MINIMAL
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 253, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 253, __FUNCTION__))->common.chain))
)
254 count++;
255
256 return count;
257}
258
259/* Return the declaration of Ith formal parameter of the function corresponding
260 to INFO. Note there is no setter function as this array is built just once
261 using ipa_initialize_node_params. */
262
263void
264ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
265{
266 fprintf (file, "param #%i", i);
267 if ((*info->descriptors)[i].decl_or_type)
268 {
269 fprintf (file, " ");
270 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
271 }
272}
273
274/* If necessary, allocate vector of parameter descriptors in info of NODE.
275 Return true if they were allocated, false if not. */
276
277static bool
278ipa_alloc_node_params (struct cgraph_node *node, int param_count)
279{
280 ipa_node_params *info = ipa_node_params_sum->get_create (node);
281
282 if (!info->descriptors && param_count)
283 {
284 vec_safe_grow_cleared (info->descriptors, param_count, true);
285 return true;
286 }
287 else
288 return false;
289}
290
291/* Initialize the ipa_node_params structure associated with NODE by counting
292 the function parameters, creating the descriptors and populating their
293 param_decls. */
294
295void
296ipa_initialize_node_params (struct cgraph_node *node)
297{
298 ipa_node_params *info = ipa_node_params_sum->get_create (node);
299
300 if (!info->descriptors
301 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
302 ipa_populate_param_decls (node, *info->descriptors);
303}
304
305/* Print the jump functions associated with call graph edge CS to file F. */
306
307static void
308ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
309{
310 ipa_edge_args *args = ipa_edge_args_sum->get (cs);
311 int count = ipa_get_cs_argument_count (args);
312
313 for (int i = 0; i < count; i++)
314 {
315 struct ipa_jump_func *jump_func;
316 enum jump_func_type type;
317
318 jump_func = ipa_get_ith_jump_func (args, i);
319 type = jump_func->type;
320
321 fprintf (f, " param %d: ", i);
322 if (type == IPA_JF_UNKNOWN)
323 fprintf (f, "UNKNOWN\n");
324 else if (type == IPA_JF_CONST)
325 {
326 tree val = jump_func->value.constant.value;
327 fprintf (f, "CONST: ");
328 print_generic_expr (f, val);
329 if (TREE_CODE (val)((enum tree_code) (val)->base.code) == ADDR_EXPR
330 && TREE_CODE (TREE_OPERAND (val, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((val), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 330, __FUNCTION__))))))->base.code)
== CONST_DECL)
331 {
332 fprintf (f, " -> ");
333 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0))((contains_struct_check (((*((const_cast<tree*> (tree_operand_check
((val), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 333, __FUNCTION__)))))), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 333, __FUNCTION__))->decl_common.initial)
);
334 }
335 fprintf (f, "\n");
336 }
337 else if (type == IPA_JF_PASS_THROUGH)
338 {
339 fprintf (f, "PASS THROUGH: ");
340 fprintf (f, "%d, op %s",
341 jump_func->value.pass_through.formal_id,
342 get_tree_code_name(jump_func->value.pass_through.operation));
343 if (jump_func->value.pass_through.operation != NOP_EXPR)
344 {
345 fprintf (f, " ");
346 print_generic_expr (f, jump_func->value.pass_through.operand);
347 }
348 if (jump_func->value.pass_through.agg_preserved)
349 fprintf (f, ", agg_preserved");
350 fprintf (f, "\n");
351 }
352 else if (type == IPA_JF_ANCESTOR)
353 {
354 fprintf (f, "ANCESTOR: ");
355 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC"%" "l" "d",
356 jump_func->value.ancestor.formal_id,
357 jump_func->value.ancestor.offset);
358 if (jump_func->value.ancestor.agg_preserved)
359 fprintf (f, ", agg_preserved");
360 fprintf (f, "\n");
361 }
362
363 if (jump_func->agg.items)
364 {
365 struct ipa_agg_jf_item *item;
366 int j;
367
368 fprintf (f, " Aggregate passed by %s:\n",
369 jump_func->agg.by_ref ? "reference" : "value");
370 FOR_EACH_VEC_ELT (*jump_func->agg.items, j, item)for (j = 0; (*jump_func->agg.items).iterate ((j), &(item
)); ++(j))
371 {
372 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC"%" "l" "d" ", ",
373 item->offset);
374 fprintf (f, "type: ");
375 print_generic_expr (f, item->type);
376 fprintf (f, ", ");
377 if (item->jftype == IPA_JF_PASS_THROUGH)
378 fprintf (f, "PASS THROUGH: %d,",
379 item->value.pass_through.formal_id);
380 else if (item->jftype == IPA_JF_LOAD_AGG)
381 {
382 fprintf (f, "LOAD AGG: %d",
383 item->value.pass_through.formal_id);
384 fprintf (f, " [offset: " HOST_WIDE_INT_PRINT_DEC"%" "l" "d" ", by %s],",
385 item->value.load_agg.offset,
386 item->value.load_agg.by_ref ? "reference"
387 : "value");
388 }
389
390 if (item->jftype == IPA_JF_PASS_THROUGH
391 || item->jftype == IPA_JF_LOAD_AGG)
392 {
393 fprintf (f, " op %s",
394 get_tree_code_name (item->value.pass_through.operation));
395 if (item->value.pass_through.operation != NOP_EXPR)
396 {
397 fprintf (f, " ");
398 print_generic_expr (f, item->value.pass_through.operand);
399 }
400 }
401 else if (item->jftype == IPA_JF_CONST)
402 {
403 fprintf (f, "CONST: ");
404 print_generic_expr (f, item->value.constant);
405 }
406 else if (item->jftype == IPA_JF_UNKNOWN)
407 fprintf (f, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC"%" "l" "d" " bits",
408 tree_to_uhwi (TYPE_SIZE (item->type)((tree_class_check ((item->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 408, __FUNCTION__))->type_common.size)
));
409 fprintf (f, "\n");
410 }
411 }
412
413 class ipa_polymorphic_call_context *ctx
414 = ipa_get_ith_polymorhic_call_context (args, i);
415 if (ctx && !ctx->useless_p ())
416 {
417 fprintf (f, " Context: ");
418 ctx->dump (dump_file);
419 }
420
421 if (jump_func->bits)
422 {
423 fprintf (f, " value: ");
424 print_hex (jump_func->bits->value, f);
425 fprintf (f, ", mask: ");
426 print_hex (jump_func->bits->mask, f);
427 fprintf (f, "\n");
428 }
429 else
430 fprintf (f, " Unknown bits\n");
431
432 if (jump_func->m_vr)
433 {
434 fprintf (f, " VR ");
435 fprintf (f, "%s[",
436 (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
437 print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
438 fprintf (f, ", ");
439 print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
440 fprintf (f, "]\n");
441 }
442 else
443 fprintf (f, " Unknown VR\n");
444 }
445}
446
447
448/* Print the jump functions of all arguments on all call graph edges going from
449 NODE to file F. */
450
451void
452ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
453{
454 struct cgraph_edge *cs;
455
456 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
457 for (cs = node->callees; cs; cs = cs->next_callee)
458 {
459
460 fprintf (f, " callsite %s -> %s : \n",
461 node->dump_name (),
462 cs->callee->dump_name ());
463 if (!ipa_edge_args_info_available_for_edge_p (cs))
464 fprintf (f, " no arg info\n");
465 else
466 ipa_print_node_jump_functions_for_edge (f, cs);
467 }
468
469 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
470 {
471 class cgraph_indirect_call_info *ii;
472
473 ii = cs->indirect_info;
474 if (ii->agg_contents)
475 fprintf (f, " indirect %s callsite, calling param %i, "
476 "offset " HOST_WIDE_INT_PRINT_DEC"%" "l" "d" ", %s",
477 ii->member_ptr ? "member ptr" : "aggregate",
478 ii->param_index, ii->offset,
479 ii->by_ref ? "by reference" : "by_value");
480 else
481 fprintf (f, " indirect %s callsite, calling param %i, "
482 "offset " HOST_WIDE_INT_PRINT_DEC"%" "l" "d",
483 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
484 ii->offset);
485
486 if (cs->call_stmt)
487 {
488 fprintf (f, ", for stmt ");
489 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
490 }
491 else
492 fprintf (f, "\n");
493 if (ii->polymorphic)
494 ii->context.dump (f);
495 if (!ipa_edge_args_info_available_for_edge_p (cs))
496 fprintf (f, " no arg info\n");
497 else
498 ipa_print_node_jump_functions_for_edge (f, cs);
499 }
500}
501
502/* Print ipa_jump_func data structures of all nodes in the call graph to F. */
503
504void
505ipa_print_all_jump_functions (FILE *f)
506{
507 struct cgraph_node *node;
508
509 fprintf (f, "\nJump functions:\n");
510 FOR_EACH_FUNCTION (node)for ((node) = symtab->first_function (); (node); (node) = symtab
->next_function ((node)))
511 {
512 ipa_print_node_jump_functions (f, node);
513 }
514}
515
516/* Set jfunc to be a know-really nothing jump function. */
517
518static void
519ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
520{
521 jfunc->type = IPA_JF_UNKNOWN;
522}
523
524/* Set JFUNC to be a copy of another jmp (to be used by jump function
525 combination code). The two functions will share their rdesc. */
526
527static void
528ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
529 struct ipa_jump_func *src)
530
531{
532 gcc_checking_assert (src->type == IPA_JF_CONST)((void)(!(src->type == IPA_JF_CONST) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 532, __FUNCTION__), 0 : 0))
;
533 dst->type = IPA_JF_CONST;
534 dst->value.constant = src->value.constant;
535}
536
537/* Set JFUNC to be a constant jmp function. */
538
539static void
540ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
541 struct cgraph_edge *cs)
542{
543 jfunc->type = IPA_JF_CONST;
544 jfunc->value.constant.value = unshare_expr_without_location (constant);
545
546 if (TREE_CODE (constant)((enum tree_code) (constant)->base.code) == ADDR_EXPR
547 && (TREE_CODE (TREE_OPERAND (constant, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((constant), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 547, __FUNCTION__))))))->base.code)
== FUNCTION_DECL
548 || (TREE_CODE (TREE_OPERAND (constant, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((constant), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 548, __FUNCTION__))))))->base.code)
== VAR_DECL
549 && TREE_STATIC (TREE_OPERAND (constant, 0))(((*((const_cast<tree*> (tree_operand_check ((constant)
, (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 549, __FUNCTION__))))))->base.static_flag)
)))
550 {
551 struct ipa_cst_ref_desc *rdesc;
552
553 rdesc = ipa_refdesc_pool.allocate ();
554 rdesc->cs = cs;
555 rdesc->next_duplicate = NULLnullptr;
556 rdesc->refcount = 1;
557 jfunc->value.constant.rdesc = rdesc;
558 }
559 else
560 jfunc->value.constant.rdesc = NULLnullptr;
561}
562
563/* Set JFUNC to be a simple pass-through jump function. */
564static void
565ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
566 bool agg_preserved)
567{
568 jfunc->type = IPA_JF_PASS_THROUGH;
569 jfunc->value.pass_through.operand = NULL_TREE(tree) nullptr;
570 jfunc->value.pass_through.formal_id = formal_id;
571 jfunc->value.pass_through.operation = NOP_EXPR;
572 jfunc->value.pass_through.agg_preserved = agg_preserved;
573}
574
575/* Set JFUNC to be an unary pass through jump function. */
576
577static void
578ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
579 enum tree_code operation)
580{
581 jfunc->type = IPA_JF_PASS_THROUGH;
582 jfunc->value.pass_through.operand = NULL_TREE(tree) nullptr;
583 jfunc->value.pass_through.formal_id = formal_id;
584 jfunc->value.pass_through.operation = operation;
585 jfunc->value.pass_through.agg_preserved = false;
586}
587/* Set JFUNC to be an arithmetic pass through jump function. */
588
589static void
590ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
591 tree operand, enum tree_code operation)
592{
593 jfunc->type = IPA_JF_PASS_THROUGH;
594 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
595 jfunc->value.pass_through.formal_id = formal_id;
596 jfunc->value.pass_through.operation = operation;
597 jfunc->value.pass_through.agg_preserved = false;
598}
599
600/* Set JFUNC to be an ancestor jump function. */
601
602static void
603ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INTlong offset,
604 int formal_id, bool agg_preserved)
605{
606 jfunc->type = IPA_JF_ANCESTOR;
607 jfunc->value.ancestor.formal_id = formal_id;
608 jfunc->value.ancestor.offset = offset;
609 jfunc->value.ancestor.agg_preserved = agg_preserved;
610}
611
612/* Get IPA BB information about the given BB. FBI is the context of analyzis
613 of this function body. */
614
615static struct ipa_bb_info *
616ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
617{
618 gcc_checking_assert (fbi)((void)(!(fbi) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 618, __FUNCTION__), 0 : 0))
;
619 return &fbi->bb_infos[bb->index];
620}
621
622/* Structure to be passed in between detect_type_change and
623 check_stmt_for_type_change. */
624
625struct prop_type_change_info
626{
627 /* Offset into the object where there is the virtual method pointer we are
628 looking for. */
629 HOST_WIDE_INTlong offset;
630 /* The declaration or SSA_NAME pointer of the base that we are checking for
631 type change. */
632 tree object;
633 /* Set to true if dynamic type change has been detected. */
634 bool type_maybe_changed;
635};
636
637/* Return true if STMT can modify a virtual method table pointer.
638
639 This function makes special assumptions about both constructors and
640 destructors which are all the functions that are allowed to alter the VMT
641 pointers. It assumes that destructors begin with assignment into all VMT
642 pointers and that constructors essentially look in the following way:
643
644 1) The very first thing they do is that they call constructors of ancestor
645 sub-objects that have them.
646
647 2) Then VMT pointers of this and all its ancestors is set to new values
648 corresponding to the type corresponding to the constructor.
649
650 3) Only afterwards, other stuff such as constructor of member sub-objects
651 and the code written by the user is run. Only this may include calling
652 virtual functions, directly or indirectly.
653
654 There is no way to call a constructor of an ancestor sub-object in any
655 other way.
656
657 This means that we do not have to care whether constructors get the correct
658 type information because they will always change it (in fact, if we define
659 the type to be given by the VMT pointer, it is undefined).
660
661 The most important fact to derive from the above is that if, for some
662 statement in the section 3, we try to detect whether the dynamic type has
663 changed, we can safely ignore all calls as we examine the function body
664 backwards until we reach statements in section 2 because these calls cannot
665 be ancestor constructors or destructors (if the input is not bogus) and so
666 do not change the dynamic type (this holds true only for automatically
667 allocated objects but at the moment we devirtualize only these). We then
668 must detect that statements in section 2 change the dynamic type and can try
669 to derive the new type. That is enough and we can stop, we will never see
670 the calls into constructors of sub-objects in this code. Therefore we can
671 safely ignore all call statements that we traverse.
672 */
673
674static bool
675stmt_may_be_vtbl_ptr_store (gimple *stmt)
676{
677 if (is_gimple_call (stmt))
678 return false;
679 if (gimple_clobber_p (stmt))
680 return false;
681 else if (is_gimple_assign (stmt))
682 {
683 tree lhs = gimple_assign_lhs (stmt);
684
685 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs))(((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 685, __FUNCTION__))->typed.type))->base.code) == ARRAY_TYPE
|| (((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 685, __FUNCTION__))->typed.type))->base.code) == RECORD_TYPE
|| ((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 685, __FUNCTION__))->typed.type))->base.code) == UNION_TYPE
|| ((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 685, __FUNCTION__))->typed.type))->base.code) == QUAL_UNION_TYPE
))
)
686 {
687 if (flag_strict_aliasingglobal_options.x_flag_strict_aliasing
688 && !POINTER_TYPE_P (TREE_TYPE (lhs))(((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 688, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 688, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
689 return false;
690
691 if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == COMPONENT_REF
692 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))((contains_struct_check (((*((const_cast<tree*> (tree_operand_check
((lhs), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 692, __FUNCTION__)))))), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 692, __FUNCTION__))->decl_common.virtual_flag)
)
693 return false;
694 /* In the future we might want to use get_ref_base_and_extent to find
695 if there is a field corresponding to the offset and if so, proceed
696 almost like if it was a component ref. */
697 }
698 }
699 return true;
700}
701
702/* Callback of walk_aliased_vdefs and a helper function for detect_type_change
703 to check whether a particular statement may modify the virtual table
704 pointerIt stores its result into DATA, which points to a
705 prop_type_change_info structure. */
706
707static bool
708check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED__attribute__ ((__unused__)), tree vdef, void *data)
709{
710 gimple *stmt = SSA_NAME_DEF_STMT (vdef)(tree_check ((vdef), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 710, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
711 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
712
713 if (stmt_may_be_vtbl_ptr_store (stmt))
714 {
715 tci->type_maybe_changed = true;
716 return true;
717 }
718 else
719 return false;
720}
721
722/* See if ARG is PARAM_DECl describing instance passed by pointer
723 or reference in FUNCTION. Return false if the dynamic type may change
724 in between beggining of the function until CALL is invoked.
725
726 Generally functions are not allowed to change type of such instances,
727 but they call destructors. We assume that methods cannot destroy the THIS
728 pointer. Also as a special cases, constructor and destructors may change
729 type of the THIS pointer. */
730
731static bool
732param_type_may_change_p (tree function, tree arg, gimple *call)
733{
734 /* Pure functions cannot do any changes on the dynamic type;
735 that require writting to memory. */
736 if (flags_from_decl_or_type (function) & (ECF_PURE(1 << 1) | ECF_CONST(1 << 0)))
737 return false;
738 /* We need to check if we are within inlined consturctor
739 or destructor (ideally we would have way to check that the
740 inline cdtor is actually working on ARG, but we don't have
741 easy tie on this, so punt on all non-pure cdtors.
742 We may also record the types of cdtors and once we know type
743 of the instance match them.
744
745 Also code unification optimizations may merge calls from
746 different blocks making return values unreliable. So
747 do nothing during late optimization. */
748 if (DECL_STRUCT_FUNCTION (function)((tree_check ((function), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 748, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
->after_inlining)
749 return true;
750 if (TREE_CODE (arg)((enum tree_code) (arg)->base.code) == SSA_NAME
751 && SSA_NAME_IS_DEFAULT_DEF (arg)(tree_check ((arg), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 751, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
752 && TREE_CODE (SSA_NAME_VAR (arg))((enum tree_code) (((tree_check ((arg), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 752, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree) nullptr
|| ((enum tree_code) ((arg)->ssa_name.var)->base.code)
== IDENTIFIER_NODE ? (tree) nullptr : (arg)->ssa_name.var
))->base.code)
== PARM_DECL)
753 {
754 /* Normal (non-THIS) argument. */
755 if ((SSA_NAME_VAR (arg)((tree_check ((arg), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 755, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree) nullptr
|| ((enum tree_code) ((arg)->ssa_name.var)->base.code)
== IDENTIFIER_NODE ? (tree) nullptr : (arg)->ssa_name.var
)
!= DECL_ARGUMENTS (function)((tree_check ((function), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 755, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
756 || TREE_CODE (TREE_TYPE (function))((enum tree_code) (((contains_struct_check ((function), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 756, __FUNCTION__))->typed.type))->base.code)
!= METHOD_TYPE)
757 /* THIS pointer of an method - here we want to watch constructors
758 and destructors as those definitely may change the dynamic
759 type. */
760 || (TREE_CODE (TREE_TYPE (function))((enum tree_code) (((contains_struct_check ((function), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 760, __FUNCTION__))->typed.type))->base.code)
== METHOD_TYPE
761 && !DECL_CXX_CONSTRUCTOR_P (function)((tree_check ((function), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 761, __FUNCTION__, (FUNCTION_DECL)))->decl_with_vis.cxx_constructor
)
762 && !DECL_CXX_DESTRUCTOR_P (function)((tree_check ((function), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 762, __FUNCTION__, (FUNCTION_DECL)))->decl_with_vis.cxx_destructor
)
763 && (SSA_NAME_VAR (arg)((tree_check ((arg), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 763, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree) nullptr
|| ((enum tree_code) ((arg)->ssa_name.var)->base.code)
== IDENTIFIER_NODE ? (tree) nullptr : (arg)->ssa_name.var
)
== DECL_ARGUMENTS (function)((tree_check ((function), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 763, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
)))
764 {
765 /* Walk the inline stack and watch out for ctors/dtors. */
766 for (tree block = gimple_block (call); block && TREE_CODE (block)((enum tree_code) (block)->base.code) == BLOCK;
767 block = BLOCK_SUPERCONTEXT (block)((tree_check ((block), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 767, __FUNCTION__, (BLOCK)))->block.supercontext)
)
768 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
769 return true;
770 return false;
771 }
772 }
773 return true;
774}
775
776/* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
777 callsite CALL) by looking for assignments to its virtual table pointer. If
778 it is, return true. ARG is the object itself (not a pointer
779 to it, unless dereferenced). BASE is the base of the memory access as
780 returned by get_ref_base_and_extent, as is the offset.
781
782 This is helper function for detect_type_change and detect_type_change_ssa
783 that does the heavy work which is usually unnecesary. */
784
785static bool
786detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
787 tree base, tree comp_type, gcall *call,
788 HOST_WIDE_INTlong offset)
789{
790 struct prop_type_change_info tci;
791 ao_ref ao;
792
793 gcc_checking_assert (DECL_P (arg)((void)(!((tree_code_type[(int) (((enum tree_code) (arg)->
base.code))] == tcc_declaration) || ((enum tree_code) (arg)->
base.code) == MEM_REF || handled_component_p (arg)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 795, __FUNCTION__), 0 : 0))
794 || TREE_CODE (arg) == MEM_REF((void)(!((tree_code_type[(int) (((enum tree_code) (arg)->
base.code))] == tcc_declaration) || ((enum tree_code) (arg)->
base.code) == MEM_REF || handled_component_p (arg)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 795, __FUNCTION__), 0 : 0))
795 || handled_component_p (arg))((void)(!((tree_code_type[(int) (((enum tree_code) (arg)->
base.code))] == tcc_declaration) || ((enum tree_code) (arg)->
base.code) == MEM_REF || handled_component_p (arg)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 795, __FUNCTION__), 0 : 0))
;
796
797 comp_type = TYPE_MAIN_VARIANT (comp_type)((tree_class_check ((comp_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 797, __FUNCTION__))->type_common.main_variant)
;
798
799 /* Const calls cannot call virtual methods through VMT and so type changes do
800 not matter. */
801 if (!flag_devirtualizeglobal_options.x_flag_devirtualize || !gimple_vuse (call)
802 /* Be sure expected_type is polymorphic. */
803 || !comp_type
804 || TREE_CODE (comp_type)((enum tree_code) (comp_type)->base.code) != RECORD_TYPE
805 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))((tree_check3 ((((tree_class_check ((comp_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 805, __FUNCTION__))->type_common.main_variant)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 805, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE
)))->type_non_common.maxval)
806 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type)))((tree_check ((((tree_check3 ((((tree_class_check ((comp_type
), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 806, __FUNCTION__))->type_common.main_variant)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 806, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE
)))->type_non_common.maxval)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 806, __FUNCTION__, (TREE_BINFO)))->binfo.vtable)
)
807 return true;
808
809 if (fbi->aa_walk_budget == 0)
810 return false;
811
812 ao_ref_init (&ao, arg);
813 ao.base = base;
814 ao.offset = offset;
815 ao.size = POINTER_SIZE(((global_options.x_ix86_isa_flags & (1UL << 58)) !=
0) ? 32 : ((8) * (((global_options.x_ix86_isa_flags & (1UL
<< 1)) != 0) ? 8 : 4)))
;
816 ao.max_size = ao.size;
817
818 tci.offset = offset;
819 tci.object = get_base_address (arg);
820 tci.type_maybe_changed = false;
821
822 int walked
823 = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
824 &tci, NULLnullptr, NULLnullptr, fbi->aa_walk_budget);
825 if (walked >= 0)
826 fbi->aa_walk_budget -= walked;
827 else
828 fbi->aa_walk_budget = 0;
829
830 if (walked >= 0 && !tci.type_maybe_changed)
831 return false;
832
833 return true;
834}
835
836/* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
837 If it is, return true. ARG is the object itself (not a pointer
838 to it, unless dereferenced). BASE is the base of the memory access as
839 returned by get_ref_base_and_extent, as is the offset. */
840
841static bool
842detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
843 tree comp_type, gcall *call,
844 HOST_WIDE_INTlong offset)
845{
846 if (!flag_devirtualizeglobal_options.x_flag_devirtualize)
847 return false;
848
849 if (TREE_CODE (base)((enum tree_code) (base)->base.code) == MEM_REF
850 && !param_type_may_change_p (current_function_decl,
851 TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 851, __FUNCTION__)))))
,
852 call))
853 return false;
854 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
855 call, offset);
856}
857
858/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
859 SSA name (its dereference will become the base and the offset is assumed to
860 be zero). */
861
862static bool
863detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
864 gcall *call)
865{
866 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME)((void)(!(((enum tree_code) (arg)->base.code) == SSA_NAME)
? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 866, __FUNCTION__), 0 : 0))
;
867 if (!flag_devirtualizeglobal_options.x_flag_devirtualize
868 || !POINTER_TYPE_P (TREE_TYPE (arg))(((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 868, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 868, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
869 return false;
870
871 if (!param_type_may_change_p (current_function_decl, arg, call))
872 return false;
873
874 arg = build2 (MEM_REF, ptr_type_nodeglobal_trees[TI_PTR_TYPE], arg,
875 build_int_cst (ptr_type_nodeglobal_trees[TI_PTR_TYPE], 0));
876
877 return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
878 call, 0);
879}
880
881/* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
882 boolean variable pointed to by DATA. */
883
884static bool
885mark_modified (ao_ref *ao ATTRIBUTE_UNUSED__attribute__ ((__unused__)), tree vdef ATTRIBUTE_UNUSED__attribute__ ((__unused__)),
886 void *data)
887{
888 bool *b = (bool *) data;
889 *b = true;
890 return true;
891}
892
893/* Find the nearest valid aa status for parameter specified by INDEX that
894 dominates BB. */
895
896static struct ipa_param_aa_status *
897find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
898 int index)
899{
900 while (true)
901 {
902 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
903 if (!bb)
904 return NULLnullptr;
905 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
906 if (!bi->param_aa_statuses.is_empty ()
907 && bi->param_aa_statuses[index].valid)
908 return &bi->param_aa_statuses[index];
909 }
910}
911
912/* Get AA status structure for the given BB and parameter with INDEX. Allocate
913 structures and/or intialize the result with a dominating description as
914 necessary. */
915
916static struct ipa_param_aa_status *
917parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
918 int index)
919{
920 gcc_checking_assert (fbi)((void)(!(fbi) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 920, __FUNCTION__), 0 : 0))
;
921 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
922 if (bi->param_aa_statuses.is_empty ())
923 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count, true);
924 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
925 if (!paa->valid)
926 {
927 gcc_checking_assert (!paa->parm_modified((void)(!(!paa->parm_modified && !paa->ref_modified
&& !paa->pt_modified) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 929, __FUNCTION__), 0 : 0))
928 && !paa->ref_modified((void)(!(!paa->parm_modified && !paa->ref_modified
&& !paa->pt_modified) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 929, __FUNCTION__), 0 : 0))
929 && !paa->pt_modified)((void)(!(!paa->parm_modified && !paa->ref_modified
&& !paa->pt_modified) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 929, __FUNCTION__), 0 : 0))
;
930 struct ipa_param_aa_status *dom_paa;
931 dom_paa = find_dominating_aa_status (fbi, bb, index);
932 if (dom_paa)
933 *paa = *dom_paa;
934 else
935 paa->valid = true;
936 }
937
938 return paa;
939}
940
941/* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
942 a value known not to be modified in this function before reaching the
943 statement STMT. FBI holds information about the function we have so far
944 gathered but do not survive the summary building stage. */
945
946static bool
947parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
948 gimple *stmt, tree parm_load)
949{
950 struct ipa_param_aa_status *paa;
951 bool modified = false;
952 ao_ref refd;
953
954 tree base = get_base_address (parm_load);
955 gcc_assert (TREE_CODE (base) == PARM_DECL)((void)(!(((enum tree_code) (base)->base.code) == PARM_DECL
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 955, __FUNCTION__), 0 : 0))
;
956 if (TREE_READONLY (base)((non_type_check ((base), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 956, __FUNCTION__))->base.readonly_flag)
)
957 return true;
958
959 gcc_checking_assert (fbi)((void)(!(fbi) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 959, __FUNCTION__), 0 : 0))
;
960 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
961 if (paa->parm_modified || fbi->aa_walk_budget == 0)
962 return false;
963
964 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE)((void)(!(gimple_vuse (stmt) != (tree) nullptr) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 964, __FUNCTION__), 0 : 0))
;
965 ao_ref_init (&refd, parm_load);
966 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
967 &modified, NULLnullptr, NULLnullptr,
968 fbi->aa_walk_budget);
969 if (walked < 0)
970 {
971 modified = true;
972 fbi->aa_walk_budget = 0;
973 }
974 else
975 fbi->aa_walk_budget -= walked;
976 if (paa && modified)
977 paa->parm_modified = true;
978 return !modified;
979}
980
981/* If STMT is an assignment that loads a value from an parameter declaration,
982 return the index of the parameter in ipa_node_params which has not been
983 modified. Otherwise return -1. */
984
985static int
986load_from_unmodified_param (struct ipa_func_body_info *fbi,
987 vec<ipa_param_descriptor, va_gc> *descriptors,
988 gimple *stmt)
989{
990 int index;
991 tree op1;
992
993 if (!gimple_assign_single_p (stmt))
994 return -1;
995
996 op1 = gimple_assign_rhs1 (stmt);
997 if (TREE_CODE (op1)((enum tree_code) (op1)->base.code) != PARM_DECL)
998 return -1;
999
1000 index = ipa_get_param_decl_index_1 (descriptors, op1);
1001 if (index < 0
1002 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
1003 return -1;
1004
1005 return index;
1006}
1007
1008/* Return true if memory reference REF (which must be a load through parameter
1009 with INDEX) loads data that are known to be unmodified in this function
1010 before reaching statement STMT. */
1011
1012static bool
1013parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
1014 int index, gimple *stmt, tree ref)
1015{
1016 struct ipa_param_aa_status *paa;
1017 bool modified = false;
1018 ao_ref refd;
1019
1020 gcc_checking_assert (fbi)((void)(!(fbi) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1020, __FUNCTION__), 0 : 0))
;
1021 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1022 if (paa->ref_modified || fbi->aa_walk_budget == 0)
1023 return false;
1024
1025 gcc_checking_assert (gimple_vuse (stmt))((void)(!(gimple_vuse (stmt)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1025, __FUNCTION__), 0 : 0))
;
1026 ao_ref_init (&refd, ref);
1027 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1028 &modified, NULLnullptr, NULLnullptr,
1029 fbi->aa_walk_budget);
1030 if (walked < 0)
1031 {
1032 modified = true;
1033 fbi->aa_walk_budget = 0;
1034 }
1035 else
1036 fbi->aa_walk_budget -= walked;
1037 if (modified)
1038 paa->ref_modified = true;
1039 return !modified;
1040}
1041
1042/* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1043 is known to be unmodified in this function before reaching call statement
1044 CALL into which it is passed. FBI describes the function body. */
1045
1046static bool
1047parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1048 gimple *call, tree parm)
1049{
1050 bool modified = false;
1051 ao_ref refd;
1052
1053 /* It's unnecessary to calculate anything about memory contnets for a const
1054 function because it is not goin to use it. But do not cache the result
1055 either. Also, no such calculations for non-pointers. */
1056 if (!gimple_vuse (call)
1057 || !POINTER_TYPE_P (TREE_TYPE (parm))(((enum tree_code) (((contains_struct_check ((parm), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1057, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((parm), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1057, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
1058 return false;
1059
1060 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1061 gimple_bb (call),
1062 index);
1063 if (paa->pt_modified || fbi->aa_walk_budget == 0)
1064 return false;
1065
1066 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE(tree) nullptr);
1067 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1068 &modified, NULLnullptr, NULLnullptr,
1069 fbi->aa_walk_budget);
1070 if (walked < 0)
1071 {
1072 fbi->aa_walk_budget = 0;
1073 modified = true;
1074 }
1075 else
1076 fbi->aa_walk_budget -= walked;
1077 if (modified)
1078 paa->pt_modified = true;
1079 return !modified;
1080}
1081
1082/* Return true if we can prove that OP is a memory reference loading
1083 data from an aggregate passed as a parameter.
1084
1085 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1086 false if it cannot prove that the value has not been modified before the
1087 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1088 if it cannot prove the value has not been modified, in that case it will
1089 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1090
1091 INFO and PARMS_AINFO describe parameters of the current function (but the
1092 latter can be NULL), STMT is the load statement. If function returns true,
1093 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1094 within the aggregate and whether it is a load from a value passed by
1095 reference respectively. */
1096
1097bool
1098ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1099 vec<ipa_param_descriptor, va_gc> *descriptors,
1100 gimple *stmt, tree op, int *index_p,
1101 HOST_WIDE_INTlong *offset_p, poly_int64 *size_p,
1102 bool *by_ref_p, bool *guaranteed_unmodified)
1103{
1104 int index;
1105 HOST_WIDE_INTlong size;
1106 bool reverse;
1107 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1108
1109 if (!base)
1110 return false;
1111
1112 if (DECL_P (base)(tree_code_type[(int) (((enum tree_code) (base)->base.code
))] == tcc_declaration)
)
1113 {
1114 int index = ipa_get_param_decl_index_1 (descriptors, base);
1115 if (index >= 0
1116 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1117 {
1118 *index_p = index;
1119 *by_ref_p = false;
1120 if (size_p)
1121 *size_p = size;
1122 if (guaranteed_unmodified)
1123 *guaranteed_unmodified = true;
1124 return true;
1125 }
1126 return false;
1127 }
1128
1129 if (TREE_CODE (base)((enum tree_code) (base)->base.code) != MEM_REF
1130 || TREE_CODE (TREE_OPERAND (base, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((base), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1130, __FUNCTION__))))))->base.code)
!= SSA_NAME
1131 || !integer_zerop (TREE_OPERAND (base, 1)(*((const_cast<tree*> (tree_operand_check ((base), (1),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1131, __FUNCTION__)))))
))
1132 return false;
1133
1134 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))(tree_check (((*((const_cast<tree*> (tree_operand_check
((base), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1134, __FUNCTION__)))))), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1134, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
)
1135 {
1136 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0))((tree_check (((*((const_cast<tree*> (tree_operand_check
((base), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1136, __FUNCTION__)))))), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1136, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) (((*((const_cast<tree*> (
tree_operand_check ((base), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1136, __FUNCTION__))))))->ssa_name.var)->base.code) ==
IDENTIFIER_NODE ? (tree) nullptr : ((*((const_cast<tree*>
(tree_operand_check ((base), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1136, __FUNCTION__))))))->ssa_name.var)
;
1137 index = ipa_get_param_decl_index_1 (descriptors, parm);
1138 }
1139 else
1140 {
1141 /* This branch catches situations where a pointer parameter is not a
1142 gimple register, for example:
1143
1144 void hip7(S*) (struct S * p)
1145 {
1146 void (*<T2e4>) (struct S *) D.1867;
1147 struct S * p.1;
1148
1149 <bb 2>:
1150 p.1_1 = p;
1151 D.1867_2 = p.1_1->f;
1152 D.1867_2 ();
1153 gdp = &p;
1154 */
1155
1156 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0))(tree_check (((*((const_cast<tree*> (tree_operand_check
((base), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1156, __FUNCTION__)))))), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1156, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
1157 index = load_from_unmodified_param (fbi, descriptors, def);
1158 }
1159
1160 if (index >= 0)
1161 {
1162 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1163 if (!data_preserved && !guaranteed_unmodified)
1164 return false;
1165
1166 *index_p = index;
1167 *by_ref_p = true;
1168 if (size_p)
1169 *size_p = size;
1170 if (guaranteed_unmodified)
1171 *guaranteed_unmodified = data_preserved;
1172 return true;
1173 }
1174 return false;
1175}
1176
1177/* If STMT is an assignment that loads a value from a parameter declaration,
1178 or from an aggregate passed as the parameter either by value or reference,
1179 return the index of the parameter in ipa_node_params. Otherwise return -1.
1180
1181 FBI holds gathered information about the function. INFO describes
1182 parameters of the function, STMT is the assignment statement. If it is a
1183 memory load from an aggregate, *OFFSET_P is filled with offset within the
1184 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1185 reference. */
1186
1187static int
1188load_from_unmodified_param_or_agg (struct ipa_func_body_info *fbi,
1189 class ipa_node_params *info,
1190 gimple *stmt,
1191 HOST_WIDE_INTlong *offset_p,
1192 bool *by_ref_p)
1193{
1194 int index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1195 poly_int64 size;
1196
1197 /* Load value from a parameter declaration. */
1198 if (index >= 0)
1199 {
1200 *offset_p = -1;
1201 return index;
1202 }
1203
1204 if (!gimple_assign_load_p (stmt))
1205 return -1;
1206
1207 tree rhs = gimple_assign_rhs1 (stmt);
1208
1209 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1210 for (tree t = rhs; handled_component_p (t); t = TREE_OPERAND (t, 0)(*((const_cast<tree*> (tree_operand_check ((t), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1210, __FUNCTION__)))))
)
1211 if (TREE_CODE (t)((enum tree_code) (t)->base.code) == VIEW_CONVERT_EXPR)
1212 return -1;
1213
1214 /* Skip memory reference containing bit-field. */
1215 if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == BIT_FIELD_REF
1216 || contains_bitfld_component_ref_p (rhs))
1217 return -1;
1218
1219 if (!ipa_load_from_parm_agg (fbi, info->descriptors, stmt, rhs, &index,
1220 offset_p, &size, by_ref_p))
1221 return -1;
1222
1223 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs))),((void)(!(!maybe_ne (tree_to_poly_int64 (((tree_class_check (
(((contains_struct_check ((rhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1223, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1223, __FUNCTION__))->type_common.size)), size)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1224, __FUNCTION__), 0 : 0))
1224 size))((void)(!(!maybe_ne (tree_to_poly_int64 (((tree_class_check (
(((contains_struct_check ((rhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1223, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1223, __FUNCTION__))->type_common.size)), size)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1224, __FUNCTION__), 0 : 0))
;
1225 if (!*by_ref_p)
1226 {
1227 tree param_type = ipa_get_type (info, index);
1228
1229 if (!param_type || !AGGREGATE_TYPE_P (param_type)(((enum tree_code) (param_type)->base.code) == ARRAY_TYPE ||
(((enum tree_code) (param_type)->base.code) == RECORD_TYPE
|| ((enum tree_code) (param_type)->base.code) == UNION_TYPE
|| ((enum tree_code) (param_type)->base.code) == QUAL_UNION_TYPE
))
)
1230 return -1;
1231 }
1232 else if (TREE_THIS_VOLATILE (rhs)((rhs)->base.volatile_flag))
1233 return -1;
1234
1235 return index;
1236}
1237
1238/* Walk pointer adjustemnts from OP (such as POINTER_PLUS and ADDR_EXPR)
1239 to find original pointer. Initialize RET to the pointer which results from
1240 the walk.
1241 If offset is known return true and initialize OFFSET_RET. */
1242
1243bool
1244unadjusted_ptr_and_unit_offset (tree op, tree *ret, poly_int64 *offset_ret)
1245{
1246 poly_int64 offset = 0;
1247 bool offset_known = true;
1248 int i;
1249
1250 for (i = 0; i < param_ipa_jump_function_lookupsglobal_options.x_param_ipa_jump_function_lookups; i++)
1251 {
1252 if (TREE_CODE (op)((enum tree_code) (op)->base.code) == ADDR_EXPR)
1253 {
1254 poly_int64 extra_offset = 0;
1255 tree base = get_addr_base_and_unit_offset (TREE_OPERAND (op, 0)(*((const_cast<tree*> (tree_operand_check ((op), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1255, __FUNCTION__)))))
,
1256 &offset);
1257 if (!base)
1258 {
1259 base = get_base_address (TREE_OPERAND (op, 0)(*((const_cast<tree*> (tree_operand_check ((op), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1259, __FUNCTION__)))))
);
1260 if (TREE_CODE (base)((enum tree_code) (base)->base.code) != MEM_REF)
1261 break;
1262 offset_known = false;
1263 }
1264 else
1265 {
1266 if (TREE_CODE (base)((enum tree_code) (base)->base.code) != MEM_REF)
1267 break;
1268 offset += extra_offset;
1269 }
1270 op = TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1270, __FUNCTION__)))))
;
1271 if (mem_ref_offset (base).to_shwi (&extra_offset))
1272 offset += extra_offset;
1273 else
1274 offset_known = false;
1275 }
1276 else if (TREE_CODE (op)((enum tree_code) (op)->base.code) == SSA_NAME
1277 && !SSA_NAME_IS_DEFAULT_DEF (op)(tree_check ((op), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1277, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
)
1278 {
1279 gimple *pstmt = SSA_NAME_DEF_STMT (op)(tree_check ((op), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1279, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
1280
1281 if (gimple_assign_single_p (pstmt))
1282 op = gimple_assign_rhs1 (pstmt);
1283 else if (is_gimple_assign (pstmt)
1284 && gimple_assign_rhs_code (pstmt) == POINTER_PLUS_EXPR)
1285 {
1286 poly_int64 extra_offset = 0;
1287 if (ptrdiff_tree_p (gimple_assign_rhs2 (pstmt),
1288 &extra_offset))
1289 offset += extra_offset;
1290 else
1291 offset_known = false;
1292 op = gimple_assign_rhs1 (pstmt);
1293 }
1294 else
1295 break;
1296 }
1297 else
1298 break;
1299 }
1300 *ret = op;
1301 *offset_ret = offset;
1302 return offset_known;
1303}
1304
1305/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1306 of an assignment statement STMT, try to determine whether we are actually
1307 handling any of the following cases and construct an appropriate jump
1308 function into JFUNC if so:
1309
1310 1) The passed value is loaded from a formal parameter which is not a gimple
1311 register (most probably because it is addressable, the value has to be
1312 scalar) and we can guarantee the value has not changed. This case can
1313 therefore be described by a simple pass-through jump function. For example:
1314
1315 foo (int a)
1316 {
1317 int a.0;
1318
1319 a.0_2 = a;
1320 bar (a.0_2);
1321
1322 2) The passed value can be described by a simple arithmetic pass-through
1323 jump function. E.g.
1324
1325 foo (int a)
1326 {
1327 int D.2064;
1328
1329 D.2064_4 = a.1(D) + 4;
1330 bar (D.2064_4);
1331
1332 This case can also occur in combination of the previous one, e.g.:
1333
1334 foo (int a, int z)
1335 {
1336 int a.0;
1337 int D.2064;
1338
1339 a.0_3 = a;
1340 D.2064_4 = a.0_3 + 4;
1341 foo (D.2064_4);
1342
1343 3) The passed value is an address of an object within another one (which
1344 also passed by reference). Such situations are described by an ancestor
1345 jump function and describe situations such as:
1346
1347 B::foo() (struct B * const this)
1348 {
1349 struct A * D.1845;
1350
1351 D.1845_2 = &this_1(D)->D.1748;
1352 A::bar (D.1845_2);
1353
1354 INFO is the structure describing individual parameters access different
1355 stages of IPA optimizations. PARMS_AINFO contains the information that is
1356 only needed for intraprocedural analysis. */
1357
1358static void
1359compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1360 class ipa_node_params *info,
1361 struct ipa_jump_func *jfunc,
1362 gcall *call, gimple *stmt, tree name,
1363 tree param_type)
1364{
1365 HOST_WIDE_INTlong offset, size;
1366 tree op1, tc_ssa, base, ssa;
1367 bool reverse;
1368 int index;
1369
1370 op1 = gimple_assign_rhs1 (stmt);
1371
1372 if (TREE_CODE (op1)((enum tree_code) (op1)->base.code) == SSA_NAME)
1373 {
1374 if (SSA_NAME_IS_DEFAULT_DEF (op1)(tree_check ((op1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1374, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
)
1375 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1)((tree_check ((op1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1375, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((op1)->ssa_name.var)->base
.code) == IDENTIFIER_NODE ? (tree) nullptr : (op1)->ssa_name
.var)
);
1376 else
1377 index = load_from_unmodified_param (fbi, info->descriptors,
1378 SSA_NAME_DEF_STMT (op1)(tree_check ((op1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1378, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
);
1379 tc_ssa = op1;
1380 }
1381 else
1382 {
1383 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1384 tc_ssa = gimple_assign_lhs (stmt);
1385 }
1386
1387 if (index >= 0)
1388 {
1389 switch (gimple_assign_rhs_class (stmt))
1390 {
1391 case GIMPLE_BINARY_RHS:
1392 {
1393 tree op2 = gimple_assign_rhs2 (stmt);
1394 if (!is_gimple_ip_invariant (op2)
1395 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))tree_code_type[(int) (gimple_assign_rhs_code (stmt))]
1396 != tcc_comparison)
1397 && !useless_type_conversion_p (TREE_TYPE (name)((contains_struct_check ((name), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1397, __FUNCTION__))->typed.type)
,
1398 TREE_TYPE (op1)((contains_struct_check ((op1), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1398, __FUNCTION__))->typed.type)
)))
1399 return;
1400
1401 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1402 gimple_assign_rhs_code (stmt));
1403 break;
1404 }
1405 case GIMPLE_SINGLE_RHS:
1406 {
1407 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1408 tc_ssa);
1409 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1410 break;
1411 }
1412 case GIMPLE_UNARY_RHS:
1413 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))((gimple_assign_rhs_code (stmt)) == NOP_EXPR || (gimple_assign_rhs_code
(stmt)) == CONVERT_EXPR)
)
1414 ipa_set_jf_unary_pass_through (jfunc, index,
1415 gimple_assign_rhs_code (stmt));
1416 default:;
1417 }
1418 return;
1419 }
1420
1421 if (TREE_CODE (op1)((enum tree_code) (op1)->base.code) != ADDR_EXPR)
1422 return;
1423 op1 = TREE_OPERAND (op1, 0)(*((const_cast<tree*> (tree_operand_check ((op1), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1423, __FUNCTION__)))))
;
1424 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1425 offset_int mem_offset;
1426 if (!base
1427 || TREE_CODE (base)((enum tree_code) (base)->base.code) != MEM_REF
1428 || !mem_ref_offset (base).is_constant (&mem_offset))
1429 return;
1430 offset += mem_offset.to_short_addr () * BITS_PER_UNIT(8);
1431 ssa = TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1431, __FUNCTION__)))))
;
1432 if (TREE_CODE (ssa)((enum tree_code) (ssa)->base.code) != SSA_NAME
1433 || !SSA_NAME_IS_DEFAULT_DEF (ssa)(tree_check ((ssa), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1433, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
1434 || offset < 0)
1435 return;
1436
1437 /* Dynamic types are changed in constructors and destructors. */
1438 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa)((tree_check ((ssa), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1438, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((ssa)->ssa_name.var)->base
.code) == IDENTIFIER_NODE ? (tree) nullptr : (ssa)->ssa_name
.var)
);
1439 if (index >= 0 && param_type && POINTER_TYPE_P (param_type)(((enum tree_code) (param_type)->base.code) == POINTER_TYPE
|| ((enum tree_code) (param_type)->base.code) == REFERENCE_TYPE
)
)
1440 ipa_set_ancestor_jf (jfunc, offset, index,
1441 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1442}
1443
1444/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1445 it looks like:
1446
1447 iftmp.1_3 = &obj_2(D)->D.1762;
1448
1449 The base of the MEM_REF must be a default definition SSA NAME of a
1450 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1451 whole MEM_REF expression is returned and the offset calculated from any
1452 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1453 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1454
1455static tree
1456get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INTlong *offset)
1457{
1458 HOST_WIDE_INTlong size;
1459 tree expr, parm, obj;
1460 bool reverse;
1461
1462 if (!gimple_assign_single_p (assign))
1463 return NULL_TREE(tree) nullptr;
1464 expr = gimple_assign_rhs1 (assign);
1465
1466 if (TREE_CODE (expr)((enum tree_code) (expr)->base.code) != ADDR_EXPR)
1467 return NULL_TREE(tree) nullptr;
1468 expr = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1468, __FUNCTION__)))))
;
1469 obj = expr;
1470 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1471
1472 offset_int mem_offset;
1473 if (!expr
1474 || TREE_CODE (expr)((enum tree_code) (expr)->base.code) != MEM_REF
1475 || !mem_ref_offset (expr).is_constant (&mem_offset))
1476 return NULL_TREE(tree) nullptr;
1477 parm = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1477, __FUNCTION__)))))
;
1478 if (TREE_CODE (parm)((enum tree_code) (parm)->base.code) != SSA_NAME
1479 || !SSA_NAME_IS_DEFAULT_DEF (parm)(tree_check ((parm), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1479, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
1480 || TREE_CODE (SSA_NAME_VAR (parm))((enum tree_code) (((tree_check ((parm), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1480, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((parm)->ssa_name.var)->base
.code) == IDENTIFIER_NODE ? (tree) nullptr : (parm)->ssa_name
.var))->base.code)
!= PARM_DECL)
1481 return NULL_TREE(tree) nullptr;
1482
1483 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT(8);
1484 *obj_p = obj;
1485 return expr;
1486}
1487
1488
1489/* Given that an actual argument is an SSA_NAME that is a result of a phi
1490 statement PHI, try to find out whether NAME is in fact a
1491 multiple-inheritance typecast from a descendant into an ancestor of a formal
1492 parameter and thus can be described by an ancestor jump function and if so,
1493 write the appropriate function into JFUNC.
1494
1495 Essentially we want to match the following pattern:
1496
1497 if (obj_2(D) != 0B)
1498 goto <bb 3>;
1499 else
1500 goto <bb 4>;
1501
1502 <bb 3>:
1503 iftmp.1_3 = &obj_2(D)->D.1762;
1504
1505 <bb 4>:
1506 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1507 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1508 return D.1879_6; */
1509
1510static void
1511compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1512 class ipa_node_params *info,
1513 struct ipa_jump_func *jfunc,
1514 gcall *call, gphi *phi)
1515{
1516 HOST_WIDE_INTlong offset;
1517 gimple *assign, *cond;
1518 basic_block phi_bb, assign_bb, cond_bb;
1519 tree tmp, parm, expr, obj;
1520 int index, i;
1521
1522 if (gimple_phi_num_args (phi) != 2)
1523 return;
1524
1525 if (integer_zerop (PHI_ARG_DEF (phi, 1)gimple_phi_arg_def ((phi), (1))))
1526 tmp = PHI_ARG_DEF (phi, 0)gimple_phi_arg_def ((phi), (0));
1527 else if (integer_zerop (PHI_ARG_DEF (phi, 0)gimple_phi_arg_def ((phi), (0))))
1528 tmp = PHI_ARG_DEF (phi, 1)gimple_phi_arg_def ((phi), (1));
1529 else
1530 return;
1531 if (TREE_CODE (tmp)((enum tree_code) (tmp)->base.code) != SSA_NAME
1532 || SSA_NAME_IS_DEFAULT_DEF (tmp)(tree_check ((tmp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1532, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
1533 || !POINTER_TYPE_P (TREE_TYPE (tmp))(((enum tree_code) (((contains_struct_check ((tmp), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1533, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((tmp), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1533, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
1534 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp)))((enum tree_code) (((contains_struct_check ((((contains_struct_check
((tmp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1534, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1534, __FUNCTION__))->typed.type))->base.code)
!= RECORD_TYPE)
1535 return;
1536
1537 assign = SSA_NAME_DEF_STMT (tmp)(tree_check ((tmp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1537, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
1538 assign_bb = gimple_bb (assign);
1539 if (!single_pred_p (assign_bb))
1540 return;
1541 expr = get_ancestor_addr_info (assign, &obj, &offset);
1542 if (!expr)
1543 return;
1544 parm = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1544, __FUNCTION__)))))
;
1545 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm)((tree_check ((parm), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1545, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((parm)->ssa_name.var)->base
.code) == IDENTIFIER_NODE ? (tree) nullptr : (parm)->ssa_name
.var)
);
1546 if (index < 0)
1547 return;
1548
1549 cond_bb = single_pred (assign_bb);
1550 cond = last_stmt (cond_bb);
1551 if (!cond
1552 || gimple_code (cond) != GIMPLE_COND
1553 || gimple_cond_code (cond) != NE_EXPR
1554 || gimple_cond_lhs (cond) != parm
1555 || !integer_zerop (gimple_cond_rhs (cond)))
1556 return;
1557
1558 phi_bb = gimple_bb (phi);
1559 for (i = 0; i < 2; i++)
1560 {
1561 basic_block pred = EDGE_PRED (phi_bb, i)(*(phi_bb)->preds)[(i)]->src;
1562 if (pred != assign_bb && pred != cond_bb)
1563 return;
1564 }
1565
1566 ipa_set_ancestor_jf (jfunc, offset, index,
1567 parm_ref_data_pass_through_p (fbi, index, call, parm));
1568}
1569
1570/* Inspect the given TYPE and return true iff it has the same structure (the
1571 same number of fields of the same types) as a C++ member pointer. If
1572 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1573 corresponding fields there. */
1574
1575static bool
1576type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1577{
1578 tree fld;
1579
1580 if (TREE_CODE (type)((enum tree_code) (type)->base.code) != RECORD_TYPE)
1581 return false;
1582
1583 fld = TYPE_FIELDS (type)((tree_check3 ((type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1583, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE
)))->type_non_common.values)
;
1584 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))(((enum tree_code) (((contains_struct_check ((fld), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1584, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((fld), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1584, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
1585 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld)))((enum tree_code) (((contains_struct_check ((((contains_struct_check
((fld), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1585, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1585, __FUNCTION__))->typed.type))->base.code)
!= METHOD_TYPE
1586 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)((tree_check ((fld), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1586, __FUNCTION__, (FIELD_DECL)))->field_decl.offset)
))
1587 return false;
1588
1589 if (method_ptr)
1590 *method_ptr = fld;
1591
1592 fld = DECL_CHAIN (fld)(((contains_struct_check (((contains_struct_check ((fld), (TS_DECL_MINIMAL
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1592, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1592, __FUNCTION__))->common.chain))
;
1593 if (!fld || INTEGRAL_TYPE_P (fld)(((enum tree_code) (fld)->base.code) == ENUMERAL_TYPE || (
(enum tree_code) (fld)->base.code) == BOOLEAN_TYPE || ((enum
tree_code) (fld)->base.code) == INTEGER_TYPE)
1594 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)((tree_check ((fld), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1594, __FUNCTION__, (FIELD_DECL)))->field_decl.offset)
))
1595 return false;
1596 if (delta)
1597 *delta = fld;
1598
1599 if (DECL_CHAIN (fld)(((contains_struct_check (((contains_struct_check ((fld), (TS_DECL_MINIMAL
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1599, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1599, __FUNCTION__))->common.chain))
)
1600 return false;
1601
1602 return true;
1603}
1604
1605/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1606 return the rhs of its defining statement, and this statement is stored in
1607 *RHS_STMT. Otherwise return RHS as it is. */
1608
1609static inline tree
1610get_ssa_def_if_simple_copy (tree rhs, gimple **rhs_stmt)
1611{
1612 while (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs)(tree_check ((rhs), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1612, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
)
1613 {
1614 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs)(tree_check ((rhs), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1614, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
1615
1616 if (gimple_assign_single_p (def_stmt))
1617 rhs = gimple_assign_rhs1 (def_stmt);
1618 else
1619 break;
1620 *rhs_stmt = def_stmt;
1621 }
1622 return rhs;
1623}
1624
1625/* Simple linked list, describing contents of an aggregate before call. */
1626
1627struct ipa_known_agg_contents_list
1628{
1629 /* Offset and size of the described part of the aggregate. */
1630 HOST_WIDE_INTlong offset, size;
1631
1632 /* Type of the described part of the aggregate. */
1633 tree type;
1634
1635 /* Known constant value or jump function data describing contents. */
1636 struct ipa_load_agg_data value;
1637
1638 /* Pointer to the next structure in the list. */
1639 struct ipa_known_agg_contents_list *next;
1640};
1641
1642/* Add an aggregate content item into a linked list of
1643 ipa_known_agg_contents_list structure, in which all elements
1644 are sorted ascendingly by offset. */
1645
1646static inline void
1647add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1648 struct ipa_known_agg_contents_list *item)
1649{
1650 struct ipa_known_agg_contents_list *list = *plist;
1651
1652 for (; list; list = list->next)
1653 {
1654 if (list->offset >= item->offset)
1655 break;
1656
1657 plist = &list->next;
1658 }
1659
1660 item->next = list;
1661 *plist = item;
1662}
1663
1664/* Check whether a given aggregate content is clobbered by certain element in
1665 a linked list of ipa_known_agg_contents_list. */
1666
1667static inline bool
1668clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1669 struct ipa_known_agg_contents_list *item)
1670{
1671 for (; list; list = list->next)
1672 {
1673 if (list->offset >= item->offset)
1674 return list->offset < item->offset + item->size;
1675
1676 if (list->offset + list->size > item->offset)
1677 return true;
1678 }
1679
1680 return false;
1681}
1682
1683/* Build aggregate jump function from LIST, assuming there are exactly
1684 VALUE_COUNT entries there and that offset of the passed argument
1685 is ARG_OFFSET and store it into JFUNC. */
1686
1687static void
1688build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1689 int value_count, HOST_WIDE_INTlong arg_offset,
1690 struct ipa_jump_func *jfunc)
1691{
1692 vec_safe_reserve (jfunc->agg.items, value_count, true);
1693 for (; list; list = list->next)
1694 {
1695 struct ipa_agg_jf_item item;
1696 tree operand = list->value.pass_through.operand;
1697
1698 if (list->value.pass_through.formal_id >= 0)
1699 {
1700 /* Content value is derived from some formal paramerter. */
1701 if (list->value.offset >= 0)
1702 item.jftype = IPA_JF_LOAD_AGG;
1703 else
1704 item.jftype = IPA_JF_PASS_THROUGH;
1705
1706 item.value.load_agg = list->value;
1707 if (operand)
1708 item.value.pass_through.operand
1709 = unshare_expr_without_location (operand);
1710 }
1711 else if (operand)
1712 {
1713 /* Content value is known constant. */
1714 item.jftype = IPA_JF_CONST;
1715 item.value.constant = unshare_expr_without_location (operand);
1716 }
1717 else
1718 continue;
1719
1720 item.type = list->type;
1721 gcc_assert (tree_to_shwi (TYPE_SIZE (list->type)) == list->size)((void)(!(tree_to_shwi (((tree_class_check ((list->type), (
tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1721, __FUNCTION__))->type_common.size)) == list->size
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1721, __FUNCTION__), 0 : 0))
;
1722
1723 item.offset = list->offset - arg_offset;
1724 gcc_assert ((item.offset % BITS_PER_UNIT) == 0)((void)(!((item.offset % (8)) == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1724, __FUNCTION__), 0 : 0))
;
1725
1726 jfunc->agg.items->quick_push (item);
1727 }
1728}
1729
1730/* Given an assignment statement STMT, try to collect information into
1731 AGG_VALUE that will be used to construct jump function for RHS of the
1732 assignment, from which content value of an aggregate part comes.
1733
1734 Besides constant and simple pass-through jump functions, also try to
1735 identify whether it matches the following pattern that can be described by
1736 a load-value-from-aggregate jump function, which is a derivative of simple
1737 pass-through jump function.
1738
1739 foo (int *p)
1740 {
1741 ...
1742
1743 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1744 bar (q_5);
1745 }
1746
1747 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1748 constant, simple pass-through and load-vale-from-aggregate. If value
1749 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1750 set to -1. For simple pass-through and load-value-from-aggregate, field
1751 FORMAL_ID specifies the related formal parameter index, and field
1752 OFFSET can be used to distinguish them, -1 means simple pass-through,
1753 otherwise means load-value-from-aggregate. */
1754
1755static void
1756analyze_agg_content_value (struct ipa_func_body_info *fbi,
1757 struct ipa_load_agg_data *agg_value,
1758 gimple *stmt)
1759{
1760 tree lhs = gimple_assign_lhs (stmt);
1761 tree rhs1 = gimple_assign_rhs1 (stmt);
1762 enum tree_code code;
1763 int index = -1;
1764
1765 /* Initialize jump function data for the aggregate part. */
1766 memset (agg_value, 0, sizeof (*agg_value));
1767 agg_value->pass_through.operation = NOP_EXPR;
1768 agg_value->pass_through.formal_id = -1;
1769 agg_value->offset = -1;
1770
1771 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))(((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1771, __FUNCTION__))->typed.type))->base.code) == ARRAY_TYPE
|| (((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1771, __FUNCTION__))->typed.type))->base.code) == RECORD_TYPE
|| ((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1771, __FUNCTION__))->typed.type))->base.code) == UNION_TYPE
|| ((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1771, __FUNCTION__))->typed.type))->base.code) == QUAL_UNION_TYPE
))
/* TODO: Support aggregate type. */
1772 || TREE_THIS_VOLATILE (lhs)((lhs)->base.volatile_flag)
1773 || TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == BIT_FIELD_REF
1774 || contains_bitfld_component_ref_p (lhs))
1775 return;
1776
1777 /* Skip SSA copies. */
1778 while (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
1779 {
1780 if (TREE_CODE (rhs1)((enum tree_code) (rhs1)->base.code) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (rhs1)(tree_check ((rhs1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1780, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
)
1781 break;
1782
1783 stmt = SSA_NAME_DEF_STMT (rhs1)(tree_check ((rhs1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1783, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
1784 if (!is_gimple_assign (stmt))
1785 break;
1786
1787 rhs1 = gimple_assign_rhs1 (stmt);
1788 }
1789
1790 if (gphi *phi = dyn_cast<gphi *> (stmt))
1791 {
1792 /* Also special case like the following (a is a formal parameter):
1793
1794 _12 = *a_11(D).dim[0].stride;
1795 ...
1796 # iftmp.22_9 = PHI <_12(2), 1(3)>
1797 ...
1798 parm.6.dim[0].stride = iftmp.22_9;
1799 ...
1800 __x_MOD_foo (&parm.6, b_31(D));
1801
1802 The aggregate function describing parm.6.dim[0].stride is encoded as a
1803 PASS-THROUGH jump function with ASSERT_EXPR operation whith operand 1
1804 (the constant from the PHI node). */
1805
1806 if (gimple_phi_num_args (phi) != 2)
1807 return;
1808 tree arg0 = gimple_phi_arg_def (phi, 0);
1809 tree arg1 = gimple_phi_arg_def (phi, 1);
1810 tree operand;
1811
1812 if (is_gimple_ip_invariant (arg1))
1813 {
1814 operand = arg1;
1815 rhs1 = arg0;
1816 }
1817 else if (is_gimple_ip_invariant (arg0))
1818 {
1819 operand = arg0;
1820 rhs1 = arg1;
1821 }
1822 else
1823 return;
1824
1825 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1826 if (!is_gimple_assign (stmt))
1827 return;
1828
1829 code = ASSERT_EXPR;
1830 agg_value->pass_through.operand = operand;
1831 }
1832 else if (is_gimple_assign (stmt))
1833 {
1834 code = gimple_assign_rhs_code (stmt);
1835 switch (gimple_assign_rhs_class (stmt))
1836 {
1837 case GIMPLE_SINGLE_RHS:
1838 if (is_gimple_ip_invariant (rhs1))
1839 {
1840 agg_value->pass_through.operand = rhs1;
1841 return;
1842 }
1843 code = NOP_EXPR;
1844 break;
1845
1846 case GIMPLE_UNARY_RHS:
1847 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1848 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1849 tcc_binary, this subtleness is somewhat misleading.
1850
1851 Since tcc_unary is widely used in IPA-CP code to check an operation
1852 with one operand, here we only allow tc_unary operation to avoid
1853 possible problem. Then we can use (opclass == tc_unary) or not to
1854 distinguish unary and binary. */
1855 if (TREE_CODE_CLASS (code)tree_code_type[(int) (code)] != tcc_unary || CONVERT_EXPR_CODE_P (code)((code) == NOP_EXPR || (code) == CONVERT_EXPR))
1856 return;
1857
1858 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1859 break;
1860
1861 case GIMPLE_BINARY_RHS:
1862 {
1863 gimple *rhs1_stmt = stmt;
1864 gimple *rhs2_stmt = stmt;
1865 tree rhs2 = gimple_assign_rhs2 (stmt);
1866
1867 rhs1 = get_ssa_def_if_simple_copy (rhs1, &rhs1_stmt);
1868 rhs2 = get_ssa_def_if_simple_copy (rhs2, &rhs2_stmt);
1869
1870 if (is_gimple_ip_invariant (rhs2))
1871 {
1872 agg_value->pass_through.operand = rhs2;
1873 stmt = rhs1_stmt;
1874 }
1875 else if (is_gimple_ip_invariant (rhs1))
1876 {
1877 if (TREE_CODE_CLASS (code)tree_code_type[(int) (code)] == tcc_comparison)
1878 code = swap_tree_comparison (code);
1879 else if (!commutative_tree_code (code))
1880 return;
1881
1882 agg_value->pass_through.operand = rhs1;
1883 stmt = rhs2_stmt;
1884 rhs1 = rhs2;
1885 }
1886 else
1887 return;
1888
1889 if (TREE_CODE_CLASS (code)tree_code_type[(int) (code)] != tcc_comparison
1890 && !useless_type_conversion_p (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1890, __FUNCTION__))->typed.type)
,
1891 TREE_TYPE (rhs1)((contains_struct_check ((rhs1), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1891, __FUNCTION__))->typed.type)
))
1892 return;
1893 }
1894 break;
1895
1896 default:
1897 return;
1898 }
1899 }
1900 else
1901 return;
1902
1903 if (TREE_CODE (rhs1)((enum tree_code) (rhs1)->base.code) != SSA_NAME)
1904 index = load_from_unmodified_param_or_agg (fbi, fbi->info, stmt,
1905 &agg_value->offset,
1906 &agg_value->by_ref);
1907 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1)(tree_check ((rhs1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1907, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
)
1908 index = ipa_get_param_decl_index (fbi->info, SSA_NAME_VAR (rhs1)((tree_check ((rhs1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1908, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((rhs1)->ssa_name.var)->base
.code) == IDENTIFIER_NODE ? (tree) nullptr : (rhs1)->ssa_name
.var)
);
1909
1910 if (index >= 0)
1911 {
1912 if (agg_value->offset >= 0)
1913 agg_value->type = TREE_TYPE (rhs1)((contains_struct_check ((rhs1), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1913, __FUNCTION__))->typed.type)
;
1914 agg_value->pass_through.formal_id = index;
1915 agg_value->pass_through.operation = code;
1916 }
1917 else
1918 agg_value->pass_through.operand = NULL_TREE(tree) nullptr;
1919}
1920
1921/* If STMT is a memory store to the object whose address is BASE, extract
1922 information (offset, size, and value) into CONTENT, and return true,
1923 otherwise we conservatively assume the whole object is modified with
1924 unknown content, and return false. CHECK_REF means that access to object
1925 is expected to be in form of MEM_REF expression. */
1926
1927static bool
1928extract_mem_content (struct ipa_func_body_info *fbi,
1929 gimple *stmt, tree base, bool check_ref,
1930 struct ipa_known_agg_contents_list *content)
1931{
1932 HOST_WIDE_INTlong lhs_offset, lhs_size;
1933 bool reverse;
1934
1935 if (!is_gimple_assign (stmt))
1936 return false;
1937
1938 tree lhs = gimple_assign_lhs (stmt);
1939 tree lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset, &lhs_size,
1940 &reverse);
1941 if (!lhs_base)
1942 return false;
1943
1944 if (check_ref)
1945 {
1946 if (TREE_CODE (lhs_base)((enum tree_code) (lhs_base)->base.code) != MEM_REF
1947 || TREE_OPERAND (lhs_base, 0)(*((const_cast<tree*> (tree_operand_check ((lhs_base), (
0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1947, __FUNCTION__)))))
!= base
1948 || !integer_zerop (TREE_OPERAND (lhs_base, 1)(*((const_cast<tree*> (tree_operand_check ((lhs_base), (
1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1948, __FUNCTION__)))))
))
1949 return false;
1950 }
1951 else if (lhs_base != base)
1952 return false;
1953
1954 content->offset = lhs_offset;
1955 content->size = lhs_size;
1956 content->type = TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1956, __FUNCTION__))->typed.type)
;
1957 content->next = NULLnullptr;
1958
1959 analyze_agg_content_value (fbi, &content->value, stmt);
1960 return true;
1961}
1962
1963/* Traverse statements from CALL backwards, scanning whether an aggregate given
1964 in ARG is filled in constants or values that are derived from caller's
1965 formal parameter in the way described by some kinds of jump functions. FBI
1966 is the context of the caller function for interprocedural analysis. ARG can
1967 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
1968 the type of the aggregate, JFUNC is the jump function for the aggregate. */
1969
1970static void
1971determine_known_aggregate_parts (struct ipa_func_body_info *fbi,
1972 gcall *call, tree arg,
1973 tree arg_type,
1974 struct ipa_jump_func *jfunc)
1975{
1976 struct ipa_known_agg_contents_list *list = NULLnullptr, *all_list = NULLnullptr;
1977 bitmap visited = NULLnullptr;
1978 int item_count = 0, value_count = 0;
1979 HOST_WIDE_INTlong arg_offset, arg_size;
1980 tree arg_base;
1981 bool check_ref, by_ref;
1982 ao_ref r;
1983 int max_agg_items = opt_for_fn (fbi->node->decl, param_ipa_max_agg_items)(opts_for_fn (fbi->node->decl)->x_param_ipa_max_agg_items
)
;
1984
1985 if (max_agg_items == 0)
1986 return;
1987
1988 /* The function operates in three stages. First, we prepare check_ref, r,
1989 arg_base and arg_offset based on what is actually passed as an actual
1990 argument. */
1991
1992 if (POINTER_TYPE_P (arg_type)(((enum tree_code) (arg_type)->base.code) == POINTER_TYPE ||
((enum tree_code) (arg_type)->base.code) == REFERENCE_TYPE
)
)
1993 {
1994 by_ref = true;
1995 if (TREE_CODE (arg)((enum tree_code) (arg)->base.code) == SSA_NAME)
1996 {
1997 tree type_size;
1998 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))((tree_class_check ((((contains_struct_check ((arg_type), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1998, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1998, __FUNCTION__))->type_common.size)
)
1999 || !POINTER_TYPE_P (TREE_TYPE (arg))(((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1999, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 1999, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
2000 return;
2001 check_ref = true;
2002 arg_base = arg;
2003 arg_offset = 0;
2004 type_size = TYPE_SIZE (TREE_TYPE (arg_type))((tree_class_check ((((contains_struct_check ((arg_type), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2004, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2004, __FUNCTION__))->type_common.size)
;
2005 arg_size = tree_to_uhwi (type_size);
2006 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE(tree) nullptr);
2007 }
2008 else if (TREE_CODE (arg)((enum tree_code) (arg)->base.code) == ADDR_EXPR)
2009 {
2010 bool reverse;
2011
2012 arg = TREE_OPERAND (arg, 0)(*((const_cast<tree*> (tree_operand_check ((arg), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2012, __FUNCTION__)))))
;
2013 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2014 &arg_size, &reverse);
2015 if (!arg_base)
2016 return;
2017 if (DECL_P (arg_base)(tree_code_type[(int) (((enum tree_code) (arg_base)->base.
code))] == tcc_declaration)
)
2018 {
2019 check_ref = false;
2020 ao_ref_init (&r, arg_base);
2021 }
2022 else
2023 return;
2024 }
2025 else
2026 return;
2027 }
2028 else
2029 {
2030 bool reverse;
2031
2032 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)))((void)(!((((enum tree_code) (((contains_struct_check ((arg),
(TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2032, __FUNCTION__))->typed.type))->base.code) == ARRAY_TYPE
|| (((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2032, __FUNCTION__))->typed.type))->base.code) == RECORD_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2032, __FUNCTION__))->typed.type))->base.code) == UNION_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2032, __FUNCTION__))->typed.type))->base.code) == QUAL_UNION_TYPE
))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2032, __FUNCTION__), 0 : 0))
;
2033
2034 by_ref = false;
2035 check_ref = false;
2036 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2037 &arg_size, &reverse);
2038 if (!arg_base)
2039 return;
2040
2041 ao_ref_init (&r, arg);
2042 }
2043
2044 /* Second stage traverses virtual SSA web backwards starting from the call
2045 statement, only looks at individual dominating virtual operand (its
2046 definition dominates the call), as long as it is confident that content
2047 of the aggregate is affected by definition of the virtual operand, it
2048 builds a sorted linked list of ipa_agg_jf_list describing that. */
2049
2050 for (tree dom_vuse = gimple_vuse (call);
2051 dom_vuse && fbi->aa_walk_budget > 0;)
2052 {
2053 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse)(tree_check ((dom_vuse), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2053, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2054
2055 if (gimple_code (stmt) == GIMPLE_PHI)
2056 {
2057 dom_vuse = get_continuation_for_phi (stmt, &r, true,
2058 fbi->aa_walk_budget,
2059 &visited, false, NULLnullptr, NULLnullptr);
2060 continue;
2061 }
2062
2063 fbi->aa_walk_budget--;
2064 if (stmt_may_clobber_ref_p_1 (stmt, &r))
2065 {
2066 struct ipa_known_agg_contents_list *content
2067 = XALLOCA (struct ipa_known_agg_contents_list)((struct ipa_known_agg_contents_list *) __builtin_alloca(sizeof
(struct ipa_known_agg_contents_list)))
;
2068
2069 if (!extract_mem_content (fbi, stmt, arg_base, check_ref, content))
2070 break;
2071
2072 /* Now we get a dominating virtual operand, and need to check
2073 whether its value is clobbered any other dominating one. */
2074 if ((content->value.pass_through.formal_id >= 0
2075 || content->value.pass_through.operand)
2076 && !clobber_by_agg_contents_list_p (all_list, content))
2077 {
2078 struct ipa_known_agg_contents_list *copy
2079 = XALLOCA (struct ipa_known_agg_contents_list)((struct ipa_known_agg_contents_list *) __builtin_alloca(sizeof
(struct ipa_known_agg_contents_list)))
;
2080
2081 /* Add to the list consisting of only dominating virtual
2082 operands, whose definitions can finally reach the call. */
2083 add_to_agg_contents_list (&list, (*copy = *content, copy));
2084
2085 if (++value_count == max_agg_items)
2086 break;
2087 }
2088
2089 /* Add to the list consisting of all dominating virtual operands. */
2090 add_to_agg_contents_list (&all_list, content);
2091
2092 if (++item_count == 2 * max_agg_items)
2093 break;
2094 }
2095 dom_vuse = gimple_vuse (stmt);
2096 }
2097
2098 if (visited)
2099 BITMAP_FREE (visited)((void) (bitmap_obstack_free ((bitmap) visited), (visited) = (
bitmap) nullptr))
;
2100
2101 /* Third stage just goes over the list and creates an appropriate vector of
2102 ipa_agg_jf_item structures out of it, of course only if there are
2103 any meaningful items to begin with. */
2104
2105 if (value_count)
2106 {
2107 jfunc->agg.by_ref = by_ref;
2108 build_agg_jump_func_from_list (list, value_count, arg_offset, jfunc);
2109 }
2110}
2111
2112
2113/* Return the Ith param type of callee associated with call graph
2114 edge E. */
2115
2116tree
2117ipa_get_callee_param_type (struct cgraph_edge *e, int i)
2118{
2119 int n;
2120 tree type = (e->callee
2121 ? TREE_TYPE (e->callee->decl)((contains_struct_check ((e->callee->decl), (TS_TYPED),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2121, __FUNCTION__))->typed.type)
2122 : gimple_call_fntype (e->call_stmt));
2123 tree t = TYPE_ARG_TYPES (type)((tree_check2 ((type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2123, __FUNCTION__, (FUNCTION_TYPE), (METHOD_TYPE)))->type_non_common
.values)
;
2124
2125 for (n = 0; n < i; n++)
2126 {
2127 if (!t)
2128 break;
2129 t = TREE_CHAIN (t)((contains_struct_check ((t), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2129, __FUNCTION__))->common.chain)
;
2130 }
2131 if (t)
2132 return TREE_VALUE (t)((tree_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2132, __FUNCTION__, (TREE_LIST)))->list.value)
;
2133 if (!e->callee)
2134 return NULLnullptr;
2135 t = DECL_ARGUMENTS (e->callee->decl)((tree_check ((e->callee->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2135, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
;
2136 for (n = 0; n < i; n++)
2137 {
2138 if (!t)
2139 return NULLnullptr;
2140 t = TREE_CHAIN (t)((contains_struct_check ((t), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2140, __FUNCTION__))->common.chain)
;
2141 }
2142 if (t)
2143 return TREE_TYPE (t)((contains_struct_check ((t), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2143, __FUNCTION__))->typed.type)
;
2144 return NULLnullptr;
2145}
2146
2147/* Return ipa_bits with VALUE and MASK values, which can be either a newly
2148 allocated structure or a previously existing one shared with other jump
2149 functions and/or transformation summaries. */
2150
2151ipa_bits *
2152ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
2153{
2154 ipa_bits tmp;
2155 tmp.value = value;
2156 tmp.mask = mask;
2157
2158 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
2159 if (*slot)
2160 return *slot;
2161
2162 ipa_bits *res = ggc_alloc<ipa_bits> ();
2163 res->value = value;
2164 res->mask = mask;
2165 *slot = res;
2166
2167 return res;
2168}
2169
2170/* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
2171 table in order to avoid creating multiple same ipa_bits structures. */
2172
2173static void
2174ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
2175 const widest_int &mask)
2176{
2177 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
2178}
2179
2180/* Return a pointer to a value_range just like *TMP, but either find it in
2181 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
2182
2183static value_range *
2184ipa_get_value_range (value_range *tmp)
2185{
2186 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
2187 if (*slot)
2188 return *slot;
2189
2190 value_range *vr = new (ggc_alloc<value_range> ()) value_range;
2191 *vr = *tmp;
2192 *slot = vr;
2193
2194 return vr;
2195}
2196
2197/* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
2198 equiv set. Use hash table in order to avoid creating multiple same copies of
2199 value_ranges. */
2200
2201static value_range *
2202ipa_get_value_range (enum value_range_kind kind, tree min, tree max)
2203{
2204 value_range tmp (min, max, kind);
2205 return ipa_get_value_range (&tmp);
2206}
2207
2208/* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
2209 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
2210 same value_range structures. */
2211
2212static void
2213ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
2214 tree min, tree max)
2215{
2216 jf->m_vr = ipa_get_value_range (type, min, max);
2217}
2218
2219/* Assign to JF a pointer to a value_range just like TMP but either fetch a
2220 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2221
2222static void
2223ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
2224{
2225 jf->m_vr = ipa_get_value_range (tmp);
2226}
2227
2228/* Compute jump function for all arguments of callsite CS and insert the
2229 information in the jump_functions array in the ipa_edge_args corresponding
2230 to this callsite. */
2231
2232static void
2233ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
2234 struct cgraph_edge *cs)
2235{
2236 ipa_node_params *info = ipa_node_params_sum->get (cs->caller);
2237 ipa_edge_args *args = ipa_edge_args_sum->get_create (cs);
2238 gcall *call = cs->call_stmt;
2239 int n, arg_num = gimple_call_num_args (call);
2240 bool useful_context = false;
2241 value_range vr;
2242
2243 if (arg_num == 0 || args->jump_functions)
1
Assuming 'arg_num' is not equal to 0
2
Assuming field 'jump_functions' is null
3
Taking false branch
2244 return;
2245 vec_safe_grow_cleared (args->jump_functions, arg_num, true);
4
Calling 'vec_safe_grow_cleared<ipa_jump_func, va_gc>'
6
Returning from 'vec_safe_grow_cleared<ipa_jump_func, va_gc>'
2246 if (flag_devirtualizeglobal_options.x_flag_devirtualize)
7
Assuming field 'x_flag_devirtualize' is not equal to 0
8
Taking true branch
2247 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num, true);
9
Passing value via 1st parameter 'v'
10
Calling 'vec_safe_grow_cleared<ipa_polymorphic_call_context, va_gc>'
2248
2249 if (gimple_call_internal_p (call))
2250 return;
2251 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
2252 return;
2253
2254 for (n = 0; n < arg_num; n++)
2255 {
2256 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
2257 tree arg = gimple_call_arg (call, n);
2258 tree param_type = ipa_get_callee_param_type (cs, n);
2259 if (flag_devirtualizeglobal_options.x_flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg))(((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2259, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2259, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
2260 {
2261 tree instance;
2262 class ipa_polymorphic_call_context context (cs->caller->decl,
2263 arg, cs->call_stmt,
2264 &instance);
2265 context.get_dynamic_type (instance, arg, NULLnullptr, cs->call_stmt,
2266 &fbi->aa_walk_budget);
2267 *ipa_get_ith_polymorhic_call_context (args, n) = context;
2268 if (!context.useless_p ())
2269 useful_context = true;
2270 }
2271
2272 if (POINTER_TYPE_P (TREE_TYPE (arg))(((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2272, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2272, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
2273 {
2274 bool addr_nonzero = false;
2275 bool strict_overflow = false;
2276
2277 if (TREE_CODE (arg)((enum tree_code) (arg)->base.code) == SSA_NAME
2278 && param_type
2279 && get_range_query (cfun(cfun + 0))->range_of_expr (vr, arg)
2280 && vr.nonzero_p ())
2281 addr_nonzero = true;
2282 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
2283 addr_nonzero = true;
2284
2285 if (addr_nonzero)
2286 {
2287 tree z = build_int_cst (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2287, __FUNCTION__))->typed.type)
, 0);
2288 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
2289 }
2290 else
2291 gcc_assert (!jfunc->m_vr)((void)(!(!jfunc->m_vr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2291, __FUNCTION__), 0 : 0))
;
2292 }
2293 else
2294 {
2295 if (TREE_CODE (arg)((enum tree_code) (arg)->base.code) == SSA_NAME
2296 && param_type
2297 && get_range_query (cfun(cfun + 0))->range_of_expr (vr, arg)
2298 && !vr.undefined_p ())
2299 {
2300 value_range resvr;
2301 range_fold_unary_expr (&resvr, NOP_EXPR, param_type,
2302 &vr, TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2302, __FUNCTION__))->typed.type)
);
2303 if (!resvr.undefined_p () && !resvr.varying_p ())
2304 ipa_set_jfunc_vr (jfunc, &resvr);
2305 else
2306 gcc_assert (!jfunc->m_vr)((void)(!(!jfunc->m_vr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2306, __FUNCTION__), 0 : 0))
;
2307 }
2308 else
2309 gcc_assert (!jfunc->m_vr)((void)(!(!jfunc->m_vr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2309, __FUNCTION__), 0 : 0))
;
2310 }
2311
2312 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))(((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2312, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2312, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2312, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
2313 && (TREE_CODE (arg)((enum tree_code) (arg)->base.code) == SSA_NAME || TREE_CODE (arg)((enum tree_code) (arg)->base.code) == INTEGER_CST))
2314 {
2315 if (TREE_CODE (arg)((enum tree_code) (arg)->base.code) == SSA_NAME)
2316 ipa_set_jfunc_bits (jfunc, 0,
2317 widest_int::from (get_nonzero_bits (arg),
2318 TYPE_SIGN (TREE_TYPE (arg))((signop) ((tree_class_check ((((contains_struct_check ((arg)
, (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2318, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2318, __FUNCTION__))->base.u.bits.unsigned_flag))
));
2319 else
2320 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
2321 }
2322 else if (POINTER_TYPE_P (TREE_TYPE (arg))(((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2322, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2322, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
2323 {
2324 unsigned HOST_WIDE_INTlong bitpos;
2325 unsigned align;
2326
2327 get_pointer_alignment_1 (arg, &align, &bitpos);
2328 widest_int mask = wi::bit_and_not
2329 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg))((tree_class_check ((((contains_struct_check ((arg), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2329, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2329, __FUNCTION__))->type_common.precision)
, false),
2330 align / BITS_PER_UNIT(8) - 1);
2331 widest_int value = bitpos / BITS_PER_UNIT(8);
2332 ipa_set_jfunc_bits (jfunc, value, mask);
2333 }
2334 else
2335 gcc_assert (!jfunc->bits)((void)(!(!jfunc->bits) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2335, __FUNCTION__), 0 : 0))
;
2336
2337 if (is_gimple_ip_invariant (arg)
2338 || (VAR_P (arg)(((enum tree_code) (arg)->base.code) == VAR_DECL)
2339 && is_global_var (arg)
2340 && TREE_READONLY (arg)((non_type_check ((arg), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2340, __FUNCTION__))->base.readonly_flag)
))
2341 ipa_set_jf_constant (jfunc, arg, cs);
2342 else if (!is_gimple_reg_type (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2342, __FUNCTION__))->typed.type)
)
2343 && TREE_CODE (arg)((enum tree_code) (arg)->base.code) == PARM_DECL)
2344 {
2345 int index = ipa_get_param_decl_index (info, arg);
2346
2347 gcc_assert (index >=0)((void)(!(index >=0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2347, __FUNCTION__), 0 : 0))
;
2348 /* Aggregate passed by value, check for pass-through, otherwise we
2349 will attempt to fill in aggregate contents later in this
2350 for cycle. */
2351 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
2352 {
2353 ipa_set_jf_simple_pass_through (jfunc, index, false);
2354 continue;
2355 }
2356 }
2357 else if (TREE_CODE (arg)((enum tree_code) (arg)->base.code) == SSA_NAME)
2358 {
2359 if (SSA_NAME_IS_DEFAULT_DEF (arg)(tree_check ((arg), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2359, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
)
2360 {
2361 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg)((tree_check ((arg), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2361, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((arg)->ssa_name.var)->base
.code) == IDENTIFIER_NODE ? (tree) nullptr : (arg)->ssa_name
.var)
);
2362 if (index >= 0)
2363 {
2364 bool agg_p;
2365 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
2366 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
2367 }
2368 }
2369 else
2370 {
2371 gimple *stmt = SSA_NAME_DEF_STMT (arg)(tree_check ((arg), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2371, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2372 if (is_gimple_assign (stmt))
2373 compute_complex_assign_jump_func (fbi, info, jfunc,
2374 call, stmt, arg, param_type);
2375 else if (gimple_code (stmt) == GIMPLE_PHI)
2376 compute_complex_ancestor_jump_func (fbi, info, jfunc,
2377 call,
2378 as_a <gphi *> (stmt));
2379 }
2380 }
2381
2382 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2383 passed (because type conversions are ignored in gimple). Usually we can
2384 safely get type from function declaration, but in case of K&R prototypes or
2385 variadic functions we can try our luck with type of the pointer passed.
2386 TODO: Since we look for actual initialization of the memory object, we may better
2387 work out the type based on the memory stores we find. */
2388 if (!param_type)
2389 param_type = TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2389, __FUNCTION__))->typed.type)
;
2390
2391 if ((jfunc->type != IPA_JF_PASS_THROUGH
2392 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2393 && (jfunc->type != IPA_JF_ANCESTOR
2394 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2395 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))(((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2395, __FUNCTION__))->typed.type))->base.code) == ARRAY_TYPE
|| (((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2395, __FUNCTION__))->typed.type))->base.code) == RECORD_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2395, __FUNCTION__))->typed.type))->base.code) == UNION_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2395, __FUNCTION__))->typed.type))->base.code) == QUAL_UNION_TYPE
))
2396 || POINTER_TYPE_P (param_type)(((enum tree_code) (param_type)->base.code) == POINTER_TYPE
|| ((enum tree_code) (param_type)->base.code) == REFERENCE_TYPE
)
))
2397 determine_known_aggregate_parts (fbi, call, arg, param_type, jfunc);
2398 }
2399 if (!useful_context)
2400 vec_free (args->polymorphic_call_contexts);
2401}
2402
2403/* Compute jump functions for all edges - both direct and indirect - outgoing
2404 from BB. */
2405
2406static void
2407ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2408{
2409 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2410 int i;
2411 struct cgraph_edge *cs;
2412
2413 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)for (i = (bi->cg_edges).length () - 1; (bi->cg_edges).iterate
((i), &(cs)); (i)--)
2414 {
2415 struct cgraph_node *callee = cs->callee;
2416
2417 if (callee)
2418 {
2419 callee = callee->ultimate_alias_target ();
2420 /* We do not need to bother analyzing calls to unknown functions
2421 unless they may become known during lto/whopr. */
2422 if (!callee->definition && !flag_ltoglobal_options.x_flag_lto
2423 && !gimple_call_fnspec (cs->call_stmt).known_p ())
2424 continue;
2425 }
2426 ipa_compute_jump_functions_for_edge (fbi, cs);
2427 }
2428}
2429
2430/* If STMT looks like a statement loading a value from a member pointer formal
2431 parameter, return that parameter and store the offset of the field to
2432 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2433 might be clobbered). If USE_DELTA, then we look for a use of the delta
2434 field rather than the pfn. */
2435
2436static tree
2437ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2438 HOST_WIDE_INTlong *offset_p)
2439{
2440 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2441
2442 if (!gimple_assign_single_p (stmt))
2443 return NULL_TREE(tree) nullptr;
2444
2445 rhs = gimple_assign_rhs1 (stmt);
2446 if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == COMPONENT_REF)
2447 {
2448 ref_field = TREE_OPERAND (rhs, 1)(*((const_cast<tree*> (tree_operand_check ((rhs), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2448, __FUNCTION__)))))
;
2449 rhs = TREE_OPERAND (rhs, 0)(*((const_cast<tree*> (tree_operand_check ((rhs), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2449, __FUNCTION__)))))
;
2450 }
2451 else
2452 ref_field = NULL_TREE(tree) nullptr;
2453 if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) != MEM_REF)
2454 return NULL_TREE(tree) nullptr;
2455 rec = TREE_OPERAND (rhs, 0)(*((const_cast<tree*> (tree_operand_check ((rhs), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2455, __FUNCTION__)))))
;
2456 if (TREE_CODE (rec)((enum tree_code) (rec)->base.code) != ADDR_EXPR)
2457 return NULL_TREE(tree) nullptr;
2458 rec = TREE_OPERAND (rec, 0)(*((const_cast<tree*> (tree_operand_check ((rec), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2458, __FUNCTION__)))))
;
2459 if (TREE_CODE (rec)((enum tree_code) (rec)->base.code) != PARM_DECL
2460 || !type_like_member_ptr_p (TREE_TYPE (rec)((contains_struct_check ((rec), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2460, __FUNCTION__))->typed.type)
, &ptr_field, &delta_field))
2461 return NULL_TREE(tree) nullptr;
2462 ref_offset = TREE_OPERAND (rhs, 1)(*((const_cast<tree*> (tree_operand_check ((rhs), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2462, __FUNCTION__)))))
;
2463
2464 if (use_delta)
2465 fld = delta_field;
2466 else
2467 fld = ptr_field;
2468 if (offset_p)
2469 *offset_p = int_bit_position (fld);
2470
2471 if (ref_field)
2472 {
2473 if (integer_nonzerop (ref_offset))
2474 return NULL_TREE(tree) nullptr;
2475 return ref_field == fld ? rec : NULL_TREE(tree) nullptr;
2476 }
2477 else
2478 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2479 : NULL_TREE(tree) nullptr;
2480}
2481
2482/* Returns true iff T is an SSA_NAME defined by a statement. */
2483
2484static bool
2485ipa_is_ssa_with_stmt_def (tree t)
2486{
2487 if (TREE_CODE (t)((enum tree_code) (t)->base.code) == SSA_NAME
2488 && !SSA_NAME_IS_DEFAULT_DEF (t)(tree_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2488, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
)
2489 return true;
2490 else
2491 return false;
2492}
2493
2494/* Find the indirect call graph edge corresponding to STMT and mark it as a
2495 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2496 indirect call graph edge.
2497 If POLYMORPHIC is true record is as a destination of polymorphic call. */
2498
2499static struct cgraph_edge *
2500ipa_note_param_call (struct cgraph_node *node, int param_index,
2501 gcall *stmt, bool polymorphic)
2502{
2503 struct cgraph_edge *cs;
2504
2505 cs = node->get_edge (stmt);
2506 cs->indirect_info->param_index = param_index;
2507 cs->indirect_info->agg_contents = 0;
2508 cs->indirect_info->member_ptr = 0;
2509 cs->indirect_info->guaranteed_unmodified = 0;
2510 ipa_node_params *info = ipa_node_params_sum->get (node);
2511 ipa_set_param_used_by_indirect_call (info, param_index, true);
2512 if (cs->indirect_info->polymorphic || polymorphic)
2513 ipa_set_param_used_by_polymorphic_call (info, param_index, true);
2514 return cs;
2515}
2516
2517/* Analyze the CALL and examine uses of formal parameters of the caller NODE
2518 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2519 intermediate information about each formal parameter. Currently it checks
2520 whether the call calls a pointer that is a formal parameter and if so, the
2521 parameter is marked with the called flag and an indirect call graph edge
2522 describing the call is created. This is very simple for ordinary pointers
2523 represented in SSA but not-so-nice when it comes to member pointers. The
2524 ugly part of this function does nothing more than trying to match the
2525 pattern of such a call. An example of such a pattern is the gimple dump
2526 below, the call is on the last line:
2527
2528 <bb 2>:
2529 f$__delta_5 = f.__delta;
2530 f$__pfn_24 = f.__pfn;
2531
2532 or
2533 <bb 2>:
2534 f$__delta_5 = MEM[(struct *)&f];
2535 f$__pfn_24 = MEM[(struct *)&f + 4B];
2536
2537 and a few lines below:
2538
2539 <bb 5>
2540 D.2496_3 = (int) f$__pfn_24;
2541 D.2497_4 = D.2496_3 & 1;
2542 if (D.2497_4 != 0)
2543 goto <bb 3>;
2544 else
2545 goto <bb 4>;
2546
2547 <bb 6>:
2548 D.2500_7 = (unsigned int) f$__delta_5;
2549 D.2501_8 = &S + D.2500_7;
2550 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2551 D.2503_10 = *D.2502_9;
2552 D.2504_12 = f$__pfn_24 + -1;
2553 D.2505_13 = (unsigned int) D.2504_12;
2554 D.2506_14 = D.2503_10 + D.2505_13;
2555 D.2507_15 = *D.2506_14;
2556 iftmp.11_16 = (String:: *) D.2507_15;
2557
2558 <bb 7>:
2559 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2560 D.2500_19 = (unsigned int) f$__delta_5;
2561 D.2508_20 = &S + D.2500_19;
2562 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2563
2564 Such patterns are results of simple calls to a member pointer:
2565
2566 int doprinting (int (MyString::* f)(int) const)
2567 {
2568 MyString S ("somestring");
2569
2570 return (S.*f)(4);
2571 }
2572
2573 Moreover, the function also looks for called pointers loaded from aggregates
2574 passed by value or reference. */
2575
2576static void
2577ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2578 tree target)
2579{
2580 class ipa_node_params *info = fbi->info;
2581 HOST_WIDE_INTlong offset;
2582 bool by_ref;
2583
2584 if (SSA_NAME_IS_DEFAULT_DEF (target)(tree_check ((target), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2584, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
)
2585 {
2586 tree var = SSA_NAME_VAR (target)((tree_check ((target), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2586, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((target)->ssa_name.var)->
base.code) == IDENTIFIER_NODE ? (tree) nullptr : (target)->
ssa_name.var)
;
2587 int index = ipa_get_param_decl_index (info, var);
2588 if (index >= 0)
2589 ipa_note_param_call (fbi->node, index, call, false);
2590 return;
2591 }
2592
2593 int index;
2594 gimple *def = SSA_NAME_DEF_STMT (target)(tree_check ((target), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2594, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2595 bool guaranteed_unmodified;
2596 if (gimple_assign_single_p (def)
2597 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2598 gimple_assign_rhs1 (def), &index, &offset,
2599 NULLnullptr, &by_ref, &guaranteed_unmodified))
2600 {
2601 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2602 call, false);
2603 cs->indirect_info->offset = offset;
2604 cs->indirect_info->agg_contents = 1;
2605 cs->indirect_info->by_ref = by_ref;
2606 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2607 return;
2608 }
2609
2610 /* Now we need to try to match the complex pattern of calling a member
2611 pointer. */
2612 if (gimple_code (def) != GIMPLE_PHI
2613 || gimple_phi_num_args (def) != 2
2614 || !POINTER_TYPE_P (TREE_TYPE (target))(((enum tree_code) (((contains_struct_check ((target), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2614, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((target), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2614, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
2615 || TREE_CODE (TREE_TYPE (TREE_TYPE (target)))((enum tree_code) (((contains_struct_check ((((contains_struct_check
((target), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2615, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2615, __FUNCTION__))->typed.type))->base.code)
!= METHOD_TYPE)
2616 return;
2617
2618 /* First, we need to check whether one of these is a load from a member
2619 pointer that is a parameter to this function. */
2620 tree n1 = PHI_ARG_DEF (def, 0)gimple_phi_arg_def ((def), (0));
2621 tree n2 = PHI_ARG_DEF (def, 1)gimple_phi_arg_def ((def), (1));
2622 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2623 return;
2624 gimple *d1 = SSA_NAME_DEF_STMT (n1)(tree_check ((n1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2624, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2625 gimple *d2 = SSA_NAME_DEF_STMT (n2)(tree_check ((n2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2625, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2626
2627 tree rec;
2628 basic_block bb, virt_bb;
2629 basic_block join = gimple_bb (def);
2630 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2631 {
2632 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULLnullptr))
2633 return;
2634
2635 bb = EDGE_PRED (join, 0)(*(join)->preds)[(0)]->src;
2636 virt_bb = gimple_bb (d2);
2637 }
2638 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2639 {
2640 bb = EDGE_PRED (join, 1)(*(join)->preds)[(1)]->src;
2641 virt_bb = gimple_bb (d1);
2642 }
2643 else
2644 return;
2645
2646 /* Second, we need to check that the basic blocks are laid out in the way
2647 corresponding to the pattern. */
2648
2649 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2650 || single_pred (virt_bb) != bb
2651 || single_succ (virt_bb) != join)
2652 return;
2653
2654 /* Third, let's see that the branching is done depending on the least
2655 significant bit of the pfn. */
2656
2657 gimple *branch = last_stmt (bb);
2658 if (!branch || gimple_code (branch) != GIMPLE_COND)
2659 return;
2660
2661 if ((gimple_cond_code (branch) != NE_EXPR
2662 && gimple_cond_code (branch) != EQ_EXPR)
2663 || !integer_zerop (gimple_cond_rhs (branch)))
2664 return;
2665
2666 tree cond = gimple_cond_lhs (branch);
2667 if (!ipa_is_ssa_with_stmt_def (cond))
2668 return;
2669
2670 def = SSA_NAME_DEF_STMT (cond)(tree_check ((cond), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2670, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2671 if (!is_gimple_assign (def)
2672 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2673 || !integer_onep (gimple_assign_rhs2 (def)))
2674 return;
2675
2676 cond = gimple_assign_rhs1 (def);
2677 if (!ipa_is_ssa_with_stmt_def (cond))
2678 return;
2679
2680 def = SSA_NAME_DEF_STMT (cond)(tree_check ((cond), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2680, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2681
2682 if (is_gimple_assign (def)
2683 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def))((gimple_assign_rhs_code (def)) == NOP_EXPR || (gimple_assign_rhs_code
(def)) == CONVERT_EXPR)
)
2684 {
2685 cond = gimple_assign_rhs1 (def);
2686 if (!ipa_is_ssa_with_stmt_def (cond))
2687 return;
2688 def = SSA_NAME_DEF_STMT (cond)(tree_check ((cond), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2688, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2689 }
2690
2691 tree rec2;
2692 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2693 (TARGET_PTRMEMFUNC_VBIT_LOCATIONptrmemfunc_vbit_in_pfn
2694 == ptrmemfunc_vbit_in_delta),
2695 NULLnullptr);
2696 if (rec != rec2)
2697 return;
2698
2699 index = ipa_get_param_decl_index (info, rec);
2700 if (index >= 0
2701 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2702 {
2703 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2704 call, false);
2705 cs->indirect_info->offset = offset;
2706 cs->indirect_info->agg_contents = 1;
2707 cs->indirect_info->member_ptr = 1;
2708 cs->indirect_info->guaranteed_unmodified = 1;
2709 }
2710
2711 return;
2712}
2713
2714/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2715 object referenced in the expression is a formal parameter of the caller
2716 FBI->node (described by FBI->info), create a call note for the
2717 statement. */
2718
2719static void
2720ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2721 gcall *call, tree target)
2722{
2723 tree obj = OBJ_TYPE_REF_OBJECT (target)(*((const_cast<tree*> (tree_operand_check (((tree_check
((target), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2723, __FUNCTION__, (OBJ_TYPE_REF)))), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2723, __FUNCTION__)))))
;
2724 int index;
2725 HOST_WIDE_INTlong anc_offset;
2726
2727 if (!flag_devirtualizeglobal_options.x_flag_devirtualize)
2728 return;
2729
2730 if (TREE_CODE (obj)((enum tree_code) (obj)->base.code) != SSA_NAME)
2731 return;
2732
2733 class ipa_node_params *info = fbi->info;
2734 if (SSA_NAME_IS_DEFAULT_DEF (obj)(tree_check ((obj), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2734, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
)
2735 {
2736 if (TREE_CODE (SSA_NAME_VAR (obj))((enum tree_code) (((tree_check ((obj), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2736, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((obj)->ssa_name.var)->base
.code) == IDENTIFIER_NODE ? (tree) nullptr : (obj)->ssa_name
.var))->base.code)
!= PARM_DECL)
2737 return;
2738
2739 anc_offset = 0;
2740 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj)((tree_check ((obj), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2740, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((obj)->ssa_name.var)->base
.code) == IDENTIFIER_NODE ? (tree) nullptr : (obj)->ssa_name
.var)
);
2741 gcc_assert (index >= 0)((void)(!(index >= 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2741, __FUNCTION__), 0 : 0))
;
2742 if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
2743 call))
2744 return;
2745 }
2746 else
2747 {
2748 gimple *stmt = SSA_NAME_DEF_STMT (obj)(tree_check ((obj), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2748, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2749 tree expr;
2750
2751 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2752 if (!expr)
2753 return;
2754 index = ipa_get_param_decl_index (info,
2755 SSA_NAME_VAR (TREE_OPERAND (expr, 0))((tree_check (((*((const_cast<tree*> (tree_operand_check
((expr), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2755, __FUNCTION__)))))), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2755, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) (((*((const_cast<tree*> (
tree_operand_check ((expr), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2755, __FUNCTION__))))))->ssa_name.var)->base.code) ==
IDENTIFIER_NODE ? (tree) nullptr : ((*((const_cast<tree*>
(tree_operand_check ((expr), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2755, __FUNCTION__))))))->ssa_name.var)
);
2756 gcc_assert (index >= 0)((void)(!(index >= 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2756, __FUNCTION__), 0 : 0))
;
2757 if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
2758 call, anc_offset))
2759 return;
2760 }
2761
2762 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2763 call, true);
2764 class cgraph_indirect_call_info *ii = cs->indirect_info;
2765 ii->offset = anc_offset;
2766 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target)(*((const_cast<tree*> (tree_operand_check (((tree_check
((target), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2766, __FUNCTION__, (OBJ_TYPE_REF)))), (2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2766, __FUNCTION__)))))
);
2767 ii->otr_type = obj_type_ref_class (target);
2768 ii->polymorphic = 1;
2769}
2770
2771/* Analyze a call statement CALL whether and how it utilizes formal parameters
2772 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2773 containing intermediate information about each formal parameter. */
2774
2775static void
2776ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2777{
2778 tree target = gimple_call_fn (call);
2779
2780 if (!target
2781 || (TREE_CODE (target)((enum tree_code) (target)->base.code) != SSA_NAME
2782 && !virtual_method_call_p (target)))
2783 return;
2784
2785 struct cgraph_edge *cs = fbi->node->get_edge (call);
2786 /* If we previously turned the call into a direct call, there is
2787 no need to analyze. */
2788 if (cs && !cs->indirect_unknown_callee)
2789 return;
2790
2791 if (cs->indirect_info->polymorphic && flag_devirtualizeglobal_options.x_flag_devirtualize)
2792 {
2793 tree instance;
2794 tree target = gimple_call_fn (call);
2795 ipa_polymorphic_call_context context (current_function_decl,
2796 target, call, &instance);
2797
2798 gcc_checking_assert (cs->indirect_info->otr_type((void)(!(cs->indirect_info->otr_type == obj_type_ref_class
(target)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2799, __FUNCTION__), 0 : 0))
2799 == obj_type_ref_class (target))((void)(!(cs->indirect_info->otr_type == obj_type_ref_class
(target)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2799, __FUNCTION__), 0 : 0))
;
2800 gcc_checking_assert (cs->indirect_info->otr_token((void)(!(cs->indirect_info->otr_token == tree_to_shwi (
(*((const_cast<tree*> (tree_operand_check (((tree_check
((target), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2801, __FUNCTION__, (OBJ_TYPE_REF)))), (2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2801, __FUNCTION__))))))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2801, __FUNCTION__), 0 : 0))
2801 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)))((void)(!(cs->indirect_info->otr_token == tree_to_shwi (
(*((const_cast<tree*> (tree_operand_check (((tree_check
((target), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2801, __FUNCTION__, (OBJ_TYPE_REF)))), (2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2801, __FUNCTION__))))))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2801, __FUNCTION__), 0 : 0))
;
2802
2803 cs->indirect_info->vptr_changed
2804 = !context.get_dynamic_type (instance,
2805 OBJ_TYPE_REF_OBJECT (target)(*((const_cast<tree*> (tree_operand_check (((tree_check
((target), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2805, __FUNCTION__, (OBJ_TYPE_REF)))), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2805, __FUNCTION__)))))
,
2806 obj_type_ref_class (target), call,
2807 &fbi->aa_walk_budget);
2808 cs->indirect_info->context = context;
2809 }
2810
2811 if (TREE_CODE (target)((enum tree_code) (target)->base.code) == SSA_NAME)
2812 ipa_analyze_indirect_call_uses (fbi, call, target);
2813 else if (virtual_method_call_p (target))
2814 ipa_analyze_virtual_call_uses (fbi, call, target);
2815}
2816
2817
2818/* Analyze the call statement STMT with respect to formal parameters (described
2819 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2820 formal parameters are called. */
2821
2822static void
2823ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2824{
2825 if (is_gimple_call (stmt))
2826 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2827}
2828
2829/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2830 If OP is a parameter declaration, mark it as used in the info structure
2831 passed in DATA. */
2832
2833static bool
2834visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2835{
2836 class ipa_node_params *info = (class ipa_node_params *) data;
2837
2838 op = get_base_address (op);
2839 if (op
2840 && TREE_CODE (op)((enum tree_code) (op)->base.code) == PARM_DECL)
2841 {
2842 int index = ipa_get_param_decl_index (info, op);
2843 gcc_assert (index >= 0)((void)(!(index >= 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2843, __FUNCTION__), 0 : 0))
;
2844 ipa_set_param_used (info, index, true);
2845 }
2846
2847 return false;
2848}
2849
2850/* Scan the statements in BB and inspect the uses of formal parameters. Store
2851 the findings in various structures of the associated ipa_node_params
2852 structure, such as parameter flags, notes etc. FBI holds various data about
2853 the function being analyzed. */
2854
2855static void
2856ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2857{
2858 gimple_stmt_iterator gsi;
2859 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2860 {
2861 gimple *stmt = gsi_stmt (gsi);
2862
2863 if (is_gimple_debug (stmt))
2864 continue;
2865
2866 ipa_analyze_stmt_uses (fbi, stmt);
2867 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2868 visit_ref_for_mod_analysis,
2869 visit_ref_for_mod_analysis,
2870 visit_ref_for_mod_analysis);
2871 }
2872 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2873 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2874 visit_ref_for_mod_analysis,
2875 visit_ref_for_mod_analysis,
2876 visit_ref_for_mod_analysis);
2877}
2878
2879/* Return true EXPR is a load from a dereference of SSA_NAME NAME. */
2880
2881static bool
2882load_from_dereferenced_name (tree expr, tree name)
2883{
2884 tree base = get_base_address (expr);
2885 return (TREE_CODE (base)((enum tree_code) (base)->base.code) == MEM_REF
2886 && TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2886, __FUNCTION__)))))
== name);
2887}
2888
2889/* Calculate controlled uses of parameters of NODE. */
2890
2891static void
2892ipa_analyze_controlled_uses (struct cgraph_node *node)
2893{
2894 ipa_node_params *info = ipa_node_params_sum->get (node);
2895
2896 for (int i = 0; i < ipa_get_param_count (info); i++)
2897 {
2898 tree parm = ipa_get_param (info, i);
2899 int call_uses = 0;
2900 bool load_dereferenced = false;
2901
2902 /* For SSA regs see if parameter is used. For non-SSA we compute
2903 the flag during modification analysis. */
2904 if (is_gimple_reg (parm))
2905 {
2906 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 2906, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
,
2907 parm);
2908 if (ddef && !has_zero_uses (ddef))
2909 {
2910 imm_use_iterator imm_iter;
2911 gimple *stmt;
2912
2913 ipa_set_param_used (info, i, true);
2914 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, ddef)for (struct auto_end_imm_use_stmt_traverse auto_end_imm_use_stmt_traverse
((((stmt) = first_imm_use_stmt (&(imm_iter), (ddef))), &
(imm_iter))); !end_imm_use_stmt_p (&(imm_iter)); (void) (
(stmt) = next_imm_use_stmt (&(imm_iter))))
2915 {
2916 if (is_gimple_debug (stmt))
2917 continue;
2918
2919 int all_stmt_uses = 0;
2920 use_operand_p use_p;
2921 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)for ((use_p) = first_imm_use_on_stmt (&(imm_iter)); !end_imm_use_on_stmt_p
(&(imm_iter)); (void) ((use_p) = next_imm_use_on_stmt (&
(imm_iter))))
2922 all_stmt_uses++;
2923
2924 if (is_gimple_call (stmt))
2925 {
2926 if (gimple_call_internal_p (stmt))
2927 {
2928 call_uses = IPA_UNDESCRIBED_USE-1;
2929 break;
2930 }
2931 int recognized_stmt_uses;
2932 if (gimple_call_fn (stmt) == ddef)
2933 recognized_stmt_uses = 1;
2934 else
2935 recognized_stmt_uses = 0;
2936 unsigned arg_count = gimple_call_num_args (stmt);
2937 for (unsigned i = 0; i < arg_count; i++)
2938 {
2939 tree arg = gimple_call_arg (stmt, i);
2940 if (arg == ddef)
2941 recognized_stmt_uses++;
2942 else if (load_from_dereferenced_name (arg, ddef))
2943 {
2944 load_dereferenced = true;
2945 recognized_stmt_uses++;
2946 }
2947 }
2948
2949 if (recognized_stmt_uses != all_stmt_uses)
2950 {
2951 call_uses = IPA_UNDESCRIBED_USE-1;
2952 break;
2953 }
2954 if (call_uses >= 0)
2955 call_uses += all_stmt_uses;
2956 }
2957 else if (gimple_assign_single_p (stmt))
2958 {
2959 tree rhs = gimple_assign_rhs1 (stmt);
2960 if (all_stmt_uses != 1
2961 || !load_from_dereferenced_name (rhs, ddef))
2962 {
2963 call_uses = IPA_UNDESCRIBED_USE-1;
2964 break;
2965 }
2966 load_dereferenced = true;
2967 }
2968 else
2969 {
2970 call_uses = IPA_UNDESCRIBED_USE-1;
2971 break;
2972 }
2973 }
2974 }
2975 else
2976 call_uses = 0;
2977 }
2978 else
2979 call_uses = IPA_UNDESCRIBED_USE-1;
2980 ipa_set_controlled_uses (info, i, call_uses);
2981 ipa_set_param_load_dereferenced (info, i, load_dereferenced);
2982 }
2983}
2984
2985/* Free stuff in BI. */
2986
2987static void
2988free_ipa_bb_info (struct ipa_bb_info *bi)
2989{
2990 bi->cg_edges.release ();
2991 bi->param_aa_statuses.release ();
2992}
2993
2994/* Dominator walker driving the analysis. */
2995
2996class analysis_dom_walker : public dom_walker
2997{
2998public:
2999 analysis_dom_walker (struct ipa_func_body_info *fbi)
3000 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
3001
3002 virtual edge before_dom_children (basic_block);
3003
3004private:
3005 struct ipa_func_body_info *m_fbi;
3006};
3007
3008edge
3009analysis_dom_walker::before_dom_children (basic_block bb)
3010{
3011 ipa_analyze_params_uses_in_bb (m_fbi, bb);
3012 ipa_compute_jump_functions_for_bb (m_fbi, bb);
3013 return NULLnullptr;
3014}
3015
3016/* Release body info FBI. */
3017
3018void
3019ipa_release_body_info (struct ipa_func_body_info *fbi)
3020{
3021 int i;
3022 struct ipa_bb_info *bi;
3023
3024 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)for (i = 0; (fbi->bb_infos).iterate ((i), &(bi)); ++(i
))
3025 free_ipa_bb_info (bi);
3026 fbi->bb_infos.release ();
3027}
3028
3029/* Initialize the array describing properties of formal parameters
3030 of NODE, analyze their uses and compute jump functions associated
3031 with actual arguments of calls from within NODE. */
3032
3033void
3034ipa_analyze_node (struct cgraph_node *node)
3035{
3036 struct ipa_func_body_info fbi;
3037 class ipa_node_params *info;
3038
3039 ipa_check_create_node_params ();
3040 ipa_check_create_edge_args ();
3041 info = ipa_node_params_sum->get_create (node);
3042
3043 if (info->analysis_done)
3044 return;
3045 info->analysis_done = 1;
3046
3047 if (ipa_func_spec_opts_forbid_analysis_p (node))
3048 {
3049 for (int i = 0; i < ipa_get_param_count (info); i++)
3050 {
3051 ipa_set_param_used (info, i, true);
3052 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE-1);
3053 }
3054 return;
3055 }
3056
3057 struct function *func = DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3057, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
;
3058 push_cfun (func);
3059 calculate_dominance_info (CDI_DOMINATORS);
3060 ipa_initialize_node_params (node);
3061 ipa_analyze_controlled_uses (node);
3062
3063 fbi.node = node;
3064 fbi.info = info;
3065 fbi.bb_infos = vNULL;
3066 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun)(((cfun + 0))->cfg->x_last_basic_block), true);
3067 fbi.param_count = ipa_get_param_count (info);
3068 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps)(opts_for_fn (node->decl)->x_param_ipa_max_aa_steps);
3069
3070 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
3071 {
3072 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
3073 bi->cg_edges.safe_push (cs);
3074 }
3075
3076 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
3077 {
3078 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
3079 bi->cg_edges.safe_push (cs);
3080 }
3081
3082 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr));
3083
3084 ipa_release_body_info (&fbi);
3085 free_dominance_info (CDI_DOMINATORS);
3086 pop_cfun ();
3087}
3088
3089/* Update the jump functions associated with call graph edge E when the call
3090 graph edge CS is being inlined, assuming that E->caller is already (possibly
3091 indirectly) inlined into CS->callee and that E has not been inlined. */
3092
3093static void
3094update_jump_functions_after_inlining (struct cgraph_edge *cs,
3095 struct cgraph_edge *e)
3096{
3097 ipa_edge_args *top = ipa_edge_args_sum->get (cs);
3098 ipa_edge_args *args = ipa_edge_args_sum->get (e);
3099 if (!args)
3100 return;
3101 int count = ipa_get_cs_argument_count (args);
3102 int i;
3103
3104 for (i = 0; i < count; i++)
3105 {
3106 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
3107 class ipa_polymorphic_call_context *dst_ctx
3108 = ipa_get_ith_polymorhic_call_context (args, i);
3109
3110 if (dst->agg.items)
3111 {
3112 struct ipa_agg_jf_item *item;
3113 int j;
3114
3115 FOR_EACH_VEC_ELT (*dst->agg.items, j, item)for (j = 0; (*dst->agg.items).iterate ((j), &(item)); ++
(j))
3116 {
3117 int dst_fid;
3118 struct ipa_jump_func *src;
3119
3120 if (item->jftype != IPA_JF_PASS_THROUGH
3121 && item->jftype != IPA_JF_LOAD_AGG)
3122 continue;
3123
3124 dst_fid = item->value.pass_through.formal_id;
3125 if (!top || dst_fid >= ipa_get_cs_argument_count (top))
3126 {
3127 item->jftype = IPA_JF_UNKNOWN;
3128 continue;
3129 }
3130
3131 item->value.pass_through.formal_id = -1;
3132 src = ipa_get_ith_jump_func (top, dst_fid);
3133 if (src->type == IPA_JF_CONST)
3134 {
3135 if (item->jftype == IPA_JF_PASS_THROUGH
3136 && item->value.pass_through.operation == NOP_EXPR)
3137 {
3138 item->jftype = IPA_JF_CONST;
3139 item->value.constant = src->value.constant.value;
3140 continue;
3141 }
3142 }
3143 else if (src->type == IPA_JF_PASS_THROUGH
3144 && src->value.pass_through.operation == NOP_EXPR)
3145 {
3146 if (item->jftype == IPA_JF_PASS_THROUGH
3147 || !item->value.load_agg.by_ref
3148 || src->value.pass_through.agg_preserved)
3149 item->value.pass_through.formal_id
3150 = src->value.pass_through.formal_id;
3151 }
3152 else if (src->type == IPA_JF_ANCESTOR)
3153 {
3154 if (item->jftype == IPA_JF_PASS_THROUGH)
3155 {
3156 if (!src->value.ancestor.offset)
3157 item->value.pass_through.formal_id
3158 = src->value.ancestor.formal_id;
3159 }
3160 else if (src->value.ancestor.agg_preserved)
3161 {
3162 gcc_checking_assert (item->value.load_agg.by_ref)((void)(!(item->value.load_agg.by_ref) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3162, __FUNCTION__), 0 : 0))
;
3163
3164 item->value.pass_through.formal_id
3165 = src->value.ancestor.formal_id;
3166 item->value.load_agg.offset
3167 += src->value.ancestor.offset;
3168 }
3169 }
3170
3171 if (item->value.pass_through.formal_id < 0)
3172 item->jftype = IPA_JF_UNKNOWN;
3173 }
3174 }
3175
3176 if (!top)
3177 {
3178 ipa_set_jf_unknown (dst);
3179 continue;
3180 }
3181
3182 if (dst->type == IPA_JF_ANCESTOR)
3183 {
3184 struct ipa_jump_func *src;
3185 int dst_fid = dst->value.ancestor.formal_id;
3186 class ipa_polymorphic_call_context *src_ctx
3187 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3188
3189 /* Variable number of arguments can cause havoc if we try to access
3190 one that does not exist in the inlined edge. So make sure we
3191 don't. */
3192 if (dst_fid >= ipa_get_cs_argument_count (top))
3193 {
3194 ipa_set_jf_unknown (dst);
3195 continue;
3196 }
3197
3198 src = ipa_get_ith_jump_func (top, dst_fid);
3199
3200 if (src_ctx && !src_ctx->useless_p ())
3201 {
3202 class ipa_polymorphic_call_context ctx = *src_ctx;
3203
3204 /* TODO: Make type preserved safe WRT contexts. */
3205 if (!ipa_get_jf_ancestor_type_preserved (dst))
3206 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3207 ctx.offset_by (dst->value.ancestor.offset);
3208 if (!ctx.useless_p ())
3209 {
3210 if (!dst_ctx)
3211 {
3212 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3213 count, true);
3214 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3215 }
3216
3217 dst_ctx->combine_with (ctx);
3218 }
3219 }
3220
3221 /* Parameter and argument in ancestor jump function must be pointer
3222 type, which means access to aggregate must be by-reference. */
3223 gcc_assert (!src->agg.items || src->agg.by_ref)((void)(!(!src->agg.items || src->agg.by_ref) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3223, __FUNCTION__), 0 : 0))
;
3224
3225 if (src->agg.items && dst->value.ancestor.agg_preserved)
3226 {
3227 struct ipa_agg_jf_item *item;
3228 int j;
3229
3230 /* Currently we do not produce clobber aggregate jump functions,
3231 replace with merging when we do. */
3232 gcc_assert (!dst->agg.items)((void)(!(!dst->agg.items) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3232, __FUNCTION__), 0 : 0))
;
3233
3234 dst->agg.items = vec_safe_copy (src->agg.items);
3235 dst->agg.by_ref = src->agg.by_ref;
3236 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)for (j = 0; vec_safe_iterate ((dst->agg.items), (j), &
(item)); ++(j))
3237 item->offset -= dst->value.ancestor.offset;
3238 }
3239
3240 if (src->type == IPA_JF_PASS_THROUGH
3241 && src->value.pass_through.operation == NOP_EXPR)
3242 {
3243 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
3244 dst->value.ancestor.agg_preserved &=
3245 src->value.pass_through.agg_preserved;
3246 }
3247 else if (src->type == IPA_JF_ANCESTOR)
3248 {
3249 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
3250 dst->value.ancestor.offset += src->value.ancestor.offset;
3251 dst->value.ancestor.agg_preserved &=
3252 src->value.ancestor.agg_preserved;
3253 }
3254 else
3255 ipa_set_jf_unknown (dst);
3256 }
3257 else if (dst->type == IPA_JF_PASS_THROUGH)
3258 {
3259 struct ipa_jump_func *src;
3260 /* We must check range due to calls with variable number of arguments
3261 and we cannot combine jump functions with operations. */
3262 if (dst->value.pass_through.operation == NOP_EXPR
3263 && (top && dst->value.pass_through.formal_id
3264 < ipa_get_cs_argument_count (top)))
3265 {
3266 int dst_fid = dst->value.pass_through.formal_id;
3267 src = ipa_get_ith_jump_func (top, dst_fid);
3268 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
3269 class ipa_polymorphic_call_context *src_ctx
3270 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3271
3272 if (src_ctx && !src_ctx->useless_p ())
3273 {
3274 class ipa_polymorphic_call_context ctx = *src_ctx;
3275
3276 /* TODO: Make type preserved safe WRT contexts. */
3277 if (!ipa_get_jf_pass_through_type_preserved (dst))
3278 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3279 if (!ctx.useless_p ())
3280 {
3281 if (!dst_ctx)
3282 {
3283 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3284 count, true);
3285 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3286 }
3287 dst_ctx->combine_with (ctx);
3288 }
3289 }
3290 switch (src->type)
3291 {
3292 case IPA_JF_UNKNOWN:
3293 ipa_set_jf_unknown (dst);
3294 break;
3295 case IPA_JF_CONST:
3296 ipa_set_jf_cst_copy (dst, src);
3297 break;
3298
3299 case IPA_JF_PASS_THROUGH:
3300 {
3301 int formal_id = ipa_get_jf_pass_through_formal_id (src);
3302 enum tree_code operation;
3303 operation = ipa_get_jf_pass_through_operation (src);
3304
3305 if (operation == NOP_EXPR)
3306 {
3307 bool agg_p;
3308 agg_p = dst_agg_p
3309 && ipa_get_jf_pass_through_agg_preserved (src);
3310 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
3311 }
3312 else if (TREE_CODE_CLASS (operation)tree_code_type[(int) (operation)] == tcc_unary)
3313 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
3314 else
3315 {
3316 tree operand = ipa_get_jf_pass_through_operand (src);
3317 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
3318 operation);
3319 }
3320 break;
3321 }
3322 case IPA_JF_ANCESTOR:
3323 {
3324 bool agg_p;
3325 agg_p = dst_agg_p
3326 && ipa_get_jf_ancestor_agg_preserved (src);
3327 ipa_set_ancestor_jf (dst,
3328 ipa_get_jf_ancestor_offset (src),
3329 ipa_get_jf_ancestor_formal_id (src),
3330 agg_p);
3331 break;
3332 }
3333 default:
3334 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3334, __FUNCTION__))
;
3335 }
3336
3337 if (src->agg.items
3338 && (dst_agg_p || !src->agg.by_ref))
3339 {
3340 /* Currently we do not produce clobber aggregate jump
3341 functions, replace with merging when we do. */
3342 gcc_assert (!dst->agg.items)((void)(!(!dst->agg.items) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3342, __FUNCTION__), 0 : 0))
;
3343
3344 dst->agg.by_ref = src->agg.by_ref;
3345 dst->agg.items = vec_safe_copy (src->agg.items);
3346 }
3347 }
3348 else
3349 ipa_set_jf_unknown (dst);
3350 }
3351 }
3352}
3353
3354/* If TARGET is an addr_expr of a function declaration, make it the
3355 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3356 Otherwise, return NULL. */
3357
3358struct cgraph_edge *
3359ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
3360 bool speculative)
3361{
3362 struct cgraph_node *callee;
3363 bool unreachable = false;
3364
3365 if (TREE_CODE (target)((enum tree_code) (target)->base.code) == ADDR_EXPR)
3366 target = TREE_OPERAND (target, 0)(*((const_cast<tree*> (tree_operand_check ((target), (0
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3366, __FUNCTION__)))))
;
3367 if (TREE_CODE (target)((enum tree_code) (target)->base.code) != FUNCTION_DECL)
3368 {
3369 target = canonicalize_constructor_val (target, NULLnullptr);
3370 if (!target || TREE_CODE (target)((enum tree_code) (target)->base.code) != FUNCTION_DECL)
3371 {
3372 /* Member pointer call that goes through a VMT lookup. */
3373 if (ie->indirect_info->member_ptr
3374 /* Or if target is not an invariant expression and we do not
3375 know if it will evaulate to function at runtime.
3376 This can happen when folding through &VAR, where &VAR
3377 is IP invariant, but VAR itself is not.
3378
3379 TODO: Revisit this when GCC 5 is branched. It seems that
3380 member_ptr check is not needed and that we may try to fold
3381 the expression and see if VAR is readonly. */
3382 || !is_gimple_ip_invariant (target))
3383 {
3384 if (dump_enabled_p ())
3385 {
3386 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3387 "discovered direct call non-invariant %s\n",
3388 ie->caller->dump_name ());
3389 }
3390 return NULLnullptr;
3391 }
3392
3393
3394 if (dump_enabled_p ())
3395 {
3396 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3397 "discovered direct call to non-function in %s, "
3398 "making it __builtin_unreachable\n",
3399 ie->caller->dump_name ());
3400 }
3401
3402 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3403 callee = cgraph_node::get_create (target);
3404 unreachable = true;
3405 }
3406 else
3407 callee = cgraph_node::get (target);
3408 }
3409 else
3410 callee = cgraph_node::get (target);
3411
3412 /* Because may-edges are not explicitely represented and vtable may be external,
3413 we may create the first reference to the object in the unit. */
3414 if (!callee || callee->inlined_to)
3415 {
3416
3417 /* We are better to ensure we can refer to it.
3418 In the case of static functions we are out of luck, since we already
3419 removed its body. In the case of public functions we may or may
3420 not introduce the reference. */
3421 if (!canonicalize_constructor_val (target, NULLnullptr)
3422 || !TREE_PUBLIC (target)((target)->base.public_flag))
3423 {
3424 if (dump_file)
3425 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
3426 "(%s -> %s) but cannot refer to it. Giving up.\n",
3427 ie->caller->dump_name (),
3428 ie->callee->dump_name ());
3429 return NULLnullptr;
3430 }
3431 callee = cgraph_node::get_create (target);
3432 }
3433
3434 /* If the edge is already speculated. */
3435 if (speculative && ie->speculative)
3436 {
3437 if (dump_file)
3438 {
3439 cgraph_edge *e2 = ie->speculative_call_for_target (callee);
3440 if (!e2)
3441 {
3442 if (dump_file)
3443 fprintf (dump_file, "ipa-prop: Discovered call to a "
3444 "speculative target (%s -> %s) but the call is "
3445 "already speculated to different target. "
3446 "Giving up.\n",
3447 ie->caller->dump_name (), callee->dump_name ());
3448 }
3449 else
3450 {
3451 if (dump_file)
3452 fprintf (dump_file,
3453 "ipa-prop: Discovered call to a speculative target "
3454 "(%s -> %s) this agree with previous speculation.\n",
3455 ie->caller->dump_name (), callee->dump_name ());
3456 }
3457 }
3458 return NULLnullptr;
3459 }
3460
3461 if (!dbg_cnt (devirt))
3462 return NULLnullptr;
3463
3464 ipa_check_create_node_params ();
3465
3466 /* We cannot make edges to inline clones. It is bug that someone removed
3467 the cgraph node too early. */
3468 gcc_assert (!callee->inlined_to)((void)(!(!callee->inlined_to) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3468, __FUNCTION__), 0 : 0))
;
3469
3470 if (dump_file && !unreachable)
3471 {
3472 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
3473 "(%s -> %s), for stmt ",
3474 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
3475 speculative ? "speculative" : "known",
3476 ie->caller->dump_name (),
3477 callee->dump_name ());
3478 if (ie->call_stmt)
3479 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
3480 else
3481 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
3482 }
3483 if (dump_enabled_p ())
3484 {
3485 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3486 "converting indirect call in %s to direct call to %s\n",
3487 ie->caller->dump_name (), callee->dump_name ());
3488 }
3489 if (!speculative)
3490 {
3491 struct cgraph_edge *orig = ie;
3492 ie = cgraph_edge::make_direct (ie, callee);
3493 /* If we resolved speculative edge the cost is already up to date
3494 for direct call (adjusted by inline_edge_duplication_hook). */
3495 if (ie == orig)
3496 {
3497 ipa_call_summary *es = ipa_call_summaries->get (ie);
3498 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
3499 - eni_size_weights.call_cost);
3500 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
3501 - eni_time_weights.call_cost);
3502 }
3503 }
3504 else
3505 {
3506 if (!callee->can_be_discarded_p ())
3507 {
3508 cgraph_node *alias;
3509 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
3510 if (alias)
3511 callee = alias;
3512 }
3513 /* make_speculative will update ie's cost to direct call cost. */
3514 ie = ie->make_speculative
3515 (callee, ie->count.apply_scale (8, 10));
3516 }
3517
3518 return ie;
3519}
3520
3521/* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3522 CONSTRUCTOR and return it. Return NULL if the search fails for some
3523 reason. */
3524
3525static tree
3526find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INTlong req_offset)
3527{
3528 tree type = TREE_TYPE (constructor)((contains_struct_check ((constructor), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3528, __FUNCTION__))->typed.type)
;
3529 if (TREE_CODE (type)((enum tree_code) (type)->base.code) != ARRAY_TYPE
3530 && TREE_CODE (type)((enum tree_code) (type)->base.code) != RECORD_TYPE)
3531 return NULLnullptr;
3532
3533 unsigned ix;
3534 tree index, val;
3535 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)for (ix = 0; (ix >= vec_safe_length (((tree_check ((constructor
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3535, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts)))
? false : (((void) (val = (*((tree_check ((constructor), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3535, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts))[
ix].value)), (index = (*((tree_check ((constructor), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3535, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts))[
ix].index), true); (ix)++)
3536 {
3537 HOST_WIDE_INTlong elt_offset;
3538 if (TREE_CODE (type)((enum tree_code) (type)->base.code) == ARRAY_TYPE)
3539 {
3540 offset_int off;
3541 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type))((tree_class_check ((((contains_struct_check ((type), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3541, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3541, __FUNCTION__))->type_common.size_unit)
;
3542 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST)((void)(!(((enum tree_code) (unit_size)->base.code) == INTEGER_CST
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3542, __FUNCTION__), 0 : 0))
;
3543
3544 if (index)
3545 {
3546 if (TREE_CODE (index)((enum tree_code) (index)->base.code) == RANGE_EXPR)
3547 off = wi::to_offset (TREE_OPERAND (index, 0)(*((const_cast<tree*> (tree_operand_check ((index), (0)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3547, __FUNCTION__)))))
);
3548 else
3549 off = wi::to_offset (index);
3550 if (TYPE_DOMAIN (type)((tree_check ((type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3550, __FUNCTION__, (ARRAY_TYPE)))->type_non_common.values
)
&& TYPE_MIN_VALUE (TYPE_DOMAIN (type))((tree_check5 ((((tree_check ((type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3550, __FUNCTION__, (ARRAY_TYPE)))->type_non_common.values
)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3550, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.minval
)
)
3551 {
3552 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type))((tree_check5 ((((tree_check ((type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3552, __FUNCTION__, (ARRAY_TYPE)))->type_non_common.values
)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3552, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.minval
)
;
3553 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST)((void)(!(((enum tree_code) (unit_size)->base.code) == INTEGER_CST
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3553, __FUNCTION__), 0 : 0))
;
3554 off = wi::sext (off - wi::to_offset (low_bound),
3555 TYPE_PRECISION (TREE_TYPE (index))((tree_class_check ((((contains_struct_check ((index), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3555, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3555, __FUNCTION__))->type_common.precision)
);
3556 }
3557 off *= wi::to_offset (unit_size);
3558 /* ??? Handle more than just the first index of a
3559 RANGE_EXPR. */
3560 }
3561 else
3562 off = wi::to_offset (unit_size) * ix;
3563
3564 off = wi::lshift (off, LOG2_BITS_PER_UNIT3);
3565 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3566 continue;
3567 elt_offset = off.to_shwi ();
3568 }
3569 else if (TREE_CODE (type)((enum tree_code) (type)->base.code) == RECORD_TYPE)
3570 {
3571 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL)((void)(!(index && ((enum tree_code) (index)->base
.code) == FIELD_DECL) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3571, __FUNCTION__), 0 : 0))
;
3572 if (DECL_BIT_FIELD (index)((tree_check ((index), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3572, __FUNCTION__, (FIELD_DECL)))->decl_common.decl_flag_1
)
)
3573 continue;
3574 elt_offset = int_bit_position (index);
3575 }
3576 else
3577 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3577, __FUNCTION__))
;
3578
3579 if (elt_offset > req_offset)
3580 return NULLnullptr;
3581
3582 if (TREE_CODE (val)((enum tree_code) (val)->base.code) == CONSTRUCTOR)
3583 return find_constructor_constant_at_offset (val,
3584 req_offset - elt_offset);
3585
3586 if (elt_offset == req_offset
3587 && is_gimple_reg_type (TREE_TYPE (val)((contains_struct_check ((val), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3587, __FUNCTION__))->typed.type)
)
3588 && is_gimple_ip_invariant (val))
3589 return val;
3590 }
3591 return NULLnullptr;
3592}
3593
3594/* Check whether SCALAR could be used to look up an aggregate interprocedural
3595 invariant from a static constructor and if so, return it. Otherwise return
3596 NULL. */
3597
3598static tree
3599ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INTlong offset, bool by_ref)
3600{
3601 if (by_ref)
3602 {
3603 if (TREE_CODE (scalar)((enum tree_code) (scalar)->base.code) != ADDR_EXPR)
3604 return NULLnullptr;
3605 scalar = TREE_OPERAND (scalar, 0)(*((const_cast<tree*> (tree_operand_check ((scalar), (0
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3605, __FUNCTION__)))))
;
3606 }
3607
3608 if (!VAR_P (scalar)(((enum tree_code) (scalar)->base.code) == VAR_DECL)
3609 || !is_global_var (scalar)
3610 || !TREE_READONLY (scalar)((non_type_check ((scalar), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3610, __FUNCTION__))->base.readonly_flag)
3611 || !DECL_INITIAL (scalar)((contains_struct_check ((scalar), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3611, __FUNCTION__))->decl_common.initial)
3612 || TREE_CODE (DECL_INITIAL (scalar))((enum tree_code) (((contains_struct_check ((scalar), (TS_DECL_COMMON
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3612, __FUNCTION__))->decl_common.initial))->base.code
)
!= CONSTRUCTOR)
3613 return NULLnullptr;
3614
3615 return find_constructor_constant_at_offset (DECL_INITIAL (scalar)((contains_struct_check ((scalar), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3615, __FUNCTION__))->decl_common.initial)
, offset);
3616}
3617
3618/* Retrieve value from AGG, a set of known offset/value for an aggregate or
3619 static initializer of SCALAR (which can be NULL) for the given OFFSET or
3620 return NULL if there is none. BY_REF specifies whether the value has to be
3621 passed by reference or by value. If FROM_GLOBAL_CONSTANT is non-NULL, then
3622 the boolean it points to is set to true if the value comes from an
3623 initializer of a constant. */
3624
3625tree
3626ipa_find_agg_cst_for_param (const ipa_agg_value_set *agg, tree scalar,
3627 HOST_WIDE_INTlong offset, bool by_ref,
3628 bool *from_global_constant)
3629{
3630 struct ipa_agg_value *item;
3631 int i;
3632
3633 if (scalar)
3634 {
3635 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3636 if (res)
3637 {
3638 if (from_global_constant)
3639 *from_global_constant = true;
3640 return res;
3641 }
3642 }
3643
3644 if (!agg
3645 || by_ref != agg->by_ref)
3646 return NULLnullptr;
3647
3648 FOR_EACH_VEC_ELT (agg->items, i, item)for (i = 0; (agg->items).iterate ((i), &(item)); ++(i)
)
3649 if (item->offset == offset)
3650 {
3651 /* Currently we do not have clobber values, return NULL for them once
3652 we do. */
3653 gcc_checking_assert (is_gimple_ip_invariant (item->value))((void)(!(is_gimple_ip_invariant (item->value)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3653, __FUNCTION__), 0 : 0))
;
3654 if (from_global_constant)
3655 *from_global_constant = false;
3656 return item->value;
3657 }
3658 return NULLnullptr;
3659}
3660
3661/* Remove a reference to SYMBOL from the list of references of a node given by
3662 reference description RDESC. Return true if the reference has been
3663 successfully found and removed. */
3664
3665static bool
3666remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3667{
3668 struct ipa_ref *to_del;
3669 struct cgraph_edge *origin;
3670
3671 origin = rdesc->cs;
3672 if (!origin)
3673 return false;
3674 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3675 origin->lto_stmt_uid);
3676 if (!to_del)
3677 return false;
3678
3679 to_del->remove_reference ();
3680 if (dump_file)
3681 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3682 origin->caller->dump_name (), symbol->dump_name ());
3683 return true;
3684}
3685
3686/* If JFUNC has a reference description with refcount different from
3687 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3688 NULL. JFUNC must be a constant jump function. */
3689
3690static struct ipa_cst_ref_desc *
3691jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3692{
3693 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3694 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE-1)
3695 return rdesc;
3696 else
3697 return NULLnullptr;
3698}
3699
3700/* If the value of constant jump function JFUNC is an address of a function
3701 declaration, return the associated call graph node. Otherwise return
3702 NULL. */
3703
3704static symtab_node *
3705symtab_node_for_jfunc (struct ipa_jump_func *jfunc)
3706{
3707 gcc_checking_assert (jfunc->type == IPA_JF_CONST)((void)(!(jfunc->type == IPA_JF_CONST) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3707, __FUNCTION__), 0 : 0))
;
3708 tree cst = ipa_get_jf_constant (jfunc);
3709 if (TREE_CODE (cst)((enum tree_code) (cst)->base.code) != ADDR_EXPR
3710 || (TREE_CODE (TREE_OPERAND (cst, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((cst), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3710, __FUNCTION__))))))->base.code)
!= FUNCTION_DECL
3711 && TREE_CODE (TREE_OPERAND (cst, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((cst), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3711, __FUNCTION__))))))->base.code)
!= VAR_DECL))
3712 return NULLnullptr;
3713
3714 return symtab_node::get (TREE_OPERAND (cst, 0)(*((const_cast<tree*> (tree_operand_check ((cst), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3714, __FUNCTION__)))))
);
3715}
3716
3717
3718/* If JFUNC is a constant jump function with a usable rdesc, decrement its
3719 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3720 the edge specified in the rdesc. Return false if either the symbol or the
3721 reference could not be found, otherwise return true. */
3722
3723static bool
3724try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3725{
3726 struct ipa_cst_ref_desc *rdesc;
3727 if (jfunc->type == IPA_JF_CONST
3728 && (rdesc = jfunc_rdesc_usable (jfunc))
3729 && --rdesc->refcount == 0)
3730 {
3731 symtab_node *symbol = symtab_node_for_jfunc (jfunc);
3732 if (!symbol)
3733 return false;
3734
3735 return remove_described_reference (symbol, rdesc);
3736 }
3737 return true;
3738}
3739
3740/* Try to find a destination for indirect edge IE that corresponds to a simple
3741 call or a call of a member function pointer and where the destination is a
3742 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3743 the type of the parameter to which the result of JFUNC is passed. If it can
3744 be determined, return the newly direct edge, otherwise return NULL.
3745 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3746 relative to. */
3747
3748static struct cgraph_edge *
3749try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3750 struct ipa_jump_func *jfunc, tree target_type,
3751 struct cgraph_node *new_root,
3752 class ipa_node_params *new_root_info)
3753{
3754 struct cgraph_edge *cs;
3755 tree target;
3756 bool agg_contents = ie->indirect_info->agg_contents;
3757 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3758 if (agg_contents)
3759 {
3760 bool from_global_constant;
3761 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3762 new_root,
3763 &jfunc->agg);
3764 target = ipa_find_agg_cst_for_param (&agg, scalar,
3765 ie->indirect_info->offset,
3766 ie->indirect_info->by_ref,
3767 &from_global_constant);
3768 agg.release ();
3769 if (target
3770 && !from_global_constant
3771 && !ie->indirect_info->guaranteed_unmodified)
3772 return NULLnullptr;
3773 }
3774 else
3775 target = scalar;
3776 if (!target)
3777 return NULLnullptr;
3778 cs = ipa_make_edge_direct_to_target (ie, target);
3779
3780 if (cs && !agg_contents)
3781 {
3782 bool ok;
3783 gcc_checking_assert (cs->callee((void)(!(cs->callee && (cs != ie || jfunc->type
!= IPA_JF_CONST || !symtab_node_for_jfunc (jfunc) || cs->
callee == symtab_node_for_jfunc (jfunc))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3787, __FUNCTION__), 0 : 0))
3784 && (cs != ie((void)(!(cs->callee && (cs != ie || jfunc->type
!= IPA_JF_CONST || !symtab_node_for_jfunc (jfunc) || cs->
callee == symtab_node_for_jfunc (jfunc))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3787, __FUNCTION__), 0 : 0))
3785 || jfunc->type != IPA_JF_CONST((void)(!(cs->callee && (cs != ie || jfunc->type
!= IPA_JF_CONST || !symtab_node_for_jfunc (jfunc) || cs->
callee == symtab_node_for_jfunc (jfunc))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3787, __FUNCTION__), 0 : 0))
3786 || !symtab_node_for_jfunc (jfunc)((void)(!(cs->callee && (cs != ie || jfunc->type
!= IPA_JF_CONST || !symtab_node_for_jfunc (jfunc) || cs->
callee == symtab_node_for_jfunc (jfunc))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3787, __FUNCTION__), 0 : 0))
3787 || cs->callee == symtab_node_for_jfunc (jfunc)))((void)(!(cs->callee && (cs != ie || jfunc->type
!= IPA_JF_CONST || !symtab_node_for_jfunc (jfunc) || cs->
callee == symtab_node_for_jfunc (jfunc))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3787, __FUNCTION__), 0 : 0))
;
3788 ok = try_decrement_rdesc_refcount (jfunc);
3789 gcc_checking_assert (ok)((void)(!(ok) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3789, __FUNCTION__), 0 : 0))
;
3790 }
3791
3792 return cs;
3793}
3794
3795/* Return the target to be used in cases of impossible devirtualization. IE
3796 and target (the latter can be NULL) are dumped when dumping is enabled. */
3797
3798tree
3799ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3800{
3801 if (dump_file)
3802 {
3803 if (target)
3804 fprintf (dump_file,
3805 "Type inconsistent devirtualization: %s->%s\n",
3806 ie->caller->dump_name (),
3807 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target))((const char *) (tree_check ((decl_assembler_name (target)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3807, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)
);
3808 else
3809 fprintf (dump_file,
3810 "No devirtualization target in %s\n",
3811 ie->caller->dump_name ());
3812 }
3813 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3814 cgraph_node::get_create (new_target);
3815 return new_target;
3816}
3817
3818/* Try to find a destination for indirect edge IE that corresponds to a virtual
3819 call based on a formal parameter which is described by jump function JFUNC
3820 and if it can be determined, make it direct and return the direct edge.
3821 Otherwise, return NULL. CTX describes the polymorphic context that the
3822 parameter the call is based on brings along with it. NEW_ROOT and
3823 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3824 to. */
3825
3826static struct cgraph_edge *
3827try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3828 struct ipa_jump_func *jfunc,
3829 class ipa_polymorphic_call_context ctx,
3830 struct cgraph_node *new_root,
3831 class ipa_node_params *new_root_info)
3832{
3833 tree target = NULLnullptr;
3834 bool speculative = false;
3835
3836 if (!opt_for_fn (ie->caller->decl, flag_devirtualize)(opts_for_fn (ie->caller->decl)->x_flag_devirtualize
)
)
3837 return NULLnullptr;
3838
3839 gcc_assert (!ie->indirect_info->by_ref)((void)(!(!ie->indirect_info->by_ref) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 3839, __FUNCTION__), 0 : 0))
;
3840
3841 /* Try to do lookup via known virtual table pointer value. */
3842 if (!ie->indirect_info->vptr_changed
3843 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)(opts_for_fn (ie->caller->decl)->x_flag_devirtualize_speculatively
)
)
3844 {
3845 tree vtable;
3846 unsigned HOST_WIDE_INTlong offset;
3847 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3848 : NULLnullptr;
3849 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3850 new_root,
3851 &jfunc->agg);
3852 tree t = ipa_find_agg_cst_for_param (&agg, scalar,
3853 ie->indirect_info->offset,
3854 true);
3855 agg.release ();
3856 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3857 {
3858 bool can_refer;
3859 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3860 vtable, offset, &can_refer);
3861 if (can_refer)
3862 {
3863 if (!t
3864 || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE)
3865 || !possible_polymorphic_call_target_p
3866 (ie, cgraph_node::get (t)))
3867 {
3868 /* Do not speculate builtin_unreachable, it is stupid! */
3869 if (!ie->indirect_info->vptr_changed)
3870 target = ipa_impossible_devirt_target (ie, target);
3871 else
3872 target = NULLnullptr;
3873 }
3874 else
3875 {
3876 target = t;
3877 speculative = ie->indirect_info->vptr_changed;
3878 }
3879 }
3880 }
3881 }
3882
3883 ipa_polymorphic_call_context ie_context (ie);
3884 vec <cgraph_node *>targets;
3885 bool final;
3886
3887 ctx.offset_by (ie->indirect_info->offset);
3888 if (ie->indirect_info->vptr_changed)
3889 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3890 ie->indirect_info->otr_type);
3891 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3892 targets = possible_polymorphic_call_targets
3893 (ie->indirect_info->otr_type,
3894 ie->indirect_info->otr_token,
3895 ctx, &final);
3896 if (final && targets.length () <= 1)
3897 {
3898 speculative = false;
3899 if (targets.length () == 1)
3900 target = targets[0]->decl;
3901 else
3902 target = ipa_impossible_devirt_target (ie, NULL_TREE(tree) nullptr);
3903 }
3904 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)(opts_for_fn (ie->caller->decl)->x_flag_devirtualize_speculatively
)
3905 && !ie->speculative && ie->maybe_hot_p ())
3906 {
3907 cgraph_node *n;
3908 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3909 ie->indirect_info->otr_token,
3910 ie->indirect_info->context);
3911 if (n)
3912 {
3913 target = n->decl;
3914 speculative = true;
3915 }
3916 }
3917
3918 if (target)
3919 {
3920 if (!possible_polymorphic_call_target_p
3921 (ie, cgraph_node::get_create (target)))
3922 {
3923 if (speculative)
3924 return NULLnullptr;
3925 target = ipa_impossible_devirt_target (ie, target);
3926 }
3927 return ipa_make_edge_direct_to_target (ie, target, speculative);
3928 }
3929 else
3930 return NULLnullptr;
3931}
3932
3933/* Update the param called notes associated with NODE when CS is being inlined,
3934 assuming NODE is (potentially indirectly) inlined into CS->callee.
3935 Moreover, if the callee is discovered to be constant, create a new cgraph
3936 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3937 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3938
3939static bool
3940update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3941 struct cgraph_node *node,
3942 vec<cgraph_edge *> *new_edges)
3943{
3944 class ipa_edge_args *top;
3945 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3946 struct cgraph_node *new_root;
3947 class ipa_node_params *new_root_info, *inlined_node_info;
3948 bool res = false;
3949
3950 ipa_check_create_edge_args ();
3951 top = ipa_edge_args_sum->get (cs);
3952 new_root = cs->caller->inlined_to
3953 ? cs->caller->inlined_to : cs->caller;
3954 new_root_info = ipa_node_params_sum->get (new_root);
3955 inlined_node_info = ipa_node_params_sum->get (cs->callee->function_symbol ());
3956
3957 for (ie = node->indirect_calls; ie; ie = next_ie)
3958 {
3959 class cgraph_indirect_call_info *ici = ie->indirect_info;
3960 struct ipa_jump_func *jfunc;
3961 int param_index;
3962
3963 next_ie = ie->next_callee;
3964
3965 if (ici->param_index == -1)
3966 continue;
3967
3968 /* We must check range due to calls with variable number of arguments: */
3969 if (!top || ici->param_index >= ipa_get_cs_argument_count (top))
3970 {
3971 ici->param_index = -1;
3972 continue;
3973 }
3974
3975 param_index = ici->param_index;
3976 jfunc = ipa_get_ith_jump_func (top, param_index);
3977
3978 auto_vec<cgraph_node *, 4> spec_targets;
3979 if (ie->speculative)
3980 for (cgraph_edge *direct = ie->first_speculative_call_target ();
3981 direct;
3982 direct = direct->next_speculative_call_target ())
3983 spec_targets.safe_push (direct->callee);
3984
3985 if (!opt_for_fn (node->decl, flag_indirect_inlining)(opts_for_fn (node->decl)->x_flag_indirect_inlining))
3986 new_direct_edge = NULLnullptr;
3987 else if (ici->polymorphic)
3988 {
3989 ipa_polymorphic_call_context ctx;
3990 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3991 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx,
3992 new_root,
3993 new_root_info);
3994 }
3995 else
3996 {
3997 tree target_type = ipa_get_type (inlined_node_info, param_index);
3998 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3999 target_type,
4000 new_root,
4001 new_root_info);
4002 }
4003
4004 /* If speculation was removed, then we need to do nothing. */
4005 if (new_direct_edge && new_direct_edge != ie
4006 && spec_targets.contains (new_direct_edge->callee))
4007 {
4008 new_direct_edge->indirect_inlining_edge = 1;
4009 res = true;
4010 if (!new_direct_edge->speculative)
4011 continue;
4012 }
4013 else if (new_direct_edge)
4014 {
4015 new_direct_edge->indirect_inlining_edge = 1;
4016 if (new_edges)
4017 {
4018 new_edges->safe_push (new_direct_edge);
4019 res = true;
4020 }
4021 /* If speculative edge was introduced we still need to update
4022 call info of the indirect edge. */
4023 if (!new_direct_edge->speculative)
4024 continue;
4025 }
4026 if (jfunc->type == IPA_JF_PASS_THROUGH
4027 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
4028 {
4029 if (ici->agg_contents
4030 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
4031 && !ici->polymorphic)
4032 ici->param_index = -1;
4033 else
4034 {
4035 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
4036 if (ici->polymorphic
4037 && !ipa_get_jf_pass_through_type_preserved (jfunc))
4038 ici->vptr_changed = true;
4039 ipa_set_param_used_by_indirect_call (new_root_info,
4040 ici->param_index, true);
4041 if (ici->polymorphic)
4042 ipa_set_param_used_by_polymorphic_call (new_root_info,
4043 ici->param_index, true);
4044 }
4045 }
4046 else if (jfunc->type == IPA_JF_ANCESTOR)
4047 {
4048 if (ici->agg_contents
4049 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
4050 && !ici->polymorphic)
4051 ici->param_index = -1;
4052 else
4053 {
4054 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
4055 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
4056 if (ici->polymorphic
4057 && !ipa_get_jf_ancestor_type_preserved (jfunc))
4058 ici->vptr_changed = true;
4059 ipa_set_param_used_by_indirect_call (new_root_info,
4060 ici->param_index, true);
4061 if (ici->polymorphic)
4062 ipa_set_param_used_by_polymorphic_call (new_root_info,
4063 ici->param_index, true);
4064 }
4065 }
4066 else
4067 /* Either we can find a destination for this edge now or never. */
4068 ici->param_index = -1;
4069 }
4070
4071 return res;
4072}
4073
4074/* Recursively traverse subtree of NODE (including node) made of inlined
4075 cgraph_edges when CS has been inlined and invoke
4076 update_indirect_edges_after_inlining on all nodes and
4077 update_jump_functions_after_inlining on all non-inlined edges that lead out
4078 of this subtree. Newly discovered indirect edges will be added to
4079 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
4080 created. */
4081
4082static bool
4083propagate_info_to_inlined_callees (struct cgraph_edge *cs,
4084 struct cgraph_node *node,
4085 vec<cgraph_edge *> *new_edges)
4086{
4087 struct cgraph_edge *e;
4088 bool res;
4089
4090 res = update_indirect_edges_after_inlining (cs, node, new_edges);
4091
4092 for (e = node->callees; e; e = e->next_callee)
4093 if (!e->inline_failed)
4094 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
4095 else
4096 update_jump_functions_after_inlining (cs, e);
4097 for (e = node->indirect_calls; e; e = e->next_callee)
4098 update_jump_functions_after_inlining (cs, e);
4099
4100 return res;
4101}
4102
4103/* Combine two controlled uses counts as done during inlining. */
4104
4105static int
4106combine_controlled_uses_counters (int c, int d)
4107{
4108 if (c == IPA_UNDESCRIBED_USE-1 || d == IPA_UNDESCRIBED_USE-1)
4109 return IPA_UNDESCRIBED_USE-1;
4110 else
4111 return c + d - 1;
4112}
4113
4114/* Propagate number of controlled users from CS->caleee to the new root of the
4115 tree of inlined nodes. */
4116
4117static void
4118propagate_controlled_uses (struct cgraph_edge *cs)
4119{
4120 ipa_edge_args *args = ipa_edge_args_sum->get (cs);
4121 if (!args)
4122 return;
4123 struct cgraph_node *new_root = cs->caller->inlined_to
4124 ? cs->caller->inlined_to : cs->caller;
4125 ipa_node_params *new_root_info = ipa_node_params_sum->get (new_root);
4126 ipa_node_params *old_root_info = ipa_node_params_sum->get (cs->callee);
4127 int count, i;
4128
4129 if (!old_root_info)
4130 return;
4131
4132 count = MIN (ipa_get_cs_argument_count (args),((ipa_get_cs_argument_count (args)) < (ipa_get_param_count
(old_root_info)) ? (ipa_get_cs_argument_count (args)) : (ipa_get_param_count
(old_root_info)))
4133 ipa_get_param_count (old_root_info))((ipa_get_cs_argument_count (args)) < (ipa_get_param_count
(old_root_info)) ? (ipa_get_cs_argument_count (args)) : (ipa_get_param_count
(old_root_info)))
;
4134 for (i = 0; i < count; i++)
4135 {
4136 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4137 struct ipa_cst_ref_desc *rdesc;
4138
4139 if (jf->type == IPA_JF_PASS_THROUGH)
4140 {
4141 int src_idx, c, d;
4142 src_idx = ipa_get_jf_pass_through_formal_id (jf);
4143 c = ipa_get_controlled_uses (new_root_info, src_idx);
4144 d = ipa_get_controlled_uses (old_root_info, i);
4145
4146 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)((void)(!(ipa_get_jf_pass_through_operation (jf) == NOP_EXPR ||
c == -1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4147, __FUNCTION__), 0 : 0))
4147 == NOP_EXPR || c == IPA_UNDESCRIBED_USE)((void)(!(ipa_get_jf_pass_through_operation (jf) == NOP_EXPR ||
c == -1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4147, __FUNCTION__), 0 : 0))
;
4148 c = combine_controlled_uses_counters (c, d);
4149 ipa_set_controlled_uses (new_root_info, src_idx, c);
4150 bool lderef = true;
4151 if (c != IPA_UNDESCRIBED_USE-1)
4152 {
4153 lderef = (ipa_get_param_load_dereferenced (new_root_info, src_idx)
4154 || ipa_get_param_load_dereferenced (old_root_info, i));
4155 ipa_set_param_load_dereferenced (new_root_info, src_idx, lderef);
4156 }
4157
4158 if (c == 0 && !lderef && new_root_info->ipcp_orig_node)
4159 {
4160 struct cgraph_node *n;
4161 struct ipa_ref *ref;
4162 tree t = new_root_info->known_csts[src_idx];
4163
4164 if (t && TREE_CODE (t)((enum tree_code) (t)->base.code) == ADDR_EXPR
4165 && TREE_CODE (TREE_OPERAND (t, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((t), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4165, __FUNCTION__))))))->base.code)
== FUNCTION_DECL
4166 && (n = cgraph_node::get (TREE_OPERAND (t, 0)(*((const_cast<tree*> (tree_operand_check ((t), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4166, __FUNCTION__)))))
))
4167 && (ref = new_root->find_reference (n, NULLnullptr, 0)))
4168 {
4169 if (dump_file)
4170 fprintf (dump_file, "ipa-prop: Removing cloning-created "
4171 "reference from %s to %s.\n",
4172 new_root->dump_name (),
4173 n->dump_name ());
4174 ref->remove_reference ();
4175 }
4176 }
4177 }
4178 else if (jf->type == IPA_JF_CONST
4179 && (rdesc = jfunc_rdesc_usable (jf)))
4180 {
4181 int d = ipa_get_controlled_uses (old_root_info, i);
4182 int c = rdesc->refcount;
4183 rdesc->refcount = combine_controlled_uses_counters (c, d);
4184 if (rdesc->refcount == 0)
4185 {
4186 tree cst = ipa_get_jf_constant (jf);
4187 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR((void)(!(((enum tree_code) (cst)->base.code) == ADDR_EXPR
&& ((((enum tree_code) ((*((const_cast<tree*> (
tree_operand_check ((cst), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4188, __FUNCTION__))))))->base.code) == FUNCTION_DECL) ||
(((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((cst), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4190, __FUNCTION__))))))->base.code) == VAR_DECL))) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4191, __FUNCTION__), 0 : 0))
4188 && ((TREE_CODE (TREE_OPERAND (cst, 0))((void)(!(((enum tree_code) (cst)->base.code) == ADDR_EXPR
&& ((((enum tree_code) ((*((const_cast<tree*> (
tree_operand_check ((cst), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4188, __FUNCTION__))))))->base.code) == FUNCTION_DECL) ||
(((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((cst), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4190, __FUNCTION__))))))->base.code) == VAR_DECL))) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4191, __FUNCTION__), 0 : 0))
4189 == FUNCTION_DECL)((void)(!(((enum tree_code) (cst)->base.code) == ADDR_EXPR
&& ((((enum tree_code) ((*((const_cast<tree*> (
tree_operand_check ((cst), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4188, __FUNCTION__))))))->base.code) == FUNCTION_DECL) ||
(((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((cst), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4190, __FUNCTION__))))))->base.code) == VAR_DECL))) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4191, __FUNCTION__), 0 : 0))
4190 || (TREE_CODE (TREE_OPERAND (cst, 0))((void)(!(((enum tree_code) (cst)->base.code) == ADDR_EXPR
&& ((((enum tree_code) ((*((const_cast<tree*> (
tree_operand_check ((cst), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4188, __FUNCTION__))))))->base.code) == FUNCTION_DECL) ||
(((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((cst), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4190, __FUNCTION__))))))->base.code) == VAR_DECL))) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4191, __FUNCTION__), 0 : 0))
4191 == VAR_DECL)))((void)(!(((enum tree_code) (cst)->base.code) == ADDR_EXPR
&& ((((enum tree_code) ((*((const_cast<tree*> (
tree_operand_check ((cst), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4188, __FUNCTION__))))))->base.code) == FUNCTION_DECL) ||
(((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((cst), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4190, __FUNCTION__))))))->base.code) == VAR_DECL))) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4191, __FUNCTION__), 0 : 0))
;
4192
4193 symtab_node *n = symtab_node::get (TREE_OPERAND (cst, 0)(*((const_cast<tree*> (tree_operand_check ((cst), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4193, __FUNCTION__)))))
);
4194 if (n)
4195 {
4196 struct cgraph_node *clone;
4197 bool removed = remove_described_reference (n, rdesc);
4198 /* The reference might have been removed by IPA-CP. */
4199 if (removed
4200 && ipa_get_param_load_dereferenced (old_root_info, i))
4201 {
4202 new_root->create_reference (n, IPA_REF_LOAD, NULLnullptr);
4203 if (dump_file)
4204 fprintf (dump_file, "ipa-prop: ...replaced it with "
4205 "LOAD one from %s to %s.\n",
4206 new_root->dump_name (), n->dump_name ());
4207 }
4208
4209 clone = cs->caller;
4210 while (clone->inlined_to
4211 && clone->ipcp_clone
4212 && clone != rdesc->cs->caller)
4213 {
4214 struct ipa_ref *ref;
4215 ref = clone->find_reference (n, NULLnullptr, 0);
4216 if (ref)
4217 {
4218 if (dump_file)
4219 fprintf (dump_file, "ipa-prop: Removing "
4220 "cloning-created reference "
4221 "from %s to %s.\n",
4222 clone->dump_name (),
4223 n->dump_name ());
4224 ref->remove_reference ();
4225 }
4226 clone = clone->callers->caller;
4227 }
4228 }
4229 }
4230 }
4231 }
4232
4233 for (i = ipa_get_param_count (old_root_info);
4234 i < ipa_get_cs_argument_count (args);
4235 i++)
4236 {
4237 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4238
4239 if (jf->type == IPA_JF_CONST)
4240 {
4241 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
4242 if (rdesc)
4243 rdesc->refcount = IPA_UNDESCRIBED_USE-1;
4244 }
4245 else if (jf->type == IPA_JF_PASS_THROUGH)
4246 ipa_set_controlled_uses (new_root_info,
4247 jf->value.pass_through.formal_id,
4248 IPA_UNDESCRIBED_USE-1);
4249 }
4250}
4251
4252/* Update jump functions and call note functions on inlining the call site CS.
4253 CS is expected to lead to a node already cloned by
4254 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
4255 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4256 created. */
4257
4258bool
4259ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
4260 vec<cgraph_edge *> *new_edges)
4261{
4262 bool changed;
4263 /* Do nothing if the preparation phase has not been carried out yet
4264 (i.e. during early inlining). */
4265 if (!ipa_node_params_sum)
4266 return false;
4267 gcc_assert (ipa_edge_args_sum)((void)(!(ipa_edge_args_sum) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4267, __FUNCTION__), 0 : 0))
;
4268
4269 propagate_controlled_uses (cs);
4270 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
4271 ipa_node_params_sum->remove (cs->callee);
4272
4273 ipa_edge_args *args = ipa_edge_args_sum->get (cs);
4274 if (args)
4275 {
4276 bool ok = true;
4277 if (args->jump_functions)
4278 {
4279 struct ipa_jump_func *jf;
4280 int i;
4281 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)for (i = 0; (*args->jump_functions).iterate ((i), &(jf
)); ++(i))
4282 if (jf->type == IPA_JF_CONST
4283 && ipa_get_jf_constant_rdesc (jf))
4284 {
4285 ok = false;
4286 break;
4287 }
4288 }
4289 if (ok)
4290 ipa_edge_args_sum->remove (cs);
4291 }
4292 if (ipcp_transformation_sum)
4293 ipcp_transformation_sum->remove (cs->callee);
4294
4295 return changed;
4296}
4297
4298/* Ensure that array of edge arguments infos is big enough to accommodate a
4299 structure for all edges and reallocates it if not. Also, allocate
4300 associated hash tables is they do not already exist. */
4301
4302void
4303ipa_check_create_edge_args (void)
4304{
4305 if (!ipa_edge_args_sum)
4306 ipa_edge_args_sum
4307 = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ())
4308 ipa_edge_args_sum_t (symtab, true));
4309 if (!ipa_bits_hash_table)
4310 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4311 if (!ipa_vr_hash_table)
4312 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4313}
4314
4315/* Free all ipa_edge structures. */
4316
4317void
4318ipa_free_all_edge_args (void)
4319{
4320 if (!ipa_edge_args_sum)
4321 return;
4322
4323 ggc_delete (ipa_edge_args_sum);
4324 ipa_edge_args_sum = NULLnullptr;
4325}
4326
4327/* Free all ipa_node_params structures. */
4328
4329void
4330ipa_free_all_node_params (void)
4331{
4332 if (ipa_node_params_sum)
4333 ggc_delete (ipa_node_params_sum);
4334 ipa_node_params_sum = NULLnullptr;
4335}
4336
4337/* Initialize IPA CP transformation summary and also allocate any necessary hash
4338 tables if they do not already exist. */
4339
4340void
4341ipcp_transformation_initialize (void)
4342{
4343 if (!ipa_bits_hash_table)
4344 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4345 if (!ipa_vr_hash_table)
4346 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4347 if (ipcp_transformation_sum == NULLnullptr)
4348 {
4349 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
4350 ipcp_transformation_sum->disable_insertion_hook ();
4351 }
4352}
4353
4354/* Release the IPA CP transformation summary. */
4355
4356void
4357ipcp_free_transformation_sum (void)
4358{
4359 if (!ipcp_transformation_sum)
4360 return;
4361
4362 ipcp_transformation_sum->~function_summary<ipcp_transformation *> ();
4363 ggc_free (ipcp_transformation_sum);
4364 ipcp_transformation_sum = NULLnullptr;
4365}
4366
4367/* Set the aggregate replacements of NODE to be AGGVALS. */
4368
4369void
4370ipa_set_node_agg_value_chain (struct cgraph_node *node,
4371 struct ipa_agg_replacement_value *aggvals)
4372{
4373 ipcp_transformation_initialize ();
4374 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
4375 s->agg_values = aggvals;
4376}
4377
4378/* Hook that is called by cgraph.c when an edge is removed. Adjust reference
4379 count data structures accordingly. */
4380
4381void
4382ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
4383{
4384 if (args->jump_functions)
4385 {
4386 struct ipa_jump_func *jf;
4387 int i;
4388 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)for (i = 0; (*args->jump_functions).iterate ((i), &(jf
)); ++(i))
4389 {
4390 struct ipa_cst_ref_desc *rdesc;
4391 try_decrement_rdesc_refcount (jf);
4392 if (jf->type == IPA_JF_CONST
4393 && (rdesc = ipa_get_jf_constant_rdesc (jf))
4394 && rdesc->cs == cs)
4395 rdesc->cs = NULLnullptr;
4396 }
4397 }
4398}
4399
4400/* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4401 reference count data strucutres accordingly. */
4402
4403void
4404ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
4405 ipa_edge_args *old_args, ipa_edge_args *new_args)
4406{
4407 unsigned int i;
4408
4409 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
4410 if (old_args->polymorphic_call_contexts)
4411 new_args->polymorphic_call_contexts
4412 = vec_safe_copy (old_args->polymorphic_call_contexts);
4413
4414 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4415 {
4416 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
4417 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
4418
4419 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
4420
4421 if (src_jf->type == IPA_JF_CONST)
4422 {
4423 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
4424
4425 if (!src_rdesc)
4426 dst_jf->value.constant.rdesc = NULLnullptr;
4427 else if (src->caller == dst->caller)
4428 {
4429 /* Creation of a speculative edge. If the source edge is the one
4430 grabbing a reference, we must create a new (duplicate)
4431 reference description. Otherwise they refer to the same
4432 description corresponding to a reference taken in a function
4433 src->caller is inlined to. In that case we just must
4434 increment the refcount. */
4435 if (src_rdesc->cs == src)
4436 {
4437 symtab_node *n = symtab_node_for_jfunc (src_jf);
4438 gcc_checking_assert (n)((void)(!(n) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4438, __FUNCTION__), 0 : 0))
;
4439 ipa_ref *ref
4440 = src->caller->find_reference (n, src->call_stmt,
4441 src->lto_stmt_uid);
4442 gcc_checking_assert (ref)((void)(!(ref) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4442, __FUNCTION__), 0 : 0))
;
4443 dst->caller->clone_reference (ref, ref->stmt);
4444
4445 ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4446 dst_rdesc->cs = dst;
4447 dst_rdesc->refcount = src_rdesc->refcount;
4448 dst_rdesc->next_duplicate = NULLnullptr;
4449 dst_jf->value.constant.rdesc = dst_rdesc;
4450 }
4451 else
4452 {
4453 src_rdesc->refcount++;
4454 dst_jf->value.constant.rdesc = src_rdesc;
4455 }
4456 }
4457 else if (src_rdesc->cs == src)
4458 {
4459 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4460 dst_rdesc->cs = dst;
4461 dst_rdesc->refcount = src_rdesc->refcount;
4462 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
4463 src_rdesc->next_duplicate = dst_rdesc;
4464 dst_jf->value.constant.rdesc = dst_rdesc;
4465 }
4466 else
4467 {
4468 struct ipa_cst_ref_desc *dst_rdesc;
4469 /* This can happen during inlining, when a JFUNC can refer to a
4470 reference taken in a function up in the tree of inline clones.
4471 We need to find the duplicate that refers to our tree of
4472 inline clones. */
4473
4474 gcc_assert (dst->caller->inlined_to)((void)(!(dst->caller->inlined_to) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4474, __FUNCTION__), 0 : 0))
;
4475 for (dst_rdesc = src_rdesc->next_duplicate;
4476 dst_rdesc;
4477 dst_rdesc = dst_rdesc->next_duplicate)
4478 {
4479 struct cgraph_node *top;
4480 top = dst_rdesc->cs->caller->inlined_to
4481 ? dst_rdesc->cs->caller->inlined_to
4482 : dst_rdesc->cs->caller;
4483 if (dst->caller->inlined_to == top)
4484 break;
4485 }
4486 gcc_assert (dst_rdesc)((void)(!(dst_rdesc) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4486, __FUNCTION__), 0 : 0))
;
4487 dst_jf->value.constant.rdesc = dst_rdesc;
4488 }
4489 }
4490 else if (dst_jf->type == IPA_JF_PASS_THROUGH
4491 && src->caller == dst->caller)
4492 {
4493 struct cgraph_node *inline_root = dst->caller->inlined_to
4494 ? dst->caller->inlined_to : dst->caller;
4495 ipa_node_params *root_info = ipa_node_params_sum->get (inline_root);
4496 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
4497
4498 int c = ipa_get_controlled_uses (root_info, idx);
4499 if (c != IPA_UNDESCRIBED_USE-1)
4500 {
4501 c++;
4502 ipa_set_controlled_uses (root_info, idx, c);
4503 }
4504 }
4505 }
4506}
4507
4508/* Analyze newly added function into callgraph. */
4509
4510static void
4511ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED__attribute__ ((__unused__)))
4512{
4513 if (node->has_gimple_body_p ())
4514 ipa_analyze_node (node);
4515}
4516
4517/* Hook that is called by summary when a node is duplicated. */
4518
4519void
4520ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
4521 ipa_node_params *old_info,
4522 ipa_node_params *new_info)
4523{
4524 ipa_agg_replacement_value *old_av, *new_av;
4525
4526 new_info->descriptors = vec_safe_copy (old_info->descriptors);
4527 new_info->lattices = NULLnullptr;
4528 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
4529 new_info->known_csts = old_info->known_csts.copy ();
4530 new_info->known_contexts = old_info->known_contexts.copy ();
4531
4532 new_info->analysis_done = old_info->analysis_done;
4533 new_info->node_enqueued = old_info->node_enqueued;
4534 new_info->versionable = old_info->versionable;
4535
4536 old_av = ipa_get_agg_replacements_for_node (src);
4537 if (old_av)
4538 {
4539 new_av = NULLnullptr;
4540 while (old_av)
4541 {
4542 struct ipa_agg_replacement_value *v;
4543
4544 v = ggc_alloc<ipa_agg_replacement_value> ();
4545 memcpy (v, old_av, sizeof (*v));
4546 v->next = new_av;
4547 new_av = v;
4548 old_av = old_av->next;
4549 }
4550 ipa_set_node_agg_value_chain (dst, new_av);
4551 }
4552}
4553
4554/* Duplication of ipcp transformation summaries. */
4555
4556void
4557ipcp_transformation_t::duplicate(cgraph_node *, cgraph_node *dst,
4558 ipcp_transformation *src_trans,
4559 ipcp_transformation *dst_trans)
4560{
4561 /* Avoid redundant work of duplicating vectors we will never use. */
4562 if (dst->inlined_to)
4563 return;
4564 dst_trans->bits = vec_safe_copy (src_trans->bits);
4565 dst_trans->m_vr = vec_safe_copy (src_trans->m_vr);
4566 ipa_agg_replacement_value *agg = src_trans->agg_values,
4567 **aggptr = &dst_trans->agg_values;
4568 while (agg)
4569 {
4570 *aggptr = ggc_alloc<ipa_agg_replacement_value> ();
4571 **aggptr = *agg;
4572 agg = agg->next;
4573 aggptr = &(*aggptr)->next;
4574 }
4575}
4576
4577/* Register our cgraph hooks if they are not already there. */
4578
4579void
4580ipa_register_cgraph_hooks (void)
4581{
4582 ipa_check_create_node_params ();
4583 ipa_check_create_edge_args ();
4584
4585 function_insertion_hook_holder =
4586 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULLnullptr);
4587}
4588
4589/* Unregister our cgraph hooks if they are not already there. */
4590
4591static void
4592ipa_unregister_cgraph_hooks (void)
4593{
4594 if (function_insertion_hook_holder)
4595 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
4596 function_insertion_hook_holder = NULLnullptr;
4597}
4598
4599/* Free all ipa_node_params and all ipa_edge_args structures if they are no
4600 longer needed after ipa-cp. */
4601
4602void
4603ipa_free_all_structures_after_ipa_cp (void)
4604{
4605 if (!optimizeglobal_options.x_optimize && !in_lto_pglobal_options.x_in_lto_p)
4606 {
4607 ipa_free_all_edge_args ();
4608 ipa_free_all_node_params ();
4609 ipcp_sources_pool.release ();
4610 ipcp_cst_values_pool.release ();
4611 ipcp_poly_ctx_values_pool.release ();
4612 ipcp_agg_lattice_pool.release ();
4613 ipa_unregister_cgraph_hooks ();
4614 ipa_refdesc_pool.release ();
4615 }
4616}
4617
4618/* Free all ipa_node_params and all ipa_edge_args structures if they are no
4619 longer needed after indirect inlining. */
4620
4621void
4622ipa_free_all_structures_after_iinln (void)
4623{
4624 ipa_free_all_edge_args ();
4625 ipa_free_all_node_params ();
4626 ipa_unregister_cgraph_hooks ();
4627 ipcp_sources_pool.release ();
4628 ipcp_cst_values_pool.release ();
4629 ipcp_poly_ctx_values_pool.release ();
4630 ipcp_agg_lattice_pool.release ();
4631 ipa_refdesc_pool.release ();
4632}
4633
4634/* Print ipa_tree_map data structures of all functions in the
4635 callgraph to F. */
4636
4637void
4638ipa_print_node_params (FILE *f, struct cgraph_node *node)
4639{
4640 int i, count;
4641 class ipa_node_params *info;
4642
4643 if (!node->definition)
4644 return;
4645 info = ipa_node_params_sum->get (node);
4646 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
4647 if (!info)
4648 {
4649 fprintf (f, " no params return\n");
4650 return;
4651 }
4652 count = ipa_get_param_count (info);
4653 for (i = 0; i < count; i++)
4654 {
4655 int c;
4656
4657 fprintf (f, " ");
4658 ipa_dump_param (f, info, i);
4659 if (ipa_is_param_used (info, i))
4660 fprintf (f, " used");
4661 if (ipa_is_param_used_by_ipa_predicates (info, i))
4662 fprintf (f, " used_by_ipa_predicates");
4663 if (ipa_is_param_used_by_indirect_call (info, i))
4664 fprintf (f, " used_by_indirect_call");
4665 if (ipa_is_param_used_by_polymorphic_call (info, i))
4666 fprintf (f, " used_by_polymorphic_call");
4667 c = ipa_get_controlled_uses (info, i);
4668 if (c == IPA_UNDESCRIBED_USE-1)
4669 fprintf (f, " undescribed_use");
4670 else
4671 fprintf (f, " controlled_uses=%i %s", c,
4672 ipa_get_param_load_dereferenced (info, i)
4673 ? "(load_dereferenced)" : "");
4674 fprintf (f, "\n");
4675 }
4676}
4677
4678/* Print ipa_tree_map data structures of all functions in the
4679 callgraph to F. */
4680
4681void
4682ipa_print_all_params (FILE * f)
4683{
4684 struct cgraph_node *node;
4685
4686 fprintf (f, "\nFunction parameters:\n");
4687 FOR_EACH_FUNCTION (node)for ((node) = symtab->first_function (); (node); (node) = symtab
->next_function ((node)))
4688 ipa_print_node_params (f, node);
4689}
4690
4691/* Dump the AV linked list. */
4692
4693void
4694ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4695{
4696 bool comma = false;
4697 fprintf (f, " Aggregate replacements:");
4698 for (; av; av = av->next)
4699 {
4700 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC"%" "l" "d" "]=", comma ? "," : "",
4701 av->index, av->offset);
4702 print_generic_expr (f, av->value);
4703 comma = true;
4704 }
4705 fprintf (f, "\n");
4706}
4707
4708/* Stream out jump function JUMP_FUNC to OB. */
4709
4710static void
4711ipa_write_jump_function (struct output_block *ob,
4712 struct ipa_jump_func *jump_func)
4713{
4714 struct ipa_agg_jf_item *item;
4715 struct bitpack_d bp;
4716 int i, count;
4717 int flag = 0;
4718
4719 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4720 as well as WPA memory by handling them specially. */
4721 if (jump_func->type == IPA_JF_CONST
4722 && TREE_CODE (jump_func->value.constant.value)((enum tree_code) (jump_func->value.constant.value)->base
.code)
== ADDR_EXPR)
4723 flag = 1;
4724
4725 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
4726 switch (jump_func->type)
4727 {
4728 case IPA_JF_UNKNOWN:
4729 break;
4730 case IPA_JF_CONST:
4731 gcc_assert (((void)(!(((((jump_func->value.constant.value)) &&
((tree_code_type[(int) (((enum tree_code) ((jump_func->value
.constant.value))->base.code))]) >= tcc_reference &&
(tree_code_type[(int) (((enum tree_code) ((jump_func->value
.constant.value))->base.code))]) <= tcc_expression)) ? (
jump_func->value.constant.value)->exp.locus : ((location_t
) 0)) == ((location_t) 0)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4732, __FUNCTION__), 0 : 0))
4732 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION)((void)(!(((((jump_func->value.constant.value)) &&
((tree_code_type[(int) (((enum tree_code) ((jump_func->value
.constant.value))->base.code))]) >= tcc_reference &&
(tree_code_type[(int) (((enum tree_code) ((jump_func->value
.constant.value))->base.code))]) <= tcc_expression)) ? (
jump_func->value.constant.value)->exp.locus : ((location_t
) 0)) == ((location_t) 0)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4732, __FUNCTION__), 0 : 0))
;
4733 stream_write_tree (ob,streamer_hooks.write_tree (ob, flag ? (*((const_cast<tree*
> (tree_operand_check ((jump_func->value.constant.value
), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4735, __FUNCTION__))))) : jump_func->value.constant.value
, true, true)
4734 flagstreamer_hooks.write_tree (ob, flag ? (*((const_cast<tree*
> (tree_operand_check ((jump_func->value.constant.value
), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4735, __FUNCTION__))))) : jump_func->value.constant.value
, true, true)
4735 ? TREE_OPERAND (jump_func->value.constant.value, 0)streamer_hooks.write_tree (ob, flag ? (*((const_cast<tree*
> (tree_operand_check ((jump_func->value.constant.value
), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4735, __FUNCTION__))))) : jump_func->value.constant.value
, true, true)
4736 : jump_func->value.constant.value, true)streamer_hooks.write_tree (ob, flag ? (*((const_cast<tree*
> (tree_operand_check ((jump_func->value.constant.value
), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4735, __FUNCTION__))))) : jump_func->value.constant.value
, true, true)
;
4737 break;
4738 case IPA_JF_PASS_THROUGH:
4739 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4740 if (jump_func->value.pass_through.operation == NOP_EXPR)
4741 {
4742 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4743 bp = bitpack_create (ob->main_stream);
4744 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4745 streamer_write_bitpack (&bp);
4746 }
4747 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)tree_code_type[(int) (jump_func->value.pass_through.operation
)]
4748 == tcc_unary)
4749 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4750 else
4751 {
4752 stream_write_tree (ob, jump_func->value.pass_through.operand, true)streamer_hooks.write_tree (ob, jump_func->value.pass_through
.operand, true, true)
;
4753 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4754 }
4755 break;
4756 case IPA_JF_ANCESTOR:
4757 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4758 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4759 bp = bitpack_create (ob->main_stream);
4760 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4761 streamer_write_bitpack (&bp);
4762 break;
4763 default:
4764 fatal_error (UNKNOWN_LOCATION((location_t) 0), "invalid jump function in LTO stream");
4765 }
4766
4767 count = vec_safe_length (jump_func->agg.items);
4768 streamer_write_uhwi (ob, count);
4769 if (count)
4770 {
4771 bp = bitpack_create (ob->main_stream);
4772 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4773 streamer_write_bitpack (&bp);
4774 }
4775
4776 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)for (i = 0; vec_safe_iterate ((jump_func->agg.items), (i),
&(item)); ++(i))
4777 {
4778 stream_write_tree (ob, item->type, true)streamer_hooks.write_tree (ob, item->type, true, true);
4779 streamer_write_uhwi (ob, item->offset);
4780 streamer_write_uhwi (ob, item->jftype);
4781 switch (item->jftype)
4782 {
4783 case IPA_JF_UNKNOWN:
4784 break;
4785 case IPA_JF_CONST:
4786 stream_write_tree (ob, item->value.constant, true)streamer_hooks.write_tree (ob, item->value.constant, true,
true)
;
4787 break;
4788 case IPA_JF_PASS_THROUGH:
4789 case IPA_JF_LOAD_AGG:
4790 streamer_write_uhwi (ob, item->value.pass_through.operation);
4791 streamer_write_uhwi (ob, item->value.pass_through.formal_id);
4792 if (TREE_CODE_CLASS (item->value.pass_through.operation)tree_code_type[(int) (item->value.pass_through.operation)]
4793 != tcc_unary)
4794 stream_write_tree (ob, item->value.pass_through.operand, true)streamer_hooks.write_tree (ob, item->value.pass_through.operand
, true, true)
;
4795 if (item->jftype == IPA_JF_LOAD_AGG)
4796 {
4797 stream_write_tree (ob, item->value.load_agg.type, true)streamer_hooks.write_tree (ob, item->value.load_agg.type, true
, true)
;
4798 streamer_write_uhwi (ob, item->value.load_agg.offset);
4799 bp = bitpack_create (ob->main_stream);
4800 bp_pack_value (&bp, item->value.load_agg.by_ref, 1);
4801 streamer_write_bitpack (&bp);
4802 }
4803 break;
4804 default:
4805 fatal_error (UNKNOWN_LOCATION((location_t) 0),
4806 "invalid jump function in LTO stream");
4807 }
4808 }
4809
4810 bp = bitpack_create (ob->main_stream);
4811 bp_pack_value (&bp, !!jump_func->bits, 1);
4812 streamer_write_bitpack (&bp);
4813 if (jump_func->bits)
4814 {
4815 streamer_write_widest_int (ob, jump_func->bits->value);
4816 streamer_write_widest_int (ob, jump_func->bits->mask);
4817 }
4818 bp_pack_value (&bp, !!jump_func->m_vr, 1);
4819 streamer_write_bitpack (&bp);
4820 if (jump_func->m_vr)
4821 {
4822 streamer_write_enum (ob->main_stream, value_rang_type,streamer_write_hwi_in_range ((ob->main_stream), 0, (int)(VR_LAST
) - 1, (int)(jump_func->m_vr->kind ()))
4823 VR_LAST, jump_func->m_vr->kind ())streamer_write_hwi_in_range ((ob->main_stream), 0, (int)(VR_LAST
) - 1, (int)(jump_func->m_vr->kind ()))
;
4824 stream_write_tree (ob, jump_func->m_vr->min (), true)streamer_hooks.write_tree (ob, jump_func->m_vr->min (),
true, true)
;
4825 stream_write_tree (ob, jump_func->m_vr->max (), true)streamer_hooks.write_tree (ob, jump_func->m_vr->max (),
true, true)
;
4826 }
4827}
4828
4829/* Read in jump function JUMP_FUNC from IB. */
4830
4831static void
4832ipa_read_jump_function (class lto_input_block *ib,
4833 struct ipa_jump_func *jump_func,
4834 struct cgraph_edge *cs,
4835 class data_in *data_in,
4836 bool prevails)
4837{
4838 enum jump_func_type jftype;
4839 enum tree_code operation;
4840 int i, count;
4841 int val = streamer_read_uhwi (ib);
4842 bool flag = val & 1;
4843
4844 jftype = (enum jump_func_type) (val / 2);
4845 switch (jftype)
4846 {
4847 case IPA_JF_UNKNOWN:
4848 ipa_set_jf_unknown (jump_func);
4849 break;
4850 case IPA_JF_CONST:
4851 {
4852 tree t = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
4853 if (flag && prevails)
4854 t = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (t)((contains_struct_check ((t), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4854, __FUNCTION__))->typed.type)
), t);
4855 ipa_set_jf_constant (jump_func, t, cs);
4856 }
4857 break;
4858 case IPA_JF_PASS_THROUGH:
4859 operation = (enum tree_code) streamer_read_uhwi (ib);
4860 if (operation == NOP_EXPR)
4861 {
4862 int formal_id = streamer_read_uhwi (ib);
4863 struct bitpack_d bp = streamer_read_bitpack (ib);
4864 bool agg_preserved = bp_unpack_value (&bp, 1);
4865 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4866 }
4867 else if (TREE_CODE_CLASS (operation)tree_code_type[(int) (operation)] == tcc_unary)
4868 {
4869 int formal_id = streamer_read_uhwi (ib);
4870 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4871 }
4872 else
4873 {
4874 tree operand = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
4875 int formal_id = streamer_read_uhwi (ib);
4876 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4877 operation);
4878 }
4879 break;
4880 case IPA_JF_ANCESTOR:
4881 {
4882 HOST_WIDE_INTlong offset = streamer_read_uhwi (ib);
4883 int formal_id = streamer_read_uhwi (ib);
4884 struct bitpack_d bp = streamer_read_bitpack (ib);
4885 bool agg_preserved = bp_unpack_value (&bp, 1);
4886 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4887 break;
4888 }
4889 default:
4890 fatal_error (UNKNOWN_LOCATION((location_t) 0), "invalid jump function in LTO stream");
4891 }
4892
4893 count = streamer_read_uhwi (ib);
4894 if (prevails)
4895 {
4896 jump_func->agg.items = NULLnullptr;
4897 vec_safe_reserve (jump_func->agg.items, count, true);
4898 }
4899 if (count)
4900 {
4901 struct bitpack_d bp = streamer_read_bitpack (ib);
4902 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4903 }
4904 for (i = 0; i < count; i++)
4905 {
4906 struct ipa_agg_jf_item item;
4907 item.type = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
4908 item.offset = streamer_read_uhwi (ib);
4909 item.jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4910
4911 switch (item.jftype)
4912 {
4913 case IPA_JF_UNKNOWN:
4914 break;
4915 case IPA_JF_CONST:
4916 item.value.constant = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
4917 break;
4918 case IPA_JF_PASS_THROUGH:
4919 case IPA_JF_LOAD_AGG:
4920 operation = (enum tree_code) streamer_read_uhwi (ib);
4921 item.value.pass_through.operation = operation;
4922 item.value.pass_through.formal_id = streamer_read_uhwi (ib);
4923 if (TREE_CODE_CLASS (operation)tree_code_type[(int) (operation)] == tcc_unary)
4924 item.value.pass_through.operand = NULL_TREE(tree) nullptr;
4925 else
4926 item.value.pass_through.operand = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
4927 if (item.jftype == IPA_JF_LOAD_AGG)
4928 {
4929 struct bitpack_d bp;
4930 item.value.load_agg.type = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
4931 item.value.load_agg.offset = streamer_read_uhwi (ib);
4932 bp = streamer_read_bitpack (ib);
4933 item.value.load_agg.by_ref = bp_unpack_value (&bp, 1);
4934 }
4935 break;
4936 default:
4937 fatal_error (UNKNOWN_LOCATION((location_t) 0),
4938 "invalid jump function in LTO stream");
4939 }
4940 if (prevails)
4941 jump_func->agg.items->quick_push (item);
4942 }
4943
4944 struct bitpack_d bp = streamer_read_bitpack (ib);
4945 bool bits_known = bp_unpack_value (&bp, 1);
4946 if (bits_known)
4947 {
4948 widest_int value = streamer_read_widest_int (ib);
4949 widest_int mask = streamer_read_widest_int (ib);
4950 if (prevails)
4951 ipa_set_jfunc_bits (jump_func, value, mask);
4952 }
4953 else
4954 jump_func->bits = NULLnullptr;
4955
4956 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4957 bool vr_known = bp_unpack_value (&vr_bp, 1);
4958 if (vr_known)
4959 {
4960 enum value_range_kind type = streamer_read_enum (ib, value_range_kind,(enum value_range_kind)streamer_read_hwi_in_range ((ib), "value_range_kind"
, 0, (int)(VR_LAST) - 1)
4961 VR_LAST)(enum value_range_kind)streamer_read_hwi_in_range ((ib), "value_range_kind"
, 0, (int)(VR_LAST) - 1)
;
4962 tree min = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
4963 tree max = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
4964 if (prevails)
4965 ipa_set_jfunc_vr (jump_func, type, min, max);
4966 }
4967 else
4968 jump_func->m_vr = NULLnullptr;
4969}
4970
4971/* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4972 relevant to indirect inlining to OB. */
4973
4974static void
4975ipa_write_indirect_edge_info (struct output_block *ob,
4976 struct cgraph_edge *cs)
4977{
4978 class cgraph_indirect_call_info *ii = cs->indirect_info;
4979 struct bitpack_d bp;
4980
4981 streamer_write_hwi (ob, ii->param_index);
4982 bp = bitpack_create (ob->main_stream);
4983 bp_pack_value (&bp, ii->polymorphic, 1);
4984 bp_pack_value (&bp, ii->agg_contents, 1);
4985 bp_pack_value (&bp, ii->member_ptr, 1);
4986 bp_pack_value (&bp, ii->by_ref, 1);
4987 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4988 bp_pack_value (&bp, ii->vptr_changed, 1);
4989 streamer_write_bitpack (&bp);
4990 if (ii->agg_contents || ii->polymorphic)
4991 streamer_write_hwi (ob, ii->offset);
4992 else
4993 gcc_assert (ii->offset == 0)((void)(!(ii->offset == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 4993, __FUNCTION__), 0 : 0))
;
4994
4995 if (ii->polymorphic)
4996 {
4997 streamer_write_hwi (ob, ii->otr_token);
4998 stream_write_tree (ob, ii->otr_type, true)streamer_hooks.write_tree (ob, ii->otr_type, true, true);
4999 ii->context.stream_out (ob);
5000 }
5001}
5002
5003/* Read in parts of cgraph_indirect_call_info corresponding to CS that are
5004 relevant to indirect inlining from IB. */
5005
5006static void
5007ipa_read_indirect_edge_info (class lto_input_block *ib,
5008 class data_in *data_in,
5009 struct cgraph_edge *cs,
5010 class ipa_node_params *info)
5011{
5012 class cgraph_indirect_call_info *ii = cs->indirect_info;
5013 struct bitpack_d bp;
5014
5015 ii->param_index = (int) streamer_read_hwi (ib);
5016 bp = streamer_read_bitpack (ib);
5017 ii->polymorphic = bp_unpack_value (&bp, 1);
5018 ii->agg_contents = bp_unpack_value (&bp, 1);
5019 ii->member_ptr = bp_unpack_value (&bp, 1);
5020 ii->by_ref = bp_unpack_value (&bp, 1);
5021 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
5022 ii->vptr_changed = bp_unpack_value (&bp, 1);
5023 if (ii->agg_contents || ii->polymorphic)
5024 ii->offset = (HOST_WIDE_INTlong) streamer_read_hwi (ib);
5025 else
5026 ii->offset = 0;
5027 if (ii->polymorphic)
5028 {
5029 ii->otr_token = (HOST_WIDE_INTlong) streamer_read_hwi (ib);
5030 ii->otr_type = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
5031 ii->context.stream_in (ib, data_in);
5032 }
5033 if (info && ii->param_index >= 0)
5034 {
5035 if (ii->polymorphic)
5036 ipa_set_param_used_by_polymorphic_call (info,
5037 ii->param_index , true);
5038 ipa_set_param_used_by_indirect_call (info,
5039 ii->param_index, true);
5040 }
5041}
5042
5043/* Stream out NODE info to OB. */
5044
5045static void
5046ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
5047{
5048 int node_ref;
5049 lto_symtab_encoder_t encoder;
5050 ipa_node_params *info = ipa_node_params_sum->get (node);
5051 int j;
5052 struct cgraph_edge *e;
5053 struct bitpack_d bp;
5054
5055 encoder = ob->decl_state->symtab_node_encoder;
5056 node_ref = lto_symtab_encoder_encode (encoder, node);
5057 streamer_write_uhwi (ob, node_ref);
5058
5059 streamer_write_uhwi (ob, ipa_get_param_count (info));
5060 for (j = 0; j < ipa_get_param_count (info); j++)
5061 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
5062 bp = bitpack_create (ob->main_stream);
5063 gcc_assert (info->analysis_done((void)(!(info->analysis_done || ipa_get_param_count (info
) == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5064, __FUNCTION__), 0 : 0))
5064 || ipa_get_param_count (info) == 0)((void)(!(info->analysis_done || ipa_get_param_count (info
) == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5064, __FUNCTION__), 0 : 0))
;
5065 gcc_assert (!info->node_enqueued)((void)(!(!info->node_enqueued) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5065, __FUNCTION__), 0 : 0))
;
5066 gcc_assert (!info->ipcp_orig_node)((void)(!(!info->ipcp_orig_node) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5066, __FUNCTION__), 0 : 0))
;
5067 for (j = 0; j < ipa_get_param_count (info); j++)
5068 {
5069 /* TODO: We could just not stream the bit in the undescribed case. */
5070 bool d = (ipa_get_controlled_uses (info, j) != IPA_UNDESCRIBED_USE-1)
5071 ? ipa_get_param_load_dereferenced (info, j) : true;
5072 bp_pack_value (&bp, d, 1);
5073 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
5074 }
5075 streamer_write_bitpack (&bp);
5076 for (j = 0; j < ipa_get_param_count (info); j++)
5077 {
5078 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
5079 stream_write_tree (ob, ipa_get_type (info, j), true)streamer_hooks.write_tree (ob, ipa_get_type (info, j), true, true
)
;
5080 }
5081 for (e = node->callees; e; e = e->next_callee)
5082 {
5083 ipa_edge_args *args = ipa_edge_args_sum->get (e);
5084
5085 if (!args)
5086 {
5087 streamer_write_uhwi (ob, 0);
5088 continue;
5089 }
5090
5091 streamer_write_uhwi (ob,
5092 ipa_get_cs_argument_count (args) * 2
5093 + (args->polymorphic_call_contexts != NULLnullptr));
5094 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5095 {
5096 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5097 if (args->polymorphic_call_contexts != NULLnullptr)
5098 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5099 }
5100 }
5101 for (e = node->indirect_calls; e; e = e->next_callee)
5102 {
5103 ipa_edge_args *args = ipa_edge_args_sum->get (e);
5104 if (!args)
5105 streamer_write_uhwi (ob, 0);
5106 else
5107 {
5108 streamer_write_uhwi (ob,
5109 ipa_get_cs_argument_count (args) * 2
5110 + (args->polymorphic_call_contexts != NULLnullptr));
5111 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5112 {
5113 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5114 if (args->polymorphic_call_contexts != NULLnullptr)
5115 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5116 }
5117 }
5118 ipa_write_indirect_edge_info (ob, e);
5119 }
5120}
5121
5122/* Stream in edge E from IB. */
5123
5124static void
5125ipa_read_edge_info (class lto_input_block *ib,
5126 class data_in *data_in,
5127 struct cgraph_edge *e, bool prevails)
5128{
5129 int count = streamer_read_uhwi (ib);
5130 bool contexts_computed = count & 1;
5131
5132 count /= 2;
5133 if (!count)
5134 return;
5135 if (prevails
5136 && (e->possibly_call_in_translation_unit_p ()
5137 /* Also stream in jump functions to builtins in hope that they
5138 will get fnspecs. */
5139 || fndecl_built_in_p (e->callee->decl, BUILT_IN_NORMAL)))
5140 {
5141 ipa_edge_args *args = ipa_edge_args_sum->get_create (e);
5142 vec_safe_grow_cleared (args->jump_functions, count, true);
5143 if (contexts_computed)
5144 vec_safe_grow_cleared (args->polymorphic_call_contexts, count, true);
5145 for (int k = 0; k < count; k++)
5146 {
5147 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5148 data_in, prevails);
5149 if (contexts_computed)
5150 ipa_get_ith_polymorhic_call_context (args, k)->stream_in
5151 (ib, data_in);
5152 }
5153 }
5154 else
5155 {
5156 for (int k = 0; k < count; k++)
5157 {
5158 struct ipa_jump_func dummy;
5159 ipa_read_jump_function (ib, &dummy, e,
5160 data_in, prevails);
5161 if (contexts_computed)
5162 {
5163 class ipa_polymorphic_call_context ctx;
5164 ctx.stream_in (ib, data_in);
5165 }
5166 }
5167 }
5168}
5169
5170/* Stream in NODE info from IB. */
5171
5172static void
5173ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
5174 class data_in *data_in)
5175{
5176 int k;
5177 struct cgraph_edge *e;
5178 struct bitpack_d bp;
5179 bool prevails = node->prevailing_p ();
5180 ipa_node_params *info
5181 = prevails ? ipa_node_params_sum->get_create (node) : NULLnullptr;
5182
5183 int param_count = streamer_read_uhwi (ib);
5184 if (prevails)
5185 {
5186 ipa_alloc_node_params (node, param_count);
5187 for (k = 0; k < param_count; k++)
5188 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
5189 if (ipa_get_param_count (info) != 0)
5190 info->analysis_done = true;
5191 info->node_enqueued = false;
5192 }
5193 else
5194 for (k = 0; k < param_count; k++)
5195 streamer_read_uhwi (ib);
5196
5197 bp = streamer_read_bitpack (ib);
5198 for (k = 0; k < param_count; k++)
5199 {
5200 bool load_dereferenced = bp_unpack_value (&bp, 1);
5201 bool used = bp_unpack_value (&bp, 1);
5202
5203 if (prevails)
5204 {
5205 ipa_set_param_load_dereferenced (info, k, load_dereferenced);
5206 ipa_set_param_used (info, k, used);
5207 }
5208 }
5209 for (k = 0; k < param_count; k++)
5210 {
5211 int nuses = streamer_read_hwi (ib);
5212 tree type = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
5213
5214 if (prevails)
5215 {
5216 ipa_set_controlled_uses (info, k, nuses);
5217 (*info->descriptors)[k].decl_or_type = type;
5218 }
5219 }
5220 for (e = node->callees; e; e = e->next_callee)
5221 ipa_read_edge_info (ib, data_in, e, prevails);
5222 for (e = node->indirect_calls; e; e = e->next_callee)
5223 {
5224 ipa_read_edge_info (ib, data_in, e, prevails);
5225 ipa_read_indirect_edge_info (ib, data_in, e, info);
5226 }
5227}
5228
5229/* Write jump functions for nodes in SET. */
5230
5231void
5232ipa_prop_write_jump_functions (void)
5233{
5234 struct output_block *ob;
5235 unsigned int count = 0;
5236 lto_symtab_encoder_iterator lsei;
5237 lto_symtab_encoder_t encoder;
5238
5239 if (!ipa_node_params_sum || !ipa_edge_args_sum)
5240 return;
5241
5242 ob = create_output_block (LTO_section_jump_functions);
5243 encoder = ob->decl_state->symtab_node_encoder;
5244 ob->symbol = NULLnullptr;
5245 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5246 lsei_next_function_in_partition (&lsei))
5247 {
5248 cgraph_node *node = lsei_cgraph_node (lsei);
5249 if (node->has_gimple_body_p ()
5250 && ipa_node_params_sum->get (node) != NULLnullptr)
5251 count++;
5252 }
5253
5254 streamer_write_uhwi (ob, count);
5255
5256 /* Process all of the functions. */
5257 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5258 lsei_next_function_in_partition (&lsei))
5259 {
5260 cgraph_node *node = lsei_cgraph_node (lsei);
5261 if (node->has_gimple_body_p ()
5262 && ipa_node_params_sum->get (node) != NULLnullptr)
5263 ipa_write_node_info (ob, node);
5264 }
5265 streamer_write_char_stream (ob->main_stream, 0);
5266 produce_asm (ob, NULLnullptr);
5267 destroy_output_block (ob);
5268}
5269
5270/* Read section in file FILE_DATA of length LEN with data DATA. */
5271
5272static void
5273ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5274 size_t len)
5275{
5276 const struct lto_function_header *header =
5277 (const struct lto_function_header *) data;
5278 const int cfg_offset = sizeof (struct lto_function_header);
5279 const int main_offset = cfg_offset + header->cfg_size;
5280 const int string_offset = main_offset + header->main_size;
5281 class data_in *data_in;
5282 unsigned int i;
5283 unsigned int count;
5284
5285 lto_input_block ib_main ((const char *) data + main_offset,
5286 header->main_size, file_data->mode_table);
5287
5288 data_in =
5289 lto_data_in_create (file_data, (const char *) data + string_offset,
5290 header->string_size, vNULL);
5291 count = streamer_read_uhwi (&ib_main);
5292
5293 for (i = 0; i < count; i++)
5294 {
5295 unsigned int index;
5296 struct cgraph_node *node;
5297 lto_symtab_encoder_t encoder;
5298
5299 index = streamer_read_uhwi (&ib_main);
5300 encoder = file_data->symtab_node_encoder;
5301 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5302 index));
5303 gcc_assert (node->definition)((void)(!(node->definition) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5303, __FUNCTION__), 0 : 0))
;
5304 ipa_read_node_info (&ib_main, node, data_in);
5305 }
5306 lto_free_section_data (file_data, LTO_section_jump_functions, NULLnullptr, data,
5307 len);
5308 lto_data_in_delete (data_in);
5309}
5310
5311/* Read ipcp jump functions. */
5312
5313void
5314ipa_prop_read_jump_functions (void)
5315{
5316 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5317 struct lto_file_decl_data *file_data;
5318 unsigned int j = 0;
5319
5320 ipa_check_create_node_params ();
5321 ipa_check_create_edge_args ();
5322 ipa_register_cgraph_hooks ();
5323
5324 while ((file_data = file_data_vec[j++]))
5325 {
5326 size_t len;
5327 const char *data
5328 = lto_get_summary_section_data (file_data, LTO_section_jump_functions,
5329 &len);
5330 if (data)
5331 ipa_prop_read_section (file_data, data, len);
5332 }
5333}
5334
5335void
5336write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5337{
5338 int node_ref;
5339 unsigned int count = 0;
5340 lto_symtab_encoder_t encoder;
5341 struct ipa_agg_replacement_value *aggvals, *av;
5342
5343 aggvals = ipa_get_agg_replacements_for_node (node);
5344 encoder = ob->decl_state->symtab_node_encoder;
5345 node_ref = lto_symtab_encoder_encode (encoder, node);
5346 streamer_write_uhwi (ob, node_ref);
5347
5348 for (av = aggvals; av; av = av->next)
5349 count++;
5350 streamer_write_uhwi (ob, count);
5351
5352 for (av = aggvals; av; av = av->next)
5353 {
5354 struct bitpack_d bp;
5355
5356 streamer_write_uhwi (ob, av->offset);
5357 streamer_write_uhwi (ob, av->index);
5358 stream_write_tree (ob, av->value, true)streamer_hooks.write_tree (ob, av->value, true, true);
5359
5360 bp = bitpack_create (ob->main_stream);
5361 bp_pack_value (&bp, av->by_ref, 1);
5362 streamer_write_bitpack (&bp);
5363 }
5364
5365 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5366 if (ts && vec_safe_length (ts->m_vr) > 0)
5367 {
5368 count = ts->m_vr->length ();
5369 streamer_write_uhwi (ob, count);
5370 for (unsigned i = 0; i < count; ++i)
5371 {
5372 struct bitpack_d bp;
5373 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5374 bp = bitpack_create (ob->main_stream);
5375 bp_pack_value (&bp, parm_vr->known, 1);
5376 streamer_write_bitpack (&bp);
5377 if (parm_vr->known)
5378 {
5379 streamer_write_enum (ob->main_stream, value_rang_type,streamer_write_hwi_in_range ((ob->main_stream), 0, (int)(VR_LAST
) - 1, (int)(parm_vr->type))
5380 VR_LAST, parm_vr->type)streamer_write_hwi_in_range ((ob->main_stream), 0, (int)(VR_LAST
) - 1, (int)(parm_vr->type))
;
5381 streamer_write_wide_int (ob, parm_vr->min);
5382 streamer_write_wide_int (ob, parm_vr->max);
5383 }
5384 }
5385 }
5386 else
5387 streamer_write_uhwi (ob, 0);
5388
5389 if (ts && vec_safe_length (ts->bits) > 0)
5390 {
5391 count = ts->bits->length ();
5392 streamer_write_uhwi (ob, count);
5393
5394 for (unsigned i = 0; i < count; ++i)
5395 {
5396 const ipa_bits *bits_jfunc = (*ts->bits)[i];
5397 struct bitpack_d bp = bitpack_create (ob->main_stream);
5398 bp_pack_value (&bp, !!bits_jfunc, 1);
5399 streamer_write_bitpack (&bp);
5400 if (bits_jfunc)
5401 {
5402 streamer_write_widest_int (ob, bits_jfunc->value);
5403 streamer_write_widest_int (ob, bits_jfunc->mask);
5404 }
5405 }
5406 }
5407 else
5408 streamer_write_uhwi (ob, 0);
5409}
5410
5411/* Stream in the aggregate value replacement chain for NODE from IB. */
5412
5413static void
5414read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5415 data_in *data_in)
5416{
5417 struct ipa_agg_replacement_value *aggvals = NULLnullptr;
5418 unsigned int count, i;
5419
5420 count = streamer_read_uhwi (ib);
5421 for (i = 0; i <count; i++)
5422 {
5423 struct ipa_agg_replacement_value *av;
5424 struct bitpack_d bp;
5425
5426 av = ggc_alloc<ipa_agg_replacement_value> ();
5427 av->offset = streamer_read_uhwi (ib);
5428 av->index = streamer_read_uhwi (ib);
5429 av->value = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
5430 bp = streamer_read_bitpack (ib);
5431 av->by_ref = bp_unpack_value (&bp, 1);
5432 av->next = aggvals;
5433 aggvals = av;
5434 }
5435 ipa_set_node_agg_value_chain (node, aggvals);
5436
5437 count = streamer_read_uhwi (ib);
5438 if (count > 0)
5439 {
5440 ipcp_transformation_initialize ();
5441 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5442 vec_safe_grow_cleared (ts->m_vr, count, true);
5443 for (i = 0; i < count; i++)
5444 {
5445 ipa_vr *parm_vr;
5446 parm_vr = &(*ts->m_vr)[i];
5447 struct bitpack_d bp;
5448 bp = streamer_read_bitpack (ib);
5449 parm_vr->known = bp_unpack_value (&bp, 1);
5450 if (parm_vr->known)
5451 {
5452 parm_vr->type = streamer_read_enum (ib, value_range_kind,(enum value_range_kind)streamer_read_hwi_in_range ((ib), "value_range_kind"
, 0, (int)(VR_LAST) - 1)
5453 VR_LAST)(enum value_range_kind)streamer_read_hwi_in_range ((ib), "value_range_kind"
, 0, (int)(VR_LAST) - 1)
;
5454 parm_vr->min = streamer_read_wide_int (ib);
5455 parm_vr->max = streamer_read_wide_int (ib);
5456 }
5457 }
5458 }
5459 count = streamer_read_uhwi (ib);
5460 if (count > 0)
5461 {
5462 ipcp_transformation_initialize ();
5463 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5464 vec_safe_grow_cleared (ts->bits, count, true);
5465
5466 for (i = 0; i < count; i++)
5467 {
5468 struct bitpack_d bp = streamer_read_bitpack (ib);
5469 bool known = bp_unpack_value (&bp, 1);
5470 if (known)
5471 {
5472 const widest_int value = streamer_read_widest_int (ib);
5473 const widest_int mask = streamer_read_widest_int (ib);
5474 ipa_bits *bits
5475 = ipa_get_ipa_bits_for_value (value, mask);
5476 (*ts->bits)[i] = bits;
5477 }
5478 }
5479 }
5480}
5481
5482/* Write all aggregate replacement for nodes in set. */
5483
5484void
5485ipcp_write_transformation_summaries (void)
5486{
5487 struct cgraph_node *node;
5488 struct output_block *ob;
5489 unsigned int count = 0;
5490 lto_symtab_encoder_iterator lsei;
5491 lto_symtab_encoder_t encoder;
5492
5493 ob = create_output_block (LTO_section_ipcp_transform);
5494 encoder = ob->decl_state->symtab_node_encoder;
5495 ob->symbol = NULLnullptr;
5496 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5497 lsei_next_function_in_partition (&lsei))
5498 {
5499 node = lsei_cgraph_node (lsei);
5500 if (node->has_gimple_body_p ())
5501 count++;
5502 }
5503
5504 streamer_write_uhwi (ob, count);
5505
5506 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5507 lsei_next_function_in_partition (&lsei))
5508 {
5509 node = lsei_cgraph_node (lsei);
5510 if (node->has_gimple_body_p ())
5511 write_ipcp_transformation_info (ob, node);
5512 }
5513 streamer_write_char_stream (ob->main_stream, 0);
5514 produce_asm (ob, NULLnullptr);
5515 destroy_output_block (ob);
5516}
5517
5518/* Read replacements section in file FILE_DATA of length LEN with data
5519 DATA. */
5520
5521static void
5522read_replacements_section (struct lto_file_decl_data *file_data,
5523 const char *data,
5524 size_t len)
5525{
5526 const struct lto_function_header *header =
5527 (const struct lto_function_header *) data;
5528 const int cfg_offset = sizeof (struct lto_function_header);
5529 const int main_offset = cfg_offset + header->cfg_size;
5530 const int string_offset = main_offset + header->main_size;
5531 class data_in *data_in;
5532 unsigned int i;
5533 unsigned int count;
5534
5535 lto_input_block ib_main ((const char *) data + main_offset,
5536 header->main_size, file_data->mode_table);
5537
5538 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5539 header->string_size, vNULL);
5540 count = streamer_read_uhwi (&ib_main);
5541
5542 for (i = 0; i < count; i++)
5543 {
5544 unsigned int index;
5545 struct cgraph_node *node;
5546 lto_symtab_encoder_t encoder;
5547
5548 index = streamer_read_uhwi (&ib_main);
5549 encoder = file_data->symtab_node_encoder;
5550 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5551 index));
5552 gcc_assert (node->definition)((void)(!(node->definition) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5552, __FUNCTION__), 0 : 0))
;
5553 read_ipcp_transformation_info (&ib_main, node, data_in);
5554 }
5555 lto_free_section_data (file_data, LTO_section_jump_functions, NULLnullptr, data,
5556 len);
5557 lto_data_in_delete (data_in);
5558}
5559
5560/* Read IPA-CP aggregate replacements. */
5561
5562void
5563ipcp_read_transformation_summaries (void)
5564{
5565 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5566 struct lto_file_decl_data *file_data;
5567 unsigned int j = 0;
5568
5569 while ((file_data = file_data_vec[j++]))
5570 {
5571 size_t len;
5572 const char *data
5573 = lto_get_summary_section_data (file_data, LTO_section_ipcp_transform,
5574 &len);
5575 if (data)
5576 read_replacements_section (file_data, data, len);
5577 }
5578}
5579
5580/* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5581 NODE. */
5582
5583static void
5584adjust_agg_replacement_values (struct cgraph_node *node,
5585 struct ipa_agg_replacement_value *aggval)
5586{
5587 struct ipa_agg_replacement_value *v;
5588 clone_info *cinfo = clone_info::get (node);
5589
5590 if (!cinfo || !cinfo->param_adjustments)
5591 return;
5592
5593 auto_vec<int, 16> new_indices;
5594 cinfo->param_adjustments->get_updated_indices (&new_indices);
5595 for (v = aggval; v; v = v->next)
5596 {
5597 gcc_checking_assert (v->index >= 0)((void)(!(v->index >= 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5597, __FUNCTION__), 0 : 0))
;
5598
5599 if ((unsigned) v->index < new_indices.length ())
5600 v->index = new_indices[v->index];
5601 else
5602 /* This can happen if we know about a constant passed by reference by
5603 an argument which is never actually used for anything, let alone
5604 loading that constant. */
5605 v->index = -1;
5606 }
5607}
5608
5609/* Dominator walker driving the ipcp modification phase. */
5610
5611class ipcp_modif_dom_walker : public dom_walker
5612{
5613public:
5614 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5615 vec<ipa_param_descriptor, va_gc> *descs,
5616 struct ipa_agg_replacement_value *av,
5617 bool *sc)
5618 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5619 m_aggval (av), m_something_changed (sc) {}
5620
5621 virtual edge before_dom_children (basic_block);
5622 bool cleanup_eh ()
5623 { return gimple_purge_all_dead_eh_edges (m_need_eh_cleanup); }
5624
5625private:
5626 struct ipa_func_body_info *m_fbi;
5627 vec<ipa_param_descriptor, va_gc> *m_descriptors;
5628 struct ipa_agg_replacement_value *m_aggval;
5629 bool *m_something_changed;
5630 auto_bitmap m_need_eh_cleanup;
5631};
5632
5633edge
5634ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5635{
5636 gimple_stmt_iterator gsi;
5637 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5638 {
5639 struct ipa_agg_replacement_value *v;
5640 gimple *stmt = gsi_stmt (gsi);
5641 tree rhs, val, t;
5642 HOST_WIDE_INTlong offset;
5643 poly_int64 size;
5644 int index;
5645 bool by_ref, vce;
5646
5647 if (!gimple_assign_load_p (stmt))
5648 continue;
5649 rhs = gimple_assign_rhs1 (stmt);
5650 if (!is_gimple_reg_type (TREE_TYPE (rhs)((contains_struct_check ((rhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5650, __FUNCTION__))->typed.type)
))
5651 continue;
5652
5653 vce = false;
5654 t = rhs;
5655 while (handled_component_p (t))
5656 {
5657 /* V_C_E can do things like convert an array of integers to one
5658 bigger integer and similar things we do not handle below. */
5659 if (TREE_CODE (t)((enum tree_code) (t)->base.code) == VIEW_CONVERT_EXPR)
5660 {
5661 vce = true;
5662 break;
5663 }
5664 t = TREE_OPERAND (t, 0)(*((const_cast<tree*> (tree_operand_check ((t), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5664, __FUNCTION__)))))
;
5665 }
5666 if (vce)
5667 continue;
5668
5669 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5670 &offset, &size, &by_ref))
5671 continue;
5672 for (v = m_aggval; v; v = v->next)
5673 if (v->index == index
5674 && v->offset == offset)
5675 break;
5676 if (!v
5677 || v->by_ref != by_ref
5678 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v->value))((tree_class_check ((((contains_struct_check ((v->value), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5678, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5678, __FUNCTION__))->type_common.size)
),
5679 size))
5680 continue;
5681
5682 gcc_checking_assert (is_gimple_ip_invariant (v->value))((void)(!(is_gimple_ip_invariant (v->value)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5682, __FUNCTION__), 0 : 0))
;
5683 if (!useless_type_conversion_p (TREE_TYPE (rhs)((contains_struct_check ((rhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5683, __FUNCTION__))->typed.type)
, TREE_TYPE (v->value)((contains_struct_check ((v->value), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5683, __FUNCTION__))->typed.type)
))
5684 {
5685 if (fold_convertible_p (TREE_TYPE (rhs)((contains_struct_check ((rhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5685, __FUNCTION__))->typed.type)
, v->value))
5686 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value)fold_build1_loc (((location_t) 0), NOP_EXPR, ((contains_struct_check
((rhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5686, __FUNCTION__))->typed.type), v->value )
;
5687 else if (TYPE_SIZE (TREE_TYPE (rhs))((tree_class_check ((((contains_struct_check ((rhs), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5687, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5687, __FUNCTION__))->type_common.size)
5688 == TYPE_SIZE (TREE_TYPE (v->value))((tree_class_check ((((contains_struct_check ((v->value), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5688, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5688, __FUNCTION__))->type_common.size)
)
5689 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value)fold_build1_loc (((location_t) 0), VIEW_CONVERT_EXPR, ((contains_struct_check
((rhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5689, __FUNCTION__))->typed.type), v->value )
;
5690 else
5691 {
5692 if (dump_file)
5693 {
5694 fprintf (dump_file, " const ");
5695 print_generic_expr (dump_file, v->value);
5696 fprintf (dump_file, " can't be converted to type of ");
5697 print_generic_expr (dump_file, rhs);
5698 fprintf (dump_file, "\n");
5699 }
5700 continue;
5701 }
5702 }
5703 else
5704 val = v->value;
5705
5706 if (dump_file && (dump_flags & TDF_DETAILS))
5707 {
5708 fprintf (dump_file, "Modifying stmt:\n ");
5709 print_gimple_stmt (dump_file, stmt, 0);
5710 }
5711 gimple_assign_set_rhs_from_tree (&gsi, val);
5712 update_stmt (stmt);
5713
5714 if (dump_file && (dump_flags & TDF_DETAILS))
5715 {
5716 fprintf (dump_file, "into:\n ");
5717 print_gimple_stmt (dump_file, stmt, 0);
5718 fprintf (dump_file, "\n");
5719 }
5720
5721 *m_something_changed = true;
5722 if (maybe_clean_eh_stmt (stmt))
5723 bitmap_set_bit (m_need_eh_cleanup, bb->index);
5724 }
5725 return NULLnullptr;
5726}
5727
5728/* Return true if we have recorded VALUE and MASK about PARM.
5729 Set VALUE and MASk accordingly. */
5730
5731bool
5732ipcp_get_parm_bits (tree parm, tree *value, widest_int *mask)
5733{
5734 cgraph_node *cnode = cgraph_node::get (current_function_decl);
5735 ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
5736 if (!ts || vec_safe_length (ts->bits) == 0)
5737 return false;
5738
5739 int i = 0;
5740 for (tree p = DECL_ARGUMENTS (current_function_decl)((tree_check ((current_function_decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5740, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
;
5741 p != parm; p = DECL_CHAIN (p)(((contains_struct_check (((contains_struct_check ((p), (TS_DECL_MINIMAL
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5741, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5741, __FUNCTION__))->common.chain))
)
5742 {
5743 i++;
5744 /* Ignore static chain. */
5745 if (!p)
5746 return false;
5747 }
5748
5749 clone_info *cinfo = clone_info::get (cnode);
5750 if (cinfo && cinfo->param_adjustments)
5751 {
5752 i = cinfo->param_adjustments->get_original_index (i);
5753 if (i < 0)
5754 return false;
5755 }
5756
5757 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5758 if (!bits[i])
5759 return false;
5760 *mask = bits[i]->mask;
5761 *value = wide_int_to_tree (TREE_TYPE (parm)((contains_struct_check ((parm), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5761, __FUNCTION__))->typed.type)
, bits[i]->value);
5762 return true;
5763}
5764
5765
5766/* Update bits info of formal parameters as described in
5767 ipcp_transformation. */
5768
5769static void
5770ipcp_update_bits (struct cgraph_node *node)
5771{
5772 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5773
5774 if (!ts || vec_safe_length (ts->bits) == 0)
5775 return;
5776 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5777 unsigned count = bits.length ();
5778 if (!count)
5779 return;
5780
5781 auto_vec<int, 16> new_indices;
5782 bool need_remapping = false;
5783 clone_info *cinfo = clone_info::get (node);
5784 if (cinfo && cinfo->param_adjustments)
5785 {
5786 cinfo->param_adjustments->get_updated_indices (&new_indices);
5787 need_remapping = true;
5788 }
5789 auto_vec <tree, 16> parm_decls;
5790 push_function_arg_decls (&parm_decls, node->decl);
5791
5792 for (unsigned i = 0; i < count; ++i)
5793 {
5794 tree parm;
5795 if (need_remapping)
5796 {
5797 if (i >= new_indices.length ())
5798 continue;
5799 int idx = new_indices[i];
5800 if (idx < 0)
5801 continue;
5802 parm = parm_decls[idx];
5803 }
5804 else
5805 parm = parm_decls[i];
5806 gcc_checking_assert (parm)((void)(!(parm) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5806, __FUNCTION__), 0 : 0))
;
5807
5808
5809 if (!bits[i]
5810 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))(((enum tree_code) (((contains_struct_check ((parm), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5810, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((parm), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5810, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((parm), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5810, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
5811 || POINTER_TYPE_P (TREE_TYPE (parm))(((enum tree_code) (((contains_struct_check ((parm), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5811, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((parm), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5811, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
5812 || !is_gimple_reg (parm))
5813 continue;
5814
5815 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5815, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
, parm);
5816 if (!ddef)
5817 continue;
5818
5819 if (dump_file)
5820 {
5821 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5822 print_hex (bits[i]->mask, dump_file);
5823 fprintf (dump_file, "\n");
5824 }
5825
5826 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef))(((enum tree_code) (((contains_struct_check ((ddef), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5826, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((ddef), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5826, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((ddef), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5826, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
)
5827 {
5828 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef))((tree_class_check ((((contains_struct_check ((ddef), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5828, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5828, __FUNCTION__))->type_common.precision)
;
5829 signop sgn = TYPE_SIGN (TREE_TYPE (ddef))((signop) ((tree_class_check ((((contains_struct_check ((ddef
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5829, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5829, __FUNCTION__))->base.u.bits.unsigned_flag))
;
5830
5831 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5832 | wide_int::from (bits[i]->value, prec, sgn);
5833 set_nonzero_bits (ddef, nonzero_bits);
5834 }
5835 else
5836 {
5837 unsigned tem = bits[i]->mask.to_uhwi ();
5838 unsigned HOST_WIDE_INTlong bitpos = bits[i]->value.to_uhwi ();
5839 unsigned align = tem & -tem;
5840 unsigned misalign = bitpos & (align - 1);
5841
5842 if (align > 1)
5843 {
5844 if (dump_file)
5845 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5846
5847 unsigned old_align, old_misalign;
5848 struct ptr_info_def *pi = get_ptr_info (ddef);
5849 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5850
5851 if (old_known
5852 && old_align > align)
5853 {
5854 if (dump_file)
5855 {
5856 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5857 if ((old_misalign & (align - 1)) != misalign)
5858 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5859 old_misalign, misalign);
5860 }
5861 continue;
5862 }
5863
5864 if (old_known
5865 && ((misalign & (old_align - 1)) != old_misalign)
5866 && dump_file)
5867 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5868 old_misalign, misalign);
5869
5870 set_ptr_info_alignment (pi, align, misalign);
5871 }
5872 }
5873 }
5874}
5875
5876bool
5877ipa_vr::nonzero_p (tree expr_type) const
5878{
5879 if (type == VR_ANTI_RANGE && wi::eq_p (min, 0) && wi::eq_p (max, 0))
5880 return true;
5881
5882 unsigned prec = TYPE_PRECISION (expr_type)((tree_class_check ((expr_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5882, __FUNCTION__))->type_common.precision)
;
5883 return (type == VR_RANGE
5884 && TYPE_UNSIGNED (expr_type)((tree_class_check ((expr_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5884, __FUNCTION__))->base.u.bits.unsigned_flag)
5885 && wi::eq_p (min, wi::one (prec))
5886 && wi::eq_p (max, wi::max_value (prec, TYPE_SIGN (expr_type)((signop) ((tree_class_check ((expr_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5886, __FUNCTION__))->base.u.bits.unsigned_flag))
)));
5887}
5888
5889/* Update value range of formal parameters as described in
5890 ipcp_transformation. */
5891
5892static void
5893ipcp_update_vr (struct cgraph_node *node)
5894{
5895 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5896 if (!ts || vec_safe_length (ts->m_vr) == 0)
5897 return;
5898 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5899 unsigned count = vr.length ();
5900 if (!count)
5901 return;
5902
5903 auto_vec<int, 16> new_indices;
5904 bool need_remapping = false;
5905 clone_info *cinfo = clone_info::get (node);
5906 if (cinfo && cinfo->param_adjustments)
5907 {
5908 cinfo->param_adjustments->get_updated_indices (&new_indices);
5909 need_remapping = true;
5910 }
5911 auto_vec <tree, 16> parm_decls;
5912 push_function_arg_decls (&parm_decls, node->decl);
5913
5914 for (unsigned i = 0; i < count; ++i)
5915 {
5916 tree parm;
5917 int remapped_idx;
5918 if (need_remapping)
5919 {
5920 if (i >= new_indices.length ())
5921 continue;
5922 remapped_idx = new_indices[i];
5923 if (remapped_idx < 0)
5924 continue;
5925 }
5926 else
5927 remapped_idx = i;
5928
5929 parm = parm_decls[remapped_idx];
5930
5931 gcc_checking_assert (parm)((void)(!(parm) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5931, __FUNCTION__), 0 : 0))
;
5932 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5932, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
, parm);
5933
5934 if (!ddef || !is_gimple_reg (parm))
5935 continue;
5936
5937 if (vr[i].known
5938 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5939 {
5940 tree type = TREE_TYPE (ddef)((contains_struct_check ((ddef), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-prop.c"
, 5940, __FUNCTION__))->typed.type)
;
5941 unsigned