Bug Summary

File:build/gcc/cgraph.cc
Warning:line 1671, column 6
Called C++ object pointer is null

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-suse-linux -analyze -disable-free -clear-ast-before-backend -disable-llvm-verifier -discard-value-names -main-file-name cgraph.cc -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -ffp-contract=on -fno-rounding-math -mconstructor-aliases -funwind-tables=2 -target-cpu x86-64 -tune-cpu generic -debugger-tuning=gdb -fcoverage-compilation-dir=/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -resource-dir /usr/lib64/clang/15.0.7 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13/backward -internal-isystem /usr/lib64/clang/15.0.7/include -internal-isystem /usr/local/include -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../x86_64-suse-linux/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir=/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -D__GCC_HAVE_DWARF2_CFI_ASM=1 -o /buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2023-03-27-141847-20772-1/report-VXRHyk.plist -x c++ /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc

/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc

1/* Callgraph handling code.
2 Copyright (C) 2003-2023 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21/* This file contains basic routines manipulating call graph
22
23 The call-graph is a data structure designed for inter-procedural
24 optimization. It represents a multi-graph where nodes are functions
25 (symbols within symbol table) and edges are call sites. */
26
27#include "config.h"
28#include "system.h"
29#include "coretypes.h"
30#include "backend.h"
31#include "target.h"
32#include "rtl.h"
33#include "tree.h"
34#include "gimple.h"
35#include "predict.h"
36#include "alloc-pool.h"
37#include "gimple-ssa.h"
38#include "cgraph.h"
39#include "lto-streamer.h"
40#include "fold-const.h"
41#include "varasm.h"
42#include "calls.h"
43#include "print-tree.h"
44#include "langhooks.h"
45#include "intl.h"
46#include "tree-eh.h"
47#include "gimple-iterator.h"
48#include "tree-cfg.h"
49#include "tree-ssa.h"
50#include "value-prof.h"
51#include "ipa-utils.h"
52#include "symbol-summary.h"
53#include "tree-vrp.h"
54#include "ipa-prop.h"
55#include "ipa-fnsummary.h"
56#include "cfgloop.h"
57#include "gimple-pretty-print.h"
58#include "tree-dfa.h"
59#include "profile.h"
60#include "context.h"
61#include "gimplify.h"
62#include "stringpool.h"
63#include "attribs.h"
64#include "selftest.h"
65#include "tree-into-ssa.h"
66#include "ipa-inline.h"
67#include "tree-nested.h"
68#include "symtab-thunks.h"
69#include "symtab-clones.h"
70
71/* FIXME: Only for PROP_loops, but cgraph shouldn't have to know about this. */
72#include "tree-pass.h"
73
74/* Queue of cgraph nodes scheduled to be lowered. */
75symtab_node *x_cgraph_nodes_queue;
76#define cgraph_nodes_queue((cgraph_node *)x_cgraph_nodes_queue) ((cgraph_node *)x_cgraph_nodes_queue)
77
78/* Symbol table global context. */
79symbol_table *symtab;
80
81/* List of hooks triggered on cgraph_edge events. */
82struct cgraph_edge_hook_list {
83 cgraph_edge_hook hook;
84 void *data;
85 struct cgraph_edge_hook_list *next;
86};
87
88/* List of hooks triggered on cgraph_node events. */
89struct cgraph_node_hook_list {
90 cgraph_node_hook hook;
91 void *data;
92 struct cgraph_node_hook_list *next;
93};
94
95/* List of hooks triggered on events involving two cgraph_edges. */
96struct cgraph_2edge_hook_list {
97 cgraph_2edge_hook hook;
98 void *data;
99 struct cgraph_2edge_hook_list *next;
100};
101
102/* List of hooks triggered on events involving two cgraph_nodes. */
103struct cgraph_2node_hook_list {
104 cgraph_2node_hook hook;
105 void *data;
106 struct cgraph_2node_hook_list *next;
107};
108
109/* Hash descriptor for cgraph_function_version_info. */
110
111struct function_version_hasher : ggc_ptr_hash<cgraph_function_version_info>
112{
113 static hashval_t hash (cgraph_function_version_info *);
114 static bool equal (cgraph_function_version_info *,
115 cgraph_function_version_info *);
116};
117
118/* Map a cgraph_node to cgraph_function_version_info using this htab.
119 The cgraph_function_version_info has a THIS_NODE field that is the
120 corresponding cgraph_node.. */
121
122static GTY(()) hash_table<function_version_hasher> *cgraph_fnver_htab = NULL__null;
123
124/* Hash function for cgraph_fnver_htab. */
125hashval_t
126function_version_hasher::hash (cgraph_function_version_info *ptr)
127{
128 int uid = ptr->this_node->get_uid ();
129 return (hashval_t)(uid);
130}
131
132/* eq function for cgraph_fnver_htab. */
133bool
134function_version_hasher::equal (cgraph_function_version_info *n1,
135 cgraph_function_version_info *n2)
136{
137 return n1->this_node->get_uid () == n2->this_node->get_uid ();
138}
139
140/* Mark as GC root all allocated nodes. */
141static GTY(()) struct cgraph_function_version_info *
142 version_info_node = NULL__null;
143
144/* Return true if NODE's address can be compared. */
145
146bool
147symtab_node::address_can_be_compared_p ()
148{
149 /* Address of virtual tables and functions is never compared. */
150 if (DECL_VIRTUAL_P (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 150, __FUNCTION__))->decl_common.virtual_flag)
)
151 return false;
152 /* Address of C++ cdtors is never compared. */
153 if (is_a <cgraph_node *> (this)
154 && (DECL_CXX_CONSTRUCTOR_P (decl)((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 154, __FUNCTION__, (FUNCTION_DECL)))->decl_with_vis.cxx_constructor
)
155 || DECL_CXX_DESTRUCTOR_P (decl)((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 155, __FUNCTION__, (FUNCTION_DECL)))->decl_with_vis.cxx_destructor
)
))
156 return false;
157 /* Constant pool symbols addresses are never compared.
158 flag_merge_constants permits us to assume the same on readonly vars. */
159 if (is_a <varpool_node *> (this)
160 && (DECL_IN_CONSTANT_POOL (decl)((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 160, __FUNCTION__, (VAR_DECL)))->decl_with_vis.in_constant_pool
)
161 || (flag_merge_constantsglobal_options.x_flag_merge_constants >= 2
162 && TREE_READONLY (decl)((non_type_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 162, __FUNCTION__))->base.readonly_flag)
&& !TREE_THIS_VOLATILE (decl)((decl)->base.volatile_flag))))
163 return false;
164 return true;
165}
166
167/* Get the cgraph_function_version_info node corresponding to node. */
168cgraph_function_version_info *
169cgraph_node::function_version (void)
170{
171 cgraph_function_version_info key;
172 key.this_node = this;
173
174 if (cgraph_fnver_htab == NULL__null)
175 return NULL__null;
176
177 return cgraph_fnver_htab->find (&key);
178}
179
180/* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
181 corresponding to cgraph_node NODE. */
182cgraph_function_version_info *
183cgraph_node::insert_new_function_version (void)
184{
185 version_info_node = NULL__null;
186 version_info_node = ggc_cleared_alloc<cgraph_function_version_info> ();
187 version_info_node->this_node = this;
188
189 if (cgraph_fnver_htab == NULL__null)
190 cgraph_fnver_htab = hash_table<function_version_hasher>::create_ggc (2);
191
192 *cgraph_fnver_htab->find_slot (version_info_node, INSERT)
193 = version_info_node;
194 return version_info_node;
195}
196
197/* Remove the cgraph_function_version_info node given by DECL_V. */
198static void
199delete_function_version (cgraph_function_version_info *decl_v)
200{
201 if (decl_v == NULL__null)
202 return;
203
204 if (version_info_node == decl_v)
205 version_info_node = NULL__null;
206
207 if (decl_v->prev != NULL__null)
208 decl_v->prev->next = decl_v->next;
209
210 if (decl_v->next != NULL__null)
211 decl_v->next->prev = decl_v->prev;
212
213 if (cgraph_fnver_htab != NULL__null)
214 cgraph_fnver_htab->remove_elt (decl_v);
215}
216
217/* Remove the cgraph_function_version_info and cgraph_node for DECL. This
218 DECL is a duplicate declaration. */
219void
220cgraph_node::delete_function_version_by_decl (tree decl)
221{
222 cgraph_node *decl_node = cgraph_node::get (decl);
223
224 if (decl_node == NULL__null)
225 return;
226
227 delete_function_version (decl_node->function_version ());
228
229 decl_node->remove ();
230}
231
232/* Record that DECL1 and DECL2 are semantically identical function
233 versions. */
234void
235cgraph_node::record_function_versions (tree decl1, tree decl2)
236{
237 cgraph_node *decl1_node = cgraph_node::get_create (decl1);
238 cgraph_node *decl2_node = cgraph_node::get_create (decl2);
239 cgraph_function_version_info *decl1_v = NULL__null;
240 cgraph_function_version_info *decl2_v = NULL__null;
241 cgraph_function_version_info *before;
242 cgraph_function_version_info *after;
243
244 gcc_assert (decl1_node != NULL && decl2_node != NULL)((void)(!(decl1_node != __null && decl2_node != __null
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 244, __FUNCTION__), 0 : 0))
;
245 decl1_v = decl1_node->function_version ();
246 decl2_v = decl2_node->function_version ();
247
248 if (decl1_v != NULL__null && decl2_v != NULL__null)
249 return;
250
251 if (decl1_v == NULL__null)
252 decl1_v = decl1_node->insert_new_function_version ();
253
254 if (decl2_v == NULL__null)
255 decl2_v = decl2_node->insert_new_function_version ();
256
257 /* Chain decl2_v and decl1_v. All semantically identical versions
258 will be chained together. */
259
260 before = decl1_v;
261 after = decl2_v;
262
263 while (before->next != NULL__null)
264 before = before->next;
265
266 while (after->prev != NULL__null)
267 after= after->prev;
268
269 before->next = after;
270 after->prev = before;
271}
272
273/* Initialize callgraph dump file. */
274
275void
276symbol_table::initialize (void)
277{
278 if (!dump_file)
279 dump_file = dump_begin (TDI_cgraph, NULL__null);
280
281 if (!ipa_clones_dump_file)
282 ipa_clones_dump_file = dump_begin (TDI_clones, NULL__null);
283}
284
285/* Allocate new callgraph node and insert it into basic data structures. */
286
287cgraph_node *
288symbol_table::create_empty (void)
289{
290 cgraph_count++;
291 return new (ggc_alloc<cgraph_node> ()) cgraph_node (cgraph_max_uid++);
292}
293
294/* Register HOOK to be called with DATA on each removed edge. */
295cgraph_edge_hook_list *
296symbol_table::add_edge_removal_hook (cgraph_edge_hook hook, void *data)
297{
298 cgraph_edge_hook_list *entry;
299 cgraph_edge_hook_list **ptr = &m_first_edge_removal_hook;
300
301 entry = (cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
302 entry->hook = hook;
303 entry->data = data;
304 entry->next = NULL__null;
305 while (*ptr)
306 ptr = &(*ptr)->next;
307 *ptr = entry;
308 return entry;
309}
310
311/* Remove ENTRY from the list of hooks called on removing edges. */
312void
313symbol_table::remove_edge_removal_hook (cgraph_edge_hook_list *entry)
314{
315 cgraph_edge_hook_list **ptr = &m_first_edge_removal_hook;
316
317 while (*ptr != entry)
318 ptr = &(*ptr)->next;
319 *ptr = entry->next;
320 free (entry);
321}
322
323/* Call all edge removal hooks. */
324void
325symbol_table::call_edge_removal_hooks (cgraph_edge *e)
326{
327 cgraph_edge_hook_list *entry = m_first_edge_removal_hook;
328 while (entry)
329 {
330 entry->hook (e, entry->data);
331 entry = entry->next;
332 }
333}
334
335/* Register HOOK to be called with DATA on each removed node. */
336cgraph_node_hook_list *
337symbol_table::add_cgraph_removal_hook (cgraph_node_hook hook, void *data)
338{
339 cgraph_node_hook_list *entry;
340 cgraph_node_hook_list **ptr = &m_first_cgraph_removal_hook;
341
342 entry = (cgraph_node_hook_list *) xmalloc (sizeof (*entry));
343 entry->hook = hook;
344 entry->data = data;
345 entry->next = NULL__null;
346 while (*ptr)
347 ptr = &(*ptr)->next;
348 *ptr = entry;
349 return entry;
350}
351
352/* Remove ENTRY from the list of hooks called on removing nodes. */
353void
354symbol_table::remove_cgraph_removal_hook (cgraph_node_hook_list *entry)
355{
356 cgraph_node_hook_list **ptr = &m_first_cgraph_removal_hook;
357
358 while (*ptr != entry)
359 ptr = &(*ptr)->next;
360 *ptr = entry->next;
361 free (entry);
362}
363
364/* Call all node removal hooks. */
365void
366symbol_table::call_cgraph_removal_hooks (cgraph_node *node)
367{
368 cgraph_node_hook_list *entry = m_first_cgraph_removal_hook;
369 while (entry)
370 {
371 entry->hook (node, entry->data);
372 entry = entry->next;
373 }
374}
375
376/* Call all node removal hooks. */
377void
378symbol_table::call_cgraph_insertion_hooks (cgraph_node *node)
379{
380 cgraph_node_hook_list *entry = m_first_cgraph_insertion_hook;
381 while (entry)
382 {
383 entry->hook (node, entry->data);
384 entry = entry->next;
385 }
386}
387
388
389/* Register HOOK to be called with DATA on each inserted node. */
390cgraph_node_hook_list *
391symbol_table::add_cgraph_insertion_hook (cgraph_node_hook hook, void *data)
392{
393 cgraph_node_hook_list *entry;
394 cgraph_node_hook_list **ptr = &m_first_cgraph_insertion_hook;
395
396 entry = (cgraph_node_hook_list *) xmalloc (sizeof (*entry));
397 entry->hook = hook;
398 entry->data = data;
399 entry->next = NULL__null;
400 while (*ptr)
401 ptr = &(*ptr)->next;
402 *ptr = entry;
403 return entry;
404}
405
406/* Remove ENTRY from the list of hooks called on inserted nodes. */
407void
408symbol_table::remove_cgraph_insertion_hook (cgraph_node_hook_list *entry)
409{
410 cgraph_node_hook_list **ptr = &m_first_cgraph_insertion_hook;
411
412 while (*ptr != entry)
413 ptr = &(*ptr)->next;
414 *ptr = entry->next;
415 free (entry);
416}
417
418/* Register HOOK to be called with DATA on each duplicated edge. */
419cgraph_2edge_hook_list *
420symbol_table::add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
421{
422 cgraph_2edge_hook_list *entry;
423 cgraph_2edge_hook_list **ptr = &m_first_edge_duplicated_hook;
424
425 entry = (cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
426 entry->hook = hook;
427 entry->data = data;
428 entry->next = NULL__null;
429 while (*ptr)
430 ptr = &(*ptr)->next;
431 *ptr = entry;
432 return entry;
433}
434
435/* Remove ENTRY from the list of hooks called on duplicating edges. */
436void
437symbol_table::remove_edge_duplication_hook (cgraph_2edge_hook_list *entry)
438{
439 cgraph_2edge_hook_list **ptr = &m_first_edge_duplicated_hook;
440
441 while (*ptr != entry)
442 ptr = &(*ptr)->next;
443 *ptr = entry->next;
444 free (entry);
445}
446
447/* Call all edge duplication hooks. */
448void
449symbol_table::call_edge_duplication_hooks (cgraph_edge *cs1, cgraph_edge *cs2)
450{
451 cgraph_2edge_hook_list *entry = m_first_edge_duplicated_hook;
452 while (entry)
453 {
454 entry->hook (cs1, cs2, entry->data);
455 entry = entry->next;
456 }
457}
458
459/* Register HOOK to be called with DATA on each duplicated node. */
460cgraph_2node_hook_list *
461symbol_table::add_cgraph_duplication_hook (cgraph_2node_hook hook, void *data)
462{
463 cgraph_2node_hook_list *entry;
464 cgraph_2node_hook_list **ptr = &m_first_cgraph_duplicated_hook;
465
466 entry = (cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
467 entry->hook = hook;
468 entry->data = data;
469 entry->next = NULL__null;
470 while (*ptr)
471 ptr = &(*ptr)->next;
472 *ptr = entry;
473 return entry;
474}
475
476/* Remove ENTRY from the list of hooks called on duplicating nodes. */
477void
478symbol_table::remove_cgraph_duplication_hook (cgraph_2node_hook_list *entry)
479{
480 cgraph_2node_hook_list **ptr = &m_first_cgraph_duplicated_hook;
481
482 while (*ptr != entry)
483 ptr = &(*ptr)->next;
484 *ptr = entry->next;
485 free (entry);
486}
487
488/* Call all node duplication hooks. */
489void
490symbol_table::call_cgraph_duplication_hooks (cgraph_node *node,
491 cgraph_node *node2)
492{
493 cgraph_2node_hook_list *entry = m_first_cgraph_duplicated_hook;
494 while (entry)
495 {
496 entry->hook (node, node2, entry->data);
497 entry = entry->next;
498 }
499}
500
501/* Return cgraph node assigned to DECL. Create new one when needed. */
502
503cgraph_node *
504cgraph_node::create (tree decl)
505{
506 cgraph_node *node = symtab->create_empty ();
507 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL)((void)(!(((enum tree_code) (decl)->base.code) == FUNCTION_DECL
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 507, __FUNCTION__), 0 : 0))
;
508
509 node->decl = decl;
510 node->semantic_interposition = opt_for_fn (decl, flag_semantic_interposition)(opts_for_fn (decl)->x_flag_semantic_interposition);
511
512 if ((flag_openaccglobal_options.x_flag_openacc || flag_openmpglobal_options.x_flag_openmp)
513 && lookup_attribute ("omp declare target", DECL_ATTRIBUTES (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 513, __FUNCTION__))->decl_common.attributes)
))
514 {
515 node->offloadable = 1;
516 if (ENABLE_OFFLOADING0)
517 g->have_offload = true;
518 }
519
520 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 520, __FUNCTION__))->decl_common.attributes)
))
521 node->ifunc_resolver = true;
522
523 node->register_symbol ();
524 maybe_record_nested_function (node);
525
526 return node;
527}
528
529/* Try to find a call graph node for declaration DECL and if it does not exist
530 or if it corresponds to an inline clone, create a new one. */
531
532cgraph_node *
533cgraph_node::get_create (tree decl)
534{
535 cgraph_node *first_clone = cgraph_node::get (decl);
536
537 if (first_clone && !first_clone->inlined_to)
538 return first_clone;
539
540 cgraph_node *node = cgraph_node::create (decl);
541 if (first_clone)
542 {
543 first_clone->clone_of = node;
544 node->clones = first_clone;
545 node->order = first_clone->order;
546 symtab->symtab_prevail_in_asm_name_hash (node);
547 node->decl->decl_with_vis.symtab_node = node;
548 if (dump_file && symtab->state != PARSING)
549 fprintf (dump_file, "Introduced new external node "
550 "(%s) and turned into root of the clone tree.\n",
551 node->dump_name ());
552 }
553 else if (dump_file && symtab->state != PARSING)
554 fprintf (dump_file, "Introduced new external node "
555 "(%s).\n", node->dump_name ());
556 return node;
557}
558
559/* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
560 the function body is associated with
561 (not necessarily cgraph_node (DECL)). */
562
563cgraph_node *
564cgraph_node::create_alias (tree alias, tree target)
565{
566 cgraph_node *alias_node;
567
568 gcc_assert (TREE_CODE (target) == FUNCTION_DECL((void)(!(((enum tree_code) (target)->base.code) == FUNCTION_DECL
|| ((enum tree_code) (target)->base.code) == IDENTIFIER_NODE
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 569, __FUNCTION__), 0 : 0))
569 || TREE_CODE (target) == IDENTIFIER_NODE)((void)(!(((enum tree_code) (target)->base.code) == FUNCTION_DECL
|| ((enum tree_code) (target)->base.code) == IDENTIFIER_NODE
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 569, __FUNCTION__), 0 : 0))
;
570 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL)((void)(!(((enum tree_code) (alias)->base.code) == FUNCTION_DECL
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 570, __FUNCTION__), 0 : 0))
;
571 alias_node = cgraph_node::get_create (alias);
572 gcc_assert (!alias_node->definition)((void)(!(!alias_node->definition) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 572, __FUNCTION__), 0 : 0))
;
573 alias_node->alias_target = target;
574 alias_node->definition = true;
575 alias_node->alias = true;
576 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (alias)((contains_struct_check ((alias), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 576, __FUNCTION__))->decl_common.attributes)
) != NULL__null)
577 alias_node->transparent_alias = alias_node->weakref = true;
578 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (alias)((contains_struct_check ((alias), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 578, __FUNCTION__))->decl_common.attributes)
))
579 alias_node->ifunc_resolver = true;
580 return alias_node;
581}
582
583/* Attempt to mark ALIAS as an alias to DECL. Return alias node if successful
584 and NULL otherwise.
585 Same body aliases are output whenever the body of DECL is output,
586 and cgraph_node::get (ALIAS) transparently returns
587 cgraph_node::get (DECL). */
588
589cgraph_node *
590cgraph_node::create_same_body_alias (tree alias, tree decl)
591{
592 cgraph_node *n;
593
594 /* If aliases aren't supported by the assembler, fail. */
595 if (!TARGET_SUPPORTS_ALIASES1)
596 return NULL__null;
597
598 /* Langhooks can create same body aliases of symbols not defined.
599 Those are useless. Drop them on the floor. */
600 if (symtab->global_info_ready)
601 return NULL__null;
602
603 n = cgraph_node::create_alias (alias, decl);
604 n->cpp_implicit_alias = true;
605 if (symtab->cpp_implicit_aliases_done)
606 n->resolve_alias (cgraph_node::get (decl));
607 return n;
608}
609
610/* Add thunk alias into callgraph. The alias declaration is ALIAS and it
611 aliases DECL with an adjustments made into the first parameter.
612 See comments in struct cgraph_thunk_info for detail on the parameters. */
613
614cgraph_node *
615cgraph_node::create_thunk (tree alias, tree, bool this_adjusting,
616 HOST_WIDE_INTlong fixed_offset,
617 HOST_WIDE_INTlong virtual_value,
618 HOST_WIDE_INTlong indirect_offset,
619 tree virtual_offset,
620 tree real_alias)
621{
622 cgraph_node *node;
623
624 node = cgraph_node::get (alias);
625 if (node)
626 node->reset ();
627 else
628 node = cgraph_node::create (alias);
629
630 /* Make sure that if VIRTUAL_OFFSET is in sync with VIRTUAL_VALUE. */
631 gcc_checking_assert (virtual_offset((void)(!(virtual_offset ? virtual_value == wi::to_wide (virtual_offset
) : virtual_value == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 633, __FUNCTION__), 0 : 0))
632 ? virtual_value == wi::to_wide (virtual_offset)((void)(!(virtual_offset ? virtual_value == wi::to_wide (virtual_offset
) : virtual_value == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 633, __FUNCTION__), 0 : 0))
633 : virtual_value == 0)((void)(!(virtual_offset ? virtual_value == wi::to_wide (virtual_offset
) : virtual_value == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 633, __FUNCTION__), 0 : 0))
;
634
635 node->thunk = true;
636 node->definition = true;
637
638 thunk_info *i;
639 thunk_info local_info;
640 if (symtab->state < CONSTRUCTION)
641 i = &local_info;
642 else
643 i = thunk_info::get_create (node);
644 i->fixed_offset = fixed_offset;
645 i->virtual_value = virtual_value;
646 i->indirect_offset = indirect_offset;
647 i->alias = real_alias;
648 i->this_adjusting = this_adjusting;
649 i->virtual_offset_p = virtual_offset != NULL__null;
650 if (symtab->state < CONSTRUCTION)
651 i->register_early (node);
652
653 return node;
654}
655
656/* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
657 Return NULL if there's no such node. */
658
659cgraph_node *
660cgraph_node::get_for_asmname (tree asmname)
661{
662 /* We do not want to look at inline clones. */
663 for (symtab_node *node = symtab_node::get_for_asmname (asmname);
664 node;
665 node = node->next_sharing_asm_name)
666 {
667 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
668 if (cn && !cn->inlined_to)
669 return cn;
670 }
671 return NULL__null;
672}
673
674/* Returns a hash value for X (which really is a cgraph_edge). */
675
676hashval_t
677cgraph_edge_hasher::hash (cgraph_edge *e)
678{
679 /* This is a really poor hash function, but it is what htab_hash_pointer
680 uses. */
681 return (hashval_t) ((intptr_t)e->call_stmt >> 3);
682}
683
684/* Returns a hash value for X (which really is a cgraph_edge). */
685
686hashval_t
687cgraph_edge_hasher::hash (gimple *call_stmt)
688{
689 /* This is a really poor hash function, but it is what htab_hash_pointer
690 uses. */
691 return (hashval_t) ((intptr_t)call_stmt >> 3);
692}
693
694/* Return nonzero if the call_stmt of cgraph_edge X is stmt *Y. */
695
696inline bool
697cgraph_edge_hasher::equal (cgraph_edge *x, gimple *y)
698{
699 return x->call_stmt == y;
700}
701
702/* Add call graph edge E to call site hash of its caller. */
703
704static inline void
705cgraph_update_edge_in_call_site_hash (cgraph_edge *e)
706{
707 gimple *call = e->call_stmt;
708 *e->caller->call_site_hash->find_slot_with_hash
709 (call, cgraph_edge_hasher::hash (call), INSERT) = e;
710}
711
712/* Add call graph edge E to call site hash of its caller. */
713
714static inline void
715cgraph_add_edge_to_call_site_hash (cgraph_edge *e)
716{
717 /* There are two speculative edges for every statement (one direct,
718 one indirect); always hash the direct one. */
719 if (e->speculative && e->indirect_unknown_callee)
720 return;
721 cgraph_edge **slot = e->caller->call_site_hash->find_slot_with_hash
722 (e->call_stmt, cgraph_edge_hasher::hash (e->call_stmt), INSERT);
723 if (*slot)
724 {
725 gcc_assert (((cgraph_edge *)*slot)->speculative)((void)(!(((cgraph_edge *)*slot)->speculative) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 725, __FUNCTION__), 0 : 0))
;
726 if (e->callee && (!e->prev_callee
727 || !e->prev_callee->speculative
728 || e->prev_callee->call_stmt != e->call_stmt))
729 *slot = e;
730 return;
731 }
732 gcc_assert (!*slot || e->speculative)((void)(!(!*slot || e->speculative) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 732, __FUNCTION__), 0 : 0))
;
733 *slot = e;
734}
735
736/* Return the callgraph edge representing the GIMPLE_CALL statement
737 CALL_STMT. */
738
739cgraph_edge *
740cgraph_node::get_edge (gimple *call_stmt)
741{
742 cgraph_edge *e, *e2;
743 int n = 0;
744
745 if (call_site_hash)
746 return call_site_hash->find_with_hash
747 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
748
749 /* This loop may turn out to be performance problem. In such case adding
750 hashtables into call nodes with very many edges is probably best
751 solution. It is not good idea to add pointer into CALL_EXPR itself
752 because we want to make possible having multiple cgraph nodes representing
753 different clones of the same body before the body is actually cloned. */
754 for (e = callees; e; e = e->next_callee)
755 {
756 if (e->call_stmt == call_stmt)
757 break;
758 n++;
759 }
760
761 if (!e)
762 for (e = indirect_calls; e; e = e->next_callee)
763 {
764 if (e->call_stmt == call_stmt)
765 break;
766 n++;
767 }
768
769 if (n > 100)
770 {
771 call_site_hash = hash_table<cgraph_edge_hasher>::create_ggc (120);
772 for (e2 = callees; e2; e2 = e2->next_callee)
773 cgraph_add_edge_to_call_site_hash (e2);
774 for (e2 = indirect_calls; e2; e2 = e2->next_callee)
775 cgraph_add_edge_to_call_site_hash (e2);
776 }
777
778 return e;
779}
780
781
782/* Change field call_stmt of edge E to NEW_STMT. If UPDATE_SPECULATIVE and E
783 is any component of speculative edge, then update all components.
784 Speculations can be resolved in the process and EDGE can be removed and
785 deallocated. Return the edge that now represents the call. */
786
787cgraph_edge *
788cgraph_edge::set_call_stmt (cgraph_edge *e, gcall *new_stmt,
789 bool update_speculative)
790{
791 tree decl;
792
793 cgraph_node *new_direct_callee = NULL__null;
794 if ((e->indirect_unknown_callee || e->speculative)
795 && (decl = gimple_call_fndecl (new_stmt)))
796 {
797 /* Constant propagation and especially inlining can turn an indirect call
798 into a direct one. */
799 new_direct_callee = cgraph_node::get (decl);
800 gcc_checking_assert (new_direct_callee)((void)(!(new_direct_callee) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 800, __FUNCTION__), 0 : 0))
;
801 }
802
803 /* Speculative edges has three component, update all of them
804 when asked to. */
805 if (update_speculative && e->speculative
806 /* If we are about to resolve the speculation by calling make_direct
807 below, do not bother going over all the speculative edges now. */
808 && !new_direct_callee)
809 {
810 cgraph_edge *direct, *indirect, *next;
811 ipa_ref *ref;
812 bool e_indirect = e->indirect_unknown_callee;
813 int n = 0;
814
815 direct = e->first_speculative_call_target ();
816 indirect = e->speculative_call_indirect_edge ();
817
818 gcall *old_stmt = direct->call_stmt;
819 for (cgraph_edge *d = direct; d; d = next)
820 {
821 next = d->next_speculative_call_target ();
822 cgraph_edge *d2 = set_call_stmt (d, new_stmt, false);
823 gcc_assert (d2 == d)((void)(!(d2 == d) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 823, __FUNCTION__), 0 : 0))
;
824 n++;
825 }
826 gcc_checking_assert (indirect->num_speculative_call_targets_p () == n)((void)(!(indirect->num_speculative_call_targets_p () == n
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 826, __FUNCTION__), 0 : 0))
;
827 for (unsigned int i = 0; e->caller->iterate_reference (i, ref); i++)
828 if (ref->speculative && ref->stmt == old_stmt)
829 {
830 ref->stmt = new_stmt;
831 n--;
832 }
833
834 indirect = set_call_stmt (indirect, new_stmt, false);
835 return e_indirect ? indirect : direct;
836 }
837
838 if (new_direct_callee)
839 e = make_direct (e, new_direct_callee);
840
841 /* Only direct speculative edges go to call_site_hash. */
842 if (e->caller->call_site_hash
843 && (!e->speculative || !e->indirect_unknown_callee)
844 /* It is possible that edge was previously speculative. In this case
845 we have different value in call stmt hash which needs preserving. */
846 && e->caller->get_edge (e->call_stmt) == e)
847 e->caller->call_site_hash->remove_elt_with_hash
848 (e->call_stmt, cgraph_edge_hasher::hash (e->call_stmt));
849
850 e->call_stmt = new_stmt;
851
852 function *fun = DECL_STRUCT_FUNCTION (e->caller->decl)((tree_check ((e->caller->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 852, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
;
853 e->can_throw_external = stmt_can_throw_external (fun, new_stmt);
854 /* Update call stite hash. For speculative calls we only record the first
855 direct edge. */
856 if (e->caller->call_site_hash
857 && (!e->speculative
858 || (e->callee
859 && (!e->prev_callee || !e->prev_callee->speculative
860 || e->prev_callee->call_stmt != e->call_stmt))
861 || (e->speculative && !e->callee)))
862 cgraph_add_edge_to_call_site_hash (e);
863 return e;
864}
865
866/* Allocate a cgraph_edge structure and fill it with data according to the
867 parameters of which only CALLEE can be NULL (when creating an indirect call
868 edge). CLONING_P should be set if properties that are copied from an
869 original edge should not be calculated. */
870
871cgraph_edge *
872symbol_table::create_edge (cgraph_node *caller, cgraph_node *callee,
873 gcall *call_stmt, profile_count count,
874 bool indir_unknown_callee, bool cloning_p)
875{
876 cgraph_edge *edge;
877
878 /* LTO does not actually have access to the call_stmt since these
879 have not been loaded yet. */
880 if (call_stmt)
881 {
882 /* This is a rather expensive check possibly triggering
883 construction of call stmt hashtable. */
884 cgraph_edge *e;
885 gcc_checking_assert (!(e = caller->get_edge (call_stmt))((void)(!(!(e = caller->get_edge (call_stmt)) || e->speculative
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 886, __FUNCTION__), 0 : 0))
886 || e->speculative)((void)(!(!(e = caller->get_edge (call_stmt)) || e->speculative
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 886, __FUNCTION__), 0 : 0))
;
887
888 gcc_assert (is_gimple_call (call_stmt))((void)(!(is_gimple_call (call_stmt)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 888, __FUNCTION__), 0 : 0))
;
889 }
890
891 edge = ggc_alloc<cgraph_edge> ();
892 edge->m_summary_id = -1;
893 edges_count++;
894
895 gcc_assert (++edges_max_uid != 0)((void)(!(++edges_max_uid != 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 895, __FUNCTION__), 0 : 0))
;
896 edge->m_uid = edges_max_uid;
897 edge->aux = NULL__null;
898 edge->caller = caller;
899 edge->callee = callee;
900 edge->prev_caller = NULL__null;
901 edge->next_caller = NULL__null;
902 edge->prev_callee = NULL__null;
903 edge->next_callee = NULL__null;
904 edge->lto_stmt_uid = 0;
905 edge->speculative_id = 0;
906
907 edge->count = count;
908 edge->call_stmt = call_stmt;
909 edge->indirect_info = NULL__null;
910 edge->indirect_inlining_edge = 0;
911 edge->speculative = false;
912 edge->indirect_unknown_callee = indir_unknown_callee;
913 if (call_stmt && caller->call_site_hash)
914 cgraph_add_edge_to_call_site_hash (edge);
915
916 if (cloning_p)
917 return edge;
918
919 edge->can_throw_external
920 = call_stmt ? stmt_can_throw_external (DECL_STRUCT_FUNCTION (caller->decl)((tree_check ((caller->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 920, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
,
921 call_stmt) : false;
922 edge->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
923 edge->call_stmt_cannot_inline_p = false;
924
925 if (opt_for_fn (edge->caller->decl, flag_devirtualize)(opts_for_fn (edge->caller->decl)->x_flag_devirtualize
)
926 && call_stmt && DECL_STRUCT_FUNCTION (caller->decl)((tree_check ((caller->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 926, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
)
927 edge->in_polymorphic_cdtor
928 = decl_maybe_in_construction_p (NULL__null, NULL__null, call_stmt,
929 caller->decl);
930 else
931 edge->in_polymorphic_cdtor = caller->thunk;
932 if (callee)
933 caller->calls_declare_variant_alt |= callee->declare_variant_alt;
934
935 if (callee && symtab->state != LTO_STREAMING
936 && edge->callee->comdat_local_p ())
937 edge->caller->calls_comdat_local = true;
938
939 return edge;
940}
941
942/* Create edge from a given function to CALLEE in the cgraph. CLONING_P should
943 be set if properties that are copied from an original edge should not be
944 calculated. */
945
946cgraph_edge *
947cgraph_node::create_edge (cgraph_node *callee,
948 gcall *call_stmt, profile_count count, bool cloning_p)
949{
950 cgraph_edge *edge = symtab->create_edge (this, callee, call_stmt, count,
951 false, cloning_p);
952
953 if (!cloning_p)
954 initialize_inline_failed (edge);
955
956 edge->next_caller = callee->callers;
957 if (callee->callers)
958 callee->callers->prev_caller = edge;
959 edge->next_callee = callees;
960 if (callees)
961 callees->prev_callee = edge;
962 callees = edge;
963 callee->callers = edge;
964
965 return edge;
966}
967
968/* Allocate cgraph_indirect_call_info and set its fields to default values. */
969
970cgraph_indirect_call_info *
971cgraph_allocate_init_indirect_info (void)
972{
973 cgraph_indirect_call_info *ii;
974
975 ii = ggc_cleared_alloc<cgraph_indirect_call_info> ();
976 ii->param_index = -1;
977 return ii;
978}
979
980/* Create an indirect edge with a yet-undetermined callee where the call
981 statement destination is a formal parameter of the caller with index
982 PARAM_INDEX. CLONING_P should be set if properties that are copied from an
983 original edge should not be calculated and indirect_info structure should
984 not be calculated. */
985
986cgraph_edge *
987cgraph_node::create_indirect_edge (gcall *call_stmt, int ecf_flags,
988 profile_count count,
989 bool cloning_p)
990{
991 cgraph_edge *edge = symtab->create_edge (this, NULL__null, call_stmt, count, true,
992 cloning_p);
993 tree target;
994
995 if (!cloning_p)
996 initialize_inline_failed (edge);
997
998 edge->indirect_info = cgraph_allocate_init_indirect_info ();
999 edge->indirect_info->ecf_flags = ecf_flags;
1000 edge->indirect_info->vptr_changed = true;
1001
1002 /* Record polymorphic call info. */
1003 if (!cloning_p
1004 && call_stmt
1005 && (target = gimple_call_fn (call_stmt))
1006 && virtual_method_call_p (target))
1007 {
1008 ipa_polymorphic_call_context context (decl, target, call_stmt);
1009
1010 /* Only record types can have virtual calls. */
1011 edge->indirect_info->polymorphic = true;
1012 edge->indirect_info->param_index = -1;
1013 edge->indirect_info->otr_token
1014 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target)(*((const_cast<tree*> (tree_operand_check (((tree_check
((target), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1014, __FUNCTION__, (OBJ_TYPE_REF)))), (2), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1014, __FUNCTION__)))))
);
1015 edge->indirect_info->otr_type = obj_type_ref_class (target);
1016 gcc_assert (TREE_CODE (edge->indirect_info->otr_type) == RECORD_TYPE)((void)(!(((enum tree_code) (edge->indirect_info->otr_type
)->base.code) == RECORD_TYPE) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1016, __FUNCTION__), 0 : 0))
;
1017 edge->indirect_info->context = context;
1018 }
1019
1020 edge->next_callee = indirect_calls;
1021 if (indirect_calls)
1022 indirect_calls->prev_callee = edge;
1023 indirect_calls = edge;
1024
1025 return edge;
1026}
1027
1028/* Remove the edge from the list of the callees of the caller. */
1029
1030void
1031cgraph_edge::remove_caller (void)
1032{
1033 if (prev_callee)
1034 prev_callee->next_callee = next_callee;
1035 if (next_callee)
1036 next_callee->prev_callee = prev_callee;
1037 if (!prev_callee)
1038 {
1039 if (indirect_unknown_callee)
1040 caller->indirect_calls = next_callee;
1041 else
1042 caller->callees = next_callee;
1043 }
1044 if (caller->call_site_hash
1045 && this == caller->get_edge (call_stmt))
1046 caller->call_site_hash->remove_elt_with_hash
1047 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
1048}
1049
1050/* Put the edge onto the free list. */
1051
1052void
1053symbol_table::free_edge (cgraph_edge *e)
1054{
1055 edges_count--;
1056 if (e->m_summary_id != -1)
1057 edge_released_summary_ids.safe_push (e->m_summary_id);
1058
1059 if (e->indirect_info)
1060 ggc_free (e->indirect_info);
1061 ggc_free (e);
1062}
1063
1064/* Remove the edge in the cgraph. */
1065
1066void
1067cgraph_edge::remove (cgraph_edge *edge)
1068{
1069 /* Call all edge removal hooks. */
1070 symtab->call_edge_removal_hooks (edge);
1071
1072 if (!edge->indirect_unknown_callee)
1073 /* Remove from callers list of the callee. */
1074 edge->remove_callee ();
1075
1076 /* Remove from callees list of the callers. */
1077 edge->remove_caller ();
1078
1079 /* Put the edge onto the free list. */
1080 symtab->free_edge (edge);
1081}
1082
1083/* Turn edge into speculative call calling N2. Update
1084 the profile so the direct call is taken COUNT times
1085 with FREQUENCY.
1086
1087 At clone materialization time, the indirect call E will
1088 be expanded as:
1089
1090 if (call_dest == N2)
1091 n2 ();
1092 else
1093 call call_dest
1094
1095 At this time the function just creates the direct call,
1096 the reference representing the if conditional and attaches
1097 them all to the original indirect call statement.
1098
1099 speculative_id is used to link direct calls with their corresponding
1100 IPA_REF_ADDR references when representing speculative calls.
1101
1102 Return direct edge created. */
1103
1104cgraph_edge *
1105cgraph_edge::make_speculative (cgraph_node *n2, profile_count direct_count,
1106 unsigned int speculative_id)
1107{
1108 cgraph_node *n = caller;
1109 ipa_ref *ref = NULL__null;
1110 cgraph_edge *e2;
1111
1112 if (dump_file)
1113 fprintf (dump_file, "Indirect call -> speculative call %s => %s\n",
1114 n->dump_name (), n2->dump_name ());
1115 speculative = true;
1116 e2 = n->create_edge (n2, call_stmt, direct_count);
1117 initialize_inline_failed (e2);
1118 e2->speculative = true;
1119 if (TREE_NOTHROW (n2->decl)((n2->decl)->base.nothrow_flag))
1120 e2->can_throw_external = false;
1121 else
1122 e2->can_throw_external = can_throw_external;
1123 e2->lto_stmt_uid = lto_stmt_uid;
1124 e2->speculative_id = speculative_id;
1125 e2->in_polymorphic_cdtor = in_polymorphic_cdtor;
1126 indirect_info->num_speculative_call_targets++;
1127 count -= e2->count;
1128 symtab->call_edge_duplication_hooks (this, e2);
1129 ref = n->create_reference (n2, IPA_REF_ADDR, call_stmt);
1130 ref->lto_stmt_uid = lto_stmt_uid;
1131 ref->speculative_id = speculative_id;
1132 ref->speculative = speculative;
1133 n2->mark_address_taken ();
1134 return e2;
1135}
1136
1137/* Speculative call consists of an indirect edge and one or more
1138 direct edge+ref pairs.
1139
1140 Given an edge which is part of speculative call, return the first
1141 direct call edge in the speculative call sequence. */
1142
1143cgraph_edge *
1144cgraph_edge::first_speculative_call_target ()
1145{
1146 cgraph_edge *e = this;
1147
1148 gcc_checking_assert (e->speculative)((void)(!(e->speculative) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1148, __FUNCTION__), 0 : 0))
;
1149 if (e->callee)
1150 {
1151 while (e->prev_callee && e->prev_callee->speculative
1152 && e->prev_callee->call_stmt == e->call_stmt
1153 && e->prev_callee->lto_stmt_uid == e->lto_stmt_uid)
1154 e = e->prev_callee;
1155 return e;
1156 }
1157 /* Call stmt site hash always points to the first target of the
1158 speculative call sequence. */
1159 if (e->call_stmt)
1160 return e->caller->get_edge (e->call_stmt);
1161 for (cgraph_edge *e2 = e->caller->callees; true; e2 = e2->next_callee)
1162 if (e2->speculative
1163 && e->call_stmt == e2->call_stmt
1164 && e->lto_stmt_uid == e2->lto_stmt_uid)
1165 return e2;
1166}
1167
1168/* We always maintain first direct edge in the call site hash, if one
1169 exists. E is going to be removed. See if it is first one and update
1170 hash accordingly. INDIRECT is the indirect edge of speculative call.
1171 We assume that INDIRECT->num_speculative_call_targets_p () is already
1172 updated for removal of E. */
1173static void
1174update_call_stmt_hash_for_removing_direct_edge (cgraph_edge *e,
1175 cgraph_edge *indirect)
1176{
1177 if (e->caller->call_site_hash)
1178 {
1179 if (e->caller->get_edge (e->call_stmt) != e)
1180 ;
1181 else if (!indirect->num_speculative_call_targets_p ())
1182 cgraph_update_edge_in_call_site_hash (indirect);
1183 else
1184 {
1185 gcc_checking_assert (e->next_callee && e->next_callee->speculative((void)(!(e->next_callee && e->next_callee->
speculative && e->next_callee->call_stmt == e->
call_stmt) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1186, __FUNCTION__), 0 : 0))
1186 && e->next_callee->call_stmt == e->call_stmt)((void)(!(e->next_callee && e->next_callee->
speculative && e->next_callee->call_stmt == e->
call_stmt) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1186, __FUNCTION__), 0 : 0))
;
1187 cgraph_update_edge_in_call_site_hash (e->next_callee);
1188 }
1189 }
1190}
1191
1192/* Speculative call EDGE turned out to be direct call to CALLEE_DECL. Remove
1193 the speculative call sequence and return edge representing the call, the
1194 original EDGE can be removed and deallocated. Return the edge that now
1195 represents the call.
1196
1197 For "speculative" indirect call that contains multiple "speculative"
1198 targets (i.e. edge->indirect_info->num_speculative_call_targets > 1),
1199 decrease the count and only remove current direct edge.
1200
1201 If no speculative direct call left to the speculative indirect call, remove
1202 the speculative of both the indirect call and corresponding direct edge.
1203
1204 It is up to caller to iteratively resolve each "speculative" direct call and
1205 redirect the call as appropriate. */
1206
1207cgraph_edge *
1208cgraph_edge::resolve_speculation (cgraph_edge *edge, tree callee_decl)
1209{
1210 cgraph_edge *e2;
1211 ipa_ref *ref;
1212
1213 gcc_assert (edge->speculative && (!callee_decl || edge->callee))((void)(!(edge->speculative && (!callee_decl || edge
->callee)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1213, __FUNCTION__), 0 : 0))
;
1214 if (!edge->callee)
1215 e2 = edge->first_speculative_call_target ();
1216 else
1217 e2 = edge;
1218 ref = e2->speculative_call_target_ref ();
1219 edge = edge->speculative_call_indirect_edge ();
1220 if (!callee_decl
1221 || !ref->referred->semantically_equivalent_p
1222 (symtab_node::get (callee_decl)))
1223 {
1224 if (dump_file)
1225 {
1226 if (callee_decl)
1227 {
1228 fprintf (dump_file, "Speculative indirect call %s => %s has "
1229 "turned out to have contradicting known target ",
1230 edge->caller->dump_name (),
1231 e2->callee->dump_name ());
1232 print_generic_expr (dump_file, callee_decl);
1233 fprintf (dump_file, "\n");
1234 }
1235 else
1236 {
1237 fprintf (dump_file, "Removing speculative call %s => %s\n",
1238 edge->caller->dump_name (),
1239 e2->callee->dump_name ());
1240 }
1241 }
1242 }
1243 else
1244 {
1245 cgraph_edge *tmp = edge;
1246 if (dump_file)
1247 fprintf (dump_file, "Speculative call turned into direct call.\n");
1248 edge = e2;
1249 e2 = tmp;
1250 /* FIXME: If EDGE is inlined, we should scale up the frequencies
1251 and counts in the functions inlined through it. */
1252 }
1253 edge->count += e2->count;
1254 if (edge->num_speculative_call_targets_p ())
1255 {
1256 /* The indirect edge has multiple speculative targets, don't remove
1257 speculative until all related direct edges are resolved. */
1258 edge->indirect_info->num_speculative_call_targets--;
1259 if (!edge->indirect_info->num_speculative_call_targets)
1260 edge->speculative = false;
1261 }
1262 else
1263 edge->speculative = false;
1264 e2->speculative = false;
1265 update_call_stmt_hash_for_removing_direct_edge (e2, edge);
1266 ref->remove_reference ();
1267 if (e2->indirect_unknown_callee || e2->inline_failed)
1268 remove (e2);
1269 else
1270 e2->callee->remove_symbol_and_inline_clones ();
1271 return edge;
1272}
1273
1274/* Return edge corresponding to speculative call to a given target.
1275 NULL if speculative call does not have one. */
1276
1277cgraph_edge *
1278cgraph_edge::speculative_call_for_target (cgraph_node *target)
1279{
1280 for (cgraph_edge *direct = first_speculative_call_target ();
1281 direct;
1282 direct = direct->next_speculative_call_target ())
1283 if (direct->speculative_call_target_ref ()
1284 ->referred->semantically_equivalent_p (target))
1285 return direct;
1286 return NULL__null;
1287}
1288
1289/* Make an indirect or speculative EDGE with an unknown callee an ordinary edge
1290 leading to CALLEE. Speculations can be resolved in the process and EDGE can
1291 be removed and deallocated. Return the edge that now represents the
1292 call. */
1293
1294cgraph_edge *
1295cgraph_edge::make_direct (cgraph_edge *edge, cgraph_node *callee)
1296{
1297 gcc_assert (edge->indirect_unknown_callee || edge->speculative)((void)(!(edge->indirect_unknown_callee || edge->speculative
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1297, __FUNCTION__), 0 : 0))
;
1298
1299 /* If we are redirecting speculative call, make it non-speculative. */
1300 if (edge->speculative)
1301 {
1302 cgraph_edge *found = NULL__null;
1303 cgraph_edge *direct, *next;
1304
1305 edge = edge->speculative_call_indirect_edge ();
1306
1307 /* Look all speculative targets and remove all but one corresponding
1308 to callee (if it exists). */
1309 for (direct = edge->first_speculative_call_target ();
1310 direct;
1311 direct = next)
1312 {
1313 next = direct->next_speculative_call_target ();
1314
1315 /* Compare ref not direct->callee. Direct edge is possibly
1316 inlined or redirected. */
1317 if (!direct->speculative_call_target_ref ()
1318 ->referred->semantically_equivalent_p (callee))
1319 edge = direct->resolve_speculation (direct, NULL__null);
1320 else
1321 {
1322 gcc_checking_assert (!found)((void)(!(!found) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1322, __FUNCTION__), 0 : 0))
;
1323 found = direct;
1324 }
1325 }
1326
1327 /* On successful speculation just remove the indirect edge and
1328 return the pre existing direct edge.
1329 It is important to not remove it and redirect because the direct
1330 edge may be inlined or redirected. */
1331 if (found)
1332 {
1333 cgraph_edge *e2 = resolve_speculation (found, callee->decl);
1334 gcc_checking_assert (!found->speculative && e2 == found)((void)(!(!found->speculative && e2 == found) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1334, __FUNCTION__), 0 : 0))
;
1335 return found;
1336 }
1337 gcc_checking_assert (!edge->speculative)((void)(!(!edge->speculative) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1337, __FUNCTION__), 0 : 0))
;
1338 }
1339
1340 edge->indirect_unknown_callee = 0;
1341 ggc_free (edge->indirect_info);
1342 edge->indirect_info = NULL__null;
1343
1344 /* Get the edge out of the indirect edge list. */
1345 if (edge->prev_callee)
1346 edge->prev_callee->next_callee = edge->next_callee;
1347 if (edge->next_callee)
1348 edge->next_callee->prev_callee = edge->prev_callee;
1349 if (!edge->prev_callee)
1350 edge->caller->indirect_calls = edge->next_callee;
1351
1352 /* Put it into the normal callee list */
1353 edge->prev_callee = NULL__null;
1354 edge->next_callee = edge->caller->callees;
1355 if (edge->caller->callees)
1356 edge->caller->callees->prev_callee = edge;
1357 edge->caller->callees = edge;
1358
1359 /* Insert to callers list of the new callee. */
1360 edge->set_callee (callee);
1361
1362 /* We need to re-determine the inlining status of the edge. */
1363 initialize_inline_failed (edge);
1364 return edge;
1365}
1366
1367/* Redirect callee of the edge to N. The function does not update underlying
1368 call expression. */
1369
1370void
1371cgraph_edge::redirect_callee (cgraph_node *n)
1372{
1373 bool loc = callee->comdat_local_p ();
1374 /* Remove from callers list of the current callee. */
1375 remove_callee ();
1376
1377 /* Insert to callers list of the new callee. */
1378 set_callee (n);
1379
1380 if (!inline_failed)
1381 return;
1382 if (!loc && n->comdat_local_p ())
1383 {
1384 cgraph_node *to = caller->inlined_to ? caller->inlined_to : caller;
1385 to->calls_comdat_local = true;
1386 }
1387 else if (loc && !n->comdat_local_p ())
1388 {
1389 cgraph_node *to = caller->inlined_to ? caller->inlined_to : caller;
1390 gcc_checking_assert (to->calls_comdat_local)((void)(!(to->calls_comdat_local) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1390, __FUNCTION__), 0 : 0))
;
1391 to->calls_comdat_local = to->check_calls_comdat_local_p ();
1392 }
1393}
1394
1395/* If necessary, change the function declaration in the call statement
1396 associated with E so that it corresponds to the edge callee. Speculations
1397 can be resolved in the process and EDGE can be removed and deallocated.
1398
1399 The edge could be one of speculative direct call generated from speculative
1400 indirect call. In this circumstance, decrease the speculative targets
1401 count (i.e. num_speculative_call_targets) and redirect call stmt to the
1402 corresponding i-th target. If no speculative direct call left to the
1403 speculative indirect call, remove "speculative" of the indirect call and
1404 also redirect stmt to it's final direct target.
1405
1406 It is up to caller to iteratively transform each "speculative"
1407 direct call as appropriate. */
1408
1409gimple *
1410cgraph_edge::redirect_call_stmt_to_callee (cgraph_edge *e)
1411{
1412 tree decl = gimple_call_fndecl (e->call_stmt);
1413 gcall *new_stmt;
1414
1415 if (e->speculative)
1416 {
1417 /* If there already is an direct call (i.e. as a result of inliner's
1418 substitution), forget about speculating. */
1419 if (decl)
1420 e = make_direct (e->speculative_call_indirect_edge (),
1421 cgraph_node::get (decl));
1422 else
1423 {
1424 /* Be sure we redirect all speculative targets before poking
1425 about indirect edge. */
1426 gcc_checking_assert (e->callee)((void)(!(e->callee) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1426, __FUNCTION__), 0 : 0))
;
1427 cgraph_edge *indirect = e->speculative_call_indirect_edge ();
1428 gcall *new_stmt;
1429 ipa_ref *ref;
1430
1431 /* Expand speculation into GIMPLE code. */
1432 if (dump_file)
1433 {
1434 fprintf (dump_file,
1435 "Expanding speculative call of %s -> %s count: ",
1436 e->caller->dump_name (),
1437 e->callee->dump_name ());
1438 e->count.dump (dump_file);
1439 fprintf (dump_file, "\n");
1440 }
1441 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl)((tree_check ((e->caller->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1441, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
);
1442
1443 profile_count all = indirect->count;
1444 for (cgraph_edge *e2 = e->first_speculative_call_target ();
1445 e2;
1446 e2 = e2->next_speculative_call_target ())
1447 all = all + e2->count;
1448 profile_probability prob = e->count.probability_in (all);
1449 if (!prob.initialized_p ())
1450 prob = profile_probability::even ();
1451 ref = e->speculative_call_target_ref ();
1452 new_stmt = gimple_ic (e->call_stmt,
1453 dyn_cast<cgraph_node *> (ref->referred),
1454 prob);
1455 e->speculative = false;
1456 if (indirect->num_speculative_call_targets_p ())
1457 {
1458 /* The indirect edge has multiple speculative targets, don't
1459 remove speculative until all related direct edges are
1460 redirected. */
1461 indirect->indirect_info->num_speculative_call_targets--;
1462 if (!indirect->indirect_info->num_speculative_call_targets)
1463 indirect->speculative = false;
1464 }
1465 else
1466 indirect->speculative = false;
1467 /* Indirect edges are not both in the call site hash.
1468 get it updated. */
1469 update_call_stmt_hash_for_removing_direct_edge (e, indirect);
1470 cgraph_edge::set_call_stmt (e, new_stmt, false);
1471 e->count = gimple_bb (e->call_stmt)->count;
1472
1473 /* Once we are done with expanding the sequence, update also indirect
1474 call probability. Until then the basic block accounts for the
1475 sum of indirect edge and all non-expanded speculations. */
1476 if (!indirect->speculative)
1477 indirect->count = gimple_bb (indirect->call_stmt)->count;
1478 ref->speculative = false;
1479 ref->stmt = NULL__null;
1480 pop_cfun ();
1481 /* Continue redirecting E to proper target. */
1482 }
1483 }
1484
1485
1486 if (e->indirect_unknown_callee
1487 || decl == e->callee->decl)
1488 return e->call_stmt;
1489
1490 if (decl && ipa_saved_clone_sources)
1491 {
1492 tree *p = ipa_saved_clone_sources->get (e->callee);
1493 if (p && decl == *p)
1494 {
1495 gimple_call_set_fndecl (e->call_stmt, e->callee->decl);
1496 return e->call_stmt;
1497 }
1498 }
1499 if (flag_checkingglobal_options.x_flag_checking && decl)
1500 {
1501 if (cgraph_node *node = cgraph_node::get (decl))
1502 {
1503 clone_info *info = clone_info::get (node);
1504 gcc_assert (!info || !info->param_adjustments)((void)(!(!info || !info->param_adjustments) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1504, __FUNCTION__), 0 : 0))
;
1505 }
1506 }
1507
1508 clone_info *callee_info = clone_info::get (e->callee);
1509 if (symtab->dump_file)
1510 {
1511 fprintf (symtab->dump_file, "updating call of %s -> %s: ",
1512 e->caller->dump_name (), e->callee->dump_name ());
1513 print_gimple_stmt (symtab->dump_file, e->call_stmt, 0, dump_flags);
1514 if (callee_info && callee_info->param_adjustments)
1515 callee_info->param_adjustments->dump (symtab->dump_file);
1516 }
1517
1518 if (ipa_param_adjustments *padjs
1519 = callee_info ? callee_info->param_adjustments : NULL__null)
1520 {
1521 /* We need to defer cleaning EH info on the new statement to
1522 fixup-cfg. We may not have dominator information at this point
1523 and thus would end up with unreachable blocks and have no way
1524 to communicate that we need to run CFG cleanup then. */
1525 int lp_nr = lookup_stmt_eh_lp (e->call_stmt);
1526 if (lp_nr != 0)
1527 remove_stmt_from_eh_lp (e->call_stmt);
1528
1529 tree old_fntype = gimple_call_fntype (e->call_stmt);
1530 new_stmt = padjs->modify_call (e, false);
1531 cgraph_node *origin = e->callee;
1532 while (origin->clone_of)
1533 origin = origin->clone_of;
1534
1535 if ((origin->former_clone_of
1536 && old_fntype == TREE_TYPE (origin->former_clone_of)((contains_struct_check ((origin->former_clone_of), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1536, __FUNCTION__))->typed.type)
)
1537 || old_fntype == TREE_TYPE (origin->decl)((contains_struct_check ((origin->decl), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1537, __FUNCTION__))->typed.type)
)
1538 gimple_call_set_fntype (new_stmt, TREE_TYPE (e->callee->decl)((contains_struct_check ((e->callee->decl), (TS_TYPED),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1538, __FUNCTION__))->typed.type)
);
1539 else
1540 {
1541 tree new_fntype = padjs->build_new_function_type (old_fntype, true);
1542 gimple_call_set_fntype (new_stmt, new_fntype);
1543 }
1544
1545 if (lp_nr != 0)
1546 add_stmt_to_eh_lp (new_stmt, lp_nr);
1547 }
1548 else
1549 {
1550 if (flag_checkingglobal_options.x_flag_checking
1551 && !fndecl_built_in_p (e->callee->decl, BUILT_IN_UNREACHABLE)
1552 && !fndecl_built_in_p (e->callee->decl, BUILT_IN_UNREACHABLE_TRAP))
1553 ipa_verify_edge_has_no_modifications (e);
1554 new_stmt = e->call_stmt;
1555 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1556 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl)((tree_check ((e->caller->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1556, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
, new_stmt);
1557 }
1558
1559 /* If changing the call to __cxa_pure_virtual or similar noreturn function,
1560 adjust gimple_call_fntype too. */
1561 if (gimple_call_noreturn_p (new_stmt)
1562 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (e->callee->decl)))(((enum tree_code) (((contains_struct_check ((((contains_struct_check
((e->callee->decl), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1562, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1562, __FUNCTION__))->typed.type))->base.code) == VOID_TYPE
)
1563 && TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl))((tree_check2 ((((contains_struct_check ((e->callee->decl
), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1563, __FUNCTION__))->typed.type)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1563, __FUNCTION__, (FUNCTION_TYPE), (METHOD_TYPE)))->type_non_common
.values)
1564 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl)))((tree_check ((((tree_check2 ((((contains_struct_check ((e->
callee->decl), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1564, __FUNCTION__))->typed.type)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1564, __FUNCTION__, (FUNCTION_TYPE), (METHOD_TYPE)))->type_non_common
.values)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1564, __FUNCTION__, (TREE_LIST)))->list.value)
1565 == void_type_nodeglobal_trees[TI_VOID_TYPE]))
1566 gimple_call_set_fntype (new_stmt, TREE_TYPE (e->callee->decl)((contains_struct_check ((e->callee->decl), (TS_TYPED),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1566, __FUNCTION__))->typed.type)
);
1567
1568 /* If the call becomes noreturn, remove the LHS if possible. */
1569 tree lhs = gimple_call_lhs (new_stmt);
1570 if (lhs
1571 && gimple_call_noreturn_p (new_stmt)
1572 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (new_stmt)))(((enum tree_code) (((contains_struct_check ((gimple_call_fntype
(new_stmt)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1572, __FUNCTION__))->typed.type))->base.code) == VOID_TYPE
)
1573 || should_remove_lhs_p (lhs)))
1574 {
1575 gimple_call_set_lhs (new_stmt, NULL_TREE(tree) __null);
1576 /* We need to fix up the SSA name to avoid checking errors. */
1577 if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME)
1578 {
1579 tree var = create_tmp_reg_fn (DECL_STRUCT_FUNCTION (e->caller->decl)((tree_check ((e->caller->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1579, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
,
1580 TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1580, __FUNCTION__))->typed.type)
, NULL__null);
1581 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var)do { tree var_ = (var); (tree_check ((lhs), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1581, __FUNCTION__, (SSA_NAME)))->ssa_name.var = var_; (
tree_check ((lhs), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1581, __FUNCTION__, (SSA_NAME)))->base.public_flag = (var_
&& ((enum tree_code) (var_)->base.code) == VAR_DECL
&& ((tree_check ((var_), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1581, __FUNCTION__, (VAR_DECL)))->base.u.bits.saturating_flag
)); } while (0)
;
1582 SSA_NAME_DEF_STMT (lhs)(tree_check ((lhs), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1582, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
= gimple_build_nop ();
1583 set_ssa_default_def (DECL_STRUCT_FUNCTION (e->caller->decl)((tree_check ((e->caller->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1583, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
,
1584 var, lhs);
1585 }
1586 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl)((tree_check ((e->caller->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1586, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
, new_stmt);
1587 }
1588
1589 /* If new callee has no static chain, remove it. */
1590 if (gimple_call_chain (new_stmt) && !DECL_STATIC_CHAIN (e->callee->decl)((tree_check ((e->callee->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1590, __FUNCTION__, (FUNCTION_DECL)))->decl_with_vis.regdecl_flag
)
)
1591 {
1592 gimple_call_set_chain (new_stmt, NULL__null);
1593 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl)((tree_check ((e->caller->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1593, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
, new_stmt);
1594 }
1595
1596 maybe_remove_unused_call_args (DECL_STRUCT_FUNCTION (e->caller->decl)((tree_check ((e->caller->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1596, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
,
1597 new_stmt);
1598
1599 e->caller->set_call_stmt_including_clones (e->call_stmt, new_stmt, false);
1600
1601 if (symtab->dump_file)
1602 {
1603 fprintf (symtab->dump_file, " updated to:");
1604 print_gimple_stmt (symtab->dump_file, e->call_stmt, 0, dump_flags);
1605 }
1606 return new_stmt;
1607}
1608
1609/* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1610 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1611 of OLD_STMT if it was previously call statement.
1612 If NEW_STMT is NULL, the call has been dropped without any
1613 replacement. */
1614
1615static void
1616cgraph_update_edges_for_call_stmt_node (cgraph_node *node,
1617 gimple *old_stmt, tree old_call,
1618 gimple *new_stmt)
1619{
1620 tree new_call = (new_stmt && is_gimple_call (new_stmt))
3
Assuming 'new_stmt' is non-null
4
'?' condition is true
1621 ? gimple_call_fndecl (new_stmt) : 0;
5
Calling 'gimple_call_fndecl'
14
Returning from 'gimple_call_fndecl'
1622
1623 /* We are seeing indirect calls, then there is nothing to update. */
1624 if (!new_call
14.1
'new_call' is null
14.1
'new_call' is null
14.1
'new_call' is null
&& !old_call)
15
Assuming 'old_call' is non-null
16
Taking false branch
1625 return;
1626 /* See if we turned indirect call into direct call or folded call to one builtin
1627 into different builtin. */
1628 if (old_call
16.1
'old_call' is not equal to 'new_call'
16.1
'old_call' is not equal to 'new_call'
16.1
'old_call' is not equal to 'new_call'
!= new_call)
17
Taking true branch
1629 {
1630 cgraph_edge *e = node->get_edge (old_stmt);
1631 cgraph_edge *ne = NULL__null;
1632 profile_count count;
1633
1634 if (e)
18
Assuming 'e' is non-null
1635 {
1636 /* Keep calls marked as dead dead. */
1637 if (new_stmt
18.1
'new_stmt' is non-null
18.1
'new_stmt' is non-null
18.1
'new_stmt' is non-null
&& is_gimple_call (new_stmt) && e->callee
19
Assuming field 'callee' is null
20
Assuming pointer value is null
1638 && (fndecl_built_in_p (e->callee->decl, BUILT_IN_UNREACHABLE)
1639 || fndecl_built_in_p (e->callee->decl,
1640 BUILT_IN_UNREACHABLE_TRAP)))
1641 {
1642 cgraph_edge::set_call_stmt (node->get_edge (old_stmt),
1643 as_a <gcall *> (new_stmt));
1644 return;
1645 }
1646 /* See if the edge is already there and has the correct callee. It
1647 might be so because of indirect inlining has already updated
1648 it. We also might've cloned and redirected the edge. */
1649 if (new_call
20.1
'new_call' is null
20.1
'new_call' is null
20.1
'new_call' is null
&& e->callee)
1650 {
1651 cgraph_node *callee = e->callee;
1652 while (callee)
1653 {
1654 if (callee->decl == new_call
1655 || callee->former_clone_of == new_call)
1656 {
1657 cgraph_edge::set_call_stmt (e, as_a <gcall *> (new_stmt));
1658 return;
1659 }
1660 callee = callee->clone_of;
1661 }
1662 }
1663
1664 /* Otherwise remove edge and create new one; we can't simply redirect
1665 since function has changed, so inline plan and other information
1666 attached to edge is invalid. */
1667 count = e->count;
1668 if (e->indirect_unknown_callee || e->inline_failed)
21
Assuming field 'indirect_unknown_callee' is 0
22
Assuming field 'inline_failed' is 0
23
Taking false branch
1669 cgraph_edge::remove (e);
1670 else
1671 e->callee->remove_symbol_and_inline_clones ();
24
Called C++ object pointer is null
1672 }
1673 else if (new_call)
1674 {
1675 /* We are seeing new direct call; compute profile info based on BB. */
1676 basic_block bb = gimple_bb (new_stmt);
1677 count = bb->count;
1678 }
1679
1680 if (new_call)
1681 {
1682 ne = node->create_edge (cgraph_node::get_create (new_call),
1683 as_a <gcall *> (new_stmt), count);
1684 gcc_assert (ne->inline_failed)((void)(!(ne->inline_failed) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1684, __FUNCTION__), 0 : 0))
;
1685 }
1686 }
1687 /* We only updated the call stmt; update pointer in cgraph edge.. */
1688 else if (old_stmt != new_stmt)
1689 cgraph_edge::set_call_stmt (node->get_edge (old_stmt),
1690 as_a <gcall *> (new_stmt));
1691}
1692
1693/* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1694 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1695 of OLD_STMT before it was updated (updating can happen inplace). */
1696
1697void
1698cgraph_update_edges_for_call_stmt (gimple *old_stmt, tree old_decl,
1699 gimple *new_stmt)
1700{
1701 cgraph_node *orig = cgraph_node::get (cfun(cfun + 0)->decl);
1702 cgraph_node *node;
1703
1704 gcc_checking_assert (orig)((void)(!(orig) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1704, __FUNCTION__), 0 : 0))
;
1
'?' condition is false
1705 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
2
Calling 'cgraph_update_edges_for_call_stmt_node'
1706 if (orig->clones)
1707 for (node = orig->clones; node != orig;)
1708 {
1709 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl,
1710 new_stmt);
1711 if (node->clones)
1712 node = node->clones;
1713 else if (node->next_sibling_clone)
1714 node = node->next_sibling_clone;
1715 else
1716 {
1717 while (node != orig && !node->next_sibling_clone)
1718 node = node->clone_of;
1719 if (node != orig)
1720 node = node->next_sibling_clone;
1721 }
1722 }
1723}
1724
1725
1726/* Remove all callees from the node. */
1727
1728void
1729cgraph_node::remove_callees (void)
1730{
1731 cgraph_edge *e, *f;
1732
1733 calls_comdat_local = false;
1734
1735 /* It is sufficient to remove the edges from the lists of callers of
1736 the callees. The callee list of the node can be zapped with one
1737 assignment. */
1738 for (e = callees; e; e = f)
1739 {
1740 f = e->next_callee;
1741 symtab->call_edge_removal_hooks (e);
1742 if (!e->indirect_unknown_callee)
1743 e->remove_callee ();
1744 symtab->free_edge (e);
1745 }
1746 for (e = indirect_calls; e; e = f)
1747 {
1748 f = e->next_callee;
1749 symtab->call_edge_removal_hooks (e);
1750 if (!e->indirect_unknown_callee)
1751 e->remove_callee ();
1752 symtab->free_edge (e);
1753 }
1754 indirect_calls = NULL__null;
1755 callees = NULL__null;
1756 if (call_site_hash)
1757 {
1758 call_site_hash->empty ();
1759 call_site_hash = NULL__null;
1760 }
1761}
1762
1763/* Remove all callers from the node. */
1764
1765void
1766cgraph_node::remove_callers (void)
1767{
1768 cgraph_edge *e, *f;
1769
1770 /* It is sufficient to remove the edges from the lists of callees of
1771 the callers. The caller list of the node can be zapped with one
1772 assignment. */
1773 for (e = callers; e; e = f)
1774 {
1775 f = e->next_caller;
1776 symtab->call_edge_removal_hooks (e);
1777 e->remove_caller ();
1778 symtab->free_edge (e);
1779 }
1780 callers = NULL__null;
1781}
1782
1783/* Helper function for cgraph_release_function_body and free_lang_data.
1784 It releases body from function DECL without having to inspect its
1785 possibly non-existent symtab node. */
1786
1787void
1788release_function_body (tree decl)
1789{
1790 function *fn = DECL_STRUCT_FUNCTION (decl)((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1790, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
;
1791 if (fn)
1792 {
1793 if (fn->cfg
1794 && loops_for_fn (fn))
1795 {
1796 fn->curr_properties &= ~PROP_loops(1 << 11);
1797 loop_optimizer_finalize (fn);
1798 }
1799 if (fn->gimple_df)
1800 {
1801 delete_tree_ssa (fn);
1802 fn->eh = NULL__null;
1803 }
1804 if (fn->cfg)
1805 {
1806 gcc_assert (!dom_info_available_p (fn, CDI_DOMINATORS))((void)(!(!dom_info_available_p (fn, CDI_DOMINATORS)) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1806, __FUNCTION__), 0 : 0))
;
1807 gcc_assert (!dom_info_available_p (fn, CDI_POST_DOMINATORS))((void)(!(!dom_info_available_p (fn, CDI_POST_DOMINATORS)) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1807, __FUNCTION__), 0 : 0))
;
1808 delete_tree_cfg_annotations (fn);
1809 free_cfg (fn);
1810 fn->cfg = NULL__null;
1811 }
1812 if (fn->value_histograms)
1813 free_histograms (fn);
1814 gimple_set_body (decl, NULL__null);
1815 /* Struct function hangs a lot of data that would leak if we didn't
1816 removed all pointers to it. */
1817 ggc_free (fn);
1818 DECL_STRUCT_FUNCTION (decl)((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1818, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
= NULL__null;
1819 }
1820 DECL_SAVED_TREE (decl)((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1820, __FUNCTION__, (FUNCTION_DECL)))->function_decl.saved_tree
)
= NULL__null;
1821}
1822
1823/* Release memory used to represent body of function.
1824 Use this only for functions that are released before being translated to
1825 target code (i.e. RTL). Functions that are compiled to RTL and beyond
1826 are free'd in final.cc via free_after_compilation().
1827 KEEP_ARGUMENTS are useful only if you want to rebuild body as thunk. */
1828
1829void
1830cgraph_node::release_body (bool keep_arguments)
1831{
1832 ipa_transforms_to_apply.release ();
1833 if (!used_as_abstract_origin && symtab->state != PARSING)
1834 {
1835 DECL_RESULT (decl)((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1835, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result
)
= NULL__null;
1836
1837 if (!keep_arguments)
1838 DECL_ARGUMENTS (decl)((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1838, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
= NULL__null;
1839 }
1840 /* If the node is abstract and needed, then do not clear
1841 DECL_INITIAL of its associated function declaration because it's
1842 needed to emit debug info later. */
1843 if (!used_as_abstract_origin && DECL_INITIAL (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1843, __FUNCTION__))->decl_common.initial)
)
1844 DECL_INITIAL (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1844, __FUNCTION__))->decl_common.initial)
= error_mark_nodeglobal_trees[TI_ERROR_MARK];
1845 release_function_body (decl);
1846 if (lto_file_data)
1847 {
1848 lto_free_function_in_decl_state_for_node (this);
1849 lto_file_data = NULL__null;
1850 }
1851 if (flag_checkingglobal_options.x_flag_checking && clones)
1852 {
1853 /* It is invalid to release body before materializing clones except
1854 for thunks that don't really need a body. Verify also that we do
1855 not leak pointers to the call statements. */
1856 for (cgraph_node *node = clones; node;
1857 node = node->next_sibling_clone)
1858 gcc_assert (node->thunk && !node->callees->call_stmt)((void)(!(node->thunk && !node->callees->call_stmt
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1858, __FUNCTION__), 0 : 0))
;
1859 }
1860 remove_callees ();
1861 remove_all_references ();
1862}
1863
1864/* Remove function from symbol table. */
1865
1866void
1867cgraph_node::remove (void)
1868{
1869 bool clone_info_set = false;
1870 clone_info *info, saved_info;
1871 if (symtab->ipa_clones_dump_file && symtab->cloned_nodes.contains (this))
1872 fprintf (symtab->ipa_clones_dump_file,
1873 "Callgraph removal;%s;%d;%s;%d;%d\n", asm_name (), order,
1874 DECL_SOURCE_FILE (decl)((expand_location (((contains_struct_check ((decl), (TS_DECL_MINIMAL
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1874, __FUNCTION__))->decl_minimal.locus))).file)
, DECL_SOURCE_LINE (decl)((expand_location (((contains_struct_check ((decl), (TS_DECL_MINIMAL
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1874, __FUNCTION__))->decl_minimal.locus))).line)
,
1875 DECL_SOURCE_COLUMN (decl)((expand_location (((contains_struct_check ((decl), (TS_DECL_MINIMAL
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1875, __FUNCTION__))->decl_minimal.locus))).column)
);
1876
1877 if ((info = clone_info::get (this)) != NULL__null)
1878 {
1879 saved_info = *info;
1880 clone_info_set = true;
1881 }
1882 symtab->call_cgraph_removal_hooks (this);
1883 remove_callers ();
1884 remove_callees ();
1885 ipa_transforms_to_apply.release ();
1886 delete_function_version (function_version ());
1887
1888 /* Incremental inlining access removed nodes stored in the postorder list.
1889 */
1890 force_output = false;
1891 forced_by_abi = false;
1892
1893 unregister (clone_info_set ? &saved_info : NULL__null);
1894 if (prev_sibling_clone)
1895 prev_sibling_clone->next_sibling_clone = next_sibling_clone;
1896 else if (clone_of)
1897 {
1898 clone_of->clones = next_sibling_clone;
1899 if (!clones)
1900 {
1901 bool need_body = false;
1902 for (cgraph_node *n = clone_of; n; n = n->clone_of)
1903 if (n->analyzed || n->clones)
1904 {
1905 need_body = true;
1906 break;
1907 }
1908 if (!need_body)
1909 clone_of->release_body ();
1910 }
1911 }
1912 if (next_sibling_clone)
1913 next_sibling_clone->prev_sibling_clone = prev_sibling_clone;
1914 if (clones)
1915 {
1916 cgraph_node *n, *next;
1917
1918 if (clone_of)
1919 {
1920 for (n = clones; n->next_sibling_clone; n = n->next_sibling_clone)
1921 n->clone_of = clone_of;
1922 n->clone_of = clone_of;
1923 n->next_sibling_clone = clone_of->clones;
1924 if (clone_of->clones)
1925 clone_of->clones->prev_sibling_clone = n;
1926 clone_of->clones = clones;
1927 }
1928 else
1929 {
1930 /* We are removing node with clones. This makes clones inconsistent,
1931 but assume they will be removed subsequently and just keep clone
1932 tree intact. This can happen in unreachable function removal since
1933 we remove unreachable functions in random order, not by bottom-up
1934 walk of clone trees. */
1935 for (n = clones; n; n = next)
1936 {
1937 next = n->next_sibling_clone;
1938 n->next_sibling_clone = NULL__null;
1939 n->prev_sibling_clone = NULL__null;
1940 n->clone_of = NULL__null;
1941 }
1942 }
1943 }
1944
1945 /* While all the clones are removed after being proceeded, the function
1946 itself is kept in the cgraph even after it is compiled. Check whether
1947 we are done with this body and reclaim it proactively if this is the case.
1948 */
1949 if (symtab->state != LTO_STREAMING)
1950 {
1951 cgraph_node *n = cgraph_node::get (decl);
1952 if (!n
1953 || (!n->clones && !n->clone_of && !n->inlined_to
1954 && ((symtab->global_info_ready || in_lto_pglobal_options.x_in_lto_p)
1955 && (TREE_ASM_WRITTEN (n->decl)((n->decl)->base.asm_written_flag)
1956 || DECL_EXTERNAL (n->decl)((contains_struct_check ((n->decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1956, __FUNCTION__))->decl_common.decl_flag_1)
1957 || !n->analyzed
1958 || (!flag_wpaglobal_options.x_flag_wpa && n->in_other_partition)))))
1959 release_body ();
1960 }
1961 else
1962 {
1963 lto_free_function_in_decl_state_for_node (this);
1964 lto_file_data = NULL__null;
1965 }
1966
1967 decl = NULL__null;
1968 if (call_site_hash)
1969 {
1970 call_site_hash->empty ();
1971 call_site_hash = NULL__null;
1972 }
1973
1974 symtab->release_symbol (this);
1975}
1976
1977/* Likewise indicate that a node is having address taken. */
1978
1979void
1980cgraph_node::mark_address_taken (void)
1981{
1982 /* Indirect inlining can figure out that all uses of the address are
1983 inlined. */
1984 if (inlined_to)
1985 {
1986 gcc_assert (cfun->after_inlining)((void)(!((cfun + 0)->after_inlining) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1986, __FUNCTION__), 0 : 0))
;
1987 gcc_assert (callers->indirect_inlining_edge)((void)(!(callers->indirect_inlining_edge) ? fancy_abort (
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 1987, __FUNCTION__), 0 : 0))
;
1988 return;
1989 }
1990 /* FIXME: address_taken flag is used both as a shortcut for testing whether
1991 IPA_REF_ADDR reference exists (and thus it should be set on node
1992 representing alias we take address of) and as a test whether address
1993 of the object was taken (and thus it should be set on node alias is
1994 referring to). We should remove the first use and the remove the
1995 following set. */
1996 address_taken = 1;
1997 cgraph_node *node = ultimate_alias_target ();
1998 node->address_taken = 1;
1999}
2000
2001/* Return local info node for the compiled function. */
2002
2003cgraph_node *
2004cgraph_node::local_info_node (tree decl)
2005{
2006 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL)((void)(!(((enum tree_code) (decl)->base.code) == FUNCTION_DECL
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2006, __FUNCTION__), 0 : 0))
;
2007 cgraph_node *node = get (decl);
2008 if (!node)
2009 return NULL__null;
2010 return node->ultimate_alias_target ();
2011}
2012
2013/* Return RTL info for the compiled function. */
2014
2015cgraph_rtl_info *
2016cgraph_node::rtl_info (const_tree decl)
2017{
2018 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL)((void)(!(((enum tree_code) (decl)->base.code) == FUNCTION_DECL
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2018, __FUNCTION__), 0 : 0))
;
2019 cgraph_node *node = get (decl);
2020 if (!node)
2021 return NULL__null;
2022 enum availability avail;
2023 node = node->ultimate_alias_target (&avail);
2024 if (decl != current_function_decl
2025 && (avail < AVAIL_AVAILABLE
2026 || (node->decl != current_function_decl
2027 && !TREE_ASM_WRITTEN (node->decl)((node->decl)->base.asm_written_flag))))
2028 return NULL__null;
2029 /* Allocate if it doesn't exist. */
2030 if (node->rtl == NULL__null)
2031 {
2032 node->rtl = ggc_cleared_alloc<cgraph_rtl_info> ();
2033 SET_HARD_REG_SET (node->rtl->function_used_regs);
2034 }
2035 return node->rtl;
2036}
2037
2038/* Return a string describing the failure REASON. */
2039
2040const char*
2041cgraph_inline_failed_string (cgraph_inline_failed_t reason)
2042{
2043#undef DEFCIFCODE
2044#define DEFCIFCODE(code, type, string)type, string,
2045
2046 static const char *cif_string_table[CIF_N_REASONS] = {
2047#include "cif-code.def"
2048 };
2049
2050 /* Signedness of an enum type is implementation defined, so cast it
2051 to unsigned before testing. */
2052 gcc_assert ((unsigned) reason < CIF_N_REASONS)((void)(!((unsigned) reason < CIF_N_REASONS) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2052, __FUNCTION__), 0 : 0))
;
2053 return cif_string_table[reason];
2054}
2055
2056/* Return a type describing the failure REASON. */
2057
2058cgraph_inline_failed_type_t
2059cgraph_inline_failed_type (cgraph_inline_failed_t reason)
2060{
2061#undef DEFCIFCODE
2062#define DEFCIFCODE(code, type, string)type, type,
2063
2064 static cgraph_inline_failed_type_t cif_type_table[CIF_N_REASONS] = {
2065#include "cif-code.def"
2066 };
2067
2068 /* Signedness of an enum type is implementation defined, so cast it
2069 to unsigned before testing. */
2070 gcc_assert ((unsigned) reason < CIF_N_REASONS)((void)(!((unsigned) reason < CIF_N_REASONS) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2070, __FUNCTION__), 0 : 0))
;
2071 return cif_type_table[reason];
2072}
2073
2074/* Names used to print out the availability enum. */
2075const char * const cgraph_availability_names[] =
2076 {"unset", "not_available", "overwritable", "available", "local"};
2077
2078/* Output flags of edge to a file F. */
2079
2080void
2081cgraph_edge::dump_edge_flags (FILE *f)
2082{
2083 if (speculative)
2084 fprintf (f, "(speculative) ");
2085 if (!inline_failed)
2086 fprintf (f, "(inlined) ");
2087 if (call_stmt_cannot_inline_p)
2088 fprintf (f, "(call_stmt_cannot_inline_p) ");
2089 if (indirect_inlining_edge)
2090 fprintf (f, "(indirect_inlining) ");
2091 if (count.initialized_p ())
2092 {
2093 fprintf (f, "(");
2094 count.dump (f);
2095 fprintf (f, ",");
2096 fprintf (f, "%.2f per call) ", sreal_frequency ().to_double ());
2097 }
2098 if (can_throw_external)
2099 fprintf (f, "(can throw external) ");
2100}
2101
2102/* Dump edge to stderr. */
2103
2104void
2105cgraph_edge::debug (void)
2106{
2107 fprintf (stderrstderr, "%s -> %s ", caller->dump_asm_name (),
2108 callee == NULL__null ? "(null)" : callee->dump_asm_name ());
2109 dump_edge_flags (stderrstderr);
2110 fprintf (stderrstderr, "\n\n");
2111 caller->debug ();
2112 if (callee != NULL__null)
2113 callee->debug ();
2114}
2115
2116/* Dump call graph node to file F. */
2117
2118void
2119cgraph_node::dump (FILE *f)
2120{
2121 cgraph_edge *edge;
2122
2123 dump_base (f);
2124
2125 if (inlined_to)
2126 fprintf (f, " Function %s is inline copy in %s\n",
2127 dump_name (),
2128 inlined_to->dump_name ());
2129 if (clone_of)
2130 fprintf (f, " Clone of %s\n", clone_of->dump_asm_name ());
2131 if (symtab->function_flags_ready)
2132 fprintf (f, " Availability: %s\n",
2133 cgraph_availability_names [get_availability ()]);
2134
2135 if (profile_id)
2136 fprintf (f, " Profile id: %i\n",
2137 profile_id);
2138 if (unit_id)
2139 fprintf (f, " Unit id: %i\n",
2140 unit_id);
2141 cgraph_function_version_info *vi = function_version ();
2142 if (vi != NULL__null)
2143 {
2144 fprintf (f, " Version info: ");
2145 if (vi->prev != NULL__null)
2146 {
2147 fprintf (f, "prev: ");
2148 fprintf (f, "%s ", vi->prev->this_node->dump_asm_name ());
2149 }
2150 if (vi->next != NULL__null)
2151 {
2152 fprintf (f, "next: ");
2153 fprintf (f, "%s ", vi->next->this_node->dump_asm_name ());
2154 }
2155 if (vi->dispatcher_resolver != NULL_TREE(tree) __null)
2156 fprintf (f, "dispatcher: %s",
2157 lang_hooks.decl_printable_name (vi->dispatcher_resolver, 2));
2158
2159 fprintf (f, "\n");
2160 }
2161 fprintf (f, " Function flags:");
2162 if (count.initialized_p ())
2163 {
2164 fprintf (f, " count:");
2165 count.dump (f);
2166 }
2167 if (tp_first_run > 0)
2168 fprintf (f, " first_run:%" PRId64"l" "d", (int64_t) tp_first_run);
2169 if (cgraph_node *origin = nested_function_origin (this))
2170 fprintf (f, " nested in:%s", origin->dump_asm_name ());
2171 if (gimple_has_body_p (decl))
2172 fprintf (f, " body");
2173 if (process)
2174 fprintf (f, " process");
2175 if (local)
2176 fprintf (f, " local");
2177 if (redefined_extern_inline)
2178 fprintf (f, " redefined_extern_inline");
2179 if (only_called_at_startup)
2180 fprintf (f, " only_called_at_startup");
2181 if (only_called_at_exit)
2182 fprintf (f, " only_called_at_exit");
2183 if (tm_clone)
2184 fprintf (f, " tm_clone");
2185 if (calls_comdat_local)
2186 fprintf (f, " calls_comdat_local");
2187 if (icf_merged)
2188 fprintf (f, " icf_merged");
2189 if (merged_comdat)
2190 fprintf (f, " merged_comdat");
2191 if (merged_extern_inline)
2192 fprintf (f, " merged_extern_inline");
2193 if (split_part)
2194 fprintf (f, " split_part");
2195 if (indirect_call_target)
2196 fprintf (f, " indirect_call_target");
2197 if (nonfreeing_fn)
2198 fprintf (f, " nonfreeing_fn");
2199 if (DECL_STATIC_CONSTRUCTOR (decl)((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2199, __FUNCTION__, (FUNCTION_DECL)))->function_decl.static_ctor_flag
)
)
2200 fprintf (f," static_constructor (priority:%i)", get_init_priority ());
2201 if (DECL_STATIC_DESTRUCTOR (decl)((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2201, __FUNCTION__, (FUNCTION_DECL)))->function_decl.static_dtor_flag
)
)
2202 fprintf (f," static_destructor (priority:%i)", get_fini_priority ());
2203 if (frequency == NODE_FREQUENCY_HOT)
2204 fprintf (f, " hot");
2205 if (frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
2206 fprintf (f, " unlikely_executed");
2207 if (frequency == NODE_FREQUENCY_EXECUTED_ONCE)
2208 fprintf (f, " executed_once");
2209 if (opt_for_fn (decl, optimize_size)(opts_for_fn (decl)->x_optimize_size))
2210 fprintf (f, " optimize_size");
2211 if (parallelized_function)
2212 fprintf (f, " parallelized_function");
2213 if (DECL_IS_MALLOC (decl)((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2213, __FUNCTION__, (FUNCTION_DECL)))->function_decl.malloc_flag
)
)
2214 fprintf (f, " decl_is_malloc");
2215 if (DECL_IS_OPERATOR_NEW_P (decl)(((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2215, __FUNCTION__, (FUNCTION_DECL)))->function_decl.decl_type
) == OPERATOR_NEW)
)
2216 fprintf (f, " %soperator_new",
2217 DECL_IS_REPLACEABLE_OPERATOR (decl)((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2217, __FUNCTION__, (FUNCTION_DECL)))->function_decl.replaceable_operator
)
? "replaceable_" : "");
2218 if (DECL_IS_OPERATOR_DELETE_P (decl)(((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2218, __FUNCTION__, (FUNCTION_DECL)))->function_decl.decl_type
) == OPERATOR_DELETE)
)
2219 fprintf (f, " %soperator_delete",
2220 DECL_IS_REPLACEABLE_OPERATOR (decl)((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2220, __FUNCTION__, (FUNCTION_DECL)))->function_decl.replaceable_operator
)
? "replaceable_" : "");
2221
2222 if (DECL_STATIC_CHAIN (decl)((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2222, __FUNCTION__, (FUNCTION_DECL)))->decl_with_vis.regdecl_flag
)
)
2223 fprintf (f, " static_chain");
2224
2225 fprintf (f, "\n");
2226
2227 if (thunk)
2228 {
2229 fprintf (f, " Thunk");
2230 thunk_info::get (this)->dump (f);
2231 }
2232 else if (former_thunk_p ())
2233 {
2234 fprintf (f, " Former thunk ");
2235 thunk_info::get (this)->dump (f);
2236 }
2237 else gcc_checking_assert (!thunk_info::get (this))((void)(!(!thunk_info::get (this)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2237, __FUNCTION__), 0 : 0))
;
2238
2239 fprintf (f, " Called by: ");
2240
2241 profile_count sum = profile_count::zero ();
2242 for (edge = callers; edge; edge = edge->next_caller)
2243 {
2244 fprintf (f, "%s ", edge->caller->dump_asm_name ());
2245 edge->dump_edge_flags (f);
2246 if (edge->count.initialized_p ())
2247 sum += edge->count.ipa ();
2248 }
2249
2250 fprintf (f, "\n Calls: ");
2251 for (edge = callees; edge; edge = edge->next_callee)
2252 {
2253 fprintf (f, "%s ", edge->callee->dump_asm_name ());
2254 edge->dump_edge_flags (f);
2255 }
2256 fprintf (f, "\n");
2257
2258 if (!body_removed && count.ipa ().initialized_p ())
2259 {
2260 bool ok = true;
2261 bool min = false;
2262 ipa_ref *ref;
2263
2264 FOR_EACH_ALIAS (this, ref)for (unsigned ref_iter_ = 0; (this)->iterate_direct_aliases
(ref_iter_, ref); ref_iter_++)
2265 if (dyn_cast <cgraph_node *> (ref->referring)->count.initialized_p ())
2266 sum += dyn_cast <cgraph_node *> (ref->referring)->count.ipa ();
2267
2268 if (inlined_to
2269 || (symtab->state < EXPANSION
2270 && ultimate_alias_target () == this && only_called_directly_p ()))
2271 ok = !count.ipa ().differs_from_p (sum);
2272 else if (count.ipa () > profile_count::from_gcov_type (100)
2273 && count.ipa () < sum.apply_scale (99, 100))
2274 ok = false, min = true;
2275 if (!ok)
2276 {
2277 fprintf (f, " Invalid sum of caller counts ");
2278 sum.dump (f);
2279 if (min)
2280 fprintf (f, ", should be at most ");
2281 else
2282 fprintf (f, ", should be ");
2283 count.ipa ().dump (f);
2284 fprintf (f, "\n");
2285 }
2286 }
2287
2288 for (edge = indirect_calls; edge; edge = edge->next_callee)
2289 {
2290 if (edge->indirect_info->polymorphic)
2291 {
2292 fprintf (f, " Polymorphic indirect call of type ");
2293 print_generic_expr (f, edge->indirect_info->otr_type, TDF_SLIM);
2294 fprintf (f, " token:%i", (int) edge->indirect_info->otr_token);
2295 }
2296 else
2297 fprintf (f, " Indirect call");
2298 edge->dump_edge_flags (f);
2299 if (edge->indirect_info->param_index != -1)
2300 {
2301 fprintf (f, "of param:%i ", edge->indirect_info->param_index);
2302 if (edge->indirect_info->agg_contents)
2303 fprintf (f, "loaded from %s %s at offset %i ",
2304 edge->indirect_info->member_ptr ? "member ptr" : "aggregate",
2305 edge->indirect_info->by_ref ? "passed by reference":"",
2306 (int)edge->indirect_info->offset);
2307 if (edge->indirect_info->vptr_changed)
2308 fprintf (f, "(vptr maybe changed) ");
2309 }
2310 fprintf (f, "num speculative call targets: %i\n",
2311 edge->indirect_info->num_speculative_call_targets);
2312 if (edge->indirect_info->polymorphic)
2313 edge->indirect_info->context.dump (f);
2314 }
2315}
2316
2317/* Dump call graph node to file F in graphviz format. */
2318
2319void
2320cgraph_node::dump_graphviz (FILE *f)
2321{
2322 cgraph_edge *edge;
2323
2324 for (edge = callees; edge; edge = edge->next_callee)
2325 {
2326 cgraph_node *callee = edge->callee;
2327
2328 fprintf (f, "\t\"%s\" -> \"%s\"\n", dump_name (), callee->dump_name ());
2329 }
2330}
2331
2332
2333/* Dump call graph node NODE to stderr. */
2334
2335DEBUG_FUNCTION__attribute__ ((__used__)) void
2336cgraph_node::debug (void)
2337{
2338 dump (stderrstderr);
2339}
2340
2341/* Dump the callgraph to file F. */
2342
2343void
2344cgraph_node::dump_cgraph (FILE *f)
2345{
2346 cgraph_node *node;
2347
2348 fprintf (f, "callgraph:\n\n");
2349 FOR_EACH_FUNCTION (node)for ((node) = symtab->first_function (); (node); (node) = symtab
->next_function ((node)))
2350 node->dump (f);
2351}
2352
2353/* Return true when the DECL can possibly be inlined. */
2354
2355bool
2356cgraph_function_possibly_inlined_p (tree decl)
2357{
2358 if (!symtab->global_info_ready)
2359 return !DECL_UNINLINABLE (decl)((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2359, __FUNCTION__, (FUNCTION_DECL)))->function_decl.uninlinable
)
;
2360 return DECL_POSSIBLY_INLINED (decl)(tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2360, __FUNCTION__, (FUNCTION_DECL)))->function_decl.possibly_inlined
;
2361}
2362
2363/* Return function availability. See cgraph.h for description of individual
2364 return values. */
2365enum availability
2366cgraph_node::get_availability (symtab_node *ref)
2367{
2368 if (ref)
2369 {
2370 cgraph_node *cref = dyn_cast <cgraph_node *> (ref);
2371 if (cref)
2372 ref = cref->inlined_to;
2373 }
2374 enum availability avail;
2375 if (!analyzed && !in_other_partition)
2376 avail = AVAIL_NOT_AVAILABLE;
2377 else if (local)
2378 avail = AVAIL_LOCAL;
2379 else if (inlined_to)
2380 avail = AVAIL_AVAILABLE;
2381 else if (transparent_alias)
2382 ultimate_alias_target (&avail, ref);
2383 else if (ifunc_resolver
2384 || lookup_attribute ("noipa", DECL_ATTRIBUTES (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2384, __FUNCTION__))->decl_common.attributes)
))
2385 avail = AVAIL_INTERPOSABLE;
2386 else if (!externally_visible)
2387 avail = AVAIL_AVAILABLE;
2388 /* If this is a reference from symbol itself and there are no aliases, we
2389 may be sure that the symbol was not interposed by something else because
2390 the symbol itself would be unreachable otherwise.
2391
2392 Also comdat groups are always resolved in groups. */
2393 else if ((this == ref && !has_aliases_p ())
2394 || (ref && get_comdat_group ()
2395 && get_comdat_group () == ref->get_comdat_group ()))
2396 avail = AVAIL_AVAILABLE;
2397 /* Inline functions are safe to be analyzed even if their symbol can
2398 be overwritten at runtime. It is not meaningful to enforce any sane
2399 behavior on replacing inline function by different body. */
2400 else if (DECL_DECLARED_INLINE_P (decl)((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2400, __FUNCTION__, (FUNCTION_DECL)))->function_decl.declared_inline_flag
)
)
2401 avail = AVAIL_AVAILABLE;
2402
2403 /* If the function can be overwritten, return OVERWRITABLE. Take
2404 care at least of two notable extensions - the COMDAT functions
2405 used to share template instantiations in C++ (this is symmetric
2406 to code cp_cannot_inline_tree_fn and probably shall be shared and
2407 the inlinability hooks completely eliminated). */
2408
2409 else if (decl_replaceable_p (decl, semantic_interposition)
2410 && !DECL_EXTERNAL (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2410, __FUNCTION__))->decl_common.decl_flag_1)
)
2411 avail = AVAIL_INTERPOSABLE;
2412 else avail = AVAIL_AVAILABLE;
2413
2414 return avail;
2415}
2416
2417/* Worker for cgraph_node_can_be_local_p. */
2418static bool
2419cgraph_node_cannot_be_local_p_1 (cgraph_node *node, void *)
2420{
2421 return !(!node->force_output
2422 && !node->ifunc_resolver
2423 /* Limitation of gas requires us to output targets of symver aliases
2424 as global symbols. This is binutils PR 25295. */
2425 && !node->symver
2426 && ((DECL_COMDAT (node->decl)((contains_struct_check ((node->decl), (TS_DECL_WITH_VIS),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2426, __FUNCTION__))->decl_with_vis.comdat_flag)
2427 && !node->forced_by_abi
2428 && !node->used_from_object_file_p ()
2429 && !node->same_comdat_group)
2430 || !node->externally_visible));
2431}
2432
2433/* Return true if cgraph_node can be made local for API change.
2434 Extern inline functions and C++ COMDAT functions can be made local
2435 at the expense of possible code size growth if function is used in multiple
2436 compilation units. */
2437bool
2438cgraph_node::can_be_local_p (void)
2439{
2440 return (!address_taken
2441 && !call_for_symbol_thunks_and_aliases (cgraph_node_cannot_be_local_p_1,
2442 NULL__null, true));
2443}
2444
2445/* Call callback on cgraph_node, thunks and aliases associated to cgraph_node.
2446 When INCLUDE_OVERWRITABLE is false, overwritable symbols are
2447 skipped. When EXCLUDE_VIRTUAL_THUNKS is true, virtual thunks are
2448 skipped. */
2449bool
2450cgraph_node::call_for_symbol_thunks_and_aliases (bool (*callback)
2451 (cgraph_node *, void *),
2452 void *data,
2453 bool include_overwritable,
2454 bool exclude_virtual_thunks)
2455{
2456 cgraph_edge *e;
2457 ipa_ref *ref;
2458 enum availability avail = AVAIL_AVAILABLE;
2459
2460 if (include_overwritable
2461 || (avail = get_availability ()) > AVAIL_INTERPOSABLE)
2462 {
2463 if (callback (this, data))
2464 return true;
2465 }
2466 FOR_EACH_ALIAS (this, ref)for (unsigned ref_iter_ = 0; (this)->iterate_direct_aliases
(ref_iter_, ref); ref_iter_++)
2467 {
2468 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2469 if (include_overwritable
2470 || alias->get_availability () > AVAIL_INTERPOSABLE)
2471 if (alias->call_for_symbol_thunks_and_aliases (callback, data,
2472 include_overwritable,
2473 exclude_virtual_thunks))
2474 return true;
2475 }
2476 if (avail <= AVAIL_INTERPOSABLE)
2477 return false;
2478 for (e = callers; e; e = e->next_caller)
2479 if (e->caller->thunk
2480 && (include_overwritable
2481 || e->caller->get_availability () > AVAIL_INTERPOSABLE)
2482 && !(exclude_virtual_thunks
2483 && thunk_info::get (e->caller)->virtual_offset_p))
2484 if (e->caller->call_for_symbol_thunks_and_aliases (callback, data,
2485 include_overwritable,
2486 exclude_virtual_thunks))
2487 return true;
2488
2489 return false;
2490}
2491
2492/* Worker to bring NODE local. */
2493
2494bool
2495cgraph_node::make_local (cgraph_node *node, void *)
2496{
2497 gcc_checking_assert (node->can_be_local_p ())((void)(!(node->can_be_local_p ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2497, __FUNCTION__), 0 : 0))
;
2498 if (DECL_COMDAT (node->decl)((contains_struct_check ((node->decl), (TS_DECL_WITH_VIS),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2498, __FUNCTION__))->decl_with_vis.comdat_flag)
|| DECL_EXTERNAL (node->decl)((contains_struct_check ((node->decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2498, __FUNCTION__))->decl_common.decl_flag_1)
)
2499 {
2500 node->make_decl_local ();
2501 node->set_section (NULL__null);
2502 node->set_comdat_group (NULL__null);
2503 node->externally_visible = false;
2504 node->forced_by_abi = false;
2505 node->local = true;
2506 node->unique_name = ((node->resolution == LDPR_PREVAILING_DEF_IRONLY
2507 || node->resolution == LDPR_PREVAILING_DEF_IRONLY_EXP)
2508 && !flag_incremental_linkglobal_options.x_flag_incremental_link);
2509 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
2510 gcc_assert (node->get_availability () == AVAIL_LOCAL)((void)(!(node->get_availability () == AVAIL_LOCAL) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2510, __FUNCTION__), 0 : 0))
;
2511 }
2512 return false;
2513}
2514
2515/* Bring cgraph node local. */
2516
2517void
2518cgraph_node::make_local (void)
2519{
2520 call_for_symbol_thunks_and_aliases (cgraph_node::make_local, NULL__null, true);
2521}
2522
2523/* Worker to set nothrow flag. */
2524
2525static void
2526set_nothrow_flag_1 (cgraph_node *node, bool nothrow, bool non_call,
2527 bool *changed)
2528{
2529 cgraph_edge *e;
2530
2531 if (nothrow && !TREE_NOTHROW (node->decl)((node->decl)->base.nothrow_flag))
2532 {
2533 /* With non-call exceptions we can't say for sure if other function body
2534 was not possibly optimized to still throw. */
2535 if (!non_call || node->binds_to_current_def_p ())
2536 {
2537 TREE_NOTHROW (node->decl)((node->decl)->base.nothrow_flag) = true;
2538 *changed = true;
2539 for (e = node->callers; e; e = e->next_caller)
2540 e->can_throw_external = false;
2541 }
2542 }
2543 else if (!nothrow && TREE_NOTHROW (node->decl)((node->decl)->base.nothrow_flag))
2544 {
2545 TREE_NOTHROW (node->decl)((node->decl)->base.nothrow_flag) = false;
2546 *changed = true;
2547 }
2548 ipa_ref *ref;
2549 FOR_EACH_ALIAS (node, ref)for (unsigned ref_iter_ = 0; (node)->iterate_direct_aliases
(ref_iter_, ref); ref_iter_++)
2550 {
2551 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2552 if (!nothrow || alias->get_availability () > AVAIL_INTERPOSABLE)
2553 set_nothrow_flag_1 (alias, nothrow, non_call, changed);
2554 }
2555 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2556 if (e->caller->thunk
2557 && (!nothrow || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2558 set_nothrow_flag_1 (e->caller, nothrow, non_call, changed);
2559}
2560
2561/* Set TREE_NOTHROW on NODE's decl and on aliases of NODE
2562 if any to NOTHROW. */
2563
2564bool
2565cgraph_node::set_nothrow_flag (bool nothrow)
2566{
2567 bool changed = false;
2568 bool non_call = opt_for_fn (decl, flag_non_call_exceptions)(opts_for_fn (decl)->x_flag_non_call_exceptions);
2569
2570 if (!nothrow || get_availability () > AVAIL_INTERPOSABLE)
2571 set_nothrow_flag_1 (this, nothrow, non_call, &changed);
2572 else
2573 {
2574 ipa_ref *ref;
2575
2576 FOR_EACH_ALIAS (this, ref)for (unsigned ref_iter_ = 0; (this)->iterate_direct_aliases
(ref_iter_, ref); ref_iter_++)
2577 {
2578 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2579 if (!nothrow || alias->get_availability () > AVAIL_INTERPOSABLE)
2580 set_nothrow_flag_1 (alias, nothrow, non_call, &changed);
2581 }
2582 }
2583 return changed;
2584}
2585
2586/* Worker to set malloc flag. */
2587static void
2588set_malloc_flag_1 (cgraph_node *node, bool malloc_p, bool *changed)
2589{
2590 if (malloc_p && !DECL_IS_MALLOC (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2590, __FUNCTION__, (FUNCTION_DECL)))->function_decl.malloc_flag
)
)
2591 {
2592 DECL_IS_MALLOC (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2592, __FUNCTION__, (FUNCTION_DECL)))->function_decl.malloc_flag
)
= true;
2593 *changed = true;
2594 }
2595
2596 ipa_ref *ref;
2597 FOR_EACH_ALIAS (node, ref)for (unsigned ref_iter_ = 0; (node)->iterate_direct_aliases
(ref_iter_, ref); ref_iter_++)
2598 {
2599 cgraph_node *alias = dyn_cast<cgraph_node *> (ref->referring);
2600 if (!malloc_p || alias->get_availability () > AVAIL_INTERPOSABLE)
2601 set_malloc_flag_1 (alias, malloc_p, changed);
2602 }
2603
2604 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2605 if (e->caller->thunk
2606 && (!malloc_p || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2607 set_malloc_flag_1 (e->caller, malloc_p, changed);
2608}
2609
2610/* Set DECL_IS_MALLOC on NODE's decl and on NODE's aliases if any. */
2611
2612bool
2613cgraph_node::set_malloc_flag (bool malloc_p)
2614{
2615 bool changed = false;
2616
2617 if (!malloc_p || get_availability () > AVAIL_INTERPOSABLE)
2618 set_malloc_flag_1 (this, malloc_p, &changed);
2619 else
2620 {
2621 ipa_ref *ref;
2622
2623 FOR_EACH_ALIAS (this, ref)for (unsigned ref_iter_ = 0; (this)->iterate_direct_aliases
(ref_iter_, ref); ref_iter_++)
2624 {
2625 cgraph_node *alias = dyn_cast<cgraph_node *> (ref->referring);
2626 if (!malloc_p || alias->get_availability () > AVAIL_INTERPOSABLE)
2627 set_malloc_flag_1 (alias, malloc_p, &changed);
2628 }
2629 }
2630 return changed;
2631}
2632
2633/* Worker to set noreturng flag. */
2634static void
2635set_noreturn_flag_1 (cgraph_node *node, bool noreturn_p, bool *changed)
2636{
2637 if (noreturn_p && !TREE_THIS_VOLATILE (node->decl)((node->decl)->base.volatile_flag))
2638 {
2639 TREE_THIS_VOLATILE (node->decl)((node->decl)->base.volatile_flag) = true;
2640 *changed = true;
2641 }
2642
2643 ipa_ref *ref;
2644 FOR_EACH_ALIAS (node, ref)for (unsigned ref_iter_ = 0; (node)->iterate_direct_aliases
(ref_iter_, ref); ref_iter_++)
2645 {
2646 cgraph_node *alias = dyn_cast<cgraph_node *> (ref->referring);
2647 if (!noreturn_p || alias->get_availability () > AVAIL_INTERPOSABLE)
2648 set_noreturn_flag_1 (alias, noreturn_p, changed);
2649 }
2650
2651 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2652 if (e->caller->thunk
2653 && (!noreturn_p || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2654 set_noreturn_flag_1 (e->caller, noreturn_p, changed);
2655}
2656
2657/* Set TREE_THIS_VOLATILE on NODE's decl and on NODE's aliases if any. */
2658
2659bool
2660cgraph_node::set_noreturn_flag (bool noreturn_p)
2661{
2662 bool changed = false;
2663
2664 if (!noreturn_p || get_availability () > AVAIL_INTERPOSABLE)
2665 set_noreturn_flag_1 (this, noreturn_p, &changed);
2666 else
2667 {
2668 ipa_ref *ref;
2669
2670 FOR_EACH_ALIAS (this, ref)for (unsigned ref_iter_ = 0; (this)->iterate_direct_aliases
(ref_iter_, ref); ref_iter_++)
2671 {
2672 cgraph_node *alias = dyn_cast<cgraph_node *> (ref->referring);
2673 if (!noreturn_p || alias->get_availability () > AVAIL_INTERPOSABLE)
2674 set_noreturn_flag_1 (alias, noreturn_p, &changed);
2675 }
2676 }
2677 return changed;
2678}
2679
2680/* Worker to set_const_flag. */
2681
2682static void
2683set_const_flag_1 (cgraph_node *node, bool set_const, bool looping,
2684 bool *changed)
2685{
2686 /* Static constructors and destructors without a side effect can be
2687 optimized out. */
2688 if (set_const && !looping)
2689 {
2690 if (DECL_STATIC_CONSTRUCTOR (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2690, __FUNCTION__, (FUNCTION_DECL)))->function_decl.static_ctor_flag
)
)
2691 {
2692 DECL_STATIC_CONSTRUCTOR (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2692, __FUNCTION__, (FUNCTION_DECL)))->function_decl.static_ctor_flag
)
= 0;
2693 *changed = true;
2694 }
2695 if (DECL_STATIC_DESTRUCTOR (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2695, __FUNCTION__, (FUNCTION_DECL)))->function_decl.static_dtor_flag
)
)
2696 {
2697 DECL_STATIC_DESTRUCTOR (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2697, __FUNCTION__, (FUNCTION_DECL)))->function_decl.static_dtor_flag
)
= 0;
2698 *changed = true;
2699 }
2700 }
2701 if (!set_const)
2702 {
2703 if (TREE_READONLY (node->decl)((non_type_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2703, __FUNCTION__))->base.readonly_flag)
)
2704 {
2705 TREE_READONLY (node->decl)((non_type_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2705, __FUNCTION__))->base.readonly_flag)
= 0;
2706 DECL_LOOPING_CONST_OR_PURE_P (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2706, __FUNCTION__, (FUNCTION_DECL)))->function_decl.looping_const_or_pure_flag
)
= false;
2707 *changed = true;
2708 }
2709 }
2710 else
2711 {
2712 /* Consider function:
2713
2714 bool a(int *p)
2715 {
2716 return *p==*p;
2717 }
2718
2719 During early optimization we will turn this into:
2720
2721 bool a(int *p)
2722 {
2723 return true;
2724 }
2725
2726 Now if this function will be detected as CONST however when interposed
2727 it may end up being just pure. We always must assume the worst
2728 scenario here. */
2729 if (TREE_READONLY (node->decl)((non_type_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2729, __FUNCTION__))->base.readonly_flag)
)
2730 {
2731 if (!looping && DECL_LOOPING_CONST_OR_PURE_P (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2731, __FUNCTION__, (FUNCTION_DECL)))->function_decl.looping_const_or_pure_flag
)
)
2732 {
2733 DECL_LOOPING_CONST_OR_PURE_P (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2733, __FUNCTION__, (FUNCTION_DECL)))->function_decl.looping_const_or_pure_flag
)
= false;
2734 *changed = true;
2735 }
2736 }
2737 else if (node->binds_to_current_def_p ())
2738 {
2739 TREE_READONLY (node->decl)((non_type_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2739, __FUNCTION__))->base.readonly_flag)
= true;
2740 DECL_LOOPING_CONST_OR_PURE_P (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2740, __FUNCTION__, (FUNCTION_DECL)))->function_decl.looping_const_or_pure_flag
)
= looping;
2741 DECL_PURE_P (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2741, __FUNCTION__, (FUNCTION_DECL)))->function_decl.pure_flag
)
= false;
2742 *changed = true;
2743 }
2744 else
2745 {
2746 if (dump_file && (dump_flags & TDF_DETAILS))
2747 fprintf (dump_file, "Dropping state to PURE because function does "
2748 "not bind to current def.\n");
2749 if (!DECL_PURE_P (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2749, __FUNCTION__, (FUNCTION_DECL)))->function_decl.pure_flag
)
)
2750 {
2751 DECL_PURE_P (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2751, __FUNCTION__, (FUNCTION_DECL)))->function_decl.pure_flag
)
= true;
2752 DECL_LOOPING_CONST_OR_PURE_P (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2752, __FUNCTION__, (FUNCTION_DECL)))->function_decl.looping_const_or_pure_flag
)
= looping;
2753 *changed = true;
2754 }
2755 else if (!looping && DECL_LOOPING_CONST_OR_PURE_P (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2755, __FUNCTION__, (FUNCTION_DECL)))->function_decl.looping_const_or_pure_flag
)
)
2756 {
2757 DECL_LOOPING_CONST_OR_PURE_P (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2757, __FUNCTION__, (FUNCTION_DECL)))->function_decl.looping_const_or_pure_flag
)
= false;
2758 *changed = true;
2759 }
2760 }
2761 }
2762
2763 ipa_ref *ref;
2764 FOR_EACH_ALIAS (node, ref)for (unsigned ref_iter_ = 0; (node)->iterate_direct_aliases
(ref_iter_, ref); ref_iter_++)
2765 {
2766 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2767 if (!set_const || alias->get_availability () > AVAIL_INTERPOSABLE)
2768 set_const_flag_1 (alias, set_const, looping, changed);
2769 }
2770 for (struct cgraph_node *n = node->simd_clones; n != NULL__null;
2771 n = n->simdclone->next_clone)
2772 set_const_flag_1 (n, set_const, looping, changed);
2773 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2774 if (e->caller->thunk
2775 && (!set_const || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2776 {
2777 /* Virtual thunks access virtual offset in the vtable, so they can
2778 only be pure, never const. */
2779 if (set_const
2780 && (thunk_info::get (e->caller)->virtual_offset_p
2781 || !node->binds_to_current_def_p (e->caller)))
2782 *changed |= e->caller->set_pure_flag (true, looping);
2783 else
2784 set_const_flag_1 (e->caller, set_const, looping, changed);
2785 }
2786}
2787
2788/* If SET_CONST is true, mark function, aliases and thunks to be ECF_CONST.
2789 If SET_CONST if false, clear the flag.
2790
2791 When setting the flag be careful about possible interposition and
2792 do not set the flag for functions that can be interposed and set pure
2793 flag for functions that can bind to other definition.
2794
2795 Return true if any change was done. */
2796
2797bool
2798cgraph_node::set_const_flag (bool set_const, bool looping)
2799{
2800 bool changed = false;
2801 if (!set_const || get_availability () > AVAIL_INTERPOSABLE)
2802 set_const_flag_1 (this, set_const, looping, &changed);
2803 else
2804 {
2805 ipa_ref *ref;
2806
2807 FOR_EACH_ALIAS (this, ref)for (unsigned ref_iter_ = 0; (this)->iterate_direct_aliases
(ref_iter_, ref); ref_iter_++)
2808 {
2809 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2810 if (!set_const || alias->get_availability () > AVAIL_INTERPOSABLE)
2811 set_const_flag_1 (alias, set_const, looping, &changed);
2812 }
2813 }
2814 return changed;
2815}
2816
2817/* Info used by set_pure_flag_1. */
2818
2819struct set_pure_flag_info
2820{
2821 bool pure;
2822 bool looping;
2823 bool changed;
2824};
2825
2826/* Worker to set_pure_flag. */
2827
2828static bool
2829set_pure_flag_1 (cgraph_node *node, void *data)
2830{
2831 struct set_pure_flag_info *info = (struct set_pure_flag_info *)data;
2832 /* Static constructors and destructors without a side effect can be
2833 optimized out. */
2834 if (info->pure && !info->looping)
2835 {
2836 if (DECL_STATIC_CONSTRUCTOR (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2836, __FUNCTION__, (FUNCTION_DECL)))->function_decl.static_ctor_flag
)
)
2837 {
2838 DECL_STATIC_CONSTRUCTOR (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2838, __FUNCTION__, (FUNCTION_DECL)))->function_decl.static_ctor_flag
)
= 0;
2839 info->changed = true;
2840 }
2841 if (DECL_STATIC_DESTRUCTOR (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2841, __FUNCTION__, (FUNCTION_DECL)))->function_decl.static_dtor_flag
)
)
2842 {
2843 DECL_STATIC_DESTRUCTOR (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2843, __FUNCTION__, (FUNCTION_DECL)))->function_decl.static_dtor_flag
)
= 0;
2844 info->changed = true;
2845 }
2846 }
2847 if (info->pure)
2848 {
2849 if (!DECL_PURE_P (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2849, __FUNCTION__, (FUNCTION_DECL)))->function_decl.pure_flag
)
&& !TREE_READONLY (node->decl)((non_type_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2849, __FUNCTION__))->base.readonly_flag)
)
2850 {
2851 DECL_PURE_P (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2851, __FUNCTION__, (FUNCTION_DECL)))->function_decl.pure_flag
)
= true;
2852 DECL_LOOPING_CONST_OR_PURE_P (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2852, __FUNCTION__, (FUNCTION_DECL)))->function_decl.looping_const_or_pure_flag
)
= info->looping;
2853 info->changed = true;
2854 }
2855 else if (DECL_LOOPING_CONST_OR_PURE_P (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2855, __FUNCTION__, (FUNCTION_DECL)))->function_decl.looping_const_or_pure_flag
)
2856 && !info->looping)
2857 {
2858 DECL_LOOPING_CONST_OR_PURE_P (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2858, __FUNCTION__, (FUNCTION_DECL)))->function_decl.looping_const_or_pure_flag
)
= false;
2859 info->changed = true;
2860 }
2861 }
2862 else
2863 {
2864 if (DECL_PURE_P (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2864, __FUNCTION__, (FUNCTION_DECL)))->function_decl.pure_flag
)
)
2865 {
2866 DECL_PURE_P (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2866, __FUNCTION__, (FUNCTION_DECL)))->function_decl.pure_flag
)
= false;
2867 DECL_LOOPING_CONST_OR_PURE_P (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2867, __FUNCTION__, (FUNCTION_DECL)))->function_decl.looping_const_or_pure_flag
)
= false;
2868 info->changed = true;
2869 }
2870 }
2871 return false;
2872}
2873
2874/* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
2875 if any to PURE.
2876
2877 When setting the flag, be careful about possible interposition.
2878 Return true if any change was done. */
2879
2880bool
2881cgraph_node::set_pure_flag (bool pure, bool looping)
2882{
2883 struct set_pure_flag_info info = {pure, looping, false};
2884 call_for_symbol_thunks_and_aliases (set_pure_flag_1, &info, !pure, true);
2885 for (struct cgraph_node *n = simd_clones; n != NULL__null;
2886 n = n->simdclone->next_clone)
2887 set_pure_flag_1 (n, &info);
2888 return info.changed;
2889}
2890
2891/* Return true when cgraph_node cannot return or throw and thus
2892 it is safe to ignore its side effects for IPA analysis. */
2893
2894bool
2895cgraph_node::cannot_return_p (void)
2896{
2897 int flags = flags_from_decl_or_type (decl);
2898 if (!opt_for_fn (decl, flag_exceptions)(opts_for_fn (decl)->x_flag_exceptions))
2899 return (flags & ECF_NORETURN(1 << 3)) != 0;
2900 else
2901 return ((flags & (ECF_NORETURN(1 << 3) | ECF_NOTHROW(1 << 6)))
2902 == (ECF_NORETURN(1 << 3) | ECF_NOTHROW(1 << 6)));
2903}
2904
2905/* Return true when call of edge cannot lead to return from caller
2906 and thus it is safe to ignore its side effects for IPA analysis
2907 when computing side effects of the caller.
2908 FIXME: We could actually mark all edges that have no reaching
2909 patch to the exit block or throw to get better results. */
2910bool
2911cgraph_edge::cannot_lead_to_return_p (void)
2912{
2913 if (caller->cannot_return_p ())
2914 return true;
2915 if (indirect_unknown_callee)
2916 {
2917 int flags = indirect_info->ecf_flags;
2918 if (!opt_for_fn (caller->decl, flag_exceptions)(opts_for_fn (caller->decl)->x_flag_exceptions))
2919 return (flags & ECF_NORETURN(1 << 3)) != 0;
2920 else
2921 return ((flags & (ECF_NORETURN(1 << 3) | ECF_NOTHROW(1 << 6)))
2922 == (ECF_NORETURN(1 << 3) | ECF_NOTHROW(1 << 6)));
2923 }
2924 else
2925 return callee->cannot_return_p ();
2926}
2927
2928/* Return true if the edge may be considered hot. */
2929
2930bool
2931cgraph_edge::maybe_hot_p (void)
2932{
2933 if (!maybe_hot_count_p (NULL__null, count.ipa ()))
2934 return false;
2935 if (caller->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED
2936 || (callee
2937 && callee->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED))
2938 return false;
2939 if (caller->frequency > NODE_FREQUENCY_UNLIKELY_EXECUTED
2940 && (callee
2941 && callee->frequency <= NODE_FREQUENCY_EXECUTED_ONCE))
2942 return false;
2943 if (opt_for_fn (caller->decl, optimize_size)(opts_for_fn (caller->decl)->x_optimize_size))
2944 return false;
2945 if (caller->frequency == NODE_FREQUENCY_HOT)
2946 return true;
2947 if (!count.initialized_p ())
2948 return true;
2949 cgraph_node *where = caller->inlined_to ? caller->inlined_to : caller;
2950 if (!where->count.initialized_p ())
2951 return false;
2952 if (caller->frequency == NODE_FREQUENCY_EXECUTED_ONCE)
2953 {
2954 if (count * 2 < where->count * 3)
2955 return false;
2956 }
2957 else if (count * param_hot_bb_frequency_fractionglobal_options.x_param_hot_bb_frequency_fraction < where->count)
2958 return false;
2959 return true;
2960}
2961
2962/* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2963
2964static bool
2965nonremovable_p (cgraph_node *node, void *)
2966{
2967 return !node->can_remove_if_no_direct_calls_and_refs_p ();
2968}
2969
2970/* Return true if whole comdat group can be removed if there are no direct
2971 calls to THIS. */
2972
2973bool
2974cgraph_node::can_remove_if_no_direct_calls_p (bool will_inline)
2975{
2976 struct ipa_ref *ref;
2977
2978 /* For local symbols or non-comdat group it is the same as
2979 can_remove_if_no_direct_calls_p. */
2980 if (!externally_visible || !same_comdat_group)
2981 {
2982 if (DECL_EXTERNAL (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 2982, __FUNCTION__))->decl_common.decl_flag_1)
)
2983 return true;
2984 if (address_taken)
2985 return false;
2986 return !call_for_symbol_and_aliases (nonremovable_p, NULL__null, true);
2987 }
2988
2989 if (will_inline && address_taken)
2990 return false;
2991
2992 /* Otherwise check if we can remove the symbol itself and then verify
2993 that only uses of the comdat groups are direct call to THIS
2994 or its aliases. */
2995 if (!can_remove_if_no_direct_calls_and_refs_p ())
2996 return false;
2997
2998 /* Check that all refs come from within the comdat group. */
2999 for (int i = 0; iterate_referring (i, ref); i++)
3000 if (ref->referring->get_comdat_group () != get_comdat_group ())
3001 return false;
3002
3003 struct cgraph_node *target = ultimate_alias_target ();
3004 for (cgraph_node *next = dyn_cast<cgraph_node *> (same_comdat_group);
3005 next != this; next = dyn_cast<cgraph_node *> (next->same_comdat_group))
3006 {
3007 if (!externally_visible)
3008 continue;
3009 if (!next->alias
3010 && !next->can_remove_if_no_direct_calls_and_refs_p ())
3011 return false;
3012
3013 /* If we see different symbol than THIS, be sure to check calls. */
3014 if (next->ultimate_alias_target () != target)
3015 for (cgraph_edge *e = next->callers; e; e = e->next_caller)
3016 if (e->caller->get_comdat_group () != get_comdat_group ()
3017 || will_inline)
3018 return false;
3019
3020 /* If function is not being inlined, we care only about
3021 references outside of the comdat group. */
3022 if (!will_inline)
3023 for (int i = 0; next->iterate_referring (i, ref); i++)
3024 if (ref->referring->get_comdat_group () != get_comdat_group ())
3025 return false;
3026 }
3027 return true;
3028}
3029
3030/* Return true when function cgraph_node can be expected to be removed
3031 from program when direct calls in this compilation unit are removed.
3032
3033 As a special case COMDAT functions are
3034 cgraph_can_remove_if_no_direct_calls_p while the are not
3035 cgraph_only_called_directly_p (it is possible they are called from other
3036 unit)
3037
3038 This function behaves as cgraph_only_called_directly_p because eliminating
3039 all uses of COMDAT function does not make it necessarily disappear from
3040 the program unless we are compiling whole program or we do LTO. In this
3041 case we know we win since dynamic linking will not really discard the
3042 linkonce section. */
3043
3044bool
3045cgraph_node::will_be_removed_from_program_if_no_direct_calls_p
3046 (bool will_inline)
3047{
3048 gcc_assert (!inlined_to)((void)(!(!inlined_to) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 3048, __FUNCTION__), 0 : 0))
;
3049 if (DECL_EXTERNAL (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 3049, __FUNCTION__))->decl_common.decl_flag_1)
)
3050 return true;
3051
3052 if (!in_lto_pglobal_options.x_in_lto_p && !flag_whole_programglobal_options.x_flag_whole_program)
3053 {
3054 /* If the symbol is in comdat group, we need to verify that whole comdat
3055 group becomes unreachable. Technically we could skip references from
3056 within the group, too. */
3057 if (!only_called_directly_p ())
3058 return false;
3059 if (same_comdat_group && externally_visible)
3060 {
3061 struct cgraph_node *target = ultimate_alias_target ();
3062
3063 if (will_inline && address_taken)
3064 return true;
3065 for (cgraph_node *next = dyn_cast<cgraph_node *> (same_comdat_group);
3066 next != this;
3067 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
3068 {
3069 if (!externally_visible)
3070 continue;
3071 if (!next->alias
3072 && !next->only_called_directly_p ())
3073 return false;
3074
3075 /* If we see different symbol than THIS,
3076 be sure to check calls. */
3077 if (next->ultimate_alias_target () != target)
3078 for (cgraph_edge *e = next->callers; e; e = e->next_caller)
3079 if (e->caller->get_comdat_group () != get_comdat_group ()
3080 || will_inline)
3081 return false;
3082 }
3083 }
3084 return true;
3085 }
3086 else
3087 return can_remove_if_no_direct_calls_p (will_inline);
3088}
3089
3090
3091/* Worker for cgraph_only_called_directly_p. */
3092
3093static bool
3094cgraph_not_only_called_directly_p_1 (cgraph_node *node, void *)
3095{
3096 return !node->only_called_directly_or_aliased_p ();
3097}
3098
3099/* Return true when function cgraph_node and all its aliases are only called
3100 directly.
3101 i.e. it is not externally visible, address was not taken and
3102 it is not used in any other non-standard way. */
3103
3104bool
3105cgraph_node::only_called_directly_p (void)
3106{
3107 gcc_assert (ultimate_alias_target () == this)((void)(!(ultimate_alias_target () == this) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 3107, __FUNCTION__), 0 : 0))
;
3108 return !call_for_symbol_and_aliases (cgraph_not_only_called_directly_p_1,
3109 NULL__null, true);
3110}
3111
3112
3113/* Collect all callers of NODE. Worker for collect_callers_of_node. */
3114
3115static bool
3116collect_callers_of_node_1 (cgraph_node *node, void *data)
3117{
3118 vec<cgraph_edge *> *redirect_callers = (vec<cgraph_edge *> *)data;
3119 cgraph_edge *cs;
3120 enum availability avail;
3121 node->ultimate_alias_target (&avail);
3122
3123 if (avail > AVAIL_INTERPOSABLE)
3124 for (cs = node->callers; cs != NULL__null; cs = cs->next_caller)
3125 if (!cs->indirect_inlining_edge
3126 && !cs->caller->thunk)
3127 redirect_callers->safe_push (cs);
3128 return false;
3129}
3130
3131/* Collect all callers of cgraph_node and its aliases that are known to lead to
3132 cgraph_node (i.e. are not overwritable). */
3133
3134auto_vec<cgraph_edge *>
3135cgraph_node::collect_callers (void)
3136{
3137 auto_vec<cgraph_edge *> redirect_callers;
3138 call_for_symbol_thunks_and_aliases (collect_callers_of_node_1,
3139 &redirect_callers, false);
3140 return redirect_callers;
3141}
3142
3143
3144/* Return TRUE if NODE2 a clone of NODE or is equivalent to it. Return
3145 optimistically true if this cannot be determined. */
3146
3147static bool
3148clone_of_p (cgraph_node *node, cgraph_node *node2)
3149{
3150 node = node->ultimate_alias_target ();
3151 node2 = node2->ultimate_alias_target ();
3152
3153 if (node2->clone_of == node
3154 || node2->former_clone_of == node->decl)
3155 return true;
3156
3157 if (!node->thunk && !node->former_thunk_p ())
3158 {
3159 while (node2
3160 && node->decl != node2->decl
3161 && node->decl != node2->former_clone_of)
3162 node2 = node2->clone_of;
3163 return node2 != NULL__null;
3164 }
3165
3166 /* There are no virtual clones of thunks so check former_clone_of or if we
3167 might have skipped thunks because this adjustments are no longer
3168 necessary. */
3169 while (node->thunk || node->former_thunk_p ())
3170 {
3171 if (!thunk_info::get (node)->this_adjusting)
3172 return false;
3173 /* In case of instrumented expanded thunks, which can have multiple calls
3174 in them, we do not know how to continue and just have to be
3175 optimistic. The same applies if all calls have already been inlined
3176 into the thunk. */
3177 if (!node->callees || node->callees->next_callee)
3178 return true;
3179 node = node->callees->callee->ultimate_alias_target ();
3180
3181 clone_info *info = clone_info::get (node2);
3182 if (!info || !info->param_adjustments
3183 || info->param_adjustments->first_param_intact_p ())
3184 return false;
3185 if (node2->former_clone_of == node->decl
3186 || node2->former_clone_of == node->former_clone_of)
3187 return true;
3188
3189 cgraph_node *n2 = node2;
3190 while (n2 && node->decl != n2->decl)
3191 n2 = n2->clone_of;
3192 if (n2)
3193 return true;
3194 }
3195
3196 return false;
3197}
3198
3199/* Verify edge count and frequency. */
3200
3201bool
3202cgraph_edge::verify_count ()
3203{
3204 bool error_found = false;
3205 if (!count.verify ())
3206 {
3207 error ("caller edge count invalid");
3208 error_found = true;
3209 }
3210 return error_found;
3211}
3212
3213/* Switch to THIS_CFUN if needed and print STMT to stderr. */
3214static void
3215cgraph_debug_gimple_stmt (function *this_cfun, gimple *stmt)
3216{
3217 bool fndecl_was_null = false;
3218 /* debug_gimple_stmt needs correct cfun */
3219 if (cfun(cfun + 0) != this_cfun)
3220 set_cfun (this_cfun);
3221 /* ...and an actual current_function_decl */
3222 if (!current_function_decl)
3223 {
3224 current_function_decl = this_cfun->decl;
3225 fndecl_was_null = true;
3226 }
3227 debug_gimple_stmt (stmt);
3228 if (fndecl_was_null)
3229 current_function_decl = NULL__null;
3230}
3231
3232/* Verify that call graph edge corresponds to DECL from the associated
3233 statement. Return true if the verification should fail. */
3234
3235bool
3236cgraph_edge::verify_corresponds_to_fndecl (tree decl)
3237{
3238 cgraph_node *node;
3239
3240 if (!decl || callee->inlined_to)
3241 return false;
3242 if (symtab->state == LTO_STREAMING)
3243 return false;
3244 node = cgraph_node::get (decl);
3245
3246 /* We do not know if a node from a different partition is an alias or what it
3247 aliases and therefore cannot do the former_clone_of check reliably. When
3248 body_removed is set, we have lost all information about what was alias or
3249 thunk of and also cannot proceed. */
3250 if (!node
3251 || node->body_removed
3252 || node->in_other_partition
3253 || callee->icf_merged
3254 || callee->in_other_partition)
3255 return false;
3256
3257 node = node->ultimate_alias_target ();
3258
3259 /* Optimizers can redirect unreachable calls or calls triggering undefined
3260 behavior to __builtin_unreachable or __builtin_unreachable trap. */
3261
3262 if (fndecl_built_in_p (callee->decl, BUILT_IN_NORMAL)
3263 && (DECL_FUNCTION_CODE (callee->decl) == BUILT_IN_UNREACHABLE
3264 || DECL_FUNCTION_CODE (callee->decl) == BUILT_IN_UNREACHABLE_TRAP))
3265 return false;
3266
3267 if (callee->former_clone_of != node->decl
3268 && (node != callee->ultimate_alias_target ())
3269 && !clone_of_p (node, callee))
3270 return true;
3271 else
3272 return false;
3273}
3274
3275/* Disable warnings about missing quoting in GCC diagnostics for
3276 the verification errors. Their format strings don't follow GCC
3277 diagnostic conventions and the calls are ultimately followed by
3278 one to internal_error. */
3279#if __GNUC__4 >= 10
3280# pragma GCC diagnostic push
3281# pragma GCC diagnostic ignored "-Wformat-diag"
3282#endif
3283
3284/* Verify consistency of speculative call in NODE corresponding to STMT
3285 and LTO_STMT_UID. If INDIRECT is set, assume that it is the indirect
3286 edge of call sequence. Return true if error is found.
3287
3288 This function is called to every component of indirect call (direct edges,
3289 indirect edge and refs). To save duplicated work, do full testing only
3290 in that case. */
3291static bool
3292verify_speculative_call (struct cgraph_node *node, gimple *stmt,
3293 unsigned int lto_stmt_uid,
3294 struct cgraph_edge *indirect)
3295{
3296 if (indirect == NULL__null)
3297 {
3298 for (indirect = node->indirect_calls; indirect;
3299 indirect = indirect->next_callee)
3300 if (indirect->call_stmt == stmt
3301 && indirect->lto_stmt_uid == lto_stmt_uid)
3302 break;
3303 if (!indirect)
3304 {
3305 error ("missing indirect call in speculative call sequence");
3306 return true;
3307 }
3308 if (!indirect->speculative)
3309 {
3310 error ("indirect call in speculative call sequence has no "
3311 "speculative flag");
3312 return true;
3313 }
3314 return false;
3315 }
3316
3317 /* Maximal number of targets. We probably will never want to have more than
3318 this. */
3319 const unsigned int num = 256;
3320 cgraph_edge *direct_calls[num];
3321 ipa_ref *refs[num];
3322
3323 for (unsigned int i = 0; i < num; i++)
3324 {
3325 direct_calls[i] = NULL__null;
3326 refs[i] = NULL__null;
3327 }
3328
3329 cgraph_edge *first_call = NULL__null;
3330 cgraph_edge *prev_call = NULL__null;
3331
3332 for (cgraph_edge *direct = node->callees; direct;
3333 direct = direct->next_callee)
3334 if (direct->call_stmt == stmt && direct->lto_stmt_uid == lto_stmt_uid)
3335 {
3336 if (!first_call)
3337 first_call = direct;
3338 if (prev_call && direct != prev_call->next_callee)
3339 {
3340 error ("speculative edges are not adjacent");
3341 return true;
3342 }
3343 prev_call = direct;
3344 if (!direct->speculative)
3345 {
3346 error ("direct call to %s in speculative call sequence has no "
3347 "speculative flag", direct->callee->dump_name ());
3348 return true;
3349 }
3350 if (direct->speculative_id >= num)
3351 {
3352 error ("direct call to %s in speculative call sequence has "
3353 "speculative_id %i out of range",
3354 direct->callee->dump_name (), direct->speculative_id);
3355 return true;
3356 }
3357 if (direct_calls[direct->speculative_id])
3358 {
3359 error ("duplicate direct call to %s in speculative call sequence "
3360 "with speculative_id %i",
3361 direct->callee->dump_name (), direct->speculative_id);
3362 return true;
3363 }
3364 direct_calls[direct->speculative_id] = direct;
3365 }
3366
3367 if (first_call->call_stmt
3368 && first_call != node->get_edge (first_call->call_stmt))
3369 {
3370 error ("call stmt hash does not point to first direct edge of "
3371 "speculative call sequence");
3372 return true;
3373 }
3374
3375 ipa_ref *ref;
3376 for (int i = 0; node->iterate_reference (i, ref); i++)
3377 if (ref->speculative
3378 && ref->stmt == stmt && ref->lto_stmt_uid == lto_stmt_uid)
3379 {
3380 if (ref->speculative_id >= num)
3381 {
3382 error ("direct call to %s in speculative call sequence has "
3383 "speculative_id %i out of range",
3384 ref->referred->dump_name (), ref->speculative_id);
3385 return true;
3386 }
3387 if (refs[ref->speculative_id])
3388 {
3389 error ("duplicate reference %s in speculative call sequence "
3390 "with speculative_id %i",
3391 ref->referred->dump_name (), ref->speculative_id);
3392 return true;
3393 }
3394 refs[ref->speculative_id] = ref;
3395 }
3396
3397 int num_targets = 0;
3398 for (unsigned int i = 0 ; i < num ; i++)
3399 {
3400 if (refs[i] && !direct_calls[i])
3401 {
3402 error ("missing direct call for speculation %i", i);
3403 return true;
3404 }
3405 if (!refs[i] && direct_calls[i])
3406 {
3407 error ("missing ref for speculation %i", i);
3408 return true;
3409 }
3410 if (refs[i] != NULL__null)
3411 num_targets++;
3412 }
3413
3414 if (num_targets != indirect->num_speculative_call_targets_p ())
3415 {
3416 error ("number of speculative targets %i mismatched with "
3417 "num_speculative_call_targets %i",
3418 num_targets,
3419 indirect->num_speculative_call_targets_p ());
3420 return true;
3421 }
3422 return false;
3423}
3424
3425/* Verify cgraph nodes of given cgraph node. */
3426DEBUG_FUNCTION__attribute__ ((__used__)) void
3427cgraph_node::verify_node (void)
3428{
3429 cgraph_edge *e;
3430 function *this_cfun = DECL_STRUCT_FUNCTION (decl)((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 3430, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
;
3431 basic_block this_block;
3432 gimple_stmt_iterator gsi;
3433 bool error_found = false;
3434 int i;
3435 ipa_ref *ref = NULL__null;
3436
3437 if (seen_error ())
3438 return;
3439
3440 timevar_push (TV_CGRAPH_VERIFY);
3441 error_found |= verify_base ();
3442 for (e = callees; e; e = e->next_callee)
3443 if (e->aux)
3444 {
3445 error ("aux field set for edge %s->%s",
3446 identifier_to_locale (e->caller->name ()),
3447 identifier_to_locale (e->callee->name ()));
3448 error_found = true;
3449 }
3450 if (!count.verify ())
3451 {
3452 error ("cgraph count invalid");
3453 error_found = true;
3454 }
3455 if (inlined_to && same_comdat_group)
3456 {
3457 error ("inline clone in same comdat group list");
3458 error_found = true;
3459 }
3460 if (inlined_to && !count.compatible_p (inlined_to->count))
3461 {
3462 error ("inline clone count is not compatible");
3463 count.debug ();
3464 inlined_to->count.debug ();
3465 error_found = true;
3466 }
3467 if (tp_first_run < 0)
3468 {
3469 error ("tp_first_run must be non-negative");
3470 error_found = true;
3471 }
3472 if (!definition && !in_other_partition && local)
3473 {
3474 error ("local symbols must be defined");
3475 error_found = true;
3476 }
3477 if (inlined_to && externally_visible)
3478 {
3479 error ("externally visible inline clone");
3480 error_found = true;
3481 }
3482 if (inlined_to && address_taken)
3483 {
3484 error ("inline clone with address taken");
3485 error_found = true;
3486 }
3487 if (inlined_to && force_output)
3488 {
3489 error ("inline clone is forced to output");
3490 error_found = true;
3491 }
3492 if (symtab->state != LTO_STREAMING)
3493 {
3494 if (calls_comdat_local && !same_comdat_group)
3495 {
3496 error ("calls_comdat_local is set outside of a comdat group");
3497 error_found = true;
3498 }
3499 if (!inlined_to && calls_comdat_local != check_calls_comdat_local_p ())
3500 {
3501 error ("invalid calls_comdat_local flag");
3502 error_found = true;
3503 }
3504 }
3505 if (DECL_IS_MALLOC (decl)((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 3505, __FUNCTION__, (FUNCTION_DECL)))->function_decl.malloc_flag
)
3506 && !POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (decl)))(((enum tree_code) (((contains_struct_check ((((contains_struct_check
((decl), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 3506, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 3506, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((((contains_struct_check
((decl), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 3506, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 3506, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
3507 {
3508 error ("malloc attribute should be used for a function that "
3509 "returns a pointer");
3510 error_found = true;
3511 }
3512 if (definition
3513 && externally_visible
3514 /* For aliases in lto1 free_lang_data doesn't guarantee preservation
3515 of opt_for_fn (decl, flag_semantic_interposition). See PR105399. */
3516 && (!alias || !in_lto_pglobal_options.x_in_lto_p)
3517 && semantic_interposition
3518 != opt_for_fn (decl, flag_semantic_interposition)(opts_for_fn (decl)->x_flag_semantic_interposition))
3519 {
3520 error ("semantic interposition mismatch");
3521 error_found = true;
3522 }
3523 for (e = indirect_calls; e; e = e->next_callee)
3524 {
3525 if (e->aux)
3526 {
3527 error ("aux field set for indirect edge from %s",
3528 identifier_to_locale (e->caller->name ()));
3529 error_found = true;
3530 }
3531 if (!e->count.compatible_p (count))
3532 {
3533 error ("edge count is not compatible with function count");
3534 e->count.debug ();
3535 count.debug ();
3536 error_found = true;
3537 }
3538 if (!e->indirect_unknown_callee
3539 || !e->indirect_info)
3540 {
3541 error ("An indirect edge from %s is not marked as indirect or has "
3542 "associated indirect_info, the corresponding statement is: ",
3543 identifier_to_locale (e->caller->name ()));
3544 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3545 error_found = true;
3546 }
3547 if (e->call_stmt && e->lto_stmt_uid)
3548 {
3549 error ("edge has both call_stmt and lto_stmt_uid set");
3550 error_found = true;
3551 }
3552 }
3553 bool check_comdat = comdat_local_p ();
3554 for (e = callers; e; e = e->next_caller)
3555 {
3556 if (e->verify_count ())
3557 error_found = true;
3558 if (check_comdat
3559 && !in_same_comdat_group_p (e->caller))
3560 {
3561 error ("comdat-local function called by %s outside its comdat",
3562 identifier_to_locale (e->caller->name ()));
3563 error_found = true;
3564 }
3565 if (!e->inline_failed)
3566 {
3567 if (inlined_to
3568 != (e->caller->inlined_to
3569 ? e->caller->inlined_to : e->caller))
3570 {
3571 error ("inlined_to pointer is wrong");
3572 error_found = true;
3573 }
3574 if (callers->next_caller)
3575 {
3576 error ("multiple inline callers");
3577 error_found = true;
3578 }
3579 }
3580 else
3581 if (inlined_to)
3582 {
3583 error ("inlined_to pointer set for noninline callers");
3584 error_found = true;
3585 }
3586 }
3587 for (e = callees; e; e = e->next_callee)
3588 {
3589 if (e->verify_count ())
3590 error_found = true;
3591 if (!e->count.compatible_p (count))
3592 {
3593 error ("edge count is not compatible with function count");
3594 e->count.debug ();
3595 count.debug ();
3596 error_found = true;
3597 }
3598 if (gimple_has_body_p (e->caller->decl)
3599 && !e->caller->inlined_to
3600 && !e->speculative
3601 /* Optimized out calls are redirected to __builtin_unreachable. */
3602 && (e->count.nonzero_p ()
3603 || ! e->callee->decl
3604 || !(fndecl_built_in_p (e->callee->decl, BUILT_IN_UNREACHABLE)
3605 || fndecl_built_in_p (e->callee->decl,
3606 BUILT_IN_UNREACHABLE_TRAP)))
3607 && count
3608 == ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (decl))((((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 3608, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f))
->cfg->x_entry_block_ptr)
->count
3609 && (!e->count.ipa_p ()
3610 && e->count.differs_from_p (gimple_bb (e->call_stmt)->count)))
3611 {
3612 error ("caller edge count does not match BB count");
3613 fprintf (stderrstderr, "edge count: ");
3614 e->count.dump (stderrstderr);
3615 fprintf (stderrstderr, "\n bb count: ");
3616 gimple_bb (e->call_stmt)->count.dump (stderrstderr);
3617 fprintf (stderrstderr, "\n");
3618 error_found = true;
3619 }
3620 if (e->call_stmt && e->lto_stmt_uid)
3621 {
3622 error ("edge has both call_stmt and lto_stmt_uid set");
3623 error_found = true;
3624 }
3625 if (e->speculative
3626 && verify_speculative_call (e->caller, e->call_stmt, e->lto_stmt_uid,
3627 NULL__null))
3628 error_found = true;
3629 }
3630 for (e = indirect_calls; e; e = e->next_callee)
3631 {
3632 if (e->verify_count ())
3633 error_found = true;
3634 if (gimple_has_body_p (e->caller->decl)
3635 && !e->caller->inlined_to
3636 && !e->speculative
3637 && e->count.ipa_p ()
3638 && count
3639 == ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (decl))((((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 3639, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f))
->cfg->x_entry_block_ptr)
->count
3640 && (!e->count.ipa_p ()
3641 && e->count.differs_from_p (gimple_bb (e->call_stmt)->count)))
3642 {
3643 error ("indirect call count does not match BB count");
3644 fprintf (stderrstderr, "edge count: ");
3645 e->count.dump (stderrstderr);
3646 fprintf (stderrstderr, "\n bb count: ");
3647 gimple_bb (e->call_stmt)->count.dump (stderrstderr);
3648 fprintf (stderrstderr, "\n");
3649 error_found = true;
3650 }
3651 if (e->speculative
3652 && verify_speculative_call (e->caller, e->call_stmt, e->lto_stmt_uid,
3653 e))
3654 error_found = true;
3655 }
3656 for (i = 0; iterate_reference (i, ref); i++)
3657 {
3658 if (ref->stmt && ref->lto_stmt_uid)
3659 {
3660 error ("reference has both stmt and lto_stmt_uid set");
3661 error_found = true;
3662 }
3663 if (ref->speculative
3664 && verify_speculative_call (this, ref->stmt,
3665 ref->lto_stmt_uid, NULL__null))
3666 error_found = true;
3667 }
3668
3669 if (!callers && inlined_to)
3670 {
3671 error ("inlined_to pointer is set but no predecessors found");
3672 error_found = true;
3673 }
3674 if (inlined_to == this)
3675 {
3676 error ("inlined_to pointer refers to itself");
3677 error_found = true;
3678 }
3679
3680 if (clone_of)
3681 {
3682 cgraph_node *first_clone = clone_of->clones;
3683 if (first_clone != this)
3684 {
3685 if (prev_sibling_clone->clone_of != clone_of)
3686 {
3687 error ("cgraph_node has wrong clone_of");
3688 error_found = true;
3689 }
3690 }
3691 }
3692 if (clones)
3693 {
3694 cgraph_node *n;
3695 for (n = clones; n; n = n->next_sibling_clone)
3696 if (n->clone_of != this)
3697 break;
3698 if (n)
3699 {
3700 error ("cgraph_node has wrong clone list");
3701 error_found = true;
3702 }
3703 }
3704 if ((prev_sibling_clone || next_sibling_clone) && !clone_of)
3705 {
3706 error ("cgraph_node is in clone list but it is not clone");
3707 error_found = true;
3708 }
3709 if (!prev_sibling_clone && clone_of && clone_of->clones != this)
3710 {
3711 error ("cgraph_node has wrong prev_clone pointer");
3712 error_found = true;
3713 }
3714 if (prev_sibling_clone && prev_sibling_clone->next_sibling_clone != this)
3715 {
3716 error ("double linked list of clones corrupted");
3717 error_found = true;
3718 }
3719
3720 if (analyzed && alias)
3721 {
3722 bool ref_found = false;
3723 int i;
3724 ipa_ref *ref = NULL__null;
3725
3726 if (callees)
3727 {
3728 error ("Alias has call edges");
3729 error_found = true;
3730 }
3731 for (i = 0; iterate_reference (i, ref); i++)
3732 if (ref->use != IPA_REF_ALIAS)
3733 {
3734 error ("Alias has non-alias reference");
3735 error_found = true;
3736 }
3737 else if (ref_found)
3738 {
3739 error ("Alias has more than one alias reference");
3740 error_found = true;
3741 }
3742 else
3743 ref_found = true;
3744 if (!ref_found)
3745 {
3746 error ("Analyzed alias has no reference");
3747 error_found = true;
3748 }
3749 }
3750
3751 if (analyzed && thunk)
3752 {
3753 if (!callees)
3754 {
3755 error ("No edge out of thunk node");
3756 error_found = true;
3757 }
3758 else if (callees->next_callee)
3759 {
3760 error ("More than one edge out of thunk node");
3761 error_found = true;
3762 }
3763 if (gimple_has_body_p (decl) && !inlined_to)
3764 {
3765 error ("Thunk is not supposed to have body");
3766 error_found = true;
3767 }
3768 }
3769 else if (analyzed && gimple_has_body_p (decl)
3770 && !TREE_ASM_WRITTEN (decl)((decl)->base.asm_written_flag)
3771 && (!DECL_EXTERNAL (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 3771, __FUNCTION__))->decl_common.decl_flag_1)
|| inlined_to)
3772 && !flag_wpaglobal_options.x_flag_wpa)
3773 {
3774 if ((this_cfun->curr_properties & PROP_assumptions_done(1 << 19)) != 0)
3775 ;
3776 else if (this_cfun->cfg)
3777 {
3778 hash_set<gimple *> stmts;
3779
3780 /* Reach the trees by walking over the CFG, and note the
3781 enclosing basic-blocks in the call edges. */
3782 FOR_EACH_BB_FN (this_block, this_cfun)for (this_block = (this_cfun)->cfg->x_entry_block_ptr->
next_bb; this_block != (this_cfun)->cfg->x_exit_block_ptr
; this_block = this_block->next_bb)
3783 {
3784 for (gsi = gsi_start_phis (this_block);
3785 !gsi_end_p (gsi); gsi_next (&gsi))
3786 stmts.add (gsi_stmt (gsi));
3787 for (gsi = gsi_start_bb (this_block);
3788 !gsi_end_p (gsi);
3789 gsi_next (&gsi))
3790 {
3791 gimple *stmt = gsi_stmt (gsi);
3792 stmts.add (stmt);
3793 if (is_gimple_call (stmt))
3794 {
3795 cgraph_edge *e = get_edge (stmt);
3796 tree decl = gimple_call_fndecl (stmt);
3797 if (e)
3798 {
3799 if (e->aux)
3800 {
3801 error ("shared call_stmt:");
3802 cgraph_debug_gimple_stmt (this_cfun, stmt);
3803 error_found = true;
3804 }
3805 if (!e->indirect_unknown_callee)
3806 {
3807 if (e->verify_corresponds_to_fndecl (decl))
3808 {
3809 error ("edge points to wrong declaration:");
3810 debug_tree (e->callee->decl);
3811 fprintf (stderrstderr," Instead of:");
3812 debug_tree (decl);
3813 error_found = true;
3814 }
3815 }
3816 else if (decl)
3817 {
3818 error ("an indirect edge with unknown callee "
3819 "corresponding to a call_stmt with "
3820 "a known declaration:");
3821 error_found = true;
3822 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3823 }
3824 e->aux = (void *)1;
3825 }
3826 else if (decl)
3827 {
3828 error ("missing callgraph edge for call stmt:");
3829 cgraph_debug_gimple_stmt (this_cfun, stmt);
3830 error_found = true;
3831 }
3832 }
3833 }
3834 }
3835 for (i = 0; iterate_reference (i, ref); i++)
3836 if (ref->stmt && !stmts.contains (ref->stmt))
3837 {
3838 error ("reference to dead statement");
3839 cgraph_debug_gimple_stmt (this_cfun, ref->stmt);
3840 error_found = true;
3841 }
3842 }
3843 else
3844 /* No CFG available?! */
3845 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 3845, __FUNCTION__))
;
3846
3847 for (e = callees; e; e = e->next_callee)
3848 {
3849 if (!e->aux && !e->speculative)
3850 {
3851 error ("edge %s->%s has no corresponding call_stmt",
3852 identifier_to_locale (e->caller->name ()),
3853 identifier_to_locale (e->callee->name ()));
3854 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3855 error_found = true;
3856 }
3857 e->aux = 0;
3858 }
3859 for (e = indirect_calls; e; e = e->next_callee)
3860 {
3861 if (!e->aux && !e->speculative)
3862 {
3863 error ("an indirect edge from %s has no corresponding call_stmt",
3864 identifier_to_locale (e->caller->name ()));
3865 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3866 error_found = true;
3867 }
3868 e->aux = 0;
3869 }
3870 }
3871
3872 if (nested_function_info *info = nested_function_info::get (this))
3873 {
3874 if (info->nested != NULL__null)
3875 {
3876 for (cgraph_node *n = info->nested; n != NULL__null;
3877 n = next_nested_function (n))
3878 {
3879 nested_function_info *ninfo = nested_function_info::get (n);
3880 if (ninfo->origin == NULL__null)
3881 {
3882 error ("missing origin for a node in a nested list");
3883 error_found = true;
3884 }
3885 else if (ninfo->origin != this)
3886 {
3887 error ("origin points to a different parent");
3888 error_found = true;
3889 break;
3890 }
3891 }
3892 }
3893 if (info->next_nested != NULL__null && info->origin == NULL__null)
3894 {
3895 error ("missing origin for a node in a nested list");
3896 error_found = true;
3897 }
3898 }
3899
3900 if (error_found)
3901 {
3902 dump (stderrstderr);
3903 internal_error ("verify_cgraph_node failed");
3904 }
3905 timevar_pop (TV_CGRAPH_VERIFY);
3906}
3907
3908/* Verify whole cgraph structure. */
3909DEBUG_FUNCTION__attribute__ ((__used__)) void
3910cgraph_node::verify_cgraph_nodes (void)
3911{
3912 cgraph_node *node;
3913
3914 if (seen_error ())
3915 return;
3916
3917 FOR_EACH_FUNCTION (node)for ((node) = symtab->first_function (); (node); (node) = symtab
->next_function ((node)))
3918 node->verify ();
3919}
3920
3921#if __GNUC__4 >= 10
3922# pragma GCC diagnostic pop
3923#endif
3924
3925/* Walk the alias chain to return the function cgraph_node is alias of.
3926 Walk through thunks, too.
3927 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3928 When REF is non-NULL, assume that reference happens in symbol REF
3929 when determining the availability. */
3930
3931cgraph_node *
3932cgraph_node::function_symbol (enum availability *availability,
3933 struct symtab_node *ref)
3934{
3935 cgraph_node *node = ultimate_alias_target (availability, ref);
3936
3937 while (node->thunk)
3938 {
3939 enum availability a;
3940
3941 ref = node;
3942 node = node->callees->callee;
3943 node = node->ultimate_alias_target (availability ? &a : NULL__null, ref);
3944 if (availability && a < *availability)
3945 *availability = a;
3946 }
3947 return node;
3948}
3949
3950/* Walk the alias chain to return the function cgraph_node is alias of.
3951 Walk through non virtual thunks, too. Thus we return either a function
3952 or a virtual thunk node.
3953 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3954 When REF is non-NULL, assume that reference happens in symbol REF
3955 when determining the availability. */
3956
3957cgraph_node *
3958cgraph_node::function_or_virtual_thunk_symbol
3959 (enum availability *availability,
3960 struct symtab_node *ref)
3961{
3962 cgraph_node *node = ultimate_alias_target (availability, ref);
3963
3964 while (node->thunk && !thunk_info::get (node)->virtual_offset_p)
3965 {
3966 enum availability a;
3967
3968 ref = node;
3969 node = node->callees->callee;
3970 node = node->ultimate_alias_target (availability ? &a : NULL__null, ref);
3971 if (availability && a < *availability)
3972 *availability = a;
3973 }
3974 return node;
3975}
3976
3977/* When doing LTO, read cgraph_node's body from disk if it is not already
3978 present. Also perform any necessary clone materializations. */
3979
3980bool
3981cgraph_node::get_untransformed_body ()
3982{
3983 lto_file_decl_data *file_data;
3984 const char *data, *name;
3985 size_t len;
3986 tree decl = this->decl;
3987
3988 /* See if there is clone to be materialized.
3989 (inline clones does not need materialization, but we can be seeing
3990 an inline clone of real clone). */
3991 cgraph_node *p = this;
3992 for (cgraph_node *c = clone_of; c; c = c->clone_of)
3993 {
3994 if (c->decl != decl)
3995 p->materialize_clone ();
3996 p = c;
3997 }
3998
3999 /* Check if body is already there. Either we have gimple body or
4000 the function is thunk and in that case we set DECL_ARGUMENTS. */
4001 if (DECL_ARGUMENTS (decl)((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 4001, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
|| gimple_has_body_p (decl))
4002 return false;
4003
4004 gcc_assert (in_lto_p && !DECL_RESULT (decl))((void)(!(global_options.x_in_lto_p && !((tree_check (
(decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 4004, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result
)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 4004, __FUNCTION__), 0 : 0))
;
4005
4006 timevar_push (TV_IPA_LTO_GIMPLE_IN);
4007
4008 file_data = lto_file_data;
4009 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))((const char *) (tree_check ((decl_assembler_name (decl)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 4009, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)
;
4010
4011 /* We may have renamed the declaration, e.g., a static function. */
4012 name = lto_get_decl_name_mapping (file_data, name);
4013 struct lto_in_decl_state *decl_state
4014 = lto_get_function_in_decl_state (file_data, decl);
4015
4016 cgraph_node *origin = this;
4017 while (origin->clone_of)
4018 origin = origin->clone_of;
4019
4020 int stream_order = origin->order - file_data->order_base;
4021 data = lto_get_section_data (file_data, LTO_section_function_body,
4022 name, stream_order, &len,
4023 decl_state->compressed);
4024 if (!data)
4025 fatal_error (input_location, "%s: section %s.%d is missing",
4026 file_data->file_name, name, stream_order);
4027
4028 gcc_assert (DECL_STRUCT_FUNCTION (decl) == NULL)((void)(!(((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 4028, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f) ==
__null) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 4028, __FUNCTION__), 0 : 0))
;
4029
4030 if (!quiet_flagglobal_options.x_quiet_flag)
4031 fprintf (stderrstderr, " in:%s", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))((const char *) (tree_check ((decl_assembler_name (decl)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 4031, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)
);
4032 lto_input_function_body (file_data, this, data);
4033 lto_stats.num_function_bodies++;
4034 lto_free_section_data (file_data, LTO_section_function_body, name,
4035 data, len, decl_state->compressed);
4036 lto_free_function_in_decl_state_for_node (this);
4037 /* Keep lto file data so ipa-inline-analysis knows about cross module
4038 inlining. */
4039
4040 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
4041
4042 return true;
4043}
4044
4045/* Prepare function body. When doing LTO, read cgraph_node's body from disk
4046 if it is not already present. When some IPA transformations are scheduled,
4047 apply them. */
4048
4049bool
4050cgraph_node::get_body (void)
4051{
4052 bool updated;
4053
4054 updated = get_untransformed_body ();
4055
4056 /* Getting transformed body makes no sense for inline clones;
4057 we should never use this on real clones because they are materialized
4058 early.
4059 TODO: Materializing clones here will likely lead to smaller LTRANS
4060 footprint. */
4061 gcc_assert (!inlined_to && !clone_of)((void)(!(!inlined_to && !clone_of) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 4061, __FUNCTION__), 0 : 0))
;
4062 if (ipa_transforms_to_apply.exists ())
4063 {
4064 opt_pass *saved_current_pass = current_pass;
4065 FILE *saved_dump_file = dump_file;
4066 const char *saved_dump_file_name = dump_file_name;
4067 dump_flags_t saved_dump_flags = dump_flags;
4068 dump_file_name = NULL__null;
4069 set_dump_file (NULL__null);
4070
4071 push_cfun (DECL_STRUCT_FUNCTION (decl)((tree_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 4071, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
);
4072
4073 update_ssa (TODO_update_ssa_only_virtuals(1 << 14));
4074 execute_all_ipa_transforms (true);
4075 cgraph_edge::rebuild_edges ();
4076 free_dominance_info (CDI_DOMINATORS);
4077 free_dominance_info (CDI_POST_DOMINATORS);
4078 pop_cfun ();
4079 updated = true;
4080
4081 current_pass = saved_current_pass;
4082 set_dump_file (saved_dump_file);
4083 dump_file_name = saved_dump_file_name;
4084 dump_flags = saved_dump_flags;
4085 }
4086 return updated;
4087}
4088
4089/* Return the DECL_STRUCT_FUNCTION of the function. */
4090
4091struct function *
4092cgraph_node::get_fun () const
4093{
4094 const cgraph_node *node = this;
4095 struct function *fun = DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 4095, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
;
4096
4097 while (!fun && node->clone_of)
4098 {
4099 node = node->clone_of;
4100 fun = DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 4100, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
;
4101 }
4102
4103 return fun;
4104}
4105
4106/* Reset all state within cgraph.cc so that we can rerun the compiler
4107 within the same process. For use by toplev::finalize. */
4108
4109void
4110cgraph_cc_finalize (void)
4111{
4112 nested_function_info::release ();
4113 thunk_info::release ();
4114 clone_info::release ();
4115 symtab = NULL__null;
4116
4117 x_cgraph_nodes_queue = NULL__null;
4118
4119 cgraph_fnver_htab = NULL__null;
4120 version_info_node = NULL__null;
4121}
4122
4123/* A worker for call_for_symbol_and_aliases. */
4124
4125bool
4126cgraph_node::call_for_symbol_and_aliases_1 (bool (*callback) (cgraph_node *,
4127 void *),
4128 void *data,
4129 bool include_overwritable)
4130{
4131 ipa_ref *ref;
4132 FOR_EACH_ALIAS (this, ref)for (unsigned ref_iter_ = 0; (this)->iterate_direct_aliases
(ref_iter_, ref); ref_iter_++)
4133 {
4134 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
4135 if (include_overwritable
4136 || alias->get_availability () > AVAIL_INTERPOSABLE)
4137 if (alias->call_for_symbol_and_aliases (callback, data,
4138 include_overwritable))
4139 return true;
4140 }
4141 return false;
4142}
4143
4144/* Return true if NODE has thunk. */
4145
4146bool
4147cgraph_node::has_thunk_p (cgraph_node *node, void *)
4148{
4149 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
4150 if (e->caller->thunk)
4151 return true;
4152 return false;
4153}
4154
4155/* Expected frequency of executions within the function. */
4156
4157sreal
4158cgraph_edge::sreal_frequency ()
4159{
4160 return count.to_sreal_scale (caller->inlined_to
4161 ? caller->inlined_to->count
4162 : caller->count);
4163}
4164
4165
4166/* During LTO stream in this can be used to check whether call can possibly
4167 be internal to the current translation unit. */
4168
4169bool
4170cgraph_edge::possibly_call_in_translation_unit_p (void)
4171{
4172 gcc_checking_assert (in_lto_p && caller->prevailing_p ())((void)(!(global_options.x_in_lto_p && caller->prevailing_p
()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 4172, __FUNCTION__), 0 : 0))
;
4173
4174 /* While incremental linking we may end up getting function body later. */
4175 if (flag_incremental_linkglobal_options.x_flag_incremental_link == INCREMENTAL_LINK_LTO)
4176 return true;
4177
4178 /* We may be smarter here and avoid streaming in indirect calls we can't
4179 track, but that would require arranging streaming the indirect call
4180 summary first. */
4181 if (!callee)
4182 return true;
4183
4184 /* If callee is local to the original translation unit, it will be
4185 defined. */
4186 if (!TREE_PUBLIC (callee->decl)((callee->decl)->base.public_flag) && !DECL_EXTERNAL (callee->decl)((contains_struct_check ((callee->decl), (TS_DECL_COMMON),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 4186, __FUNCTION__))->decl_common.decl_flag_1)
)
4187 return true;
4188
4189 /* Otherwise we need to lookup prevailing symbol (symbol table is not merged,
4190 yet) and see if it is a definition. In fact we may also resolve aliases,
4191 but that is probably not too important. */
4192 symtab_node *node = callee;
4193 for (int n = 10; node->previous_sharing_asm_name && n ; n--)
4194 node = node->previous_sharing_asm_name;
4195 if (node->previous_sharing_asm_name)
4196 node = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (callee->decl)decl_assembler_name (callee->decl));
4197 gcc_assert (TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl))((void)(!(((node->decl)->base.public_flag) || ((contains_struct_check
((node->decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 4197, __FUNCTION__))->decl_common.decl_flag_1)) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 4197, __FUNCTION__), 0 : 0))
;
4198 return node->get_availability () >= AVAIL_INTERPOSABLE;
4199}
4200
4201/* Return num_speculative_targets of this edge. */
4202
4203int
4204cgraph_edge::num_speculative_call_targets_p (void)
4205{
4206 return indirect_info ? indirect_info->num_speculative_call_targets : 0;
4207}
4208
4209/* Check if function calls comdat local. This is used to recompute
4210 calls_comdat_local flag after function transformations. */
4211bool
4212cgraph_node::check_calls_comdat_local_p ()
4213{
4214 for (cgraph_edge *e = callees; e; e = e->next_callee)
4215 if (e->inline_failed
4216 ? e->callee->comdat_local_p ()
4217 : e->callee->check_calls_comdat_local_p ())
4218 return true;
4219 return false;
4220}
4221
4222/* Return true if this node represents a former, i.e. an expanded, thunk. */
4223
4224bool
4225cgraph_node::former_thunk_p (void)
4226{
4227 if (thunk)
4228 return false;
4229 thunk_info *i = thunk_info::get (this);
4230 if (!i)
4231 return false;
4232 gcc_checking_assert (i->fixed_offset || i->virtual_offset_p((void)(!(i->fixed_offset || i->virtual_offset_p || i->
indirect_offset) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 4233, __FUNCTION__), 0 : 0))
4233 || i->indirect_offset)((void)(!(i->fixed_offset || i->virtual_offset_p || i->
indirect_offset) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 4233, __FUNCTION__), 0 : 0))
;
4234 return true;
4235}
4236
4237/* A stashed copy of "symtab" for use by selftest::symbol_table_test.
4238 This needs to be a global so that it can be a GC root, and thus
4239 prevent the stashed copy from being garbage-collected if the GC runs
4240 during a symbol_table_test. */
4241
4242symbol_table *saved_symtab;
4243
4244#if CHECKING_P1
4245
4246namespace selftest {
4247
4248/* class selftest::symbol_table_test. */
4249
4250/* Constructor. Store the old value of symtab, and create a new one. */
4251
4252symbol_table_test::symbol_table_test ()
4253{
4254 gcc_assert (saved_symtab == NULL)((void)(!(saved_symtab == __null) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 4254, __FUNCTION__), 0 : 0))
;
4255 saved_symtab = symtab;
4256 symtab = new (ggc_alloc<symbol_table> ()) symbol_table ();
4257}
4258
4259/* Destructor. Restore the old value of symtab. */
4260
4261symbol_table_test::~symbol_table_test ()
4262{
4263 gcc_assert (saved_symtab != NULL)((void)(!(saved_symtab != __null) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 4263, __FUNCTION__), 0 : 0))
;
4264 symtab = saved_symtab;
4265 saved_symtab = NULL__null;
4266}
4267
4268/* Verify that symbol_table_test works. */
4269
4270static void
4271test_symbol_table_test ()
4272{
4273 /* Simulate running two selftests involving symbol tables. */
4274 for (int i = 0; i < 2; i++)
4275 {
4276 symbol_table_test stt;
4277 tree test_decl = build_decl (UNKNOWN_LOCATION((location_t) 0), FUNCTION_DECL,
4278 get_identifier ("test_decl")(__builtin_constant_p ("test_decl") ? get_identifier_with_length
(("test_decl"), strlen ("test_decl")) : get_identifier ("test_decl"
))
,
4279 build_function_type_list (void_type_nodeglobal_trees[TI_VOID_TYPE],
4280 NULL_TREE(tree) __null));
4281 cgraph_node *node = cgraph_node::get_create (test_decl);
4282 gcc_assert (node)((void)(!(node) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 4282, __FUNCTION__), 0 : 0))
;
4283
4284 /* Verify that the node has order 0 on both iterations,
4285 and thus that nodes have predictable dump names in selftests. */
4286 ASSERT_EQ (node->order, 0)do { const char *desc_ = "ASSERT_EQ (" "(node->order)" ", "
"(0)" ")"; if (((node->order)) == ((0))) ::selftest::pass
((((::selftest::location ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 4286, __FUNCTION__)))), desc_); else ::selftest::fail ((((::
selftest::location ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 4286, __FUNCTION__)))), desc_); } while (0)
;
4287 ASSERT_STREQ (node->dump_name (), "test_decl/0")do { ::selftest::assert_streq ((::selftest::location ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.cc"
, 4287, __FUNCTION__)), "node->dump_name ()", "\"test_decl/0\""
, (node->dump_name ()), ("test_decl/0")); } while (0)
;
4288 }
4289}
4290
4291/* Run all of the selftests within this file. */
4292
4293void
4294cgraph_cc_tests ()
4295{
4296 test_symbol_table_test ();
4297}
4298
4299} // namespace selftest
4300
4301#endif /* CHECKING_P */
4302
4303#include "gt-cgraph.h"

/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h

1/* Gimple IR definitions.
2
3 Copyright (C) 2007-2023 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22#ifndef GCC_GIMPLE_H
23#define GCC_GIMPLE_H
24
25#include "tree-ssa-alias.h"
26#include "gimple-expr.h"
27
28typedef gimple *gimple_seq_node;
29
30enum gimple_code {
31#define DEFGSCODE(SYM, STRING, STRUCT) SYM,
32#include "gimple.def"
33#undef DEFGSCODE
34 LAST_AND_UNUSED_GIMPLE_CODE
35};
36
37extern const char *const gimple_code_name[];
38extern const unsigned char gimple_rhs_class_table[];
39
40/* Strip the outermost pointer, from tr1/type_traits. */
41template<typename T> struct remove_pointer { typedef T type; };
42template<typename T> struct remove_pointer<T *> { typedef T type; };
43
44/* Error out if a gimple tuple is addressed incorrectly. */
45#if defined ENABLE_GIMPLE_CHECKING1
46#define gcc_gimple_checking_assert(EXPR)((void)(!(EXPR) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 46, __FUNCTION__), 0 : 0))
gcc_assert (EXPR)((void)(!(EXPR) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 46, __FUNCTION__), 0 : 0))
47extern void gimple_check_failed (const gimple *, const char *, int, \
48 const char *, enum gimple_code, \
49 enum tree_code) ATTRIBUTE_NORETURN__attribute__ ((__noreturn__)) \
50 ATTRIBUTE_COLD;
51
52#define GIMPLE_CHECK(GS, CODE)do { const gimple *__gs = (GS); if (gimple_code (__gs) != (CODE
)) gimple_check_failed (__gs, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 52, __FUNCTION__, (CODE), ERROR_MARK); } while (0)
\
53 do { \
54 const gimple *__gs = (GS); \
55 if (gimple_code (__gs) != (CODE)) \
56 gimple_check_failed (__gs, __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h", __LINE__56, __FUNCTION__, \
57 (CODE), ERROR_MARK); \
58 } while (0)
59template <typename T>
60inline T
61GIMPLE_CHECK2(const gimple *gs,
62#if __GNUC__4 > 4 || (__GNUC__4 == 4 && __GNUC_MINOR__2 >= 8)
63 const char *file = __builtin_FILE (),
64 int line = __builtin_LINE (),
65 const char *fun = __builtin_FUNCTION ())
66#else
67 const char *file = __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h",
68 int line = __LINE__68,
69 const char *fun = NULL__null)
70#endif
71{
72 T ret = dyn_cast <T> (gs);
73 if (!ret)
74 gimple_check_failed (gs, file, line, fun,
75 remove_pointer<T>::type::code_, ERROR_MARK);
76 return ret;
77}
78template <typename T>
79inline T
80GIMPLE_CHECK2(gimple *gs,
81#if __GNUC__4 > 4 || (__GNUC__4 == 4 && __GNUC_MINOR__2 >= 8)
82 const char *file = __builtin_FILE (),
83 int line = __builtin_LINE (),
84 const char *fun = __builtin_FUNCTION ())
85#else
86 const char *file = __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h",
87 int line = __LINE__87,
88 const char *fun = NULL__null)
89#endif
90{
91 T ret = dyn_cast <T> (gs);
92 if (!ret)
93 gimple_check_failed (gs, file, line, fun,
94 remove_pointer<T>::type::code_, ERROR_MARK);
95 return ret;
96}
97#else /* not ENABLE_GIMPLE_CHECKING */
98#define gcc_gimple_checking_assert(EXPR)((void)(!(EXPR) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 98, __FUNCTION__), 0 : 0))
((void)(0 && (EXPR)))
99#define GIMPLE_CHECK(GS, CODE)do { const gimple *__gs = (GS); if (gimple_code (__gs) != (CODE
)) gimple_check_failed (__gs, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 99, __FUNCTION__, (CODE), ERROR_MARK); } while (0)
(void)0
100template <typename T>
101inline T
102GIMPLE_CHECK2(gimple *gs)
103{
104 return as_a <T> (gs);
105}
106template <typename T>
107inline T
108GIMPLE_CHECK2(const gimple *gs)
109{
110 return as_a <T> (gs);
111}
112#endif
113
114/* Class of GIMPLE expressions suitable for the RHS of assignments. See
115 get_gimple_rhs_class. */
116enum gimple_rhs_class
117{
118 GIMPLE_INVALID_RHS, /* The expression cannot be used on the RHS. */
119 GIMPLE_TERNARY_RHS, /* The expression is a ternary operation. */
120 GIMPLE_BINARY_RHS, /* The expression is a binary operation. */
121 GIMPLE_UNARY_RHS, /* The expression is a unary operation. */
122 GIMPLE_SINGLE_RHS /* The expression is a single object (an SSA
123 name, a _DECL, a _REF, etc. */
124};
125
126/* Specific flags for individual GIMPLE statements. These flags are
127 always stored in gimple.subcode and they may only be
128 defined for statement codes that do not use subcodes.
129
130 Values for the masks can overlap as long as the overlapping values
131 are never used in the same statement class.
132
133 The maximum mask value that can be defined is 1 << 15 (i.e., each
134 statement code can hold up to 16 bitflags).
135
136 Keep this list sorted. */
137enum gf_mask {
138 GF_ASM_INPUT = 1 << 0,
139 GF_ASM_VOLATILE = 1 << 1,
140 GF_ASM_INLINE = 1 << 2,
141 GF_CALL_FROM_THUNK = 1 << 0,
142 GF_CALL_RETURN_SLOT_OPT = 1 << 1,
143 GF_CALL_TAILCALL = 1 << 2,
144 GF_CALL_VA_ARG_PACK = 1 << 3,
145 GF_CALL_NOTHROW = 1 << 4,
146 GF_CALL_ALLOCA_FOR_VAR = 1 << 5,
147 GF_CALL_INTERNAL = 1 << 6,
148 GF_CALL_CTRL_ALTERING = 1 << 7,
149 GF_CALL_MUST_TAIL_CALL = 1 << 9,
150 GF_CALL_BY_DESCRIPTOR = 1 << 10,
151 GF_CALL_NOCF_CHECK = 1 << 11,
152 GF_CALL_FROM_NEW_OR_DELETE = 1 << 12,
153 GF_OMP_PARALLEL_COMBINED = 1 << 0,
154 GF_OMP_TASK_TASKLOOP = 1 << 0,
155 GF_OMP_TASK_TASKWAIT = 1 << 1,
156 GF_OMP_FOR_KIND_MASK = (1 << 3) - 1,
157 GF_OMP_FOR_KIND_FOR = 0,
158 GF_OMP_FOR_KIND_DISTRIBUTE = 1,
159 GF_OMP_FOR_KIND_TASKLOOP = 2,
160 GF_OMP_FOR_KIND_OACC_LOOP = 4,
161 GF_OMP_FOR_KIND_SIMD = 5,
162 GF_OMP_FOR_COMBINED = 1 << 3,
163 GF_OMP_FOR_COMBINED_INTO = 1 << 4,
164 GF_OMP_TARGET_KIND_MASK = (1 << 5) - 1,
165 GF_OMP_TARGET_KIND_REGION = 0,
166 GF_OMP_TARGET_KIND_DATA = 1,
167 GF_OMP_TARGET_KIND_UPDATE = 2,
168 GF_OMP_TARGET_KIND_ENTER_DATA = 3,
169 GF_OMP_TARGET_KIND_EXIT_DATA = 4,
170 GF_OMP_TARGET_KIND_OACC_PARALLEL = 5,
171 GF_OMP_TARGET_KIND_OACC_KERNELS = 6,
172 GF_OMP_TARGET_KIND_OACC_SERIAL = 7,
173 GF_OMP_TARGET_KIND_OACC_DATA = 8,
174 GF_OMP_TARGET_KIND_OACC_UPDATE = 9,
175 GF_OMP_TARGET_KIND_OACC_ENTER_DATA = 10,
176 GF_OMP_TARGET_KIND_OACC_EXIT_DATA = 11,
177 GF_OMP_TARGET_KIND_OACC_DECLARE = 12,
178 GF_OMP_TARGET_KIND_OACC_HOST_DATA = 13,
179 /* A 'GF_OMP_TARGET_KIND_OACC_PARALLEL' representing an OpenACC 'kernels'
180 decomposed part, parallelized. */
181 GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED = 14,
182 /* A 'GF_OMP_TARGET_KIND_OACC_PARALLEL' representing an OpenACC 'kernels'
183 decomposed part, "gang-single". */
184 GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE = 15,
185 /* A 'GF_OMP_TARGET_KIND_OACC_DATA' representing an OpenACC 'kernels'
186 decomposed parts' 'data' construct. */
187 GF_OMP_TARGET_KIND_OACC_DATA_KERNELS = 16,
188 GF_OMP_TEAMS_HOST = 1 << 0,
189
190 /* True on an GIMPLE_OMP_RETURN statement if the return does not require
191 a thread synchronization via some sort of barrier. The exact barrier
192 that would otherwise be emitted is dependent on the OMP statement with
193 which this return is associated. */
194 GF_OMP_RETURN_NOWAIT = 1 << 0,
195
196 GF_OMP_SECTION_LAST = 1 << 0,
197 GF_OMP_ORDERED_STANDALONE = 1 << 0,
198 GF_OMP_ATOMIC_MEMORY_ORDER = (1 << 6) - 1,
199 GF_OMP_ATOMIC_NEED_VALUE = 1 << 6,
200 GF_OMP_ATOMIC_WEAK = 1 << 7,
201 GF_PREDICT_TAKEN = 1 << 15
202};
203
204/* This subcode tells apart different kinds of stmts that are not used
205 for codegen, but rather to retain debug information. */
206enum gimple_debug_subcode {
207 GIMPLE_DEBUG_BIND = 0,
208 GIMPLE_DEBUG_SOURCE_BIND = 1,
209 GIMPLE_DEBUG_BEGIN_STMT = 2,
210 GIMPLE_DEBUG_INLINE_ENTRY = 3
211};
212
213/* Masks for selecting a pass local flag (PLF) to work on. These
214 masks are used by gimple_set_plf and gimple_plf. */
215enum plf_mask {
216 GF_PLF_1 = 1 << 0,
217 GF_PLF_2 = 1 << 1
218};
219
220/* Data structure definitions for GIMPLE tuples. NOTE: word markers
221 are for 64 bit hosts. */
222
223struct GTY((desc ("gimple_statement_structure (&%h)"), tag ("GSS_BASE"),
224 chain_next ("%h.next"), variable_size))
225 gimple
226{
227 /* [ WORD 1 ]
228 Main identifying code for a tuple. */
229 ENUM_BITFIELD(gimple_code)enum gimple_code code : 8;
230
231 /* Nonzero if a warning should not be emitted on this tuple. */
232 unsigned int no_warning : 1;
233
234 /* Nonzero if this tuple has been visited. Passes are responsible
235 for clearing this bit before using it. */
236 unsigned int visited : 1;
237
238 /* Nonzero if this tuple represents a non-temporal move. */
239 unsigned int nontemporal_move : 1;
240
241 /* Pass local flags. These flags are free for any pass to use as
242 they see fit. Passes should not assume that these flags contain
243 any useful value when the pass starts. Any initial state that
244 the pass requires should be set on entry to the pass. See
245 gimple_set_plf and gimple_plf for usage. */
246 unsigned int plf : 2;
247
248 /* Nonzero if this statement has been modified and needs to have its
249 operands rescanned. */
250 unsigned modified : 1;
251
252 /* Nonzero if this statement contains volatile operands. */
253 unsigned has_volatile_ops : 1;
254
255 /* Padding to get subcode to 16 bit alignment. */
256 unsigned pad : 1;
257
258 /* The SUBCODE field can be used for tuple-specific flags for tuples
259 that do not require subcodes. Note that SUBCODE should be at
260 least as wide as tree codes, as several tuples store tree codes
261 in there. */
262 unsigned int subcode : 16;
263
264 /* UID of this statement. This is used by passes that want to
265 assign IDs to statements. It must be assigned and used by each
266 pass. By default it should be assumed to contain garbage. */
267 unsigned uid;
268
269 /* [ WORD 2 ]
270 Locus information for debug info. */
271 location_t location;
272
273 /* Number of operands in this tuple. */
274 unsigned num_ops;
275
276 /* [ WORD 3 ]
277 Basic block holding this statement. */
278 basic_block bb;
279
280 /* [ WORD 4-5 ]
281 Linked lists of gimple statements. The next pointers form
282 a NULL terminated list, the prev pointers are a cyclic list.
283 A gimple statement is hence also a double-ended list of
284 statements, with the pointer itself being the first element,
285 and the prev pointer being the last. */
286 gimple *next;
287 gimple *GTY((skip)) prev;
288};
289
290
291/* Base structure for tuples with operands. */
292
293/* This gimple subclass has no tag value. */
294struct GTY(())
295 gimple_statement_with_ops_base : public gimple
296{
297 /* [ WORD 1-6 ] : base class */
298
299 /* [ WORD 7 ]
300 SSA operand vectors. NOTE: It should be possible to
301 amalgamate these vectors with the operand vector OP. However,
302 the SSA operand vectors are organized differently and contain
303 more information (like immediate use chaining). */
304 struct use_optype_d GTY((skip (""))) *use_ops;
305};
306
307
308/* Statements that take register operands. */
309
310struct GTY((tag("GSS_WITH_OPS")))
311 gimple_statement_with_ops : public gimple_statement_with_ops_base
312{
313 /* [ WORD 1-7 ] : base class */
314
315 /* [ WORD 8 ]
316 Operand vector. NOTE! This must always be the last field
317 of this structure. In particular, this means that this
318 structure cannot be embedded inside another one. */
319 tree GTY((length ("%h.num_ops"))) op[1];
320};
321
322
323/* Base for statements that take both memory and register operands. */
324
325struct GTY((tag("GSS_WITH_MEM_OPS_BASE")))
326 gimple_statement_with_memory_ops_base : public gimple_statement_with_ops_base
327{
328 /* [ WORD 1-7 ] : base class */
329
330 /* [ WORD 8-9 ]
331 Virtual operands for this statement. The GC will pick them
332 up via the ssa_names array. */
333 tree GTY((skip (""))) vdef;
334 tree GTY((skip (""))) vuse;
335};
336
337
338/* Statements that take both memory and register operands. */
339
340struct GTY((tag("GSS_WITH_MEM_OPS")))
341 gimple_statement_with_memory_ops :
342 public gimple_statement_with_memory_ops_base
343{
344 /* [ WORD 1-9 ] : base class */
345
346 /* [ WORD 10 ]
347 Operand vector. NOTE! This must always be the last field
348 of this structure. In particular, this means that this
349 structure cannot be embedded inside another one. */
350 tree GTY((length ("%h.num_ops"))) op[1];
351};
352
353
354/* Call statements that take both memory and register operands. */
355
356struct GTY((tag("GSS_CALL")))
357 gcall : public gimple_statement_with_memory_ops_base
358{
359 /* [ WORD 1-9 ] : base class */
360
361 /* [ WORD 10-13 ] */
362 struct pt_solution call_used;
363 struct pt_solution call_clobbered;
364
365 /* [ WORD 14 ] */
366 union GTY ((desc ("%1.subcode & GF_CALL_INTERNAL"))) {
367 tree GTY ((tag ("0"))) fntype;
368 enum internal_fn GTY ((tag ("GF_CALL_INTERNAL"))) internal_fn;
369 } u;
370
371 /* [ WORD 15 ]
372 Operand vector. NOTE! This must always be the last field
373 of this structure. In particular, this means that this
374 structure cannot be embedded inside another one. */
375 tree GTY((length ("%h.num_ops"))) op[1];
376
377 static const enum gimple_code code_ = GIMPLE_CALL;
378};
379
380
381/* OMP statements. */
382
383struct GTY((tag("GSS_OMP")))
384 gimple_statement_omp : public gimple
385{
386 /* [ WORD 1-6 ] : base class */
387
388 /* [ WORD 7 ] */
389 gimple_seq body;
390};
391
392
393/* GIMPLE_BIND */
394
395struct GTY((tag("GSS_BIND")))
396 gbind : public gimple
397{
398 /* [ WORD 1-6 ] : base class */
399
400 /* [ WORD 7 ]
401 Variables declared in this scope. */
402 tree vars;
403
404 /* [ WORD 8 ]
405 This is different than the BLOCK field in gimple,
406 which is analogous to TREE_BLOCK (i.e., the lexical block holding
407 this statement). This field is the equivalent of BIND_EXPR_BLOCK
408 in tree land (i.e., the lexical scope defined by this bind). See
409 gimple-low.cc. */
410 tree block;
411
412 /* [ WORD 9 ] */
413 gimple_seq body;
414};
415
416
417/* GIMPLE_CATCH */
418
419struct GTY((tag("GSS_CATCH")))
420 gcatch : public gimple
421{
422 /* [ WORD 1-6 ] : base class */
423
424 /* [ WORD 7 ] */
425 tree types;
426
427 /* [ WORD 8 ] */
428 gimple_seq handler;
429};
430
431
432/* GIMPLE_EH_FILTER */
433
434struct GTY((tag("GSS_EH_FILTER")))
435 geh_filter : public gimple
436{
437 /* [ WORD 1-6 ] : base class */
438
439 /* [ WORD 7 ]
440 Filter types. */
441 tree types;
442
443 /* [ WORD 8 ]
444 Failure actions. */
445 gimple_seq failure;
446};
447
448/* GIMPLE_EH_ELSE */
449
450struct GTY((tag("GSS_EH_ELSE")))
451 geh_else : public gimple
452{
453 /* [ WORD 1-6 ] : base class */
454
455 /* [ WORD 7,8 ] */
456 gimple_seq n_body, e_body;
457};
458
459/* GIMPLE_EH_MUST_NOT_THROW */
460
461struct GTY((tag("GSS_EH_MNT")))
462 geh_mnt : public gimple
463{
464 /* [ WORD 1-6 ] : base class */
465
466 /* [ WORD 7 ] Abort function decl. */
467 tree fndecl;
468};
469
470/* GIMPLE_PHI */
471
472struct GTY((tag("GSS_PHI")))
473 gphi : public gimple
474{
475 /* [ WORD 1-6 ] : base class */
476
477 /* [ WORD 7 ] */
478 unsigned capacity;
479 unsigned nargs;
480
481 /* [ WORD 8 ] */
482 tree result;
483
484 /* [ WORD 9 ] */
485 struct phi_arg_d GTY ((length ("%h.nargs"))) args[1];
486};
487
488
489/* GIMPLE_RESX, GIMPLE_EH_DISPATCH */
490
491struct GTY((tag("GSS_EH_CTRL")))
492 gimple_statement_eh_ctrl : public gimple
493{
494 /* [ WORD 1-6 ] : base class */
495
496 /* [ WORD 7 ]
497 Exception region number. */
498 int region;
499};
500
501struct GTY((tag("GSS_EH_CTRL")))
502 gresx : public gimple_statement_eh_ctrl
503{
504 /* No extra fields; adds invariant:
505 stmt->code == GIMPLE_RESX. */
506};
507
508struct GTY((tag("GSS_EH_CTRL")))
509 geh_dispatch : public gimple_statement_eh_ctrl
510{
511 /* No extra fields; adds invariant:
512 stmt->code == GIMPLE_EH_DISPATH. */
513};
514
515
516/* GIMPLE_TRY */
517
518struct GTY((tag("GSS_TRY")))
519 gtry : public gimple
520{
521 /* [ WORD 1-6 ] : base class */
522
523 /* [ WORD 7 ]
524 Expression to evaluate. */
525 gimple_seq eval;
526
527 /* [ WORD 8 ]
528 Cleanup expression. */
529 gimple_seq cleanup;
530};
531
532/* Kind of GIMPLE_TRY statements. */
533enum gimple_try_flags
534{
535 /* A try/catch. */
536 GIMPLE_TRY_CATCH = 1 << 0,
537
538 /* A try/finally. */
539 GIMPLE_TRY_FINALLY = 1 << 1,
540 GIMPLE_TRY_KIND = GIMPLE_TRY_CATCH | GIMPLE_TRY_FINALLY,
541
542 /* Analogous to TRY_CATCH_IS_CLEANUP. */
543 GIMPLE_TRY_CATCH_IS_CLEANUP = 1 << 2
544};
545
546/* GIMPLE_WITH_CLEANUP_EXPR */
547
548struct GTY((tag("GSS_WCE")))
549 gimple_statement_wce : public gimple
550{
551 /* [ WORD 1-6 ] : base class */
552
553 /* Subcode: CLEANUP_EH_ONLY. True if the cleanup should only be
554 executed if an exception is thrown, not on normal exit of its
555 scope. This flag is analogous to the CLEANUP_EH_ONLY flag
556 in TARGET_EXPRs. */
557
558 /* [ WORD 7 ]
559 Cleanup expression. */
560 gimple_seq cleanup;
561};
562
563
564/* GIMPLE_ASM */
565
566struct GTY((tag("GSS_ASM")))
567 gasm : public gimple_statement_with_memory_ops_base
568{
569 /* [ WORD 1-9 ] : base class */
570
571 /* [ WORD 10 ]
572 __asm__ statement. */
573 const char *string;
574
575 /* [ WORD 11 ]
576 Number of inputs, outputs, clobbers, labels. */
577 unsigned char ni;
578 unsigned char no;
579 unsigned char nc;
580 unsigned char nl;
581
582 /* [ WORD 12 ]
583 Operand vector. NOTE! This must always be the last field
584 of this structure. In particular, this means that this
585 structure cannot be embedded inside another one. */
586 tree GTY((length ("%h.num_ops"))) op[1];
587};
588
589/* GIMPLE_OMP_CRITICAL */
590
591struct GTY((tag("GSS_OMP_CRITICAL")))
592 gomp_critical : public gimple_statement_omp
593{
594 /* [ WORD 1-7 ] : base class */
595
596 /* [ WORD 8 ] */
597 tree clauses;
598
599 /* [ WORD 9 ]
600 Critical section name. */
601 tree name;
602};
603
604
605struct GTY(()) gimple_omp_for_iter {
606 /* Condition code. */
607 enum tree_code cond;
608
609 /* Index variable. */
610 tree index;
611
612 /* Initial value. */
613 tree initial;
614
615 /* Final value. */
616 tree final;
617
618 /* Increment. */
619 tree incr;
620};
621
622/* GIMPLE_OMP_FOR */
623
624struct GTY((tag("GSS_OMP_FOR")))
625 gomp_for : public gimple_statement_omp
626{
627 /* [ WORD 1-7 ] : base class */
628
629 /* [ WORD 8 ] */
630 tree clauses;
631
632 /* [ WORD 9 ]
633 Number of elements in iter array. */
634 size_t collapse;
635
636 /* [ WORD 10 ] */
637 struct gimple_omp_for_iter * GTY((length ("%h.collapse"))) iter;
638
639 /* [ WORD 11 ]
640 Pre-body evaluated before the loop body begins. */
641 gimple_seq pre_body;
642};
643
644
645/* GIMPLE_OMP_PARALLEL, GIMPLE_OMP_TARGET, GIMPLE_OMP_TASK, GIMPLE_OMP_TEAMS */
646
647struct GTY((tag("GSS_OMP_PARALLEL_LAYOUT")))
648 gimple_statement_omp_parallel_layout : public gimple_statement_omp
649{
650 /* [ WORD 1-7 ] : base class */
651
652 /* [ WORD 8 ]
653 Clauses. */
654 tree clauses;
655
656 /* [ WORD 9 ]
657 Child function holding the body of the parallel region. */
658 tree child_fn;
659
660 /* [ WORD 10 ]
661 Shared data argument. */
662 tree data_arg;
663};
664
665/* GIMPLE_OMP_PARALLEL or GIMPLE_TASK */
666struct GTY((tag("GSS_OMP_PARALLEL_LAYOUT")))
667 gimple_statement_omp_taskreg : public gimple_statement_omp_parallel_layout
668{
669 /* No extra fields; adds invariant:
670 stmt->code == GIMPLE_OMP_PARALLEL
671 || stmt->code == GIMPLE_OMP_TASK
672 || stmt->code == GIMPLE_OMP_TEAMS. */
673};
674
675/* GIMPLE_OMP_PARALLEL */
676struct GTY((tag("GSS_OMP_PARALLEL_LAYOUT")))
677 gomp_parallel : public gimple_statement_omp_taskreg
678{
679 /* No extra fields; adds invariant:
680 stmt->code == GIMPLE_OMP_PARALLEL. */
681};
682
683/* GIMPLE_OMP_TARGET */
684struct GTY((tag("GSS_OMP_PARALLEL_LAYOUT")))
685 gomp_target : public gimple_statement_omp_parallel_layout
686{
687 /* No extra fields; adds invariant:
688 stmt->code == GIMPLE_OMP_TARGET. */
689};
690
691/* GIMPLE_OMP_TASK */
692
693struct GTY((tag("GSS_OMP_TASK")))
694 gomp_task : public gimple_statement_omp_taskreg
695{
696 /* [ WORD 1-10 ] : base class */
697
698 /* [ WORD 11 ]
699 Child function holding firstprivate initialization if needed. */
700 tree copy_fn;
701
702 /* [ WORD 12-13 ]
703 Size and alignment in bytes of the argument data block. */
704 tree arg_size;
705 tree arg_align;
706};
707
708
709/* GIMPLE_OMP_SECTION */
710/* Uses struct gimple_statement_omp. */
711
712
713/* GIMPLE_OMP_SECTIONS */
714
715struct GTY((tag("GSS_OMP_SECTIONS")))
716 gomp_sections : public gimple_statement_omp
717{
718 /* [ WORD 1-7 ] : base class */
719
720 /* [ WORD 8 ] */
721 tree clauses;
722
723 /* [ WORD 9 ]
724 The control variable used for deciding which of the sections to
725 execute. */
726 tree control;
727};
728
729/* GIMPLE_OMP_CONTINUE.
730
731 Note: This does not inherit from gimple_statement_omp, because we
732 do not need the body field. */
733
734struct GTY((tag("GSS_OMP_CONTINUE")))
735 gomp_continue : public gimple
736{
737 /* [ WORD 1-6 ] : base class */
738
739 /* [ WORD 7 ] */
740 tree control_def;
741
742 /* [ WORD 8 ] */
743 tree control_use;
744};
745
746/* GIMPLE_OMP_SINGLE, GIMPLE_OMP_ORDERED, GIMPLE_OMP_TASKGROUP,
747 GIMPLE_OMP_SCAN, GIMPLE_OMP_MASKED, GIMPLE_OMP_SCOPE. */
748
749struct GTY((tag("GSS_OMP_SINGLE_LAYOUT")))
750 gimple_statement_omp_single_layout : public gimple_statement_omp
751{
752 /* [ WORD 1-7 ] : base class */
753
754 /* [ WORD 8 ] */
755 tree clauses;
756};
757
758struct GTY((tag("GSS_OMP_SINGLE_LAYOUT")))
759 gomp_single : public gimple_statement_omp_single_layout
760{
761 /* No extra fields; adds invariant:
762 stmt->code == GIMPLE_OMP_SINGLE. */
763};
764
765struct GTY((tag("GSS_OMP_PARALLEL_LAYOUT")))
766 gomp_teams : public gimple_statement_omp_taskreg
767{
768 /* No extra fields; adds invariant:
769 stmt->code == GIMPLE_OMP_TEAMS. */
770};
771
772struct GTY((tag("GSS_OMP_SINGLE_LAYOUT")))
773 gomp_ordered : public gimple_statement_omp_single_layout
774{
775 /* No extra fields; adds invariant:
776 stmt->code == GIMPLE_OMP_ORDERED. */
777};
778
779struct GTY((tag("GSS_OMP_SINGLE_LAYOUT")))
780 gomp_scan : public gimple_statement_omp_single_layout
781{
782 /* No extra fields; adds invariant:
783 stmt->code == GIMPLE_OMP_SCAN. */
784};
785
786
787/* GIMPLE_OMP_ATOMIC_LOAD.
788 Note: This is based on gimple, not g_s_omp, because g_s_omp
789 contains a sequence, which we don't need here. */
790
791struct GTY((tag("GSS_OMP_ATOMIC_LOAD")))
792 gomp_atomic_load : public gimple
793{
794 /* [ WORD 1-6 ] : base class */
795
796 /* [ WORD 7-8 ] */
797 tree rhs, lhs;
798};
799
800/* GIMPLE_OMP_ATOMIC_STORE.
801 See note on GIMPLE_OMP_ATOMIC_LOAD. */
802
803struct GTY((tag("GSS_OMP_ATOMIC_STORE_LAYOUT")))
804 gimple_statement_omp_atomic_store_layout : public gimple
805{
806 /* [ WORD 1-6 ] : base class */
807
808 /* [ WORD 7 ] */
809 tree val;
810};
811
812struct GTY((tag("GSS_OMP_ATOMIC_STORE_LAYOUT")))
813 gomp_atomic_store :
814 public gimple_statement_omp_atomic_store_layout
815{
816 /* No extra fields; adds invariant:
817 stmt->code == GIMPLE_OMP_ATOMIC_STORE. */
818};
819
820struct GTY((tag("GSS_OMP_ATOMIC_STORE_LAYOUT")))
821 gimple_statement_omp_return :
822 public gimple_statement_omp_atomic_store_layout
823{
824 /* No extra fields; adds invariant:
825 stmt->code == GIMPLE_OMP_RETURN. */
826};
827
828/* Assumptions. */
829
830struct GTY((tag("GSS_ASSUME")))
831 gimple_statement_assume : public gimple
832{
833 /* [ WORD 1-6 ] : base class */
834
835 /* [ WORD 7 ] */
836 tree guard;
837
838 /* [ WORD 8 ] */
839 gimple_seq body;
840};
841
842/* GIMPLE_TRANSACTION. */
843
844/* Bits to be stored in the GIMPLE_TRANSACTION subcode. */
845
846/* The __transaction_atomic was declared [[outer]] or it is
847 __transaction_relaxed. */
848#define GTMA_IS_OUTER(1u << 0) (1u << 0)
849#define GTMA_IS_RELAXED(1u << 1) (1u << 1)
850#define GTMA_DECLARATION_MASK((1u << 0) | (1u << 1)) (GTMA_IS_OUTER(1u << 0) | GTMA_IS_RELAXED(1u << 1))
851
852/* The transaction is seen to not have an abort. */
853#define GTMA_HAVE_ABORT(1u << 2) (1u << 2)
854/* The transaction is seen to have loads or stores. */
855#define GTMA_HAVE_LOAD(1u << 3) (1u << 3)
856#define GTMA_HAVE_STORE(1u << 4) (1u << 4)
857/* The transaction MAY enter serial irrevocable mode in its dynamic scope. */
858#define GTMA_MAY_ENTER_IRREVOCABLE(1u << 5) (1u << 5)
859/* The transaction WILL enter serial irrevocable mode.
860 An irrevocable block post-dominates the entire transaction, such
861 that all invocations of the transaction will go serial-irrevocable.
862 In such case, we don't bother instrumenting the transaction, and
863 tell the runtime that it should begin the transaction in
864 serial-irrevocable mode. */
865#define GTMA_DOES_GO_IRREVOCABLE(1u << 6) (1u << 6)
866/* The transaction contains no instrumentation code whatsover, most
867 likely because it is guaranteed to go irrevocable upon entry. */
868#define GTMA_HAS_NO_INSTRUMENTATION(1u << 7) (1u << 7)
869
870struct GTY((tag("GSS_TRANSACTION")))
871 gtransaction : public gimple_statement_with_memory_ops_base
872{
873 /* [ WORD 1-9 ] : base class */
874
875 /* [ WORD 10 ] */
876 gimple_seq body;
877
878 /* [ WORD 11-13 ] */
879 tree label_norm;
880 tree label_uninst;
881 tree label_over;
882};
883
884#define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) SYM,
885enum gimple_statement_structure_enum {
886#include "gsstruct.def"
887 LAST_GSS_ENUM
888};
889#undef DEFGSSTRUCT
890
891/* A statement with the invariant that
892 stmt->code == GIMPLE_COND
893 i.e. a conditional jump statement. */
894
895struct GTY((tag("GSS_WITH_OPS")))
896 gcond : public gimple_statement_with_ops
897{
898 /* no additional fields; this uses the layout for GSS_WITH_OPS. */
899 static const enum gimple_code code_ = GIMPLE_COND;
900};
901
902/* A statement with the invariant that
903 stmt->code == GIMPLE_DEBUG
904 i.e. a debug statement. */
905
906struct GTY((tag("GSS_WITH_OPS")))
907 gdebug : public gimple_statement_with_ops
908{
909 /* no additional fields; this uses the layout for GSS_WITH_OPS. */
910};
911
912/* A statement with the invariant that
913 stmt->code == GIMPLE_GOTO
914 i.e. a goto statement. */
915
916struct GTY((tag("GSS_WITH_OPS")))
917 ggoto : public gimple_statement_with_ops
918{
919 /* no additional fields; this uses the layout for GSS_WITH_OPS. */
920};
921
922/* A statement with the invariant that
923 stmt->code == GIMPLE_LABEL
924 i.e. a label statement. */
925
926struct GTY((tag("GSS_WITH_OPS")))
927 glabel : public gimple_statement_with_ops
928{
929 /* no additional fields; this uses the layout for GSS_WITH_OPS. */
930};
931
932/* A statement with the invariant that
933 stmt->code == GIMPLE_SWITCH
934 i.e. a switch statement. */
935
936struct GTY((tag("GSS_WITH_OPS")))
937 gswitch : public gimple_statement_with_ops
938{
939 /* no additional fields; this uses the layout for GSS_WITH_OPS. */
940};
941
942/* A statement with the invariant that
943 stmt->code == GIMPLE_ASSIGN
944 i.e. an assignment statement. */
945
946struct GTY((tag("GSS_WITH_MEM_OPS")))
947 gassign : public gimple_statement_with_memory_ops
948{
949 static const enum gimple_code code_ = GIMPLE_ASSIGN;
950 /* no additional fields; this uses the layout for GSS_WITH_MEM_OPS. */
951};
952
953/* A statement with the invariant that
954 stmt->code == GIMPLE_RETURN
955 i.e. a return statement. */
956
957struct GTY((tag("GSS_WITH_MEM_OPS")))
958 greturn : public gimple_statement_with_memory_ops
959{
960 /* no additional fields; this uses the layout for GSS_WITH_MEM_OPS. */
961};
962
963template <>
964template <>
965inline bool
966is_a_helper <gasm *>::test (gimple *gs)
967{
968 return gs->code == GIMPLE_ASM;
969}
970
971template <>
972template <>
973inline bool
974is_a_helper <gassign *>::test (gimple *gs)
975{
976 return gs->code == GIMPLE_ASSIGN;
977}
978
979template <>
980template <>
981inline bool
982is_a_helper <const gassign *>::test (const gimple *gs)
983{
984 return gs->code == GIMPLE_ASSIGN;
985}
986
987template <>
988template <>
989inline bool
990is_a_helper <gbind *>::test (gimple *gs)
991{
992 return gs->code == GIMPLE_BIND;
993}
994
995template <>
996template <>
997inline bool
998is_a_helper <gcall *>::test (gimple *gs)
999{
1000 return gs->code == GIMPLE_CALL;
1001}
1002
1003template <>
1004template <>
1005inline bool
1006is_a_helper <gcatch *>::test (gimple *gs)
1007{
1008 return gs->code == GIMPLE_CATCH;
1009}
1010
1011template <>
1012template <>
1013inline bool
1014is_a_helper <gcond *>::test (gimple *gs)
1015{
1016 return gs->code == GIMPLE_COND;
1017}
1018
1019template <>
1020template <>
1021inline bool
1022is_a_helper <const gcond *>::test (const gimple *gs)
1023{
1024 return gs->code == GIMPLE_COND;
1025}
1026
1027template <>
1028template <>
1029inline bool
1030is_a_helper <gdebug *>::test (gimple *gs)
1031{
1032 return gs->code == GIMPLE_DEBUG;
1033}
1034
1035template <>
1036template <>
1037inline bool
1038is_a_helper <const gdebug *>::test (const gimple *gs)
1039{
1040 return gs->code == GIMPLE_DEBUG;
1041}
1042
1043template <>
1044template <>
1045inline bool
1046is_a_helper <ggoto *>::test (gimple *gs)
1047{
1048 return gs->code == GIMPLE_GOTO;
1049}
1050
1051template <>
1052template <>
1053inline bool
1054is_a_helper <const ggoto *>::test (const gimple *gs)
1055{
1056 return gs->code == GIMPLE_GOTO;
1057}
1058
1059template <>
1060template <>
1061inline bool
1062is_a_helper <glabel *>::test (gimple *gs)
1063{
1064 return gs->code == GIMPLE_LABEL;
1065}
1066
1067template <>
1068template <>
1069inline bool
1070is_a_helper <const glabel *>::test (const gimple *gs)
1071{
1072 return gs->code == GIMPLE_LABEL;
1073}
1074
1075template <>
1076template <>
1077inline bool
1078is_a_helper <gresx *>::test (gimple *gs)
1079{
1080 return gs->code == GIMPLE_RESX;
1081}
1082
1083template <>
1084template <>
1085inline bool
1086is_a_helper <geh_dispatch *>::test (gimple *gs)
1087{
1088 return gs->code == GIMPLE_EH_DISPATCH;
1089}
1090
1091template <>
1092template <>
1093inline bool
1094is_a_helper <geh_else *>::test (gimple *gs)
1095{
1096 return gs->code == GIMPLE_EH_ELSE;
1097}
1098
1099template <>
1100template <>
1101inline bool
1102is_a_helper <const geh_else *>::test (const gimple *gs)
1103{
1104 return gs->code == GIMPLE_EH_ELSE;
1105}
1106
1107template <>
1108template <>
1109inline bool
1110is_a_helper <geh_filter *>::test (gimple *gs)
1111{
1112 return gs->code == GIMPLE_EH_FILTER;
1113}
1114
1115template <>
1116template <>
1117inline bool
1118is_a_helper <geh_mnt *>::test (gimple *gs)
1119{
1120 return gs->code == GIMPLE_EH_MUST_NOT_THROW;
1121}
1122
1123template <>
1124template <>
1125inline bool
1126is_a_helper <const geh_mnt *>::test (const gimple *gs)
1127{
1128 return gs->code == GIMPLE_EH_MUST_NOT_THROW;
1129}
1130
1131template <>
1132template <>
1133inline bool
1134is_a_helper <gomp_atomic_load *>::test (gimple *gs)
1135{
1136 return gs->code == GIMPLE_OMP_ATOMIC_LOAD;
1137}
1138
1139template <>
1140template <>
1141inline bool
1142is_a_helper <gomp_atomic_store *>::test (gimple *gs)
1143{
1144 return gs->code == GIMPLE_OMP_ATOMIC_STORE;
1145}
1146
1147template <>
1148template <>
1149inline bool
1150is_a_helper <gimple_statement_omp_return *>::test (gimple *gs)
1151{
1152 return gs->code == GIMPLE_OMP_RETURN;
1153}
1154
1155template <>
1156template <>
1157inline bool
1158is_a_helper <gomp_continue *>::test (gimple *gs)
1159{
1160 return gs->code == GIMPLE_OMP_CONTINUE;
1161}
1162
1163template <>
1164template <>
1165inline bool
1166is_a_helper <gomp_critical *>::test (gimple *gs)
1167{
1168 return gs->code == GIMPLE_OMP_CRITICAL;
1169}
1170
1171template <>
1172template <>
1173inline bool
1174is_a_helper <gomp_ordered *>::test (gimple *gs)
1175{
1176 return gs->code == GIMPLE_OMP_ORDERED;
1177}
1178
1179template <>
1180template <>
1181inline bool
1182is_a_helper <gomp_scan *>::test (gimple *gs)
1183{
1184 return gs->code == GIMPLE_OMP_SCAN;
1185}
1186
1187template <>
1188template <>
1189inline bool
1190is_a_helper <gomp_for *>::test (gimple *gs)
1191{
1192 return gs->code == GIMPLE_OMP_FOR;
1193}
1194
1195template <>
1196template <>
1197inline bool
1198is_a_helper <gimple_statement_omp_taskreg *>::test (gimple *gs)
1199{
1200 return (gs->code == GIMPLE_OMP_PARALLEL
1201 || gs->code == GIMPLE_OMP_TASK
1202 || gs->code == GIMPLE_OMP_TEAMS);
1203}
1204
1205template <>
1206template <>
1207inline bool
1208is_a_helper <gomp_parallel *>::test (gimple *gs)
1209{
1210 return gs->code == GIMPLE_OMP_PARALLEL;
1211}
1212
1213template <>
1214template <>
1215inline bool
1216is_a_helper <gomp_target *>::test (gimple *gs)
1217{
1218 return gs->code == GIMPLE_OMP_TARGET;
1219}
1220
1221template <>
1222template <>
1223inline bool
1224is_a_helper <gomp_sections *>::test (gimple *gs)
1225{
1226 return gs->code == GIMPLE_OMP_SECTIONS;
1227}
1228
1229template <>
1230template <>
1231inline bool
1232is_a_helper <gomp_single *>::test (gimple *gs)
1233{
1234 return gs->code == GIMPLE_OMP_SINGLE;
1235}
1236
1237template <>
1238template <>
1239inline bool
1240is_a_helper <gomp_teams *>::test (gimple *gs)
1241{
1242 return gs->code == GIMPLE_OMP_TEAMS;
1243}
1244
1245template <>
1246template <>
1247inline bool
1248is_a_helper <gomp_task *>::test (gimple *gs)
1249{
1250 return gs->code == GIMPLE_OMP_TASK;
1251}
1252
1253template <>
1254template <>
1255inline bool
1256is_a_helper <gphi *>::test (gimple *gs)
1257{
1258 return gs->code == GIMPLE_PHI;
1259}
1260
1261template <>
1262template <>
1263inline bool
1264is_a_helper <greturn *>::test (gimple *gs)
1265{
1266 return gs->code == GIMPLE_RETURN;
1267}
1268
1269template <>
1270template <>
1271inline bool
1272is_a_helper <gswitch *>::test (gimple *gs)
1273{
1274 return gs->code == GIMPLE_SWITCH;
1275}
1276
1277template <>
1278template <>
1279inline bool
1280is_a_helper <const gswitch *>::test (const gimple *gs)
1281{
1282 return gs->code == GIMPLE_SWITCH;
1283}
1284
1285template <>
1286template <>
1287inline bool
1288is_a_helper <gimple_statement_assume *>::test (gimple *gs)
1289{
1290 return gs->code == GIMPLE_ASSUME;
1291}
1292
1293template <>
1294template <>
1295inline bool
1296is_a_helper <gtransaction *>::test (gimple *gs)
1297{
1298 return gs->code == GIMPLE_TRANSACTION;
1299}
1300
1301template <>
1302template <>
1303inline bool
1304is_a_helper <gtry *>::test (gimple *gs)
1305{
1306 return gs->code == GIMPLE_TRY;
1307}
1308
1309template <>
1310template <>
1311inline bool
1312is_a_helper <const gtry *>::test (const gimple *gs)
1313{
1314 return gs->code == GIMPLE_TRY;
1315}
1316
1317template <>
1318template <>
1319inline bool
1320is_a_helper <gimple_statement_wce *>::test (gimple *gs)
1321{
1322 return gs->code == GIMPLE_WITH_CLEANUP_EXPR;
1323}
1324
1325template <>
1326template <>
1327inline bool
1328is_a_helper <const gasm *>::test (const gimple *gs)
1329{
1330 return gs->code == GIMPLE_ASM;
1331}
1332
1333template <>
1334template <>
1335inline bool
1336is_a_helper <const gbind *>::test (const gimple *gs)
1337{
1338 return gs->code == GIMPLE_BIND;
1339}
1340
1341template <>
1342template <>
1343inline bool
1344is_a_helper <const gcall *>::test (const gimple *gs)
1345{
1346 return gs->code == GIMPLE_CALL;
1347}
1348
1349template <>
1350template <>
1351inline bool
1352is_a_helper <const gcatch *>::test (const gimple *gs)
1353{
1354 return gs->code == GIMPLE_CATCH;
1355}
1356
1357template <>
1358template <>
1359inline bool
1360is_a_helper <const gresx *>::test (const gimple *gs)
1361{
1362 return gs->code == GIMPLE_RESX;
1363}
1364
1365template <>
1366template <>
1367inline bool
1368is_a_helper <const geh_dispatch *>::test (const gimple *gs)
1369{
1370 return gs->code == GIMPLE_EH_DISPATCH;
1371}
1372
1373template <>
1374template <>
1375inline bool
1376is_a_helper <const geh_filter *>::test (const gimple *gs)
1377{
1378 return gs->code == GIMPLE_EH_FILTER;
1379}
1380
1381template <>
1382template <>
1383inline bool
1384is_a_helper <const gomp_atomic_load *>::test (const gimple *gs)
1385{
1386 return gs->code == GIMPLE_OMP_ATOMIC_LOAD;
1387}
1388
1389template <>
1390template <>
1391inline bool
1392is_a_helper <const gomp_atomic_store *>::test (const gimple *gs)
1393{
1394 return gs->code == GIMPLE_OMP_ATOMIC_STORE;
1395}
1396
1397template <>
1398template <>
1399inline bool
1400is_a_helper <const gimple_statement_omp_return *>::test (const gimple *gs)
1401{
1402 return gs->code == GIMPLE_OMP_RETURN;
1403}
1404
1405template <>
1406template <>
1407inline bool
1408is_a_helper <const gomp_continue *>::test (const gimple *gs)
1409{
1410 return gs->code == GIMPLE_OMP_CONTINUE;
1411}
1412
1413template <>
1414template <>
1415inline bool
1416is_a_helper <const gomp_critical *>::test (const gimple *gs)
1417{
1418 return gs->code == GIMPLE_OMP_CRITICAL;
1419}
1420
1421template <>
1422template <>
1423inline bool
1424is_a_helper <const gomp_ordered *>::test (const gimple *gs)
1425{
1426 return gs->code == GIMPLE_OMP_ORDERED;
1427}
1428
1429template <>
1430template <>
1431inline bool
1432is_a_helper <const gomp_scan *>::test (const gimple *gs)
1433{
1434 return gs->code == GIMPLE_OMP_SCAN;
1435}
1436
1437template <>
1438template <>
1439inline bool
1440is_a_helper <const gomp_for *>::test (const gimple *gs)
1441{
1442 return gs->code == GIMPLE_OMP_FOR;
1443}
1444
1445template <>
1446template <>
1447inline bool
1448is_a_helper <const gimple_statement_omp_taskreg *>::test (const gimple *gs)
1449{
1450 return (gs->code == GIMPLE_OMP_PARALLEL
1451 || gs->code == GIMPLE_OMP_TASK
1452 || gs->code == GIMPLE_OMP_TEAMS);
1453}
1454
1455template <>
1456template <>
1457inline bool
1458is_a_helper <const gomp_parallel *>::test (const gimple *gs)
1459{
1460 return gs->code == GIMPLE_OMP_PARALLEL;
1461}
1462
1463template <>
1464template <>
1465inline bool
1466is_a_helper <const gomp_target *>::test (const gimple *gs)
1467{
1468 return gs->code == GIMPLE_OMP_TARGET;
1469}
1470
1471template <>
1472template <>
1473inline bool
1474is_a_helper <const gomp_sections *>::test (const gimple *gs)
1475{
1476 return gs->code == GIMPLE_OMP_SECTIONS;
1477}
1478
1479template <>
1480template <>
1481inline bool
1482is_a_helper <const gomp_single *>::test (const gimple *gs)
1483{
1484 return gs->code == GIMPLE_OMP_SINGLE;
1485}
1486
1487template <>
1488template <>
1489inline bool
1490is_a_helper <const gomp_teams *>::test (const gimple *gs)
1491{
1492 return gs->code == GIMPLE_OMP_TEAMS;
1493}
1494
1495template <>
1496template <>
1497inline bool
1498is_a_helper <const gomp_task *>::test (const gimple *gs)
1499{
1500 return gs->code == GIMPLE_OMP_TASK;
1501}
1502
1503template <>
1504template <>
1505inline bool
1506is_a_helper <const gphi *>::test (const gimple *gs)
1507{
1508 return gs->code == GIMPLE_PHI;
1509}
1510
1511template <>
1512template <>
1513inline bool
1514is_a_helper <const greturn *>::test (const gimple *gs)
1515{
1516 return gs->code == GIMPLE_RETURN;
1517}
1518
1519template <>
1520template <>
1521inline bool
1522is_a_helper <const gimple_statement_assume *>::test (const gimple *gs)
1523{
1524 return gs->code == GIMPLE_ASSUME;
1525}
1526
1527template <>
1528template <>
1529inline bool
1530is_a_helper <const gtransaction *>::test (const gimple *gs)
1531{
1532 return gs->code == GIMPLE_TRANSACTION;
1533}
1534
1535/* Offset in bytes to the location of the operand vector.
1536 Zero if there is no operand vector for this tuple structure. */
1537extern size_t const gimple_ops_offset_[];
1538
1539/* Map GIMPLE codes to GSS codes. */
1540extern enum gimple_statement_structure_enum const gss_for_code_[];
1541
1542/* This variable holds the currently expanded gimple statement for purposes
1543 of comminucating the profile info to the builtin expanders. */
1544extern gimple *currently_expanding_gimple_stmt;
1545
1546size_t gimple_size (enum gimple_code code, unsigned num_ops = 0);
1547void gimple_init (gimple *g, enum gimple_code code, unsigned num_ops);
1548gimple *gimple_alloc (enum gimple_code, unsigned CXX_MEM_STAT_INFO);
1549greturn *gimple_build_return (tree);
1550void gimple_call_reset_alias_info (gcall *);
1551gcall *gimple_build_call_vec (tree, const vec<tree> &);
1552gcall *gimple_build_call (tree, unsigned, ...);
1553gcall *gimple_build_call_valist (tree, unsigned, va_list);
1554gcall *gimple_build_call_internal (enum internal_fn, unsigned, ...);
1555gcall *gimple_build_call_internal_vec (enum internal_fn, const vec<tree> &);
1556gcall *gimple_build_call_from_tree (tree, tree);
1557gassign *gimple_build_assign (tree, tree CXX_MEM_STAT_INFO);
1558gassign *gimple_build_assign (tree, enum tree_code,
1559 tree, tree, tree CXX_MEM_STAT_INFO);
1560gassign *gimple_build_assign (tree, enum tree_code,
1561 tree, tree CXX_MEM_STAT_INFO);
1562gassign *gimple_build_assign (tree, enum tree_code, tree CXX_MEM_STAT_INFO);
1563gcond *gimple_build_cond (enum tree_code, tree, tree, tree, tree);
1564gcond *gimple_build_cond_from_tree (tree, tree, tree);
1565void gimple_cond_set_condition_from_tree (gcond *, tree);
1566glabel *gimple_build_label (tree label);
1567ggoto *gimple_build_goto (tree dest);
1568gimple *gimple_build_nop (void);
1569gbind *gimple_build_bind (tree, gimple_seq, tree);
1570gasm *gimple_build_asm_vec (const char *, vec<tree, va_gc> *,
1571 vec<tree, va_gc> *, vec<tree, va_gc> *,
1572 vec<tree, va_gc> *);
1573gcatch *gimple_build_catch (tree, gimple_seq);
1574geh_filter *gimple_build_eh_filter (tree, gimple_seq);
1575geh_mnt *gimple_build_eh_must_not_throw (tree);
1576geh_else *gimple_build_eh_else (gimple_seq, gimple_seq);
1577gtry *gimple_build_try (gimple_seq, gimple_seq,
1578 enum gimple_try_flags);
1579gimple *gimple_build_wce (gimple_seq);
1580gresx *gimple_build_resx (int);
1581gswitch *gimple_build_switch_nlabels (unsigned, tree, tree);
1582gswitch *gimple_build_switch (tree, tree, const vec<tree> &);
1583geh_dispatch *gimple_build_eh_dispatch (int);
1584gdebug *gimple_build_debug_bind (tree, tree, gimple * CXX_MEM_STAT_INFO);
1585gdebug *gimple_build_debug_source_bind (tree, tree, gimple * CXX_MEM_STAT_INFO);
1586gdebug *gimple_build_debug_begin_stmt (tree, location_t CXX_MEM_STAT_INFO);
1587gdebug *gimple_build_debug_inline_entry (tree, location_t CXX_MEM_STAT_INFO);
1588gomp_critical *gimple_build_omp_critical (gimple_seq, tree, tree);
1589gomp_for *gimple_build_omp_for (gimple_seq, int, tree, size_t, gimple_seq);
1590gomp_parallel *gimple_build_omp_parallel (gimple_seq, tree, tree, tree);
1591gomp_task *gimple_build_omp_task (gimple_seq, tree, tree, tree, tree,
1592 tree, tree);
1593gimple *gimple_build_omp_section (gimple_seq);
1594gimple *gimple_build_omp_scope (gimple_seq, tree);
1595gimple *gimple_build_omp_master (gimple_seq);
1596gimple *gimple_build_omp_masked (gimple_seq, tree);
1597gimple *gimple_build_omp_taskgroup (gimple_seq, tree);
1598gomp_continue *gimple_build_omp_continue (tree, tree);
1599gomp_ordered *gimple_build_omp_ordered (gimple_seq, tree);
1600gimple *gimple_build_omp_return (bool);
1601gomp_scan *gimple_build_omp_scan (gimple_seq, tree);
1602gomp_sections *gimple_build_omp_sections (gimple_seq, tree);
1603gimple *gimple_build_omp_sections_switch (void);
1604gomp_single *gimple_build_omp_single (gimple_seq, tree);
1605gomp_target *gimple_build_omp_target (gimple_seq, int, tree);
1606gomp_teams *gimple_build_omp_teams (gimple_seq, tree);
1607gomp_atomic_load *gimple_build_omp_atomic_load (tree, tree,
1608 enum omp_memory_order);
1609gomp_atomic_store *gimple_build_omp_atomic_store (tree, enum omp_memory_order);
1610gimple *gimple_build_assume (tree, gimple_seq);
1611gtransaction *gimple_build_transaction (gimple_seq);
1612extern void gimple_seq_add_stmt (gimple_seq *, gimple *);
1613extern void gimple_seq_add_stmt_without_update (gimple_seq *, gimple *);
1614void gimple_seq_add_seq (gimple_seq *, gimple_seq);
1615void gimple_seq_add_seq_without_update (gimple_seq *, gimple_seq);
1616extern void annotate_all_with_location_after (gimple_seq, gimple_stmt_iterator,
1617 location_t);
1618extern void annotate_all_with_location (gimple_seq, location_t);
1619bool empty_body_p (gimple_seq);
1620gimple_seq gimple_seq_copy (gimple_seq);
1621bool gimple_call_same_target_p (const gimple *, const gimple *);
1622int gimple_call_flags (const gimple *);
1623int gimple_call_arg_flags (const gcall *, unsigned);
1624int gimple_call_retslot_flags (const gcall *);
1625int gimple_call_static_chain_flags (const gcall *);
1626int gimple_call_return_flags (const gcall *);
1627bool gimple_call_nonnull_result_p (gcall *);
1628tree gimple_call_nonnull_arg (gcall *);
1629bool gimple_assign_copy_p (gimple *);
1630bool gimple_assign_ssa_name_copy_p (gimple *);
1631bool gimple_assign_unary_nop_p (gimple *);
1632void gimple_set_bb (gimple *, basic_block);
1633void gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *, tree);
1634void gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *, enum tree_code,
1635 tree, tree, tree);
1636tree gimple_get_lhs (const gimple *);
1637void gimple_set_lhs (gimple *, tree);
1638gimple *gimple_copy (gimple *);
1639void gimple_move_vops (gimple *, gimple *);
1640bool gimple_has_side_effects (const gimple *);
1641bool gimple_could_trap_p_1 (const gimple *, bool, bool);
1642bool gimple_could_trap_p (const gimple *);
1643bool gimple_assign_rhs_could_trap_p (gimple *);
1644extern void dump_gimple_statistics (void);
1645unsigned get_gimple_rhs_num_ops (enum tree_code);
1646gcall *gimple_call_copy_skip_args (gcall *, bitmap);
1647extern bool gimple_compare_field_offset (tree, tree);
1648extern tree gimple_unsigned_type (tree);
1649extern tree gimple_signed_type (tree);
1650extern alias_set_type gimple_get_alias_set (tree);
1651extern bool gimple_ior_addresses_taken (bitmap, gimple *);
1652extern bool gimple_builtin_call_types_compatible_p (const gimple *, tree);
1653extern combined_fn gimple_call_combined_fn (const gimple *);
1654extern bool gimple_call_operator_delete_p (const gcall *);
1655extern bool gimple_call_builtin_p (const gimple *);
1656extern bool gimple_call_builtin_p (const gimple *, enum built_in_class);
1657extern bool gimple_call_builtin_p (const gimple *, enum built_in_function);
1658extern bool gimple_asm_clobbers_memory_p (const gasm *);
1659extern void dump_decl_set (FILE *, bitmap);
1660extern bool nonfreeing_call_p (gimple *);
1661extern bool nonbarrier_call_p (gimple *);
1662extern bool infer_nonnull_range (gimple *, tree);
1663extern bool infer_nonnull_range_by_dereference (gimple *, tree);
1664extern bool infer_nonnull_range_by_attribute (gimple *, tree);
1665extern void sort_case_labels (vec<tree> &);
1666extern void preprocess_case_label_vec_for_gimple (vec<tree> &, tree, tree *);
1667extern void gimple_seq_set_location (gimple_seq, location_t);
1668extern void gimple_seq_discard (gimple_seq);
1669extern void maybe_remove_unused_call_args (struct function *, gimple *);
1670extern bool gimple_inexpensive_call_p (gcall *);
1671extern bool stmt_can_terminate_bb_p (gimple *);
1672extern location_t gimple_or_expr_nonartificial_location (gimple *, tree);
1673gcall *gimple_build_builtin_unreachable (location_t);
1674
1675/* Return the disposition for a warning (or all warnings by default)
1676 for a statement. */
1677extern bool warning_suppressed_p (const gimple *, opt_code = all_warnings)
1678 ATTRIBUTE_NONNULL (1)__attribute__ ((__nonnull__ (1)));
1679/* Set the disposition for a warning (or all warnings by default)
1680 at a location to enabled by default. */
1681extern void suppress_warning (gimple *, opt_code = all_warnings,
1682 bool = true) ATTRIBUTE_NONNULL (1)__attribute__ ((__nonnull__ (1)));
1683
1684/* Copy the warning disposition mapping from one statement to another. */
1685extern void copy_warning (gimple *, const gimple *)
1686 ATTRIBUTE_NONNULL (1)__attribute__ ((__nonnull__ (1))) ATTRIBUTE_NONNULL (2)__attribute__ ((__nonnull__ (2)));
1687/* Copy the warning disposition mapping from an expression to a statement. */
1688extern void copy_warning (gimple *, const_tree)
1689 ATTRIBUTE_NONNULL (1)__attribute__ ((__nonnull__ (1))) ATTRIBUTE_NONNULL (2)__attribute__ ((__nonnull__ (2)));
1690/* Copy the warning disposition mapping from a statement to an expression. */
1691extern void copy_warning (tree, const gimple *)
1692 ATTRIBUTE_NONNULL (1)__attribute__ ((__nonnull__ (1))) ATTRIBUTE_NONNULL (2)__attribute__ ((__nonnull__ (2)));
1693
1694/* Formal (expression) temporary table handling: multiple occurrences of
1695 the same scalar expression are evaluated into the same temporary. */
1696
1697typedef struct gimple_temp_hash_elt
1698{
1699 tree val; /* Key */
1700 tree temp; /* Value */
1701} elt_t;
1702
1703/* Get the number of the next statement uid to be allocated. */
1704inline unsigned int
1705gimple_stmt_max_uid (struct function *fn)
1706{
1707 return fn->last_stmt_uid;
1708}
1709
1710/* Set the number of the next statement uid to be allocated. */
1711inline void
1712set_gimple_stmt_max_uid (struct function *fn, unsigned int maxid)
1713{
1714 fn->last_stmt_uid = maxid;
1715}
1716
1717/* Set the number of the next statement uid to be allocated. */
1718inline unsigned int
1719inc_gimple_stmt_max_uid (struct function *fn)
1720{
1721 return fn->last_stmt_uid++;
1722}
1723
1724/* Return the first node in GIMPLE sequence S. */
1725
1726inline gimple_seq_node
1727gimple_seq_first (gimple_seq s)
1728{
1729 return s;
1730}
1731
1732
1733/* Return the first statement in GIMPLE sequence S. */
1734
1735inline gimple *
1736gimple_seq_first_stmt (gimple_seq s)
1737{
1738 gimple_seq_node n = gimple_seq_first (s);
1739 return n;
1740}
1741
1742/* Return the first statement in GIMPLE sequence S as a gbind *,
1743 verifying that it has code GIMPLE_BIND in a checked build. */
1744
1745inline gbind *
1746gimple_seq_first_stmt_as_a_bind (gimple_seq s)
1747{
1748 gimple_seq_node n = gimple_seq_first (s);
1749 return as_a <gbind *> (n);
1750}
1751
1752
1753/* Return the last node in GIMPLE sequence S. */
1754
1755inline gimple_seq_node
1756gimple_seq_last (gimple_seq s)
1757{
1758 return s ? s->prev : NULL__null;
1759}
1760
1761
1762/* Return the last statement in GIMPLE sequence S. */
1763
1764inline gimple *
1765gimple_seq_last_stmt (gimple_seq s)
1766{
1767 gimple_seq_node n = gimple_seq_last (s);
1768 return n;
1769}
1770
1771
1772/* Set the last node in GIMPLE sequence *PS to LAST. */
1773
1774inline void
1775gimple_seq_set_last (gimple_seq *ps, gimple_seq_node last)
1776{
1777 (*ps)->prev = last;
1778}
1779
1780
1781/* Set the first node in GIMPLE sequence *PS to FIRST. */
1782
1783inline void
1784gimple_seq_set_first (gimple_seq *ps, gimple_seq_node first)
1785{
1786 *ps = first;
1787}
1788
1789
1790/* Return true if GIMPLE sequence S is empty. */
1791
1792inline bool
1793gimple_seq_empty_p (gimple_seq s)
1794{
1795 return s == NULL__null;
1796}
1797
1798/* Allocate a new sequence and initialize its first element with STMT. */
1799
1800inline gimple_seq
1801gimple_seq_alloc_with_stmt (gimple *stmt)
1802{
1803 gimple_seq seq = NULL__null;
1804 gimple_seq_add_stmt (&seq, stmt);
1805 return seq;
1806}
1807
1808
1809/* Returns the sequence of statements in BB. */
1810
1811inline gimple_seq
1812bb_seq (const_basic_block bb)
1813{
1814 return (!(bb->flags & BB_RTL)) ? bb->il.gimple.seq : NULL__null;
1815}
1816
1817inline gimple_seq *
1818bb_seq_addr (basic_block bb)
1819{
1820 return (!(bb->flags & BB_RTL)) ? &bb->il.gimple.seq : NULL__null;
1821}
1822
1823/* Sets the sequence of statements in BB to SEQ. */
1824
1825inline void
1826set_bb_seq (basic_block bb, gimple_seq seq)
1827{
1828 gcc_checking_assert (!(bb->flags & BB_RTL))((void)(!(!(bb->flags & BB_RTL)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 1828, __FUNCTION__), 0 : 0))
;
1829 bb->il.gimple.seq = seq;
1830}
1831
1832
1833/* Return the code for GIMPLE statement G. */
1834
1835inline enum gimple_code
1836gimple_code (const gimple *g)
1837{
1838 return g->code;
1839}
1840
1841
1842/* Return the GSS code used by a GIMPLE code. */
1843
1844inline enum gimple_statement_structure_enum
1845gss_for_code (enum gimple_code code)
1846{
1847 gcc_gimple_checking_assert ((unsigned int)code < LAST_AND_UNUSED_GIMPLE_CODE)((void)(!((unsigned int)code < LAST_AND_UNUSED_GIMPLE_CODE
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 1847, __FUNCTION__), 0 : 0))
;
1848 return gss_for_code_[code];
1849}
1850
1851
1852/* Return which GSS code is used by GS. */
1853
1854inline enum gimple_statement_structure_enum
1855gimple_statement_structure (gimple *gs)
1856{
1857 return gss_for_code (gimple_code (gs));
1858}
1859
1860
1861/* Return true if statement G has sub-statements. This is only true for
1862 High GIMPLE statements. */
1863
1864inline bool
1865gimple_has_substatements (gimple *g)
1866{
1867 switch (gimple_code (g))
1868 {
1869 case GIMPLE_ASSUME:
1870 case GIMPLE_BIND:
1871 case GIMPLE_CATCH:
1872 case GIMPLE_EH_FILTER:
1873 case GIMPLE_EH_ELSE:
1874 case GIMPLE_TRY:
1875 case GIMPLE_OMP_FOR:
1876 case GIMPLE_OMP_MASTER:
1877 case GIMPLE_OMP_MASKED: