Bug Summary

File:build/gcc/ipa-modref.c
Warning:line 4066, column 5
Called C++ object pointer is null

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-unknown-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name ipa-modref.c -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -fno-rounding-math -mconstructor-aliases -munwind-tables -target-cpu x86-64 -tune-cpu generic -debugger-tuning=gdb -fcoverage-compilation-dir=/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -resource-dir /usr/lib64/clang/13.0.0 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11/backward -internal-isystem /usr/lib64/clang/13.0.0/include -internal-isystem /usr/local/include -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../x86_64-suse-linux/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-error=format-diag -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir=/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -D__GCC_HAVE_DWARF2_CFI_ASM=1 -o /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2021-11-20-133755-20252-1/report-cbafU7.plist -x c++ /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c

/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c

1/* Search for references that a functions loads or stores.
2 Copyright (C) 2020-2021 Free Software Foundation, Inc.
3 Contributed by David Cepelik and Jan Hubicka
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21/* Mod/ref pass records summary about loads and stores performed by the
22 function. This is later used by alias analysis to disambiguate memory
23 accesses across function calls.
24
25 This file contains a tree pass and an IPA pass. Both performs the same
26 analysis however tree pass is executed during early and late optimization
27 passes to propagate info downwards in the compilation order. IPA pass
28 propagates across the callgraph and is able to handle recursion and works on
29 whole program during link-time analysis.
30
31 LTO mode differs from the local mode by not recording alias sets but types
32 that are translated to alias sets later. This is necessary in order stream
33 the information because the alias sets are rebuild at stream-in time and may
34 not correspond to ones seen during analysis. For this reason part of
35 analysis is duplicated.
36
37 The following information is computed
38 1) load/store access tree described in ipa-modref-tree.h
39 This is used by tree-ssa-alias to disambiguate load/stores
40 2) EAF flags used by points-to analysis (in tree-ssa-structlias).
41 and defined in tree-core.h.
42 and stored to optimization_summaries.
43
44 There are multiple summaries computed and used during the propagation:
45 - summaries holds summaries from analysis to IPA propagation
46 time.
47 - summaries_lto is same as summaries but holds them in a format
48 that can be streamed (as described above).
49 - fnspec_summary holds fnspec strings for call. This is
50 necessary because gimple_call_fnspec performs additional
51 analysis except for looking callee fndecl.
52 - escape_summary holds escape points for given call edge.
53 That is a vector recording what function parmaeters
54 may escape to a function call (and with what parameter index). */
55
56#include "config.h"
57#include "system.h"
58#include "coretypes.h"
59#include "backend.h"
60#include "tree.h"
61#include "gimple.h"
62#include "alloc-pool.h"
63#include "tree-pass.h"
64#include "gimple-iterator.h"
65#include "tree-dfa.h"
66#include "cgraph.h"
67#include "ipa-utils.h"
68#include "symbol-summary.h"
69#include "gimple-pretty-print.h"
70#include "gimple-walk.h"
71#include "print-tree.h"
72#include "tree-streamer.h"
73#include "alias.h"
74#include "calls.h"
75#include "ipa-modref-tree.h"
76#include "ipa-modref.h"
77#include "value-range.h"
78#include "ipa-prop.h"
79#include "ipa-fnsummary.h"
80#include "attr-fnspec.h"
81#include "symtab-clones.h"
82#include "gimple-ssa.h"
83#include "tree-phinodes.h"
84#include "tree-ssa-operands.h"
85#include "ssa-iterators.h"
86#include "stringpool.h"
87#include "tree-ssanames.h"
88#include "attribs.h"
89#include "tree-cfg.h"
90#include "tree-eh.h"
91
92
93namespace {
94
95/* We record fnspec specifiers for call edges since they depends on actual
96 gimple statements. */
97
98class fnspec_summary
99{
100public:
101 char *fnspec;
102
103 fnspec_summary ()
104 : fnspec (NULLnullptr)
105 {
106 }
107
108 ~fnspec_summary ()
109 {
110 free (fnspec);
111 }
112};
113
114/* Summary holding fnspec string for a given call. */
115
116class fnspec_summaries_t : public call_summary <fnspec_summary *>
117{
118public:
119 fnspec_summaries_t (symbol_table *symtab)
120 : call_summary <fnspec_summary *> (symtab) {}
121 /* Hook that is called by summary when an edge is duplicated. */
122 virtual void duplicate (cgraph_edge *,
123 cgraph_edge *,
124 fnspec_summary *src,
125 fnspec_summary *dst)
126 {
127 dst->fnspec = xstrdup (src->fnspec);
128 }
129};
130
131static fnspec_summaries_t *fnspec_summaries = NULLnullptr;
132
133/* Escape summary holds a vector of param indexes that escape to
134 a given call. */
135struct escape_entry
136{
137 /* Parameter that escapes at a given call. */
138 int parm_index;
139 /* Argument it escapes to. */
140 unsigned int arg;
141 /* Minimal flags known about the argument. */
142 eaf_flags_t min_flags;
143 /* Does it escape directly or indirectly? */
144 bool direct;
145};
146
147/* Dump EAF flags. */
148
149static void
150dump_eaf_flags (FILE *out, int flags, bool newline = true)
151{
152 if (flags & EAF_UNUSED(1 << 1))
153 fprintf (out, " unused");
154 if (flags & EAF_NO_DIRECT_CLOBBER(1 << 2))
155 fprintf (out, " no_direct_clobber");
156 if (flags & EAF_NO_INDIRECT_CLOBBER(1 << 3))
157 fprintf (out, " no_indirect_clobber");
158 if (flags & EAF_NO_DIRECT_ESCAPE(1 << 4))
159 fprintf (out, " no_direct_escape");
160 if (flags & EAF_NO_INDIRECT_ESCAPE(1 << 5))
161 fprintf (out, " no_indirect_escape");
162 if (flags & EAF_NOT_RETURNED_DIRECTLY(1 << 6))
163 fprintf (out, " not_returned_directly");
164 if (flags & EAF_NOT_RETURNED_INDIRECTLY(1 << 7))
165 fprintf (out, " not_returned_indirectly");
166 if (flags & EAF_NO_DIRECT_READ(1 << 8))
167 fprintf (out, " no_direct_read");
168 if (flags & EAF_NO_INDIRECT_READ(1 << 9))
169 fprintf (out, " no_indirect_read");
170 if (newline)
171 fprintf (out, "\n");
172}
173
174struct escape_summary
175{
176 auto_vec <escape_entry> esc;
177 void dump (FILE *out)
178 {
179 for (unsigned int i = 0; i < esc.length (); i++)
180 {
181 fprintf (out, " parm %i arg %i %s min:",
182 esc[i].parm_index,
183 esc[i].arg,
184 esc[i].direct ? "(direct)" : "(indirect)");
185 dump_eaf_flags (out, esc[i].min_flags, false);
186 }
187 fprintf (out, "\n");
188 }
189};
190
191class escape_summaries_t : public call_summary <escape_summary *>
192{
193public:
194 escape_summaries_t (symbol_table *symtab)
195 : call_summary <escape_summary *> (symtab) {}
196 /* Hook that is called by summary when an edge is duplicated. */
197 virtual void duplicate (cgraph_edge *,
198 cgraph_edge *,
199 escape_summary *src,
200 escape_summary *dst)
201 {
202 dst->esc = src->esc.copy ();
203 }
204};
205
206static escape_summaries_t *escape_summaries = NULLnullptr;
207
208} /* ANON namespace: GTY annotated summaries can not be anonymous. */
209
210
211/* Class (from which there is one global instance) that holds modref summaries
212 for all analyzed functions. */
213
214class GTY((user)) modref_summaries
215 : public fast_function_summary <modref_summary *, va_gc>
216{
217public:
218 modref_summaries (symbol_table *symtab)
219 : fast_function_summary <modref_summary *, va_gc> (symtab) {}
220 virtual void insert (cgraph_node *, modref_summary *state);
221 virtual void duplicate (cgraph_node *src_node,
222 cgraph_node *dst_node,
223 modref_summary *src_data,
224 modref_summary *dst_data);
225 static modref_summaries *create_ggc (symbol_table *symtab)
226 {
227 return new (ggc_alloc_no_dtor<modref_summaries> ())
228 modref_summaries (symtab);
229 }
230};
231
232class modref_summary_lto;
233
234/* Class (from which there is one global instance) that holds modref summaries
235 for all analyzed functions. */
236
237class GTY((user)) modref_summaries_lto
238 : public fast_function_summary <modref_summary_lto *, va_gc>
239{
240public:
241 modref_summaries_lto (symbol_table *symtab)
242 : fast_function_summary <modref_summary_lto *, va_gc> (symtab),
243 propagated (false) {}
244 virtual void insert (cgraph_node *, modref_summary_lto *state);
245 virtual void duplicate (cgraph_node *src_node,
246 cgraph_node *dst_node,
247 modref_summary_lto *src_data,
248 modref_summary_lto *dst_data);
249 static modref_summaries_lto *create_ggc (symbol_table *symtab)
250 {
251 return new (ggc_alloc_no_dtor<modref_summaries_lto> ())
252 modref_summaries_lto (symtab);
253 }
254 bool propagated;
255};
256
257/* Global variable holding all modref summaries
258 (from analysis to IPA propagation time). */
259
260static GTY(()) fast_function_summary <modref_summary *, va_gc>
261 *summaries;
262
263/* Global variable holding all modref optimization summaries
264 (from IPA propagation time or used by local optimization pass). */
265
266static GTY(()) fast_function_summary <modref_summary *, va_gc>
267 *optimization_summaries;
268
269/* LTO summaries hold info from analysis to LTO streaming or from LTO
270 stream-in through propagation to LTO stream-out. */
271
272static GTY(()) fast_function_summary <modref_summary_lto *, va_gc>
273 *summaries_lto;
274
275/* Summary for a single function which this pass produces. */
276
277modref_summary::modref_summary ()
278 : loads (NULLnullptr), stores (NULLnullptr), retslot_flags (0), static_chain_flags (0),
279 writes_errno (false), side_effects (false), nondeterministic (false),
280 calls_interposable (false), global_memory_read (false),
281 global_memory_written (false), try_dse (false)
282{
283}
284
285modref_summary::~modref_summary ()
286{
287 if (loads)
288 ggc_delete (loads);
289 if (stores)
290 ggc_delete (stores);
291}
292
293/* Remove all flags from EAF_FLAGS that are implied by ECF_FLAGS and not
294 useful to track. If returns_void is true moreover clear
295 EAF_NOT_RETURNED. */
296static int
297remove_useless_eaf_flags (int eaf_flags, int ecf_flags, bool returns_void)
298{
299 if (ecf_flags & (ECF_CONST(1 << 0) | ECF_NOVOPS(1 << 9)))
300 eaf_flags &= ~implicit_const_eaf_flags;
301 else if (ecf_flags & ECF_PURE(1 << 1))
302 eaf_flags &= ~implicit_pure_eaf_flags;
303 else if ((ecf_flags & ECF_NORETURN(1 << 3)) || returns_void)
304 eaf_flags &= ~(EAF_NOT_RETURNED_DIRECTLY(1 << 6) | EAF_NOT_RETURNED_INDIRECTLY(1 << 7));
305 return eaf_flags;
306}
307
308/* Return true if FLAGS holds some useful information. */
309
310static bool
311eaf_flags_useful_p (vec <eaf_flags_t> &flags, int ecf_flags)
312{
313 for (unsigned i = 0; i < flags.length (); i++)
314 if (remove_useless_eaf_flags (flags[i], ecf_flags, false))
315 return true;
316 return false;
317}
318
319/* Return true if summary is potentially useful for optimization.
320 If CHECK_FLAGS is false assume that arg_flags are useful. */
321
322bool
323modref_summary::useful_p (int ecf_flags, bool check_flags)
324{
325 if (arg_flags.length () && !check_flags)
326 return true;
327 if (check_flags && eaf_flags_useful_p (arg_flags, ecf_flags))
328 return true;
329 arg_flags.release ();
330 if (check_flags && remove_useless_eaf_flags (retslot_flags, ecf_flags, false))
331 return true;
332 if (check_flags
333 && remove_useless_eaf_flags (static_chain_flags, ecf_flags, false))
334 return true;
335 if (ecf_flags & (ECF_CONST(1 << 0) | ECF_NOVOPS(1 << 9)))
336 return ((!side_effects || !nondeterministic)
337 && (ecf_flags & ECF_LOOPING_CONST_OR_PURE(1 << 2)));
338 if (loads && !loads->every_base)
339 return true;
340 else
341 kills.release ();
342 if (ecf_flags & ECF_PURE(1 << 1))
343 return ((!side_effects || !nondeterministic)
344 && (ecf_flags & ECF_LOOPING_CONST_OR_PURE(1 << 2)));
345 return stores && !stores->every_base;
346}
347
348/* Single function summary used for LTO. */
349
350typedef modref_tree <tree> modref_records_lto;
351struct GTY(()) modref_summary_lto
352{
353 /* Load and stores in functions using types rather then alias sets.
354
355 This is necessary to make the information streamable for LTO but is also
356 more verbose and thus more likely to hit the limits. */
357 modref_records_lto *loads;
358 modref_records_lto *stores;
359 auto_vec<modref_access_node> GTY((skip)) kills;
360 auto_vec<eaf_flags_t> GTY((skip)) arg_flags;
361 eaf_flags_t retslot_flags;
362 eaf_flags_t static_chain_flags;
363 unsigned writes_errno : 1;
364 unsigned side_effects : 1;
365 unsigned nondeterministic : 1;
366 unsigned calls_interposable : 1;
367
368 modref_summary_lto ();
369 ~modref_summary_lto ();
370 void dump (FILE *);
371 bool useful_p (int ecf_flags, bool check_flags = true);
372};
373
374/* Summary for a single function which this pass produces. */
375
376modref_summary_lto::modref_summary_lto ()
377 : loads (NULLnullptr), stores (NULLnullptr), retslot_flags (0), static_chain_flags (0),
378 writes_errno (false), side_effects (false), nondeterministic (false),
379 calls_interposable (false)
380{
381}
382
383modref_summary_lto::~modref_summary_lto ()
384{
385 if (loads)
386 ggc_delete (loads);
387 if (stores)
388 ggc_delete (stores);
389}
390
391
392/* Return true if lto summary is potentially useful for optimization.
393 If CHECK_FLAGS is false assume that arg_flags are useful. */
394
395bool
396modref_summary_lto::useful_p (int ecf_flags, bool check_flags)
397{
398 if (arg_flags.length () && !check_flags)
399 return true;
400 if (check_flags && eaf_flags_useful_p (arg_flags, ecf_flags))
401 return true;
402 arg_flags.release ();
403 if (check_flags && remove_useless_eaf_flags (retslot_flags, ecf_flags, false))
404 return true;
405 if (check_flags
406 && remove_useless_eaf_flags (static_chain_flags, ecf_flags, false))
407 return true;
408 if (ecf_flags & (ECF_CONST(1 << 0) | ECF_NOVOPS(1 << 9)))
409 return ((!side_effects || !nondeterministic)
410 && (ecf_flags & ECF_LOOPING_CONST_OR_PURE(1 << 2)));
411 if (loads && !loads->every_base)
412 return true;
413 else
414 kills.release ();
415 if (ecf_flags & ECF_PURE(1 << 1))
416 return ((!side_effects || !nondeterministic)
417 && (ecf_flags & ECF_LOOPING_CONST_OR_PURE(1 << 2)));
418 return stores && !stores->every_base;
419}
420
421/* Dump records TT to OUT. */
422
423static void
424dump_records (modref_records *tt, FILE *out)
425{
426 fprintf (out, " Limits: %i bases, %i refs\n",
427 (int)tt->max_bases, (int)tt->max_refs);
428 if (tt->every_base)
429 {
430 fprintf (out, " Every base\n");
431 return;
432 }
433 size_t i;
434 modref_base_node <alias_set_type> *n;
435 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, n)for (i = 0; vec_safe_iterate ((tt->bases), (i), &(n));
++(i))
436 {
437 fprintf (out, " Base %i: alias set %i\n", (int)i, n->base);
438 if (n->every_ref)
439 {
440 fprintf (out, " Every ref\n");
441 continue;
442 }
443 size_t j;
444 modref_ref_node <alias_set_type> *r;
445 FOR_EACH_VEC_SAFE_ELT (n->refs, j, r)for (j = 0; vec_safe_iterate ((n->refs), (j), &(r)); ++
(j))
446 {
447 fprintf (out, " Ref %i: alias set %i\n", (int)j, r->ref);
448 if (r->every_access)
449 {
450 fprintf (out, " Every access\n");
451 continue;
452 }
453 size_t k;
454 modref_access_node *a;
455 FOR_EACH_VEC_SAFE_ELT (r->accesses, k, a)for (k = 0; vec_safe_iterate ((r->accesses), (k), &(a)
); ++(k))
456 {
457 fprintf (out, " access:");
458 a->dump (out);
459 }
460 }
461 }
462}
463
464/* Dump records TT to OUT. */
465
466static void
467dump_lto_records (modref_records_lto *tt, FILE *out)
468{
469 fprintf (out, " Limits: %i bases, %i refs\n",
470 (int)tt->max_bases, (int)tt->max_refs);
471 if (tt->every_base)
472 {
473 fprintf (out, " Every base\n");
474 return;
475 }
476 size_t i;
477 modref_base_node <tree> *n;
478 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, n)for (i = 0; vec_safe_iterate ((tt->bases), (i), &(n));
++(i))
479 {
480 fprintf (out, " Base %i:", (int)i);
481 print_generic_expr (dump_file, n->base);
482 fprintf (out, " (alias set %i)\n",
483 n->base ? get_alias_set (n->base) : 0);
484 if (n->every_ref)
485 {
486 fprintf (out, " Every ref\n");
487 continue;
488 }
489 size_t j;
490 modref_ref_node <tree> *r;
491 FOR_EACH_VEC_SAFE_ELT (n->refs, j, r)for (j = 0; vec_safe_iterate ((n->refs), (j), &(r)); ++
(j))
492 {
493 fprintf (out, " Ref %i:", (int)j);
494 print_generic_expr (dump_file, r->ref);
495 fprintf (out, " (alias set %i)\n",
496 r->ref ? get_alias_set (r->ref) : 0);
497 if (r->every_access)
498 {
499 fprintf (out, " Every access\n");
500 continue;
501 }
502 size_t k;
503 modref_access_node *a;
504 FOR_EACH_VEC_SAFE_ELT (r->accesses, k, a)for (k = 0; vec_safe_iterate ((r->accesses), (k), &(a)
); ++(k))
505 {
506 fprintf (out, " access:");
507 a->dump (out);
508 }
509 }
510 }
511}
512
513/* Dump all escape points of NODE to OUT. */
514
515static void
516dump_modref_edge_summaries (FILE *out, cgraph_node *node, int depth)
517{
518 int i = 0;
519 if (!escape_summaries)
520 return;
521 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
522 {
523 class escape_summary *sum = escape_summaries->get (e);
524 if (sum)
525 {
526 fprintf (out, "%*sIndirect call %i in %s escapes:",
527 depth, "", i, node->dump_name ());
528 sum->dump (out);
529 }
530 i++;
531 }
532 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
533 {
534 if (!e->inline_failed)
535 dump_modref_edge_summaries (out, e->callee, depth + 1);
536 class escape_summary *sum = escape_summaries->get (e);
537 if (sum)
538 {
539 fprintf (out, "%*sCall %s->%s escapes:", depth, "",
540 node->dump_name (), e->callee->dump_name ());
541 sum->dump (out);
542 }
543 class fnspec_summary *fsum = fnspec_summaries->get (e);
544 if (fsum)
545 {
546 fprintf (out, "%*sCall %s->%s fnspec: %s\n", depth, "",
547 node->dump_name (), e->callee->dump_name (),
548 fsum->fnspec);
549 }
550 }
551}
552
553/* Remove all call edge summaries associated with NODE. */
554
555static void
556remove_modref_edge_summaries (cgraph_node *node)
557{
558 if (!escape_summaries)
559 return;
560 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
561 escape_summaries->remove (e);
562 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
563 {
564 if (!e->inline_failed)
565 remove_modref_edge_summaries (e->callee);
566 escape_summaries->remove (e);
567 fnspec_summaries->remove (e);
568 }
569}
570
571/* Dump summary. */
572
573void
574modref_summary::dump (FILE *out)
575{
576 if (loads)
577 {
578 fprintf (out, " loads:\n");
579 dump_records (loads, out);
580 }
581 if (stores)
582 {
583 fprintf (out, " stores:\n");
584 dump_records (stores, out);
585 }
586 if (kills.length ())
587 {
588 fprintf (out, " kills:\n");
589 for (auto kill : kills)
590 {
591 fprintf (out, " ");
592 kill.dump (out);
593 }
594 }
595 if (writes_errno)
596 fprintf (out, " Writes errno\n");
597 if (side_effects)
598 fprintf (out, " Side effects\n");
599 if (nondeterministic)
600 fprintf (out, " Nondeterministic\n");
601 if (calls_interposable)
602 fprintf (out, " Calls interposable\n");
603 if (global_memory_read)
604 fprintf (out, " Global memory read\n");
605 if (global_memory_written)
606 fprintf (out, " Global memory written\n");
607 if (try_dse)
608 fprintf (out, " Try dse\n");
609 if (arg_flags.length ())
610 {
611 for (unsigned int i = 0; i < arg_flags.length (); i++)
612 if (arg_flags[i])
613 {
614 fprintf (out, " parm %i flags:", i);
615 dump_eaf_flags (out, arg_flags[i]);
616 }
617 }
618 if (retslot_flags)
619 {
620 fprintf (out, " Retslot flags:");
621 dump_eaf_flags (out, retslot_flags);
622 }
623 if (static_chain_flags)
624 {
625 fprintf (out, " Static chain flags:");
626 dump_eaf_flags (out, static_chain_flags);
627 }
628}
629
630/* Dump summary. */
631
632void
633modref_summary_lto::dump (FILE *out)
634{
635 fprintf (out, " loads:\n");
636 dump_lto_records (loads, out);
637 fprintf (out, " stores:\n");
638 dump_lto_records (stores, out);
639 if (kills.length ())
640 {
641 fprintf (out, " kills:\n");
642 for (auto kill : kills)
643 {
644 fprintf (out, " ");
645 kill.dump (out);
646 }
647 }
648 if (writes_errno)
649 fprintf (out, " Writes errno\n");
650 if (side_effects)
651 fprintf (out, " Side effects\n");
652 if (nondeterministic)
653 fprintf (out, " Nondeterministic\n");
654 if (calls_interposable)
655 fprintf (out, " Calls interposable\n");
656 if (arg_flags.length ())
657 {
658 for (unsigned int i = 0; i < arg_flags.length (); i++)
659 if (arg_flags[i])
660 {
661 fprintf (out, " parm %i flags:", i);
662 dump_eaf_flags (out, arg_flags[i]);
663 }
664 }
665 if (retslot_flags)
666 {
667 fprintf (out, " Retslot flags:");
668 dump_eaf_flags (out, retslot_flags);
669 }
670 if (static_chain_flags)
671 {
672 fprintf (out, " Static chain flags:");
673 dump_eaf_flags (out, static_chain_flags);
674 }
675}
676
677/* Called after summary is produced and before it is used by local analysis.
678 Can be called multiple times in case summary needs to update signature.
679 FUN is decl of function summary is attached to. */
680void
681modref_summary::finalize (tree fun)
682{
683 global_memory_read = !loads || loads->global_access_p ();
684 global_memory_written = !stores || stores->global_access_p ();
685
686 /* We can do DSE if we know function has no side effects and
687 we can analyse all stores. Disable dse if there are too many
688 stores to try. */
689 if (side_effects || global_memory_written || writes_errno)
690 try_dse = false;
691 else
692 {
693 try_dse = true;
694 size_t i, j, k;
695 int num_tests = 0, max_tests
696 = opt_for_fn (fun, param_modref_max_tests)(opts_for_fn (fun)->x_param_modref_max_tests);
697 modref_base_node <alias_set_type> *base_node;
698 modref_ref_node <alias_set_type> *ref_node;
699 modref_access_node *access_node;
700 FOR_EACH_VEC_SAFE_ELT (stores->bases, i, base_node)for (i = 0; vec_safe_iterate ((stores->bases), (i), &(
base_node)); ++(i))
701 {
702 if (base_node->every_ref)
703 {
704 try_dse = false;
705 break;
706 }
707 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)for (j = 0; vec_safe_iterate ((base_node->refs), (j), &
(ref_node)); ++(j))
708 {
709 if (base_node->every_ref)
710 {
711 try_dse = false;
712 break;
713 }
714 FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)for (k = 0; vec_safe_iterate ((ref_node->accesses), (k), &
(access_node)); ++(k))
715 if (num_tests++ > max_tests
716 || !access_node->parm_offset_known)
717 {
718 try_dse = false;
719 break;
720 }
721 if (!try_dse)
722 break;
723 }
724 if (!try_dse)
725 break;
726 }
727 }
728}
729
730/* Get function summary for FUNC if it exists, return NULL otherwise. */
731
732modref_summary *
733get_modref_function_summary (cgraph_node *func)
734{
735 /* Avoid creation of the summary too early (e.g. when front-end calls us). */
736 if (!optimization_summaries)
737 return NULLnullptr;
738
739 /* A single function body may be represented by multiple symbols with
740 different visibility. For example, if FUNC is an interposable alias,
741 we don't want to return anything, even if we have summary for the target
742 function. */
743 enum availability avail;
744 func = func->function_or_virtual_thunk_symbol
745 (&avail, current_function_decl ?
746 cgraph_node::get (current_function_decl) : NULLnullptr);
747 if (avail <= AVAIL_INTERPOSABLE)
748 return NULLnullptr;
749
750 modref_summary *r = optimization_summaries->get (func);
751 return r;
752}
753
754/* Get function summary for CALL if it exists, return NULL otherwise.
755 If non-null set interposed to indicate whether function may not
756 bind to current def. In this case sometimes loads from function
757 needs to be ignored. */
758
759modref_summary *
760get_modref_function_summary (gcall *call, bool *interposed)
761{
762 tree callee = gimple_call_fndecl (call);
763 if (!callee)
764 return NULLnullptr;
765 struct cgraph_node *node = cgraph_node::get (callee);
766 if (!node)
767 return NULLnullptr;
768 modref_summary *r = get_modref_function_summary (node);
769 if (interposed && r)
770 *interposed = r->calls_interposable
771 || !node->binds_to_current_def_p ();
772 return r;
773}
774
775
776namespace {
777
778/* Construct modref_access_node from REF. */
779static modref_access_node
780get_access (ao_ref *ref)
781{
782 tree base;
783
784 base = ao_ref_base (ref);
785 modref_access_node a = {ref->offset, ref->size, ref->max_size,
786 0, MODREF_UNKNOWN_PARM, false, 0};
787 if (TREE_CODE (base)((enum tree_code) (base)->base.code) == MEM_REF || TREE_CODE (base)((enum tree_code) (base)->base.code) == TARGET_MEM_REF)
788 {
789 tree memref = base;
790 base = TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 790, __FUNCTION__)))))
;
791
792 if (TREE_CODE (base)((enum tree_code) (base)->base.code) == SSA_NAME
793 && SSA_NAME_IS_DEFAULT_DEF (base)(tree_check ((base), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 793, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
794 && TREE_CODE (SSA_NAME_VAR (base))((enum tree_code) (((tree_check ((base), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 794, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree) nullptr
|| ((enum tree_code) ((base)->ssa_name.var)->base.code
) == IDENTIFIER_NODE ? (tree) nullptr : (base)->ssa_name.var
))->base.code)
== PARM_DECL)
795 {
796 a.parm_index = 0;
797 if (cfun(cfun + 0)->static_chain_decl
798 && base == ssa_default_def (cfun(cfun + 0), cfun(cfun + 0)->static_chain_decl))
799 a.parm_index = MODREF_STATIC_CHAIN_PARM;
800 else
801 for (tree t = DECL_ARGUMENTS (current_function_decl)((tree_check ((current_function_decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 801, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
;
802 t != SSA_NAME_VAR (base)((tree_check ((base), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 802, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree) nullptr
|| ((enum tree_code) ((base)->ssa_name.var)->base.code
) == IDENTIFIER_NODE ? (tree) nullptr : (base)->ssa_name.var
)
; t = DECL_CHAIN (t)(((contains_struct_check (((contains_struct_check ((t), (TS_DECL_MINIMAL
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 802, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 802, __FUNCTION__))->common.chain))
)
803 a.parm_index++;
804 }
805 else
806 a.parm_index = MODREF_UNKNOWN_PARM;
807
808 if (a.parm_index != MODREF_UNKNOWN_PARM
809 && TREE_CODE (memref)((enum tree_code) (memref)->base.code) == MEM_REF)
810 {
811 a.parm_offset_known
812 = wi::to_poly_wide (TREE_OPERAND(*((const_cast<tree*> (tree_operand_check ((memref), (1
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 813, __FUNCTION__)))))
813 (memref, 1)(*((const_cast<tree*> (tree_operand_check ((memref), (1
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 813, __FUNCTION__)))))
).to_shwi (&a.parm_offset);
814 }
815 else
816 a.parm_offset_known = false;
817 }
818 else
819 a.parm_index = MODREF_UNKNOWN_PARM;
820 return a;
821}
822
823/* Record access into the modref_records data structure. */
824
825static void
826record_access (modref_records *tt, ao_ref *ref, modref_access_node &a)
827{
828 alias_set_type base_set = !flag_strict_aliasingglobal_options.x_flag_strict_aliasing ? 0
829 : ao_ref_base_alias_set (ref);
830 alias_set_type ref_set = !flag_strict_aliasingglobal_options.x_flag_strict_aliasing ? 0
831 : (ao_ref_alias_set (ref));
832 if (dump_file)
833 {
834 fprintf (dump_file, " - Recording base_set=%i ref_set=%i ",
835 base_set, ref_set);
836 a.dump (dump_file);
837 }
838 tt->insert (base_set, ref_set, a, false);
839}
840
841/* IPA version of record_access_tree. */
842
843static void
844record_access_lto (modref_records_lto *tt, ao_ref *ref, modref_access_node &a)
845{
846 /* get_alias_set sometimes use different type to compute the alias set
847 than TREE_TYPE (base). Do same adjustments. */
848 tree base_type = NULL_TREE(tree) nullptr, ref_type = NULL_TREE(tree) nullptr;
849 if (flag_strict_aliasingglobal_options.x_flag_strict_aliasing)
850 {
851 tree base;
852
853 base = ref->ref;
854 while (handled_component_p (base))
855 base = TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 855, __FUNCTION__)))))
;
856
857 base_type = reference_alias_ptr_type_1 (&base);
858
859 if (!base_type)
860 base_type = TREE_TYPE (base)((contains_struct_check ((base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 860, __FUNCTION__))->typed.type)
;
861 else
862 base_type = TYPE_REF_CAN_ALIAS_ALL (base_type)((tree_check2 ((base_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 862, __FUNCTION__, (POINTER_TYPE), (REFERENCE_TYPE)))->base
.static_flag)
863 ? NULL_TREE(tree) nullptr : TREE_TYPE (base_type)((contains_struct_check ((base_type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 863, __FUNCTION__))->typed.type)
;
864
865 tree ref_expr = ref->ref;
866 ref_type = reference_alias_ptr_type_1 (&ref_expr);
867
868 if (!ref_type)
869 ref_type = TREE_TYPE (ref_expr)((contains_struct_check ((ref_expr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 869, __FUNCTION__))->typed.type)
;
870 else
871 ref_type = TYPE_REF_CAN_ALIAS_ALL (ref_type)((tree_check2 ((ref_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 871, __FUNCTION__, (POINTER_TYPE), (REFERENCE_TYPE)))->base
.static_flag)
872 ? NULL_TREE(tree) nullptr : TREE_TYPE (ref_type)((contains_struct_check ((ref_type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 872, __FUNCTION__))->typed.type)
;
873
874 /* Sanity check that we are in sync with what get_alias_set does. */
875 gcc_checking_assert ((!base_type && !ao_ref_base_alias_set (ref))((void)(!((!base_type && !ao_ref_base_alias_set (ref)
) || get_alias_set (base_type) == ao_ref_base_alias_set (ref)
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 877, __FUNCTION__), 0 : 0))
876 || get_alias_set (base_type)((void)(!((!base_type && !ao_ref_base_alias_set (ref)
) || get_alias_set (base_type) == ao_ref_base_alias_set (ref)
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 877, __FUNCTION__), 0 : 0))
877 == ao_ref_base_alias_set (ref))((void)(!((!base_type && !ao_ref_base_alias_set (ref)
) || get_alias_set (base_type) == ao_ref_base_alias_set (ref)
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 877, __FUNCTION__), 0 : 0))
;
878 gcc_checking_assert ((!ref_type && !ao_ref_alias_set (ref))((void)(!((!ref_type && !ao_ref_alias_set (ref)) || get_alias_set
(ref_type) == ao_ref_alias_set (ref)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 880, __FUNCTION__), 0 : 0))
879 || get_alias_set (ref_type)((void)(!((!ref_type && !ao_ref_alias_set (ref)) || get_alias_set
(ref_type) == ao_ref_alias_set (ref)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 880, __FUNCTION__), 0 : 0))
880 == ao_ref_alias_set (ref))((void)(!((!ref_type && !ao_ref_alias_set (ref)) || get_alias_set
(ref_type) == ao_ref_alias_set (ref)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 880, __FUNCTION__), 0 : 0))
;
881
882 /* Do not bother to record types that have no meaningful alias set.
883 Also skip variably modified types since these go to local streams. */
884 if (base_type && (!get_alias_set (base_type)
885 || variably_modified_type_p (base_type, NULL_TREE(tree) nullptr)))
886 base_type = NULL_TREE(tree) nullptr;
887 if (ref_type && (!get_alias_set (ref_type)
888 || variably_modified_type_p (ref_type, NULL_TREE(tree) nullptr)))
889 ref_type = NULL_TREE(tree) nullptr;
890 }
891 if (dump_file)
892 {
893 fprintf (dump_file, " - Recording base type:");
894 print_generic_expr (dump_file, base_type);
895 fprintf (dump_file, " (alias set %i) ref type:",
896 base_type ? get_alias_set (base_type) : 0);
897 print_generic_expr (dump_file, ref_type);
898 fprintf (dump_file, " (alias set %i) ",
899 ref_type ? get_alias_set (ref_type) : 0);
900 a.dump (dump_file);
901 }
902
903 tt->insert (base_type, ref_type, a, false);
904}
905
906/* Returns true if and only if we should store the access to EXPR.
907 Some accesses, e.g. loads from automatic variables, are not interesting. */
908
909static bool
910record_access_p (tree expr)
911{
912 if (refs_local_or_readonly_memory_p (expr))
913 {
914 if (dump_file)
915 fprintf (dump_file, " - Read-only or local, ignoring.\n");
916 return false;
917 }
918 return true;
919}
920
921/* Return true if ECF flags says that nondeterminsm can be ignored. */
922
923static bool
924ignore_nondeterminism_p (tree caller, int flags)
925{
926 if ((flags & (ECF_CONST(1 << 0) | ECF_PURE(1 << 1)))
927 && !(flags & ECF_LOOPING_CONST_OR_PURE(1 << 2)))
928 return true;
929 if ((flags & (ECF_NORETURN(1 << 3) | ECF_NOTHROW(1 << 6))) == (ECF_NORETURN(1 << 3) | ECF_NOTHROW(1 << 6))
930 || (!opt_for_fn (caller, flag_exceptions)(opts_for_fn (caller)->x_flag_exceptions) && (flags & ECF_NORETURN(1 << 3))))
931 return true;
932 return false;
933}
934
935/* Return true if ECF flags says that return value can be ignored. */
936
937static bool
938ignore_retval_p (tree caller, int flags)
939{
940 if ((flags & (ECF_NORETURN(1 << 3) | ECF_NOTHROW(1 << 6))) == (ECF_NORETURN(1 << 3) | ECF_NOTHROW(1 << 6))
941 || (!opt_for_fn (caller, flag_exceptions)(opts_for_fn (caller)->x_flag_exceptions) && (flags & ECF_NORETURN(1 << 3))))
942 return true;
943 return false;
944}
945
946/* Return true if ECF flags says that stores can be ignored. */
947
948static bool
949ignore_stores_p (tree caller, int flags)
950{
951 if (flags & (ECF_PURE(1 << 1) | ECF_CONST(1 << 0) | ECF_NOVOPS(1 << 9)))
952 return true;
953 if ((flags & (ECF_NORETURN(1 << 3) | ECF_NOTHROW(1 << 6))) == (ECF_NORETURN(1 << 3) | ECF_NOTHROW(1 << 6))
954 || (!opt_for_fn (caller, flag_exceptions)(opts_for_fn (caller)->x_flag_exceptions) && (flags & ECF_NORETURN(1 << 3))))
955 return true;
956 return false;
957}
958
959/* Determine parm_map for argument OP. */
960
961modref_parm_map
962parm_map_for_arg (tree op)
963{
964 bool offset_known;
965 poly_int64 offset;
966 struct modref_parm_map parm_map;
967
968 parm_map.parm_offset_known = false;
969 parm_map.parm_offset = 0;
970
971 offset_known = unadjusted_ptr_and_unit_offset (op, &op, &offset);
972 if (TREE_CODE (op)((enum tree_code) (op)->base.code) == SSA_NAME
973 && SSA_NAME_IS_DEFAULT_DEF (op)(tree_check ((op), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 973, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
974 && TREE_CODE (SSA_NAME_VAR (op))((enum tree_code) (((tree_check ((op), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 974, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree) nullptr
|| ((enum tree_code) ((op)->ssa_name.var)->base.code) ==
IDENTIFIER_NODE ? (tree) nullptr : (op)->ssa_name.var))->
base.code)
== PARM_DECL)
975 {
976 int index = 0;
977 for (tree t = DECL_ARGUMENTS (current_function_decl)((tree_check ((current_function_decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 977, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
;
978 t != SSA_NAME_VAR (op)((tree_check ((op), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 978, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree) nullptr
|| ((enum tree_code) ((op)->ssa_name.var)->base.code) ==
IDENTIFIER_NODE ? (tree) nullptr : (op)->ssa_name.var)
; t = DECL_CHAIN (t)(((contains_struct_check (((contains_struct_check ((t), (TS_DECL_MINIMAL
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 978, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 978, __FUNCTION__))->common.chain))
)
979 {
980 if (!t)
981 {
982 index = MODREF_UNKNOWN_PARM;
983 break;
984 }
985 index++;
986 }
987 parm_map.parm_index = index;
988 parm_map.parm_offset_known = offset_known;
989 parm_map.parm_offset = offset;
990 }
991 else if (points_to_local_or_readonly_memory_p (op))
992 parm_map.parm_index = MODREF_LOCAL_MEMORY_PARM;
993 else
994 parm_map.parm_index = MODREF_UNKNOWN_PARM;
995 return parm_map;
996}
997
998/* Merge side effects of call STMT to function with CALLEE_SUMMARY
999 int CUR_SUMMARY. Return true if something changed.
1000 If IGNORE_STORES is true, do not merge stores.
1001 If RECORD_ADJUSTMENTS is true cap number of adjustments to
1002 a given access to make dataflow finite. */
1003
1004bool
1005merge_call_side_effects (modref_summary *cur_summary,
1006 gimple *stmt, modref_summary *callee_summary,
1007 bool ignore_stores, cgraph_node *callee_node,
1008 bool record_adjustments, bool always_executed)
1009{
1010 auto_vec <modref_parm_map, 32> parm_map;
1011 modref_parm_map chain_map;
1012 bool changed = false;
1013 int flags = gimple_call_flags (stmt);
1014
1015 if ((flags & (ECF_CONST(1 << 0) | ECF_NOVOPS(1 << 9)))
1016 && !(flags & ECF_LOOPING_CONST_OR_PURE(1 << 2)))
1017 return changed;
1018
1019 if (!(flags & (ECF_CONST(1 << 0) | ECF_NOVOPS(1 << 9) | ECF_PURE(1 << 1)))
1020 || (flags & ECF_LOOPING_CONST_OR_PURE(1 << 2)))
1021 {
1022 if (!cur_summary->side_effects && callee_summary->side_effects)
1023 {
1024 if (dump_file)
1025 fprintf (dump_file, " - merging side effects.\n");
1026 cur_summary->side_effects = true;
1027 changed = true;
1028 }
1029 if (!cur_summary->nondeterministic && callee_summary->nondeterministic
1030 && !ignore_nondeterminism_p (current_function_decl, flags))
1031 {
1032 if (dump_file)
1033 fprintf (dump_file, " - merging nondeterministic.\n");
1034 cur_summary->nondeterministic = true;
1035 changed = true;
1036 }
1037 }
1038
1039 if (flags & (ECF_CONST(1 << 0) | ECF_NOVOPS(1 << 9)))
1040 return changed;
1041
1042 if (!cur_summary->calls_interposable && callee_summary->calls_interposable)
1043 {
1044 if (dump_file)
1045 fprintf (dump_file, " - merging calls interposable.\n");
1046 cur_summary->calls_interposable = true;
1047 changed = true;
1048 }
1049
1050 /* We can not safely optimize based on summary of callee if it does
1051 not always bind to current def: it is possible that memory load
1052 was optimized out earlier which may not happen in the interposed
1053 variant. */
1054 if (!callee_node->binds_to_current_def_p ()
1055 && !cur_summary->calls_interposable)
1056 {
1057 if (dump_file)
1058 fprintf (dump_file, " - May be interposed.\n");
1059 cur_summary->calls_interposable = true;
1060 changed = true;
1061 }
1062
1063 if (dump_file)
1064 fprintf (dump_file, " - Merging side effects of %s with parm map:",
1065 callee_node->dump_name ());
1066
1067 parm_map.safe_grow_cleared (gimple_call_num_args (stmt), true);
1068 for (unsigned i = 0; i < gimple_call_num_args (stmt); i++)
1069 {
1070 parm_map[i] = parm_map_for_arg (gimple_call_arg (stmt, i));
1071 if (dump_file)
1072 {
1073 fprintf (dump_file, " %i", parm_map[i].parm_index);
1074 if (parm_map[i].parm_offset_known)
1075 {
1076 fprintf (dump_file, " offset:");
1077 print_dec ((poly_int64_pod)parm_map[i].parm_offset,
1078 dump_file, SIGNED);
1079 }
1080 }
1081 }
1082 if (gimple_call_chain (stmt))
1083 {
1084 chain_map = parm_map_for_arg (gimple_call_chain (stmt));
1085 if (dump_file)
1086 {
1087 fprintf (dump_file, "static chain %i", chain_map.parm_index);
1088 if (chain_map.parm_offset_known)
1089 {
1090 fprintf (dump_file, " offset:");
1091 print_dec ((poly_int64_pod)chain_map.parm_offset,
1092 dump_file, SIGNED);
1093 }
1094 }
1095 }
1096 if (dump_file)
1097 fprintf (dump_file, "\n");
1098
1099 if (always_executed
1100 && callee_summary->kills.length ()
1101 && (!cfun(cfun + 0)->can_throw_non_call_exceptions
1102 || !stmt_could_throw_p (cfun(cfun + 0), stmt)))
1103 {
1104 /* Watch for self recursive updates. */
1105 auto_vec<modref_access_node, 32> saved_kills;
1106
1107 saved_kills.reserve_exact (callee_summary->kills.length ());
1108 saved_kills.splice (callee_summary->kills);
1109 for (auto kill : saved_kills)
1110 {
1111 if (kill.parm_index >= (int)parm_map.length ())
1112 continue;
1113 modref_parm_map &m
1114 = kill.parm_index == MODREF_STATIC_CHAIN_PARM
1115 ? chain_map
1116 : parm_map[kill.parm_index];
1117 if (m.parm_index == MODREF_LOCAL_MEMORY_PARM
1118 || m.parm_index == MODREF_UNKNOWN_PARM
1119 || m.parm_index == MODREF_RETSLOT_PARM
1120 || !m.parm_offset_known)
1121 continue;
1122 modref_access_node n = kill;
1123 n.parm_index = m.parm_index;
1124 n.parm_offset += m.parm_offset;
1125 if (modref_access_node::insert_kill (cur_summary->kills, n,
1126 record_adjustments))
1127 changed = true;
1128 }
1129 }
1130
1131 /* Merge with callee's summary. */
1132 changed |= cur_summary->loads->merge (callee_summary->loads, &parm_map,
1133 &chain_map, record_adjustments);
1134 if (!ignore_stores)
1135 {
1136 changed |= cur_summary->stores->merge (callee_summary->stores,
1137 &parm_map, &chain_map,
1138 record_adjustments);
1139 if (!cur_summary->writes_errno
1140 && callee_summary->writes_errno)
1141 {
1142 cur_summary->writes_errno = true;
1143 changed = true;
1144 }
1145 }
1146 return changed;
1147}
1148
1149/* Return access mode for argument I of call STMT with FNSPEC. */
1150
1151static modref_access_node
1152get_access_for_fnspec (gcall *call, attr_fnspec &fnspec,
1153 unsigned int i, modref_parm_map &map)
1154{
1155 tree size = NULL_TREE(tree) nullptr;
1156 unsigned int size_arg;
1157
1158 if (!fnspec.arg_specified_p (i))
1159 ;
1160 else if (fnspec.arg_max_access_size_given_by_arg_p (i, &size_arg))
1161 size = gimple_call_arg (call, size_arg);
1162 else if (fnspec.arg_access_size_given_by_type_p (i))
1163 {
1164 tree callee = gimple_call_fndecl (call);
1165 tree t = TYPE_ARG_TYPES (TREE_TYPE (callee))((tree_check2 ((((contains_struct_check ((callee), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 1165, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 1165, __FUNCTION__, (FUNCTION_TYPE), (METHOD_TYPE)))->type_non_common
.values)
;
1166
1167 for (unsigned int p = 0; p < i; p++)
1168 t = TREE_CHAIN (t)((contains_struct_check ((t), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 1168, __FUNCTION__))->common.chain)
;
1169 size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t)))((tree_class_check ((((contains_struct_check ((((tree_check (
(t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 1169, __FUNCTION__, (TREE_LIST)))->list.value)), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 1169, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 1169, __FUNCTION__))->type_common.size_unit)
;
1170 }
1171 modref_access_node a = {0, -1, -1,
1172 map.parm_offset, map.parm_index,
1173 map.parm_offset_known, 0};
1174 poly_int64 size_hwi;
1175 if (size
1176 && poly_int_tree_p (size, &size_hwi)
1177 && coeffs_in_range_p (size_hwi, 0,
1178 HOST_WIDE_INT_MAX(~((long) (1UL << (64 - 1)))) / BITS_PER_UNIT(8)))
1179 {
1180 a.size = -1;
1181 a.max_size = size_hwi << LOG2_BITS_PER_UNIT3;
1182 }
1183 return a;
1184}
1185
1186/* Collapse loads and return true if something changed. */
1187
1188static bool
1189collapse_loads (modref_summary *cur_summary,
1190 modref_summary_lto *cur_summary_lto)
1191{
1192 bool changed = false;
1193
1194 if (cur_summary && !cur_summary->loads->every_base)
1195 {
1196 cur_summary->loads->collapse ();
1197 changed = true;
1198 }
1199 if (cur_summary_lto
1200 && !cur_summary_lto->loads->every_base)
1201 {
1202 cur_summary_lto->loads->collapse ();
1203 changed = true;
1204 }
1205 return changed;
1206}
1207
1208/* Collapse loads and return true if something changed. */
1209
1210static bool
1211collapse_stores (modref_summary *cur_summary,
1212 modref_summary_lto *cur_summary_lto)
1213{
1214 bool changed = false;
1215
1216 if (cur_summary && !cur_summary->stores->every_base)
1217 {
1218 cur_summary->stores->collapse ();
1219 changed = true;
1220 }
1221 if (cur_summary_lto
1222 && !cur_summary_lto->stores->every_base)
1223 {
1224 cur_summary_lto->stores->collapse ();
1225 changed = true;
1226 }
1227 return changed;
1228}
1229
1230
1231/* Apply side effects of call STMT to CUR_SUMMARY using FNSPEC.
1232 If IGNORE_STORES is true ignore them.
1233 Return false if no useful summary can be produced. */
1234
1235static bool
1236process_fnspec (modref_summary *cur_summary,
1237 modref_summary_lto *cur_summary_lto,
1238 gcall *call, bool ignore_stores)
1239{
1240 attr_fnspec fnspec = gimple_call_fnspec (call);
1241 int flags = gimple_call_flags (call);
1242
1243 if (!(flags & (ECF_CONST(1 << 0) | ECF_NOVOPS(1 << 9) | ECF_PURE(1 << 1)))
1244 || (flags & ECF_LOOPING_CONST_OR_PURE(1 << 2))
1245 || (cfun(cfun + 0)->can_throw_non_call_exceptions
1246 && stmt_could_throw_p (cfun(cfun + 0), call)))
1247 {
1248 if (cur_summary)
1249 {
1250 cur_summary->side_effects = true;
1251 if (!ignore_nondeterminism_p (current_function_decl, flags))
1252 cur_summary->nondeterministic = true;
1253 }
1254 if (cur_summary_lto)
1255 {
1256 cur_summary_lto->side_effects = true;
1257 if (!ignore_nondeterminism_p (current_function_decl, flags))
1258 cur_summary_lto->nondeterministic = true;
1259 }
1260 }
1261 if (flags & (ECF_CONST(1 << 0) | ECF_NOVOPS(1 << 9)))
1262 return true;
1263 if (!fnspec.known_p ())
1264 {
1265 if (dump_file && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1266 fprintf (dump_file, " Builtin with no fnspec: %s\n",
1267 IDENTIFIER_POINTER (DECL_NAME (gimple_call_fndecl (call)))((const char *) (tree_check ((((contains_struct_check ((gimple_call_fndecl
(call)), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 1267, __FUNCTION__))->decl_minimal.name)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 1267, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)
);
1268 if (ignore_stores)
1269 {
1270 collapse_loads (cur_summary, cur_summary_lto);
1271 return true;
1272 }
1273 return false;
1274 }
1275 if (fnspec.global_memory_read_p ())
1276 collapse_loads (cur_summary, cur_summary_lto);
1277 else
1278 {
1279 for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
1280 if (!POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i)))(((enum tree_code) (((contains_struct_check ((gimple_call_arg
(call, i)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 1280, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((gimple_call_arg
(call, i)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 1280, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
1281 ;
1282 else if (!fnspec.arg_specified_p (i)
1283 || fnspec.arg_maybe_read_p (i))
1284 {
1285 modref_parm_map map = parm_map_for_arg
1286 (gimple_call_arg (call, i));
1287
1288 if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
1289 continue;
1290 if (map.parm_index == MODREF_UNKNOWN_PARM)
1291 {
1292 collapse_loads (cur_summary, cur_summary_lto);
1293 break;
1294 }
1295 if (cur_summary)
1296 cur_summary->loads->insert (0, 0,
1297 get_access_for_fnspec (call,
1298 fnspec, i,
1299 map),
1300 false);
1301 if (cur_summary_lto)
1302 cur_summary_lto->loads->insert (0, 0,
1303 get_access_for_fnspec (call,
1304 fnspec, i,
1305 map),
1306 false);
1307 }
1308 }
1309 if (ignore_stores)
1310 return true;
1311 if (fnspec.global_memory_written_p ())
1312 collapse_stores (cur_summary, cur_summary_lto);
1313 else
1314 {
1315 for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
1316 if (!POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i)))(((enum tree_code) (((contains_struct_check ((gimple_call_arg
(call, i)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 1316, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((gimple_call_arg
(call, i)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 1316, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
1317 ;
1318 else if (!fnspec.arg_specified_p (i)
1319 || fnspec.arg_maybe_written_p (i))
1320 {
1321 modref_parm_map map = parm_map_for_arg
1322 (gimple_call_arg (call, i));
1323
1324 if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
1325 continue;
1326 if (map.parm_index == MODREF_UNKNOWN_PARM)
1327 {
1328 collapse_stores (cur_summary, cur_summary_lto);
1329 break;
1330 }
1331 if (cur_summary)
1332 cur_summary->stores->insert (0, 0,
1333 get_access_for_fnspec (call,
1334 fnspec, i,
1335 map),
1336 false);
1337 if (cur_summary_lto)
1338 cur_summary_lto->stores->insert (0, 0,
1339 get_access_for_fnspec (call,
1340 fnspec, i,
1341 map),
1342 false);
1343 }
1344 if (fnspec.errno_maybe_written_p () && flag_errno_mathglobal_options.x_flag_errno_math)
1345 {
1346 if (cur_summary)
1347 cur_summary->writes_errno = true;
1348 if (cur_summary_lto)
1349 cur_summary_lto->writes_errno = true;
1350 }
1351 }
1352 return true;
1353}
1354
1355/* Analyze function call STMT in function F.
1356 Remember recursive calls in RECURSIVE_CALLS. */
1357
1358static bool
1359analyze_call (modref_summary *cur_summary, modref_summary_lto *cur_summary_lto,
1360 gcall *stmt, vec <gimple *> *recursive_calls,
1361 bool always_executed)
1362{
1363 /* Check flags on the function call. In certain cases, analysis can be
1364 simplified. */
1365 int flags = gimple_call_flags (stmt);
1366 if ((flags & (ECF_CONST(1 << 0) | ECF_NOVOPS(1 << 9)))
1367 && !(flags & ECF_LOOPING_CONST_OR_PURE(1 << 2)))
1368 {
1369 if (dump_file)
1370 fprintf (dump_file,
1371 " - ECF_CONST | ECF_NOVOPS, ignoring all stores and all loads "
1372 "except for args.\n");
1373 return true;
1374 }
1375
1376 /* Pure functions do not affect global memory. Stores by functions which are
1377 noreturn and do not throw can safely be ignored. */
1378 bool ignore_stores = ignore_stores_p (current_function_decl, flags);
1379
1380 /* Next, we try to get the callee's function declaration. The goal is to
1381 merge their summary with ours. */
1382 tree callee = gimple_call_fndecl (stmt);
1383
1384 /* Check if this is an indirect call. */
1385 if (!callee)
1386 {
1387 if (dump_file)
1388 fprintf (dump_file, gimple_call_internal_p (stmt)
1389 ? " - Internal call" : " - Indirect call.\n");
1390 return process_fnspec (cur_summary, cur_summary_lto, stmt, ignore_stores);
1391 }
1392 /* We only need to handle internal calls in IPA mode. */
1393 gcc_checking_assert (!cur_summary_lto)((void)(!(!cur_summary_lto) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 1393, __FUNCTION__), 0 : 0))
;
1394
1395 struct cgraph_node *callee_node = cgraph_node::get_create (callee);
1396
1397 /* If this is a recursive call, the target summary is the same as ours, so
1398 there's nothing to do. */
1399 if (recursive_call_p (current_function_decl, callee))
1400 {
1401 recursive_calls->safe_push (stmt);
1402 if (cur_summary)
1403 cur_summary->side_effects = true;
1404 if (cur_summary_lto)
1405 cur_summary_lto->side_effects = true;
1406 if (dump_file)
1407 fprintf (dump_file, " - Skipping recursive call.\n");
1408 return true;
1409 }
1410
1411 gcc_assert (callee_node != NULL)((void)(!(callee_node != nullptr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 1411, __FUNCTION__), 0 : 0))
;
1412
1413 /* Get the function symbol and its availability. */
1414 enum availability avail;
1415 callee_node = callee_node->function_symbol (&avail);
1416 bool looping;
1417 if (builtin_safe_for_const_function_p (&looping, callee))
1418 {
1419 if (looping)
1420 {
1421 if (cur_summary)
1422 cur_summary->side_effects = true;
1423 if (cur_summary_lto)
1424 cur_summary_lto->side_effects = true;
1425 }
1426 if (dump_file)
1427 fprintf (dump_file, " - Bulitin is safe for const.\n");
1428 return true;
1429 }
1430 if (avail <= AVAIL_INTERPOSABLE)
1431 {
1432 if (dump_file)
1433 fprintf (dump_file, " - Function availability <= AVAIL_INTERPOSABLE.\n");
1434 return process_fnspec (cur_summary, cur_summary_lto, stmt, ignore_stores);
1435 }
1436
1437 /* Get callee's modref summary. As above, if there's no summary, we either
1438 have to give up or, if stores are ignored, we can just purge loads. */
1439 modref_summary *callee_summary = optimization_summaries->get (callee_node);
1440 if (!callee_summary)
1441 {
1442 if (dump_file)
1443 fprintf (dump_file, " - No modref summary available for callee.\n");
1444 return process_fnspec (cur_summary, cur_summary_lto, stmt, ignore_stores);
1445 }
1446
1447 merge_call_side_effects (cur_summary, stmt, callee_summary, ignore_stores,
1448 callee_node, false, always_executed);
1449
1450 return true;
1451}
1452
1453/* Support analysis in non-lto and lto mode in parallel. */
1454
1455struct summary_ptrs
1456{
1457 struct modref_summary *nolto;
1458 struct modref_summary_lto *lto;
1459 bool always_executed;
1460};
1461
1462/* Helper for analyze_stmt. */
1463
1464static bool
1465analyze_load (gimple *, tree, tree op, void *data)
1466{
1467 modref_summary *summary = ((summary_ptrs *)data)->nolto;
1468 modref_summary_lto *summary_lto = ((summary_ptrs *)data)->lto;
1469
1470 if (dump_file)
1471 {
1472 fprintf (dump_file, " - Analyzing load: ");
1473 print_generic_expr (dump_file, op);
1474 fprintf (dump_file, "\n");
1475 }
1476
1477 if (TREE_THIS_VOLATILE (op)((op)->base.volatile_flag)
1478 || (cfun(cfun + 0)->can_throw_non_call_exceptions
1479 && tree_could_throw_p (op)))
1480 {
1481 if (dump_file)
1482 fprintf (dump_file, " (volatile or can throw; marking side effects) ");
1483 if (summary)
1484 summary->side_effects = summary->nondeterministic = true;
1485 if (summary_lto)
1486 summary_lto->side_effects = summary_lto->nondeterministic = true;
1487 }
1488
1489 if (!record_access_p (op))
1490 return false;
1491
1492 ao_ref r;
1493 ao_ref_init (&r, op);
1494 modref_access_node a = get_access (&r);
1495
1496 if (summary)
1497 record_access (summary->loads, &r, a);
1498 if (summary_lto)
1499 record_access_lto (summary_lto->loads, &r, a);
1500 return false;
1501}
1502
1503/* Helper for analyze_stmt. */
1504
1505static bool
1506analyze_store (gimple *stmt, tree, tree op, void *data)
1507{
1508 modref_summary *summary = ((summary_ptrs *)data)->nolto;
1509 modref_summary_lto *summary_lto = ((summary_ptrs *)data)->lto;
1510
1511 if (dump_file)
1512 {
1513 fprintf (dump_file, " - Analyzing store: ");
1514 print_generic_expr (dump_file, op);
1515 fprintf (dump_file, "\n");
1516 }
1517
1518 if (TREE_THIS_VOLATILE (op)((op)->base.volatile_flag)
1519 || (cfun(cfun + 0)->can_throw_non_call_exceptions
1520 && tree_could_throw_p (op)))
1521 {
1522 if (dump_file)
1523 fprintf (dump_file, " (volatile or can throw; marking side effects) ");
1524 if (summary)
1525 summary->side_effects = summary->nondeterministic = true;
1526 if (summary_lto)
1527 summary_lto->side_effects = summary_lto->nondeterministic = true;
1528 }
1529
1530 if (!record_access_p (op))
1531 return false;
1532
1533 ao_ref r;
1534 ao_ref_init (&r, op);
1535 modref_access_node a = get_access (&r);
1536
1537 if (summary)
1538 record_access (summary->stores, &r, a);
1539 if (summary_lto)
1540 record_access_lto (summary_lto->stores, &r, a);
1541 if (((summary_ptrs *)data)->always_executed
1542 && a.useful_for_kill_p ()
1543 && (!cfun(cfun + 0)->can_throw_non_call_exceptions
1544 || !stmt_could_throw_p (cfun(cfun + 0), stmt)))
1545 {
1546 if (dump_file)
1547 fprintf (dump_file, " - Recording kill\n");
1548 if (summary)
1549 modref_access_node::insert_kill (summary->kills, a, false);
1550 if (summary_lto)
1551 modref_access_node::insert_kill (summary_lto->kills, a, false);
1552 }
1553 return false;
1554}
1555
1556/* Analyze statement STMT of function F.
1557 If IPA is true do not merge in side effects of calls. */
1558
1559static bool
1560analyze_stmt (modref_summary *summary, modref_summary_lto *summary_lto,
1561 gimple *stmt, bool ipa, vec <gimple *> *recursive_calls,
1562 bool always_executed)
1563{
1564 /* In general we can not ignore clobbers because they are barriers for code
1565 motion, however after inlining it is safe to do because local optimization
1566 passes do not consider clobbers from other functions.
1567 Similar logic is in ipa-pure-const.c. */
1568 if ((ipa || cfun(cfun + 0)->after_inlining) && gimple_clobber_p (stmt))
1569 {
1570 if (always_executed && record_access_p (gimple_assign_lhs (stmt)))
1571 {
1572 ao_ref r;
1573 ao_ref_init (&r, gimple_assign_lhs (stmt));
1574 modref_access_node a = get_access (&r);
1575 if (a.useful_for_kill_p ())
1576 {
1577 if (dump_file)
1578 fprintf (dump_file, " - Recording kill\n");
1579 if (summary)
1580 modref_access_node::insert_kill (summary->kills, a, false);
1581 if (summary_lto)
1582 modref_access_node::insert_kill (summary_lto->kills, a, false);
1583 }
1584 }
1585 return true;
1586 }
1587
1588 struct summary_ptrs sums = {summary, summary_lto, always_executed};
1589
1590 /* Analyze all loads and stores in STMT. */
1591 walk_stmt_load_store_ops (stmt, &sums,
1592 analyze_load, analyze_store);
1593
1594 switch (gimple_code (stmt))
1595 {
1596 case GIMPLE_ASM:
1597 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
1598 {
1599 if (summary)
1600 summary->side_effects = summary->nondeterministic = true;
1601 if (summary_lto)
1602 summary_lto->side_effects = summary_lto->nondeterministic = true;
1603 }
1604 if (cfun(cfun + 0)->can_throw_non_call_exceptions
1605 && stmt_could_throw_p (cfun(cfun + 0), stmt))
1606 {
1607 if (summary)
1608 summary->side_effects = true;
1609 if (summary_lto)
1610 summary_lto->side_effects = true;
1611 }
1612 /* If the ASM statement does not read nor write memory, there's nothing
1613 to do. Otherwise just give up. */
1614 if (!gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
1615 return true;
1616 if (dump_file)
1617 fprintf (dump_file, " - Function contains GIMPLE_ASM statement "
1618 "which clobbers memory.\n");
1619 return false;
1620 case GIMPLE_CALL:
1621 if (!ipa || gimple_call_internal_p (stmt))
1622 return analyze_call (summary, summary_lto,
1623 as_a <gcall *> (stmt), recursive_calls,
1624 always_executed);
1625 else
1626 {
1627 attr_fnspec fnspec = gimple_call_fnspec (as_a <gcall *>(stmt));
1628
1629 if (fnspec.known_p ()
1630 && (!fnspec.global_memory_read_p ()
1631 || !fnspec.global_memory_written_p ()))
1632 {
1633 cgraph_edge *e = cgraph_node::get (current_function_decl)->get_edge (stmt);
1634 if (e->callee)
1635 {
1636 fnspec_summaries->get_create (e)->fnspec = xstrdup (fnspec.get_str ());
1637 if (dump_file)
1638 fprintf (dump_file, " Recorded fnspec %s\n", fnspec.get_str ());
1639 }
1640 }
1641 }
1642 return true;
1643 default:
1644 if (cfun(cfun + 0)->can_throw_non_call_exceptions
1645 && stmt_could_throw_p (cfun(cfun + 0), stmt))
1646 {
1647 if (summary)
1648 summary->side_effects = true;
1649 if (summary_lto)
1650 summary_lto->side_effects = true;
1651 }
1652 return true;
1653 }
1654}
1655
1656/* Remove summary of current function because during the function body
1657 scan we determined it is not useful. LTO, NOLTO and IPA determines the
1658 mode of scan. */
1659
1660static void
1661remove_summary (bool lto, bool nolto, bool ipa)
1662{
1663 cgraph_node *fnode = cgraph_node::get (current_function_decl);
1664 if (!ipa)
1665 optimization_summaries->remove (fnode);
1666 else
1667 {
1668 if (nolto)
1669 summaries->remove (fnode);
1670 if (lto)
1671 summaries_lto->remove (fnode);
1672 remove_modref_edge_summaries (fnode);
1673 }
1674 if (dump_file)
1675 fprintf (dump_file,
1676 " - modref done with result: not tracked.\n");
1677}
1678
1679/* Return true if OP accesses memory pointed to by SSA_NAME. */
1680
1681bool
1682memory_access_to (tree op, tree ssa_name)
1683{
1684 tree base = get_base_address (op);
1685 if (!base)
1686 return false;
1687 if (TREE_CODE (base)((enum tree_code) (base)->base.code) != MEM_REF && TREE_CODE (base)((enum tree_code) (base)->base.code) != TARGET_MEM_REF)
1688 return false;
1689 return TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 1689, __FUNCTION__)))))
== ssa_name;
1690}
1691
1692/* Consider statement val = *arg.
1693 return EAF flags of ARG that can be determined from EAF flags of VAL
1694 (which are known to be FLAGS). If IGNORE_STORES is true we can ignore
1695 all stores to VAL, i.e. when handling noreturn function. */
1696
1697static int
1698deref_flags (int flags, bool ignore_stores)
1699{
1700 /* Dereference is also a direct read but dereferenced value does not
1701 yield any other direct use. */
1702 int ret = EAF_NO_DIRECT_CLOBBER(1 << 2) | EAF_NO_DIRECT_ESCAPE(1 << 4)
1703 | EAF_NOT_RETURNED_DIRECTLY(1 << 6);
1704 /* If argument is unused just account for
1705 the read involved in dereference. */
1706 if (flags & EAF_UNUSED(1 << 1))
1707 ret |= EAF_NO_INDIRECT_READ(1 << 9) | EAF_NO_INDIRECT_CLOBBER(1 << 3)
1708 | EAF_NO_INDIRECT_ESCAPE(1 << 5);
1709 else
1710 {
1711 /* Direct or indirect accesses leads to indirect accesses. */
1712 if (((flags & EAF_NO_DIRECT_CLOBBER(1 << 2))
1713 && (flags & EAF_NO_INDIRECT_CLOBBER(1 << 3)))
1714 || ignore_stores)
1715 ret |= EAF_NO_INDIRECT_CLOBBER(1 << 3);
1716 if (((flags & EAF_NO_DIRECT_ESCAPE(1 << 4))
1717 && (flags & EAF_NO_INDIRECT_ESCAPE(1 << 5)))
1718 || ignore_stores)
1719 ret |= EAF_NO_INDIRECT_ESCAPE(1 << 5);
1720 if ((flags & EAF_NO_DIRECT_READ(1 << 8))
1721 && (flags & EAF_NO_INDIRECT_READ(1 << 9)))
1722 ret |= EAF_NO_INDIRECT_READ(1 << 9);
1723 if ((flags & EAF_NOT_RETURNED_DIRECTLY(1 << 6))
1724 && (flags & EAF_NOT_RETURNED_INDIRECTLY(1 << 7)))
1725 ret |= EAF_NOT_RETURNED_INDIRECTLY(1 << 7);
1726 }
1727 return ret;
1728}
1729
1730
1731/* Description of an escape point. */
1732
1733struct escape_point
1734{
1735 /* Value escapes to this call. */
1736 gcall *call;
1737 /* Argument it escapes to. */
1738 int arg;
1739 /* Flags already known about the argument (this can save us from recording
1740 esape points if local analysis did good job already). */
1741 eaf_flags_t min_flags;
1742 /* Does value escape directly or indiretly? */
1743 bool direct;
1744};
1745
1746class modref_lattice
1747{
1748public:
1749 /* EAF flags of the SSA name. */
1750 eaf_flags_t flags;
1751 /* Used during DFS walk to mark names where final value was determined
1752 without need for dataflow. */
1753 bool known;
1754 /* Used during DFS walk to mark open vertices (for cycle detection). */
1755 bool open;
1756 /* Set during DFS walk for names that needs dataflow propagation. */
1757 bool do_dataflow;
1758 /* Used during the iterative dataflow. */
1759 bool changed;
1760
1761 /* When doing IPA analysis we can not merge in callee escape points;
1762 Only remember them and do the merging at IPA propagation time. */
1763 vec <escape_point, va_heap, vl_ptr> escape_points;
1764
1765 /* Representation of a graph for dataaflow. This graph is built on-demand
1766 using modref_eaf_analysis::analyze_ssa and later solved by
1767 modref_eaf_analysis::propagate.
1768 Each edge represents the fact that flags of current lattice should be
1769 propagated to lattice of SSA_NAME. */
1770 struct propagate_edge
1771 {
1772 int ssa_name;
1773 bool deref;
1774 };
1775 vec <propagate_edge, va_heap, vl_ptr> propagate_to;
1776
1777 void init ();
1778 void release ();
1779 bool merge (const modref_lattice &with);
1780 bool merge (int flags);
1781 bool merge_deref (const modref_lattice &with, bool ignore_stores);
1782 bool merge_direct_load ();
1783 bool merge_direct_store ();
1784 bool add_escape_point (gcall *call, int arg, int min_flags, bool diret);
1785 void dump (FILE *out, int indent = 0) const;
1786};
1787
1788/* Lattices are saved to vectors, so keep them PODs. */
1789void
1790modref_lattice::init ()
1791{
1792 /* All flags we track. */
1793 int f = EAF_NO_DIRECT_CLOBBER(1 << 2) | EAF_NO_INDIRECT_CLOBBER(1 << 3)
1794 | EAF_NO_DIRECT_ESCAPE(1 << 4) | EAF_NO_INDIRECT_ESCAPE(1 << 5)
1795 | EAF_NO_DIRECT_READ(1 << 8) | EAF_NO_INDIRECT_READ(1 << 9)
1796 | EAF_NOT_RETURNED_DIRECTLY(1 << 6) | EAF_NOT_RETURNED_INDIRECTLY(1 << 7)
1797 | EAF_UNUSED(1 << 1);
1798 flags = f;
1799 /* Check that eaf_flags_t is wide enough to hold all flags. */
1800 gcc_checking_assert (f == flags)((void)(!(f == flags) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 1800, __FUNCTION__), 0 : 0))
;
1801 open = true;
1802 known = false;
1803}
1804
1805/* Release memory. */
1806void
1807modref_lattice::release ()
1808{
1809 escape_points.release ();
1810 propagate_to.release ();
1811}
1812
1813/* Dump lattice to OUT; indent with INDENT spaces. */
1814
1815void
1816modref_lattice::dump (FILE *out, int indent) const
1817{
1818 dump_eaf_flags (out, flags);
1819 if (escape_points.length ())
1820 {
1821 fprintf (out, "%*sEscapes:\n", indent, "");
1822 for (unsigned int i = 0; i < escape_points.length (); i++)
1823 {
1824 fprintf (out, "%*s Arg %i (%s) min flags", indent, "",
1825 escape_points[i].arg,
1826 escape_points[i].direct ? "direct" : "indirect");
1827 dump_eaf_flags (out, escape_points[i].min_flags, false);
1828 fprintf (out, " in call ");
1829 print_gimple_stmt (out, escape_points[i].call, 0);
1830 }
1831 }
1832}
1833
1834/* Add escape point CALL, ARG, MIN_FLAGS, DIRECT. Return false if such escape
1835 point exists. */
1836
1837bool
1838modref_lattice::add_escape_point (gcall *call, int arg, int min_flags,
1839 bool direct)
1840{
1841 escape_point *ep;
1842 unsigned int i;
1843
1844 /* If we already determined flags to be bad enough,
1845 we do not need to record. */
1846 if ((flags & min_flags) == flags || (min_flags & EAF_UNUSED(1 << 1)))
1847 return false;
1848
1849 FOR_EACH_VEC_ELT (escape_points, i, ep)for (i = 0; (escape_points).iterate ((i), &(ep)); ++(i))
1850 if (ep->call == call && ep->arg == arg && ep->direct == direct)
1851 {
1852 if ((ep->min_flags & min_flags) == min_flags)
1853 return false;
1854 ep->min_flags &= min_flags;
1855 return true;
1856 }
1857 /* Give up if max escape points is met. */
1858 if ((int)escape_points.length () > param_modref_max_escape_pointsglobal_options.x_param_modref_max_escape_points)
1859 {
1860 if (dump_file)
1861 fprintf (dump_file, "--param modref-max-escape-points limit reached\n");
1862 merge (0);
1863 return true;
1864 }
1865 escape_point new_ep = {call, arg, min_flags, direct};
1866 escape_points.safe_push (new_ep);
1867 return true;
1868}
1869
1870/* Merge in flags from F. */
1871bool
1872modref_lattice::merge (int f)
1873{
1874 if (f & EAF_UNUSED(1 << 1))
1875 return false;
1876 /* Check that flags seems sane: if function does not read the parameter
1877 it can not access it indirectly. */
1878 gcc_checking_assert (!(f & EAF_NO_DIRECT_READ)((void)(!(!(f & (1 << 8)) || ((f & (1 << 9
)) && (f & (1 << 3)) && (f & (1
<< 5)) && (f & (1 << 7)))) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 1882, __FUNCTION__), 0 : 0))
1879 || ((f & EAF_NO_INDIRECT_READ)((void)(!(!(f & (1 << 8)) || ((f & (1 << 9
)) && (f & (1 << 3)) && (f & (1
<< 5)) && (f & (1 << 7)))) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 1882, __FUNCTION__), 0 : 0))
1880 && (f & EAF_NO_INDIRECT_CLOBBER)((void)(!(!(f & (1 << 8)) || ((f & (1 << 9
)) && (f & (1 << 3)) && (f & (1
<< 5)) && (f & (1 << 7)))) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 1882, __FUNCTION__), 0 : 0))
1881 && (f & EAF_NO_INDIRECT_ESCAPE)((void)(!(!(f & (1 << 8)) || ((f & (1 << 9
)) && (f & (1 << 3)) && (f & (1
<< 5)) && (f & (1 << 7)))) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 1882, __FUNCTION__), 0 : 0))
1882 && (f & EAF_NOT_RETURNED_INDIRECTLY)))((void)(!(!(f & (1 << 8)) || ((f & (1 << 9
)) && (f & (1 << 3)) && (f & (1
<< 5)) && (f & (1 << 7)))) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 1882, __FUNCTION__), 0 : 0))
;
1883 if ((flags & f) != flags)
1884 {
1885 flags &= f;
1886 /* Prune obvoiusly useless flags;
1887 We do not have ECF_FLAGS handy which is not big problem since
1888 we will do final flags cleanup before producing summary.
1889 Merging should be fast so it can work well with dataflow. */
1890 flags = remove_useless_eaf_flags (flags, 0, false);
1891 if (!flags)
1892 escape_points.release ();
1893 return true;
1894 }
1895 return false;
1896}
1897
1898/* Merge in WITH. Return true if anyting changed. */
1899
1900bool
1901modref_lattice::merge (const modref_lattice &with)
1902{
1903 if (!with.known)
1904 do_dataflow = true;
1905
1906 bool changed = merge (with.flags);
1907
1908 if (!flags)
1909 return changed;
1910 for (unsigned int i = 0; i < with.escape_points.length (); i++)
1911 changed |= add_escape_point (with.escape_points[i].call,
1912 with.escape_points[i].arg,
1913 with.escape_points[i].min_flags,
1914 with.escape_points[i].direct);
1915 return changed;
1916}
1917
1918/* Merge in deref of WITH. If IGNORE_STORES is true do not consider
1919 stores. Return true if anyting changed. */
1920
1921bool
1922modref_lattice::merge_deref (const modref_lattice &with, bool ignore_stores)
1923{
1924 if (!with.known)
1925 do_dataflow = true;
1926
1927 bool changed = merge (deref_flags (with.flags, ignore_stores));
1928
1929 if (!flags)
1930 return changed;
1931 for (unsigned int i = 0; i < with.escape_points.length (); i++)
1932 {
1933 int min_flags = with.escape_points[i].min_flags;
1934
1935 if (with.escape_points[i].direct)
1936 min_flags = deref_flags (min_flags, ignore_stores);
1937 else if (ignore_stores)
1938 min_flags |= ignore_stores_eaf_flags;
1939 changed |= add_escape_point (with.escape_points[i].call,
1940 with.escape_points[i].arg,
1941 min_flags,
1942 false);
1943 }
1944 return changed;
1945}
1946
1947/* Merge in flags for direct load. */
1948
1949bool
1950modref_lattice::merge_direct_load ()
1951{
1952 return merge (~(EAF_UNUSED(1 << 1) | EAF_NO_DIRECT_READ(1 << 8)));
1953}
1954
1955/* Merge in flags for direct store. */
1956
1957bool
1958modref_lattice::merge_direct_store ()
1959{
1960 return merge (~(EAF_UNUSED(1 << 1) | EAF_NO_DIRECT_CLOBBER(1 << 2)));
1961}
1962
1963/* Analyzer of EAF flags.
1964 This is genrally dataflow problem over the SSA graph, however we only
1965 care about flags of few selected ssa names (arguments, return slot and
1966 static chain). So we first call analyze_ssa_name on all relevant names
1967 and perform a DFS walk to discover SSA names where flags needs to be
1968 determined. For acyclic graphs we try to determine final flags during
1969 this walk. Once cycles or recursin depth is met we enlist SSA names
1970 for dataflow which is done by propagate call.
1971
1972 After propagation the flags can be obtained using get_ssa_name_flags. */
1973
1974class modref_eaf_analysis
1975{
1976public:
1977 /* Mark NAME as relevant for analysis. */
1978 void analyze_ssa_name (tree name);
1979 /* Dataflow slover. */
1980 void propagate ();
1981 /* Return flags computed earlier for NAME. */
1982 int get_ssa_name_flags (tree name)
1983 {
1984 int version = SSA_NAME_VERSION (name)(tree_check ((name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 1984, __FUNCTION__, (SSA_NAME)))->base.u.version
;
1985 gcc_checking_assert (m_lattice[version].known)((void)(!(m_lattice[version].known) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 1985, __FUNCTION__), 0 : 0))
;
1986 return m_lattice[version].flags;
1987 }
1988 /* In IPA mode this will record all escape points
1989 determined for NAME to PARM_IDNEX. Flags are minimal
1990 flags known. */
1991 void record_escape_points (tree name, int parm_index, int flags);
1992 modref_eaf_analysis (bool ipa)
1993 {
1994 m_ipa = ipa;
1995 m_depth = 0;
1996 m_lattice.safe_grow_cleared (num_ssa_names(vec_safe_length ((cfun + 0)->gimple_df->ssa_names)), true);
1997 }
1998 ~modref_eaf_analysis ()
1999 {
2000 gcc_checking_assert (!m_depth)((void)(!(!m_depth) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2000, __FUNCTION__), 0 : 0))
;
2001 if (m_ipa || m_names_to_propagate.length ())
2002 for (unsigned int i = 0; i < num_ssa_names(vec_safe_length ((cfun + 0)->gimple_df->ssa_names)); i++)
2003 m_lattice[i].release ();
2004 }
2005private:
2006 /* If true, we produce analysis for IPA mode. In this case escape points ar
2007 collected. */
2008 bool m_ipa;
2009 /* Depth of recursion of analyze_ssa_name. */
2010 int m_depth;
2011 /* Propagation lattice for individual ssa names. */
2012 auto_vec<modref_lattice> m_lattice;
2013 auto_vec<tree> m_deferred_names;
2014 auto_vec<int> m_names_to_propagate;
2015
2016 void merge_with_ssa_name (tree dest, tree src, bool deref);
2017 void merge_call_lhs_flags (gcall *call, int arg, tree name, bool direct,
2018 bool deref);
2019};
2020
2021
2022/* Call statements may return tgeir parameters. Consider argument number
2023 ARG of USE_STMT and determine flags that can needs to be cleared
2024 in case pointer possibly indirectly references from ARG I is returned.
2025 If DIRECT is true consider direct returns and if INDIRECT consider
2026 indirect returns.
2027 LATTICE, DEPTH and ipa are same as in analyze_ssa_name.
2028 ARG is set to -1 for static chain. */
2029
2030void
2031modref_eaf_analysis::merge_call_lhs_flags (gcall *call, int arg,
2032 tree name, bool direct,
2033 bool indirect)
2034{
2035 int index = SSA_NAME_VERSION (name)(tree_check ((name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2035, __FUNCTION__, (SSA_NAME)))->base.u.version
;
2036 bool returned_directly = false;
2037
2038 /* If there is no return value, no flags are affected. */
2039 if (!gimple_call_lhs (call))
2040 return;
2041
2042 /* If we know that function returns given argument and it is not ARG
2043 we can still be happy. */
2044 if (arg >= 0)
2045 {
2046 int flags = gimple_call_return_flags (call);
2047 if (flags & ERF_RETURNS_ARG(1 << 2))
2048 {
2049 if ((flags & ERF_RETURN_ARG_MASK(3)) == arg)
2050 returned_directly = true;
2051 else
2052 return;
2053 }
2054 }
2055 /* Make ERF_RETURNS_ARG overwrite EAF_UNUSED. */
2056 if (returned_directly)
2057 {
2058 direct = true;
2059 indirect = false;
2060 }
2061 /* If value is not returned at all, do nothing. */
2062 else if (!direct && !indirect)
2063 return;
2064
2065 /* If return value is SSA name determine its flags. */
2066 if (TREE_CODE (gimple_call_lhs (call))((enum tree_code) (gimple_call_lhs (call))->base.code) == SSA_NAME)
2067 {
2068 tree lhs = gimple_call_lhs (call);
2069 if (direct)
2070 merge_with_ssa_name (name, lhs, false);
2071 if (indirect)
2072 merge_with_ssa_name (name, lhs, true);
2073 }
2074 /* In the case of memory store we can do nothing. */
2075 else if (!direct)
2076 m_lattice[index].merge (deref_flags (0, false));
2077 else
2078 m_lattice[index].merge (0);
2079}
2080
2081/* CALL_FLAGS are EAF_FLAGS of the argument. Turn them
2082 into flags for caller, update LATTICE of corresponding
2083 argument if needed. */
2084
2085static int
2086callee_to_caller_flags (int call_flags, bool ignore_stores,
2087 modref_lattice &lattice)
2088{
2089 /* call_flags is about callee returning a value
2090 that is not the same as caller returning it. */
2091 call_flags |= EAF_NOT_RETURNED_DIRECTLY(1 << 6)
2092 | EAF_NOT_RETURNED_INDIRECTLY(1 << 7);
2093 if (!ignore_stores && !(call_flags & EAF_UNUSED(1 << 1)))
2094 {
2095 /* If value escapes we are no longer able to track what happens
2096 with it because we can read it from the escaped location
2097 anytime. */
2098 if (!(call_flags & EAF_NO_DIRECT_ESCAPE(1 << 4)))
2099 lattice.merge (0);
2100 else if (!(call_flags & EAF_NO_INDIRECT_ESCAPE(1 << 5)))
2101 lattice.merge (~(EAF_NOT_RETURNED_INDIRECTLY(1 << 7)
2102 | EAF_NO_DIRECT_READ(1 << 8)
2103 | EAF_NO_INDIRECT_READ(1 << 9)
2104 | EAF_NO_INDIRECT_CLOBBER(1 << 3)
2105 | EAF_UNUSED(1 << 1)));
2106 }
2107 else
2108 call_flags |= ignore_stores_eaf_flags;
2109 return call_flags;
2110}
2111
2112/* Analyze EAF flags for SSA name NAME and store result to LATTICE.
2113 LATTICE is an array of modref_lattices.
2114 DEPTH is a recursion depth used to make debug output prettier.
2115 If IPA is true we analyze for IPA propagation (and thus call escape points
2116 are processed later) */
2117
2118void
2119modref_eaf_analysis::analyze_ssa_name (tree name)
2120{
2121 imm_use_iterator ui;
2122 gimple *use_stmt;
2123 int index = SSA_NAME_VERSION (name)(tree_check ((name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2123, __FUNCTION__, (SSA_NAME)))->base.u.version
;
2124
2125 /* See if value is already computed. */
2126 if (m_lattice[index].known || m_lattice[index].do_dataflow)
2127 return;
2128 if (m_lattice[index].open)
2129 {
2130 if (dump_file)
2131 fprintf (dump_file,
2132 "%*sCycle in SSA graph\n",
2133 m_depth * 4, "");
2134 return;
2135 }
2136 /* Recursion guard. */
2137 m_lattice[index].init ();
2138 if (m_depth == param_modref_max_depthglobal_options.x_param_modref_max_depth)
2139 {
2140 if (dump_file)
2141 fprintf (dump_file,
2142 "%*sMax recursion depth reached; postponing\n",
2143 m_depth * 4, "");
2144 m_deferred_names.safe_push (name);
2145 return;
2146 }
2147
2148 if (dump_file)
2149 {
2150 fprintf (dump_file,
2151 "%*sAnalyzing flags of ssa name: ", m_depth * 4, "");
2152 print_generic_expr (dump_file, name);
2153 fprintf (dump_file, "\n");
2154 }
2155
2156 FOR_EACH_IMM_USE_STMT (use_stmt, ui, name)for (struct auto_end_imm_use_stmt_traverse auto_end_imm_use_stmt_traverse
((((use_stmt) = first_imm_use_stmt (&(ui), (name))), &
(ui))); !end_imm_use_stmt_p (&(ui)); (void) ((use_stmt) =
next_imm_use_stmt (&(ui))))
2157 {
2158 if (m_lattice[index].flags == 0)
2159 break;
2160 if (is_gimple_debug (use_stmt))
2161 continue;
2162 if (dump_file)
2163 {
2164 fprintf (dump_file, "%*s Analyzing stmt: ", m_depth * 4, "");
2165 print_gimple_stmt (dump_file, use_stmt, 0);
2166 }
2167 /* If we see a direct non-debug use, clear unused bit.
2168 All dereferneces should be accounted below using deref_flags. */
2169 m_lattice[index].merge (~EAF_UNUSED(1 << 1));
2170
2171 /* Gimple return may load the return value.
2172 Returning name counts as an use by tree-ssa-structalias.c */
2173 if (greturn *ret = dyn_cast <greturn *> (use_stmt))
2174 {
2175 /* Returning through return slot is seen as memory write earlier. */
2176 if (DECL_RESULT (current_function_decl)((tree_check ((current_function_decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2176, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result
)
2177 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl))((tree_check3 ((((tree_check ((current_function_decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2177, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result
)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2177, __FUNCTION__, (VAR_DECL), (PARM_DECL), (RESULT_DECL))
)->decl_common.decl_by_reference_flag)
)
2178 ;
2179 else if (gimple_return_retval (ret) == name)
2180 m_lattice[index].merge (~(EAF_UNUSED(1 << 1) | EAF_NOT_RETURNED_DIRECTLY(1 << 6)
2181 | EAF_NOT_RETURNED_DIRECTLY(1 << 6)));
2182 else if (memory_access_to (gimple_return_retval (ret), name))
2183 {
2184 m_lattice[index].merge_direct_load ();
2185 m_lattice[index].merge (~(EAF_UNUSED(1 << 1)
2186 | EAF_NOT_RETURNED_INDIRECTLY(1 << 7)));
2187 }
2188 }
2189 /* Account for LHS store, arg loads and flags from callee function. */
2190 else if (gcall *call = dyn_cast <gcall *> (use_stmt))
2191 {
2192 tree callee = gimple_call_fndecl (call);
2193
2194 /* IPA PTA internally it treats calling a function as "writing" to
2195 the argument space of all functions the function pointer points to
2196 (PR101949). We can not drop EAF_NOCLOBBER only when ipa-pta
2197 is on since that would allow propagation of this from -fno-ipa-pta
2198 to -fipa-pta functions. */
2199 if (gimple_call_fn (use_stmt) == name)
2200 m_lattice[index].merge (~(EAF_NO_DIRECT_CLOBBER(1 << 2) | EAF_UNUSED(1 << 1)));
2201
2202 /* Recursion would require bit of propagation; give up for now. */
2203 if (callee && !m_ipa && recursive_call_p (current_function_decl,
2204 callee))
2205 m_lattice[index].merge (0);
2206 else
2207 {
2208 int ecf_flags = gimple_call_flags (call);
2209 bool ignore_stores = ignore_stores_p (current_function_decl,
2210 ecf_flags);
2211 bool ignore_retval = ignore_retval_p (current_function_decl,
2212 ecf_flags);
2213
2214 /* Handle *name = func (...). */
2215 if (gimple_call_lhs (call)
2216 && memory_access_to (gimple_call_lhs (call), name))
2217 {
2218 m_lattice[index].merge_direct_store ();
2219 /* Return slot optimization passes address of
2220 LHS to callee via hidden parameter and this
2221 may make LHS to escape. See PR 98499. */
2222 if (gimple_call_return_slot_opt_p (call)
2223 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (call)))((((contains_struct_check ((gimple_call_lhs (call)), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2223, __FUNCTION__))->typed.type))->base.addressable_flag
)
)
2224 {
2225 int call_flags = gimple_call_retslot_flags (call);
2226 bool isretslot = false;
2227
2228 if (DECL_RESULT (current_function_decl)((tree_check ((current_function_decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2228, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result
)
2229 && DECL_BY_REFERENCE((tree_check3 ((((tree_check ((current_function_decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2230, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result
)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2230, __FUNCTION__, (VAR_DECL), (PARM_DECL), (RESULT_DECL))
)->decl_common.decl_by_reference_flag)
2230 (DECL_RESULT (current_function_decl))((tree_check3 ((((tree_check ((current_function_decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2230, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result
)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2230, __FUNCTION__, (VAR_DECL), (PARM_DECL), (RESULT_DECL))
)->decl_common.decl_by_reference_flag)
)
2231 isretslot = ssa_default_def
2232 (cfun(cfun + 0),
2233 DECL_RESULT (current_function_decl)((tree_check ((current_function_decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2233, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result
)
)
2234 == name;
2235
2236 /* Passing returnslot to return slot is special because
2237 not_returned and escape has same meaning.
2238 However passing arg to return slot is different. If
2239 the callee's return slot is returned it means that
2240 arg is written to itself which is an escape.
2241 Since we do not track the memory it is written to we
2242 need to give up on analysisng it. */
2243 if (!isretslot)
2244 {
2245 if (!(call_flags & (EAF_NOT_RETURNED_DIRECTLY(1 << 6)
2246 | EAF_UNUSED(1 << 1))))
2247 m_lattice[index].merge (0);
2248 else gcc_checking_assert((void)(!(call_flags & ((1 << 7) | (1 << 1)))
? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2250, __FUNCTION__), 0 : 0))
2249 (call_flags & (EAF_NOT_RETURNED_INDIRECTLY((void)(!(call_flags & ((1 << 7) | (1 << 1)))
? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2250, __FUNCTION__), 0 : 0))
2250 | EAF_UNUSED))((void)(!(call_flags & ((1 << 7) | (1 << 1)))
? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2250, __FUNCTION__), 0 : 0))
;
2251 call_flags = callee_to_caller_flags
2252 (call_flags, false,
2253 m_lattice[index]);
2254 }
2255 m_lattice[index].merge (call_flags);
2256 }
2257 }
2258
2259 if (gimple_call_chain (call)
2260 && (gimple_call_chain (call) == name))
2261 {
2262 int call_flags = gimple_call_static_chain_flags (call);
2263 if (!ignore_retval && !(call_flags & EAF_UNUSED(1 << 1)))
2264 merge_call_lhs_flags
2265 (call, -1, name,
2266 !(call_flags & EAF_NOT_RETURNED_DIRECTLY(1 << 6)),
2267 !(call_flags & EAF_NOT_RETURNED_INDIRECTLY(1 << 7)));
2268 call_flags = callee_to_caller_flags
2269 (call_flags, ignore_stores,
2270 m_lattice[index]);
2271 if (!(ecf_flags & (ECF_CONST(1 << 0) | ECF_NOVOPS(1 << 9))))
2272 m_lattice[index].merge (call_flags);
2273 }
2274
2275 /* Process internal functions and right away. */
2276 bool record_ipa = m_ipa && !gimple_call_internal_p (call);
2277
2278 /* Handle all function parameters. */
2279 for (unsigned i = 0;
2280 i < gimple_call_num_args (call)
2281 && m_lattice[index].flags; i++)
2282 /* Name is directly passed to the callee. */
2283 if (gimple_call_arg (call, i) == name)
2284 {
2285 int call_flags = gimple_call_arg_flags (call, i);
2286 if (!ignore_retval)
2287 merge_call_lhs_flags
2288 (call, i, name,
2289 !(call_flags & (EAF_NOT_RETURNED_DIRECTLY(1 << 6)
2290 | EAF_UNUSED(1 << 1))),
2291 !(call_flags & (EAF_NOT_RETURNED_INDIRECTLY(1 << 7)
2292 | EAF_UNUSED(1 << 1))));
2293 if (!(ecf_flags & (ECF_CONST(1 << 0) | ECF_NOVOPS(1 << 9))))
2294 {
2295 call_flags = callee_to_caller_flags
2296 (call_flags, ignore_stores,
2297 m_lattice[index]);
2298 if (!record_ipa)
2299 m_lattice[index].merge (call_flags);
2300 else
2301 m_lattice[index].add_escape_point (call, i,
2302 call_flags, true);
2303 }
2304 }
2305 /* Name is dereferenced and passed to a callee. */
2306 else if (memory_access_to (gimple_call_arg (call, i), name))
2307 {
2308 int call_flags = deref_flags
2309 (gimple_call_arg_flags (call, i), ignore_stores);
2310 if (!ignore_retval && !(call_flags & EAF_UNUSED(1 << 1))
2311 && !(call_flags & EAF_NOT_RETURNED_DIRECTLY(1 << 6))
2312 && !(call_flags & EAF_NOT_RETURNED_INDIRECTLY(1 << 7)))
2313 merge_call_lhs_flags (call, i, name, false, true);
2314 if (ecf_flags & (ECF_CONST(1 << 0) | ECF_NOVOPS(1 << 9)))
2315 m_lattice[index].merge_direct_load ();
2316 else
2317 {
2318 call_flags = callee_to_caller_flags
2319 (call_flags, ignore_stores,
2320 m_lattice[index]);
2321 if (!record_ipa)
2322 m_lattice[index].merge (call_flags);
2323 else
2324 m_lattice[index].add_escape_point (call, i,
2325 call_flags, false);
2326 }
2327 }
2328 }
2329 }
2330 else if (gimple_assign_load_p (use_stmt))
2331 {
2332 gassign *assign = as_a <gassign *> (use_stmt);
2333 /* Memory to memory copy. */
2334 if (gimple_store_p (assign))
2335 {
2336 /* Handle *lhs = *name.
2337
2338 We do not track memory locations, so assume that value
2339 is used arbitrarily. */
2340 if (memory_access_to (gimple_assign_rhs1 (assign), name))
2341 m_lattice[index].merge (deref_flags (0, false));
2342 /* Handle *name = *exp. */
2343 else if (memory_access_to (gimple_assign_lhs (assign), name))
2344 m_lattice[index].merge_direct_store ();
2345 }
2346 /* Handle lhs = *name. */
2347 else if (memory_access_to (gimple_assign_rhs1 (assign), name))
2348 {
2349 tree lhs = gimple_assign_lhs (assign);
2350 merge_with_ssa_name (name, lhs, true);
2351 }
2352 }
2353 else if (gimple_store_p (use_stmt))
2354 {
2355 gassign *assign = dyn_cast <gassign *> (use_stmt);
2356
2357 /* Handle *lhs = name. */
2358 if (assign && gimple_assign_rhs1 (assign) == name)
2359 {
2360 if (dump_file)
2361 fprintf (dump_file, "%*s ssa name saved to memory\n",
2362 m_depth * 4, "");
2363 m_lattice[index].merge (0);
2364 }
2365 /* Handle *name = exp. */
2366 else if (assign
2367 && memory_access_to (gimple_assign_lhs (assign), name))
2368 {
2369 /* In general we can not ignore clobbers because they are
2370 barriers for code motion, however after inlining it is safe to
2371 do because local optimization passes do not consider clobbers
2372 from other functions.
2373 Similar logic is in ipa-pure-const.c. */
2374 if (!cfun(cfun + 0)->after_inlining || !gimple_clobber_p (assign))
2375 m_lattice[index].merge_direct_store ();
2376 }
2377 /* ASM statements etc. */
2378 else if (!assign)
2379 {
2380 if (dump_file)
2381 fprintf (dump_file, "%*s Unhandled store\n", m_depth * 4, "");
2382 m_lattice[index].merge (0);
2383 }
2384 }
2385 else if (gassign *assign = dyn_cast <gassign *> (use_stmt))
2386 {
2387 enum tree_code code = gimple_assign_rhs_code (assign);
2388
2389 /* See if operation is a merge as considered by
2390 tree-ssa-structalias.c:find_func_aliases. */
2391 if (!truth_value_p (code)
2392 && code != POINTER_DIFF_EXPR
2393 && (code != POINTER_PLUS_EXPR
2394 || gimple_assign_rhs1 (assign) == name))
2395 {
2396 tree lhs = gimple_assign_lhs (assign);
2397 merge_with_ssa_name (name, lhs, false);
2398 }
2399 }
2400 else if (gphi *phi = dyn_cast <gphi *> (use_stmt))
2401 {
2402 tree result = gimple_phi_result (phi);
2403 merge_with_ssa_name (name, result, false);
2404 }
2405 /* Conditions are not considered escape points
2406 by tree-ssa-structalias. */
2407 else if (gimple_code (use_stmt) == GIMPLE_COND)
2408 ;
2409 else
2410 {
2411 if (dump_file)
2412 fprintf (dump_file, "%*s Unhandled stmt\n", m_depth * 4, "");
2413 m_lattice[index].merge (0);
2414 }
2415
2416 if (dump_file)
2417 {
2418 fprintf (dump_file, "%*s current flags of ", m_depth * 4, "");
2419 print_generic_expr (dump_file, name);
2420 m_lattice[index].dump (dump_file, m_depth * 4 + 4);
2421 }
2422 }
2423 if (dump_file)
2424 {
2425 fprintf (dump_file, "%*sflags of ssa name ", m_depth * 4, "");
2426 print_generic_expr (dump_file, name);
2427 m_lattice[index].dump (dump_file, m_depth * 4 + 2);
2428 }
2429 m_lattice[index].open = false;
2430 if (!m_lattice[index].do_dataflow)
2431 m_lattice[index].known = true;
2432}
2433
2434/* Propagate info from SRC to DEST. If DEREF it true, assume that SRC
2435 is dereferenced. */
2436
2437void
2438modref_eaf_analysis::merge_with_ssa_name (tree dest, tree src, bool deref)
2439{
2440 int index = SSA_NAME_VERSION (dest)(tree_check ((dest), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2440, __FUNCTION__, (SSA_NAME)))->base.u.version
;
2441 int src_index = SSA_NAME_VERSION (src)(tree_check ((src), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2441, __FUNCTION__, (SSA_NAME)))->base.u.version
;
2442
2443 /* Merging lattice with itself is a no-op. */
2444 if (!deref && src == dest)
2445 return;
2446
2447 m_depth++;
2448 analyze_ssa_name (src);
2449 m_depth--;
2450 if (deref)
2451 m_lattice[index].merge_deref (m_lattice[src_index], false);
2452 else
2453 m_lattice[index].merge (m_lattice[src_index]);
2454
2455 /* If we failed to produce final solution add an edge to the dataflow
2456 graph. */
2457 if (!m_lattice[src_index].known)
2458 {
2459 modref_lattice::propagate_edge e = {index, deref};
2460
2461 if (!m_lattice[src_index].propagate_to.length ())
2462 m_names_to_propagate.safe_push (src_index);
2463 m_lattice[src_index].propagate_to.safe_push (e);
2464 m_lattice[src_index].changed = true;
2465 m_lattice[src_index].do_dataflow = true;
2466 if (dump_file)
2467 fprintf (dump_file,
2468 "%*sWill propgate from ssa_name %i to %i%s\n",
2469 m_depth * 4 + 4,
2470 "", src_index, index, deref ? " (deref)" : "");
2471 }
2472}
2473
2474/* In the case we deferred some SSA names, reprocess them. In the case some
2475 dataflow edges were introduced, do the actual iterative dataflow. */
2476
2477void
2478modref_eaf_analysis::propagate ()
2479{
2480 int iterations = 0;
2481 size_t i;
2482 int index;
2483 bool changed = true;
2484
2485 while (m_deferred_names.length ())
2486 {
2487 tree name = m_deferred_names.pop ();
2488 m_lattice[SSA_NAME_VERSION (name)(tree_check ((name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2488, __FUNCTION__, (SSA_NAME)))->base.u.version
].open = false;
2489 if (dump_file)
2490 fprintf (dump_file, "Analyzing deferred SSA name\n");
2491 analyze_ssa_name (name);
2492 }
2493
2494 if (!m_names_to_propagate.length ())
2495 return;
2496 if (dump_file)
2497 fprintf (dump_file, "Propagating EAF flags\n");
2498
2499 /* Compute reverse postorder. */
2500 auto_vec <int> rpo;
2501 struct stack_entry
2502 {
2503 int name;
2504 unsigned pos;
2505 };
2506 auto_vec <struct stack_entry> stack;
2507 int pos = m_names_to_propagate.length () - 1;
2508
2509 rpo.safe_grow (m_names_to_propagate.length (), true);
2510 stack.reserve_exact (m_names_to_propagate.length ());
2511
2512 /* We reuse known flag for RPO DFS walk bookeeping. */
2513 if (flag_checkingglobal_options.x_flag_checking)
2514 FOR_EACH_VEC_ELT (m_names_to_propagate, i, index)for (i = 0; (m_names_to_propagate).iterate ((i), &(index)
); ++(i))
2515 gcc_assert (!m_lattice[index].known && m_lattice[index].changed)((void)(!(!m_lattice[index].known && m_lattice[index]
.changed) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2515, __FUNCTION__), 0 : 0))
;
2516
2517 FOR_EACH_VEC_ELT (m_names_to_propagate, i, index)for (i = 0; (m_names_to_propagate).iterate ((i), &(index)
); ++(i))
2518 {
2519 if (!m_lattice[index].known)
2520 {
2521 stack_entry e = {index, 0};
2522
2523 stack.quick_push (e);
2524 m_lattice[index].known = true;
2525 }
2526 while (stack.length ())
2527 {
2528 bool found = false;
2529 int index1 = stack.last ().name;
2530
2531 while (stack.last ().pos < m_lattice[index1].propagate_to.length ())
2532 {
2533 int index2 = m_lattice[index1]
2534 .propagate_to[stack.last ().pos].ssa_name;
2535
2536 stack.last ().pos++;
2537 if (!m_lattice[index2].known
2538 && m_lattice[index2].propagate_to.length ())
2539 {
2540 stack_entry e = {index2, 0};
2541
2542 stack.quick_push (e);
2543 m_lattice[index2].known = true;
2544 found = true;
2545 break;
2546 }
2547 }
2548 if (!found
2549 && stack.last ().pos == m_lattice[index1].propagate_to.length ())
2550 {
2551 rpo[pos--] = index1;
2552 stack.pop ();
2553 }
2554 }
2555 }
2556
2557 /* Perform itrative dataflow. */
2558 while (changed)
2559 {
2560 changed = false;
2561 iterations++;
2562 if (dump_file)
2563 fprintf (dump_file, " iteration %i\n", iterations);
2564 FOR_EACH_VEC_ELT (rpo, i, index)for (i = 0; (rpo).iterate ((i), &(index)); ++(i))
2565 {
2566 if (m_lattice[index].changed)
2567 {
2568 size_t j;
2569
2570 m_lattice[index].changed = false;
2571 if (dump_file)
2572 fprintf (dump_file, " Visiting ssa name %i\n", index);
2573 for (j = 0; j < m_lattice[index].propagate_to.length (); j++)
2574 {
2575 bool ch;
2576 int target = m_lattice[index].propagate_to[j].ssa_name;
2577 bool deref = m_lattice[index].propagate_to[j].deref;
2578
2579 if (dump_file)
2580 fprintf (dump_file, " Propagating flags of ssa name"
2581 " %i to %i%s\n",
2582 index, target, deref ? " (deref)" : "");
2583 m_lattice[target].known = true;
2584 if (!m_lattice[index].propagate_to[j].deref)
2585 ch = m_lattice[target].merge (m_lattice[index]);
2586 else
2587 ch = m_lattice[target].merge_deref (m_lattice[index],
2588 false);
2589 if (!ch)
2590 continue;
2591 if (dump_file)
2592 {
2593 fprintf (dump_file, " New lattice: ");
2594 m_lattice[target].dump (dump_file);
2595 }
2596 changed = true;
2597 m_lattice[target].changed = true;
2598 }
2599 }
2600 }
2601 }
2602 if (dump_file)
2603 fprintf (dump_file, "EAF flags propagated in %i iterations\n", iterations);
2604}
2605
2606/* Record escape points of PARM_INDEX according to LATTICE. */
2607
2608void
2609modref_eaf_analysis::record_escape_points (tree name, int parm_index, int flags)
2610{
2611 modref_lattice &lattice = m_lattice[SSA_NAME_VERSION (name)(tree_check ((name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2611, __FUNCTION__, (SSA_NAME)))->base.u.version
];
2612
2613 if (lattice.escape_points.length ())
2614 {
2615 escape_point *ep;
2616 unsigned int ip;
2617 cgraph_node *node = cgraph_node::get (current_function_decl);
2618
2619 gcc_assert (m_ipa)((void)(!(m_ipa) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2619, __FUNCTION__), 0 : 0))
;
2620 FOR_EACH_VEC_ELT (lattice.escape_points, ip, ep)for (ip = 0; (lattice.escape_points).iterate ((ip), &(ep)
); ++(ip))
2621 if ((ep->min_flags & flags) != flags)
2622 {
2623 cgraph_edge *e = node->get_edge (ep->call);
2624 struct escape_entry ee = {parm_index, ep->arg,
2625 ep->min_flags, ep->direct};
2626
2627 escape_summaries->get_create (e)->esc.safe_push (ee);
2628 }
2629 }
2630}
2631
2632/* Determine EAF flags for function parameters
2633 and fill in SUMMARY/SUMMARY_LTO. If IPA is true work in IPA mode
2634 where we also collect scape points.
2635 PAST_FLAGS, PAST_RETSLOT_FLAGS, PAST_STATIC_CHAIN_FLAGS can be
2636 used to preserve flags from prevoius (IPA) run for cases where
2637 late optimizations changed code in a way we can no longer analyze
2638 it easily. */
2639
2640static void
2641analyze_parms (modref_summary *summary, modref_summary_lto *summary_lto,
2642 bool ipa, vec<eaf_flags_t> &past_flags,
2643 int past_retslot_flags, int past_static_chain_flags)
2644{
2645 unsigned int parm_index = 0;
2646 unsigned int count = 0;
2647 int ecf_flags = flags_from_decl_or_type (current_function_decl);
2648 tree retslot = NULLnullptr;
2649 tree static_chain = NULLnullptr;
2650
2651 /* If there is return slot, look up its SSA name. */
2652 if (DECL_RESULT (current_function_decl)((tree_check ((current_function_decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2652, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result
)
2653 && DECL_BY_REFERENCE (DECL_RESULT (current_function_decl))((tree_check3 ((((tree_check ((current_function_decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2653, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result
)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2653, __FUNCTION__, (VAR_DECL), (PARM_DECL), (RESULT_DECL))
)->decl_common.decl_by_reference_flag)
)
2654 retslot = ssa_default_def (cfun(cfun + 0), DECL_RESULT (current_function_decl)((tree_check ((current_function_decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2654, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result
)
);
2655 if (cfun(cfun + 0)->static_chain_decl)
2656 static_chain = ssa_default_def (cfun(cfun + 0), cfun(cfun + 0)->static_chain_decl);
2657
2658 for (tree parm = DECL_ARGUMENTS (current_function_decl)((tree_check ((current_function_decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2658, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
; parm;
2659 parm = TREE_CHAIN (parm)((contains_struct_check ((parm), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2659, __FUNCTION__))->common.chain)
)
2660 count++;
2661
2662 if (!count && !retslot && !static_chain)
2663 return;
2664
2665 modref_eaf_analysis eaf_analysis (ipa);
2666
2667 /* Determine all SSA names we need to know flags for. */
2668 for (tree parm = DECL_ARGUMENTS (current_function_decl)((tree_check ((current_function_decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2668, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
; parm;
2669 parm = TREE_CHAIN (parm)((contains_struct_check ((parm), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2669, __FUNCTION__))->common.chain)
)
2670 {
2671 tree name = ssa_default_def (cfun(cfun + 0), parm);
2672 if (name)
2673 eaf_analysis.analyze_ssa_name (name);
2674 }
2675 if (retslot)
2676 eaf_analysis.analyze_ssa_name (retslot);
2677 if (static_chain)
2678 eaf_analysis.analyze_ssa_name (static_chain);
2679
2680 /* Do the dataflow. */
2681 eaf_analysis.propagate ();
2682
2683 tree attr = lookup_attribute ("fn spec",
2684 TYPE_ATTRIBUTES((tree_class_check ((((contains_struct_check ((current_function_decl
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2685, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2685, __FUNCTION__))->type_common.attributes)
2685 (TREE_TYPE (current_function_decl))((tree_class_check ((((contains_struct_check ((current_function_decl
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2685, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2685, __FUNCTION__))->type_common.attributes)
);
2686 attr_fnspec fnspec (attr
2687 ? TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)))((const char *)((tree_check ((((tree_check ((((tree_check ((attr
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2687, __FUNCTION__, (TREE_LIST)))->list.value)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2687, __FUNCTION__, (TREE_LIST)))->list.value)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2687, __FUNCTION__, (STRING_CST)))->string.str))
2688 : "");
2689
2690
2691 /* Store results to summaries. */
2692 for (tree parm = DECL_ARGUMENTS (current_function_decl)((tree_check ((current_function_decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2692, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
; parm; parm_index++,
2693 parm = TREE_CHAIN (parm)((contains_struct_check ((parm), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2693, __FUNCTION__))->common.chain)
)
2694 {
2695 tree name = ssa_default_def (cfun(cfun + 0), parm);
2696 if (!name || has_zero_uses (name))
2697 {
2698 /* We do not track non-SSA parameters,
2699 but we want to track unused gimple_regs. */
2700 if (!is_gimple_reg (parm))
2701 continue;
2702 if (summary)
2703 {
2704 if (parm_index >= summary->arg_flags.length ())
2705 summary->arg_flags.safe_grow_cleared (count, true);
2706 summary->arg_flags[parm_index] = EAF_UNUSED(1 << 1);
2707 }
2708 else if (summary_lto)
2709 {
2710 if (parm_index >= summary_lto->arg_flags.length ())
2711 summary_lto->arg_flags.safe_grow_cleared (count, true);
2712 summary_lto->arg_flags[parm_index] = EAF_UNUSED(1 << 1);
2713 }
2714 continue;
2715 }
2716 int flags = eaf_analysis.get_ssa_name_flags (name);
2717 int attr_flags = fnspec.arg_eaf_flags (parm_index);
2718
2719 if (dump_file && (flags | attr_flags) != flags && !(flags & EAF_UNUSED(1 << 1)))
2720 {
2721 fprintf (dump_file,
2722 " Flags for param %i combined with fnspec flags:",
2723 (int)parm_index);
2724 dump_eaf_flags (dump_file, attr_flags, false);
2725 fprintf (dump_file, " determined: ");
2726 dump_eaf_flags (dump_file, flags, true);
2727 }
2728 flags |= attr_flags;
2729
2730 /* Eliminate useless flags so we do not end up storing unnecessary
2731 summaries. */
2732
2733 flags = remove_useless_eaf_flags
2734 (flags, ecf_flags,
2735 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))(((enum tree_code) (((contains_struct_check ((((contains_struct_check
((current_function_decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2735, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2735, __FUNCTION__))->typed.type))->base.code) == VOID_TYPE
)
);
2736 if (past_flags.length () > parm_index)
2737 {
2738 int past = past_flags[parm_index];
2739 past = remove_useless_eaf_flags
2740 (past, ecf_flags,
2741 VOID_TYPE_P (TREE_TYPE(((enum tree_code) (((contains_struct_check ((((contains_struct_check
((current_function_decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2742, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2742, __FUNCTION__))->typed.type))->base.code) == VOID_TYPE
)
2742 (TREE_TYPE (current_function_decl)))(((enum tree_code) (((contains_struct_check ((((contains_struct_check
((current_function_decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2742, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2742, __FUNCTION__))->typed.type))->base.code) == VOID_TYPE
)
);
2743 if (dump_file && (flags | past) != flags && !(flags & EAF_UNUSED(1 << 1)))
2744 {
2745 fprintf (dump_file,
2746 " Flags for param %i combined with IPA pass:",
2747 (int)parm_index);
2748 dump_eaf_flags (dump_file, past, false);
2749 fprintf (dump_file, " determined: ");
2750 dump_eaf_flags (dump_file, flags, true);
2751 }
2752 if (!(flags & EAF_UNUSED(1 << 1)))
2753 flags |= past;
2754 }
2755
2756 if (flags)
2757 {
2758 if (summary)
2759 {
2760 if (parm_index >= summary->arg_flags.length ())
2761 summary->arg_flags.safe_grow_cleared (count, true);
2762 summary->arg_flags[parm_index] = flags;
2763 }
2764 else if (summary_lto)
2765 {
2766 if (parm_index >= summary_lto->arg_flags.length ())
2767 summary_lto->arg_flags.safe_grow_cleared (count, true);
2768 summary_lto->arg_flags[parm_index] = flags;
2769 }
2770 eaf_analysis.record_escape_points (name, parm_index, flags);
2771 }
2772 }
2773 if (retslot)
2774 {
2775 int flags = eaf_analysis.get_ssa_name_flags (retslot);
2776 int past = past_retslot_flags;
2777
2778 flags = remove_useless_eaf_flags (flags, ecf_flags, false);
2779 past = remove_useless_eaf_flags
2780 (past, ecf_flags,
2781 VOID_TYPE_P (TREE_TYPE(((enum tree_code) (((contains_struct_check ((((contains_struct_check
((current_function_decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2782, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2782, __FUNCTION__))->typed.type))->base.code) == VOID_TYPE
)
2782 (TREE_TYPE (current_function_decl)))(((enum tree_code) (((contains_struct_check ((((contains_struct_check
((current_function_decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2782, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2782, __FUNCTION__))->typed.type))->base.code) == VOID_TYPE
)
);
2783 if (dump_file && (flags | past) != flags && !(flags & EAF_UNUSED(1 << 1)))
2784 {
2785 fprintf (dump_file,
2786 " Retslot flags combined with IPA pass:");
2787 dump_eaf_flags (dump_file, past, false);
2788 fprintf (dump_file, " determined: ");
2789 dump_eaf_flags (dump_file, flags, true);
2790 }
2791 if (!(flags & EAF_UNUSED(1 << 1)))
2792 flags |= past;
2793 if (flags)
2794 {
2795 if (summary)
2796 summary->retslot_flags = flags;
2797 if (summary_lto)
2798 summary_lto->retslot_flags = flags;
2799 eaf_analysis.record_escape_points (retslot,
2800 MODREF_RETSLOT_PARM, flags);
2801 }
2802 }
2803 if (static_chain)
2804 {
2805 int flags = eaf_analysis.get_ssa_name_flags (static_chain);
2806 int past = past_static_chain_flags;
2807
2808 flags = remove_useless_eaf_flags (flags, ecf_flags, false);
2809 past = remove_useless_eaf_flags
2810 (past, ecf_flags,
2811 VOID_TYPE_P (TREE_TYPE(((enum tree_code) (((contains_struct_check ((((contains_struct_check
((current_function_decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2812, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2812, __FUNCTION__))->typed.type))->base.code) == VOID_TYPE
)
2812 (TREE_TYPE (current_function_decl)))(((enum tree_code) (((contains_struct_check ((((contains_struct_check
((current_function_decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2812, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2812, __FUNCTION__))->typed.type))->base.code) == VOID_TYPE
)
);
2813 if (dump_file && (flags | past) != flags && !(flags & EAF_UNUSED(1 << 1)))
2814 {
2815 fprintf (dump_file,
2816 " Static chain flags combined with IPA pass:");
2817 dump_eaf_flags (dump_file, past, false);
2818 fprintf (dump_file, " determined: ");
2819 dump_eaf_flags (dump_file, flags, true);
2820 }
2821 if (!(flags & EAF_UNUSED(1 << 1)))
2822 flags |= past;
2823 if (flags)
2824 {
2825 if (summary)
2826 summary->static_chain_flags = flags;
2827 if (summary_lto)
2828 summary_lto->static_chain_flags = flags;
2829 eaf_analysis.record_escape_points (static_chain,
2830 MODREF_STATIC_CHAIN_PARM,
2831 flags);
2832 }
2833 }
2834}
2835
2836/* Analyze function F. IPA indicates whether we're running in local mode
2837 (false) or the IPA mode (true).
2838 Return true if fixup cfg is needed after the pass. */
2839
2840static bool
2841analyze_function (function *f, bool ipa)
2842{
2843 bool fixup_cfg = false;
2844 if (dump_file)
2845 fprintf (dump_file, "modref analyzing '%s' (ipa=%i)%s%s\n",
2846 function_name (f), ipa,
2847 TREE_READONLY (current_function_decl)((non_type_check ((current_function_decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2847, __FUNCTION__))->base.readonly_flag)
? " (const)" : "",
2848 DECL_PURE_P (current_function_decl)((tree_check ((current_function_decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2848, __FUNCTION__, (FUNCTION_DECL)))->function_decl.pure_flag
)
? " (pure)" : "");
2849
2850 /* Don't analyze this function if it's compiled with -fno-strict-aliasing. */
2851 if (!flag_ipa_modrefglobal_options.x_flag_ipa_modref
2852 || lookup_attribute ("noipa", DECL_ATTRIBUTES (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2852, __FUNCTION__))->decl_common.attributes)
))
2853 return false;
2854
2855 /* Compute no-LTO summaries when local optimization is going to happen. */
2856 bool nolto = (!ipa || ((!flag_ltoglobal_options.x_flag_lto || flag_fat_lto_objectsglobal_options.x_flag_fat_lto_objects) && !in_lto_pglobal_options.x_in_lto_p)
2857 || (in_lto_pglobal_options.x_in_lto_p && !flag_wpaglobal_options.x_flag_wpa
2858 && flag_incremental_linkglobal_options.x_flag_incremental_link != INCREMENTAL_LINK_LTO));
2859 /* Compute LTO when LTO streaming is going to happen. */
2860 bool lto = ipa && ((flag_ltoglobal_options.x_flag_lto && !in_lto_pglobal_options.x_in_lto_p)
2861 || flag_wpaglobal_options.x_flag_wpa
2862 || flag_incremental_linkglobal_options.x_flag_incremental_link == INCREMENTAL_LINK_LTO);
2863 cgraph_node *fnode = cgraph_node::get (current_function_decl);
2864
2865 modref_summary *summary = NULLnullptr;
2866 modref_summary_lto *summary_lto = NULLnullptr;
2867
2868 bool past_flags_known = false;
2869 auto_vec <eaf_flags_t> past_flags;
2870 int past_retslot_flags = 0;
2871 int past_static_chain_flags = 0;
2872
2873 /* Initialize the summary.
2874 If we run in local mode there is possibly pre-existing summary from
2875 IPA pass. Dump it so it is easy to compare if mod-ref info has
2876 improved. */
2877 if (!ipa)
2878 {
2879 if (!optimization_summaries)
2880 optimization_summaries = modref_summaries::create_ggc (symtab);
2881 else /* Remove existing summary if we are re-running the pass. */
2882 {
2883 summary = optimization_summaries->get (fnode);
2884 if (summary != NULLnullptr
2885 && summary->loads)
2886 {
2887 if (dump_file)
2888 {
2889 fprintf (dump_file, "Past summary:\n");
2890 optimization_summaries->get (fnode)->dump (dump_file);
2891 }
2892 past_flags.reserve_exact (summary->arg_flags.length ());
2893 past_flags.splice (summary->arg_flags);
2894 past_retslot_flags = summary->retslot_flags;
2895 past_static_chain_flags = summary->static_chain_flags;
2896 past_flags_known = true;
2897 }
2898 optimization_summaries->remove (fnode);
2899 }
2900 summary = optimization_summaries->get_create (fnode);
2901 gcc_checking_assert (nolto && !lto)((void)(!(nolto && !lto) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2901, __FUNCTION__), 0 : 0))
;
2902 }
2903 /* In IPA mode we analyze every function precisely once. Assert that. */
2904 else
2905 {
2906 if (nolto)
2907 {
2908 if (!summaries)
2909 summaries = modref_summaries::create_ggc (symtab);
2910 else
2911 summaries->remove (fnode);
2912 summary = summaries->get_create (fnode);
2913 }
2914 if (lto)
2915 {
2916 if (!summaries_lto)
2917 summaries_lto = modref_summaries_lto::create_ggc (symtab);
2918 else
2919 summaries_lto->remove (fnode);
2920 summary_lto = summaries_lto->get_create (fnode);
2921 }
2922 if (!fnspec_summaries)
2923 fnspec_summaries = new fnspec_summaries_t (symtab);
2924 if (!escape_summaries)
2925 escape_summaries = new escape_summaries_t (symtab);
2926 }
2927
2928
2929 /* Create and initialize summary for F.
2930 Note that summaries may be already allocated from previous
2931 run of the pass. */
2932 if (nolto)
2933 {
2934 gcc_assert (!summary->loads)((void)(!(!summary->loads) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2934, __FUNCTION__), 0 : 0))
;
2935 summary->loads = modref_records::create_ggc (param_modref_max_basesglobal_options.x_param_modref_max_bases,
2936 param_modref_max_refsglobal_options.x_param_modref_max_refs,
2937 param_modref_max_accessesglobal_options.x_param_modref_max_accesses);
2938 gcc_assert (!summary->stores)((void)(!(!summary->stores) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2938, __FUNCTION__), 0 : 0))
;
2939 summary->stores = modref_records::create_ggc (param_modref_max_basesglobal_options.x_param_modref_max_bases,
2940 param_modref_max_refsglobal_options.x_param_modref_max_refs,
2941 param_modref_max_accessesglobal_options.x_param_modref_max_accesses);
2942 summary->writes_errno = false;
2943 summary->side_effects = false;
2944 summary->nondeterministic = false;
2945 summary->calls_interposable = false;
2946 }
2947 if (lto)
2948 {
2949 gcc_assert (!summary_lto->loads)((void)(!(!summary_lto->loads) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2949, __FUNCTION__), 0 : 0))
;
2950 summary_lto->loads = modref_records_lto::create_ggc
2951 (param_modref_max_basesglobal_options.x_param_modref_max_bases,
2952 param_modref_max_refsglobal_options.x_param_modref_max_refs,
2953 param_modref_max_accessesglobal_options.x_param_modref_max_accesses);
2954 gcc_assert (!summary_lto->stores)((void)(!(!summary_lto->stores) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 2954, __FUNCTION__), 0 : 0))
;
2955 summary_lto->stores = modref_records_lto::create_ggc
2956 (param_modref_max_basesglobal_options.x_param_modref_max_bases,
2957 param_modref_max_refsglobal_options.x_param_modref_max_refs,
2958 param_modref_max_accessesglobal_options.x_param_modref_max_accesses);
2959 summary_lto->writes_errno = false;
2960 summary_lto->side_effects = false;
2961 summary_lto->nondeterministic = false;
2962 summary_lto->calls_interposable = false;
2963 }
2964
2965 analyze_parms (summary, summary_lto, ipa,
2966 past_flags, past_retslot_flags, past_static_chain_flags);
2967
2968 int ecf_flags = flags_from_decl_or_type (current_function_decl);
2969 auto_vec <gimple *, 32> recursive_calls;
2970
2971 /* Analyze each statement in each basic block of the function. If the
2972 statement cannot be analyzed (for any reason), the entire function cannot
2973 be analyzed by modref. */
2974 basic_block bb;
2975 FOR_EACH_BB_FN (bb, f)for (bb = (f)->cfg->x_entry_block_ptr->next_bb; bb !=
(f)->cfg->x_exit_block_ptr; bb = bb->next_bb)
2976 {
2977 gimple_stmt_iterator si;
2978 bool always_executed
2979 = bb == single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr))->dest;
2980
2981 for (si = gsi_start_nondebug_after_labels_bb (bb);
2982 !gsi_end_p (si); gsi_next_nondebug (&si))
2983 {
2984 if (!analyze_stmt (summary, summary_lto,
2985 gsi_stmt (si), ipa, &recursive_calls,
2986 always_executed)
2987 || ((!summary || !summary->useful_p (ecf_flags, false))
2988 && (!summary_lto
2989 || !summary_lto->useful_p (ecf_flags, false))))
2990 {
2991 collapse_loads (summary, summary_lto);
2992 collapse_stores (summary, summary_lto);
2993 break;
2994 }
2995 if (always_executed
2996 && stmt_can_throw_external (cfun(cfun + 0), gsi_stmt (si)))
2997 always_executed = false;
2998 }
2999 }
3000
3001 /* In non-IPA mode we need to perform iterative datafow on recursive calls.
3002 This needs to be done after all other side effects are computed. */
3003 if (!ipa)
3004 {
3005 bool changed = true;
3006 bool first = true;
3007 while (changed)
3008 {
3009 changed = false;
3010 for (unsigned i = 0; i < recursive_calls.length (); i++)
3011 {
3012 changed |= merge_call_side_effects
3013 (summary, recursive_calls[i], summary,
3014 ignore_stores_p (current_function_decl,
3015 gimple_call_flags
3016 (recursive_calls[i])),
3017 fnode, !first, false);
3018 if (!summary->useful_p (ecf_flags, false))
3019 {
3020 remove_summary (lto, nolto, ipa);
3021 return false;
3022 }
3023 }
3024 first = false;
3025 }
3026 }
3027 if (summary && !summary->side_effects && !finite_function_p ())
3028 summary->side_effects = true;
3029 if (summary_lto && !summary_lto->side_effects && !finite_function_p ())
3030 summary_lto->side_effects = true;
3031
3032 if (!ipa && flag_ipa_pure_constglobal_options.x_flag_ipa_pure_const)
3033 {
3034 if (!summary->stores->every_base && !summary->stores->bases
3035 && !summary->nondeterministic)
3036 {
3037 if (!summary->loads->every_base && !summary->loads->bases
3038 && !summary->calls_interposable)
3039 fixup_cfg = ipa_make_function_const (fnode,
3040 summary->side_effects, true);
3041 else
3042 fixup_cfg = ipa_make_function_pure (fnode,
3043 summary->side_effects, true);
3044 }
3045 }
3046 if (summary && !summary->useful_p (ecf_flags))
3047 {
3048 if (!ipa)
3049 optimization_summaries->remove (fnode);
3050 else
3051 summaries->remove (fnode);
3052 summary = NULLnullptr;
3053 }
3054 if (summary)
3055 summary->finalize (current_function_decl);
3056 if (summary_lto && !summary_lto->useful_p (ecf_flags))
3057 {
3058 summaries_lto->remove (fnode);
3059 summary_lto = NULLnullptr;
3060 }
3061
3062 if (ipa && !summary && !summary_lto)
3063 remove_modref_edge_summaries (fnode);
3064
3065 if (dump_file)
3066 {
3067 fprintf (dump_file, " - modref done with result: tracked.\n");
3068 if (summary)
3069 summary->dump (dump_file);
3070 if (summary_lto)
3071 summary_lto->dump (dump_file);
3072 dump_modref_edge_summaries (dump_file, fnode, 2);
3073 /* To simplify debugging, compare IPA and local solutions. */
3074 if (past_flags_known && summary)
3075 {
3076 size_t len = summary->arg_flags.length ();
3077
3078 if (past_flags.length () > len)
3079 len = past_flags.length ();
3080 for (size_t i = 0; i < len; i++)
3081 {
3082 int old_flags = i < past_flags.length () ? past_flags[i] : 0;
3083 int new_flags = i < summary->arg_flags.length ()
3084 ? summary->arg_flags[i] : 0;
3085 old_flags = remove_useless_eaf_flags
3086 (old_flags, flags_from_decl_or_type (current_function_decl),
3087 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))(((enum tree_code) (((contains_struct_check ((((contains_struct_check
((current_function_decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3087, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3087, __FUNCTION__))->typed.type))->base.code) == VOID_TYPE
)
);
3088 if (old_flags != new_flags)
3089 {
3090 if ((old_flags & ~new_flags) == 0
3091 || (new_flags & EAF_UNUSED(1 << 1)))
3092 fprintf (dump_file, " Flags for param %i improved:",
3093 (int)i);
3094 else
3095 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3095, __FUNCTION__))
;
3096 dump_eaf_flags (dump_file, old_flags, false);
3097 fprintf (dump_file, " -> ");
3098 dump_eaf_flags (dump_file, new_flags, true);
3099 }
3100 }
3101 past_retslot_flags = remove_useless_eaf_flags
3102 (past_retslot_flags,
3103 flags_from_decl_or_type (current_function_decl),
3104 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))(((enum tree_code) (((contains_struct_check ((((contains_struct_check
((current_function_decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3104, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3104, __FUNCTION__))->typed.type))->base.code) == VOID_TYPE
)
);
3105 if (past_retslot_flags != summary->retslot_flags)
3106 {
3107 if ((past_retslot_flags & ~summary->retslot_flags) == 0
3108 || (summary->retslot_flags & EAF_UNUSED(1 << 1)))
3109 fprintf (dump_file, " Flags for retslot improved:");
3110 else
3111 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3111, __FUNCTION__))
;
3112 dump_eaf_flags (dump_file, past_retslot_flags, false);
3113 fprintf (dump_file, " -> ");
3114 dump_eaf_flags (dump_file, summary->retslot_flags, true);
3115 }
3116 past_static_chain_flags = remove_useless_eaf_flags
3117 (past_static_chain_flags,
3118 flags_from_decl_or_type (current_function_decl),
3119 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))(((enum tree_code) (((contains_struct_check ((((contains_struct_check
((current_function_decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3119, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3119, __FUNCTION__))->typed.type))->base.code) == VOID_TYPE
)
);
3120 if (past_static_chain_flags != summary->static_chain_flags)
3121 {
3122 if ((past_static_chain_flags & ~summary->static_chain_flags) == 0
3123 || (summary->static_chain_flags & EAF_UNUSED(1 << 1)))
3124 fprintf (dump_file, " Flags for static chain improved:");
3125 else
3126 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3126, __FUNCTION__))
;
3127 dump_eaf_flags (dump_file, past_static_chain_flags, false);
3128 fprintf (dump_file, " -> ");
3129 dump_eaf_flags (dump_file, summary->static_chain_flags, true);
3130 }
3131 }
3132 else if (past_flags_known && !summary)
3133 {
3134 for (size_t i = 0; i < past_flags.length (); i++)
3135 {
3136 int old_flags = past_flags[i];
3137 old_flags = remove_useless_eaf_flags
3138 (old_flags, flags_from_decl_or_type (current_function_decl),
3139 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))(((enum tree_code) (((contains_struct_check ((((contains_struct_check
((current_function_decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3139, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3139, __FUNCTION__))->typed.type))->base.code) == VOID_TYPE
)
);
3140 if (old_flags)
3141 {
3142 fprintf (dump_file, " Flags for param %i worsened:",
3143 (int)i);
3144 dump_eaf_flags (dump_file, old_flags, false);
3145 fprintf (dump_file, " -> \n");
3146 }
3147 }
3148 past_retslot_flags = remove_useless_eaf_flags
3149 (past_retslot_flags,
3150 flags_from_decl_or_type (current_function_decl),
3151 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))(((enum tree_code) (((contains_struct_check ((((contains_struct_check
((current_function_decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3151, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3151, __FUNCTION__))->typed.type))->base.code) == VOID_TYPE
)
);
3152 if (past_retslot_flags)
3153 {
3154 fprintf (dump_file, " Flags for retslot worsened:");
3155 dump_eaf_flags (dump_file, past_retslot_flags, false);
3156 fprintf (dump_file, " ->\n");
3157 }
3158 past_static_chain_flags = remove_useless_eaf_flags
3159 (past_static_chain_flags,
3160 flags_from_decl_or_type (current_function_decl),
3161 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))(((enum tree_code) (((contains_struct_check ((((contains_struct_check
((current_function_decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3161, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3161, __FUNCTION__))->typed.type))->base.code) == VOID_TYPE
)
);
3162 if (past_static_chain_flags)
3163 {
3164 fprintf (dump_file, " Flags for static chain worsened:");
3165 dump_eaf_flags (dump_file, past_static_chain_flags, false);
3166 fprintf (dump_file, " ->\n");
3167 }
3168 }
3169 }
3170 return fixup_cfg;
3171}
3172
3173/* Callback for generate_summary. */
3174
3175static void
3176modref_generate (void)
3177{
3178 struct cgraph_node *node;
3179 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)for ((node) = symtab->first_function_with_gimple_body (); (
node); (node) = symtab->next_function_with_gimple_body (node
))
3180 {
3181 function *f = DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3181, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
;
3182 if (!f)
3183 continue;
3184 push_cfun (f);
3185 analyze_function (f, true);
3186 pop_cfun ();
3187 }
3188}
3189
3190} /* ANON namespace. */
3191
3192/* Debugging helper. */
3193
3194void
3195debug_eaf_flags (int flags)
3196{
3197 dump_eaf_flags (stderrstderr, flags, true);
3198}
3199
3200/* Called when a new function is inserted to callgraph late. */
3201
3202void
3203modref_summaries::insert (struct cgraph_node *node, modref_summary *)
3204{
3205 /* Local passes ought to be executed by the pass manager. */
3206 if (this == optimization_summaries)
3207 {
3208 optimization_summaries->remove (node);
3209 return;
3210 }
3211 if (!DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3211, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
3212 || !opt_for_fn (node->decl, flag_ipa_modref)(opts_for_fn (node->decl)->x_flag_ipa_modref))
3213 {
3214 summaries->remove (node);
3215 return;
3216 }
3217 push_cfun (DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3217, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
);
3218 analyze_function (DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3218, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
, true);
3219 pop_cfun ();
3220}
3221
3222/* Called when a new function is inserted to callgraph late. */
3223
3224void
3225modref_summaries_lto::insert (struct cgraph_node *node, modref_summary_lto *)
3226{
3227 /* We do not support adding new function when IPA information is already
3228 propagated. This is done only by SIMD cloning that is not very
3229 critical. */
3230 if (!DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3230, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
3231 || !opt_for_fn (node->decl, flag_ipa_modref)(opts_for_fn (node->decl)->x_flag_ipa_modref)
3232 || propagated)
3233 {
3234 summaries_lto->remove (node);
3235 return;
3236 }
3237 push_cfun (DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3237, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
);
3238 analyze_function (DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3238, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
, true);
3239 pop_cfun ();
3240}
3241
3242/* Called when new clone is inserted to callgraph late. */
3243
3244void
3245modref_summaries::duplicate (cgraph_node *, cgraph_node *dst,
3246 modref_summary *src_data,
3247 modref_summary *dst_data)
3248{
3249 /* Do not duplicate optimization summaries; we do not handle parameter
3250 transforms on them. */
3251 if (this == optimization_summaries)
3252 {
3253 optimization_summaries->remove (dst);
3254 return;
3255 }
3256 dst_data->stores = modref_records::create_ggc
3257 (src_data->stores->max_bases,
3258 src_data->stores->max_refs,
3259 src_data->stores->max_accesses);
3260 dst_data->stores->copy_from (src_data->stores);
3261 dst_data->loads = modref_records::create_ggc
3262 (src_data->loads->max_bases,
3263 src_data->loads->max_refs,
3264 src_data->loads->max_accesses);
3265 dst_data->loads->copy_from (src_data->loads);
3266 dst_data->kills.reserve_exact (src_data->kills.length ());
3267 dst_data->kills.splice (src_data->kills);
3268 dst_data->writes_errno = src_data->writes_errno;
3269 dst_data->side_effects = src_data->side_effects;
3270 dst_data->nondeterministic = src_data->nondeterministic;
3271 dst_data->calls_interposable = src_data->calls_interposable;
3272 if (src_data->arg_flags.length ())
3273 dst_data->arg_flags = src_data->arg_flags.copy ();
3274 dst_data->retslot_flags = src_data->retslot_flags;
3275 dst_data->static_chain_flags = src_data->static_chain_flags;
3276}
3277
3278/* Called when new clone is inserted to callgraph late. */
3279
3280void
3281modref_summaries_lto::duplicate (cgraph_node *, cgraph_node *,
3282 modref_summary_lto *src_data,
3283 modref_summary_lto *dst_data)
3284{
3285 /* Be sure that no further cloning happens after ipa-modref. If it does
3286 we will need to update signatures for possible param changes. */
3287 gcc_checking_assert (!((modref_summaries_lto *)summaries_lto)->propagated)((void)(!(!((modref_summaries_lto *)summaries_lto)->propagated
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3287, __FUNCTION__), 0 : 0))
;
3288 dst_data->stores = modref_records_lto::create_ggc
3289 (src_data->stores->max_bases,
3290 src_data->stores->max_refs,
3291 src_data->stores->max_accesses);
3292 dst_data->stores->copy_from (src_data->stores);
3293 dst_data->loads = modref_records_lto::create_ggc
3294 (src_data->loads->max_bases,
3295 src_data->loads->max_refs,
3296 src_data->loads->max_accesses);
3297 dst_data->loads->copy_from (src_data->loads);
3298 dst_data->kills.reserve_exact (src_data->kills.length ());
3299 dst_data->kills.splice (src_data->kills);
3300 dst_data->writes_errno = src_data->writes_errno;
3301 dst_data->side_effects = src_data->side_effects;
3302 dst_data->nondeterministic = src_data->nondeterministic;
3303 dst_data->calls_interposable = src_data->calls_interposable;
3304 if (src_data->arg_flags.length ())
3305 dst_data->arg_flags = src_data->arg_flags.copy ();
3306 dst_data->retslot_flags = src_data->retslot_flags;
3307 dst_data->static_chain_flags = src_data->static_chain_flags;
3308}
3309
3310namespace
3311{
3312/* Definition of the modref pass on GIMPLE. */
3313const pass_data pass_data_modref = {
3314 GIMPLE_PASS,
3315 "modref",
3316 OPTGROUP_IPA,
3317 TV_TREE_MODREF,
3318 (PROP_cfg(1 << 3) | PROP_ssa(1 << 5)),
3319 0,
3320 0,
3321 0,
3322 0,
3323};
3324
3325class pass_modref : public gimple_opt_pass
3326{
3327 public:
3328 pass_modref (gcc::context *ctxt)
3329 : gimple_opt_pass (pass_data_modref, ctxt) {}
3330
3331 /* opt_pass methods: */
3332 opt_pass *clone ()
3333 {
3334 return new pass_modref (m_ctxt);
3335 }
3336 virtual bool gate (function *)
3337 {
3338 return flag_ipa_modrefglobal_options.x_flag_ipa_modref;
3339 }
3340 virtual unsigned int execute (function *);
3341};
3342
3343/* Encode TT to the output block OB using the summary streaming API. */
3344
3345static void
3346write_modref_records (modref_records_lto *tt, struct output_block *ob)
3347{
3348 streamer_write_uhwi (ob, tt->max_bases);
3349 streamer_write_uhwi (ob, tt->max_refs);
3350 streamer_write_uhwi (ob, tt->max_accesses);
3351
3352 streamer_write_uhwi (ob, tt->every_base);
3353 streamer_write_uhwi (ob, vec_safe_length (tt->bases));
3354 for (auto base_node : tt->bases)
3355 {
3356 stream_write_tree (ob, base_node->base, true)streamer_hooks.write_tree (ob, base_node->base, true, true
)
;
3357
3358 streamer_write_uhwi (ob, base_node->every_ref);
3359 streamer_write_uhwi (ob, vec_safe_length (base_node->refs));
3360
3361 for (auto ref_node : base_node->refs)
3362 {
3363 stream_write_tree (ob, ref_node->ref, true)streamer_hooks.write_tree (ob, ref_node->ref, true, true);
3364 streamer_write_uhwi (ob, ref_node->every_access);
3365 streamer_write_uhwi (ob, vec_safe_length (ref_node->accesses));
3366
3367 for (auto access_node : ref_node->accesses)
3368 access_node.stream_out (ob);
3369 }
3370 }
3371}
3372
3373/* Read a modref_tree from the input block IB using the data from DATA_IN.
3374 This assumes that the tree was encoded using write_modref_tree.
3375 Either nolto_ret or lto_ret is initialized by the tree depending whether
3376 LTO streaming is expected or not. */
3377
3378static void
3379read_modref_records (lto_input_block *ib, struct data_in *data_in,
3380 modref_records **nolto_ret,
3381 modref_records_lto **lto_ret)
3382{
3383 size_t max_bases = streamer_read_uhwi (ib);
3384 size_t max_refs = streamer_read_uhwi (ib);
3385 size_t max_accesses = streamer_read_uhwi (ib);
3386
3387 if (lto_ret)
3388 *lto_ret = modref_records_lto::create_ggc (max_bases, max_refs,
3389 max_accesses);
3390 if (nolto_ret)
3391 *nolto_ret = modref_records::create_ggc (max_bases, max_refs,
3392 max_accesses);
3393 gcc_checking_assert (lto_ret || nolto_ret)((void)(!(lto_ret || nolto_ret) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3393, __FUNCTION__), 0 : 0))
;
3394
3395 size_t every_base = streamer_read_uhwi (ib);
3396 size_t nbase = streamer_read_uhwi (ib);
3397
3398 gcc_assert (!every_base || nbase == 0)((void)(!(!every_base || nbase == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3398, __FUNCTION__), 0 : 0))
;
3399 if (every_base)
3400 {
3401 if (nolto_ret)
3402 (*nolto_ret)->collapse ();
3403 if (lto_ret)
3404 (*lto_ret)->collapse ();
3405 }
3406 for (size_t i = 0; i < nbase; i++)
3407 {
3408 tree base_tree = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
3409 modref_base_node <alias_set_type> *nolto_base_node = NULLnullptr;
3410 modref_base_node <tree> *lto_base_node = NULLnullptr;
3411
3412 /* At stream in time we have LTO alias info. Check if we streamed in
3413 something obviously unnecessary. Do not glob types by alias sets;
3414 it is not 100% clear that ltrans types will get merged same way.
3415 Types may get refined based on ODR type conflicts. */
3416 if (base_tree && !get_alias_set (base_tree))
3417 {
3418 if (dump_file)
3419 {
3420 fprintf (dump_file, "Streamed in alias set 0 type ");
3421 print_generic_expr (dump_file, base_tree);
3422 fprintf (dump_file, "\n");
3423 }
3424 base_tree = NULLnullptr;
3425 }
3426
3427 if (nolto_ret)
3428 nolto_base_node = (*nolto_ret)->insert_base (base_tree
3429 ? get_alias_set (base_tree)
3430 : 0, 0);
3431 if (lto_ret)
3432 lto_base_node = (*lto_ret)->insert_base (base_tree, 0);
3433 size_t every_ref = streamer_read_uhwi (ib);
3434 size_t nref = streamer_read_uhwi (ib);
3435
3436 gcc_assert (!every_ref || nref == 0)((void)(!(!every_ref || nref == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3436, __FUNCTION__), 0 : 0))
;
3437 if (every_ref)
3438 {
3439 if (nolto_base_node)
3440 nolto_base_node->collapse ();
3441 if (lto_base_node)
3442 lto_base_node->collapse ();
3443 }
3444 for (size_t j = 0; j < nref; j++)
3445 {
3446 tree ref_tree = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
3447
3448 if (ref_tree && !get_alias_set (ref_tree))
3449 {
3450 if (dump_file)
3451 {
3452 fprintf (dump_file, "Streamed in alias set 0 type ");
3453 print_generic_expr (dump_file, ref_tree);
3454 fprintf (dump_file, "\n");
3455 }
3456 ref_tree = NULLnullptr;
3457 }
3458
3459 modref_ref_node <alias_set_type> *nolto_ref_node = NULLnullptr;
3460 modref_ref_node <tree> *lto_ref_node = NULLnullptr;
3461
3462 if (nolto_base_node)
3463 nolto_ref_node
3464 = nolto_base_node->insert_ref (ref_tree
3465 ? get_alias_set (ref_tree) : 0,
3466 max_refs);
3467 if (lto_base_node)
3468 lto_ref_node = lto_base_node->insert_ref (ref_tree, max_refs);
3469
3470 size_t every_access = streamer_read_uhwi (ib);
3471 size_t naccesses = streamer_read_uhwi (ib);
3472
3473 if (nolto_ref_node && every_access)
3474 nolto_ref_node->collapse ();
3475 if (lto_ref_node && every_access)
3476 lto_ref_node->collapse ();
3477
3478 for (size_t k = 0; k < naccesses; k++)
3479 {
3480 modref_access_node a = modref_access_node::stream_in (ib);
3481 if (nolto_ref_node)
3482 nolto_ref_node->insert_access (a, max_accesses, false);
3483 if (lto_ref_node)
3484 lto_ref_node->insert_access (a, max_accesses, false);
3485 }
3486 }
3487 }
3488 if (lto_ret)
3489 (*lto_ret)->cleanup ();
3490 if (nolto_ret)
3491 (*nolto_ret)->cleanup ();
3492}
3493
3494/* Write ESUM to BP. */
3495
3496static void
3497modref_write_escape_summary (struct bitpack_d *bp, escape_summary *esum)
3498{
3499 if (!esum)
3500 {
3501 bp_pack_var_len_unsigned (bp, 0);
3502 return;
3503 }
3504 bp_pack_var_len_unsigned (bp, esum->esc.length ());
3505 unsigned int i;
3506 escape_entry *ee;
3507 FOR_EACH_VEC_ELT (esum->esc, i, ee)for (i = 0; (esum->esc).iterate ((i), &(ee)); ++(i))
3508 {
3509 bp_pack_var_len_int (bp, ee->parm_index);
3510 bp_pack_var_len_unsigned (bp, ee->arg);
3511 bp_pack_var_len_unsigned (bp, ee->min_flags);
3512 bp_pack_value (bp, ee->direct, 1);
3513 }
3514}
3515
3516/* Read escape summary for E from BP. */
3517
3518static void
3519modref_read_escape_summary (struct bitpack_d *bp, cgraph_edge *e)
3520{
3521 unsigned int n = bp_unpack_var_len_unsigned (bp);
3522 if (!n)
3523 return;
3524 escape_summary *esum = escape_summaries->get_create (e);
3525 esum->esc.reserve_exact (n);
3526 for (unsigned int i = 0; i < n; i++)
3527 {
3528 escape_entry ee;
3529 ee.parm_index = bp_unpack_var_len_int (bp);
3530 ee.arg = bp_unpack_var_len_unsigned (bp);
3531 ee.min_flags = bp_unpack_var_len_unsigned (bp);
3532 ee.direct = bp_unpack_value (bp, 1);
3533 esum->esc.quick_push (ee);
3534 }
3535}
3536
3537/* Callback for write_summary. */
3538
3539static void
3540modref_write ()
3541{
3542 struct output_block *ob = create_output_block (LTO_section_ipa_modref);
3543 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
3544 unsigned int count = 0;
3545 int i;
3546
3547 if (!summaries_lto)
3548 {
3549 streamer_write_uhwi (ob, 0);
3550 streamer_write_char_stream (ob->main_stream, 0);
3551 produce_asm (ob, NULLnullptr);
3552 destroy_output_block (ob);
3553 return;
3554 }
3555
3556 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
3557 {
3558 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
3559 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
3560 modref_summary_lto *r;
3561
3562 if (cnode && cnode->definition && !cnode->alias
3563 && (r = summaries_lto->get (cnode))
3564 && r->useful_p (flags_from_decl_or_type (cnode->decl)))
3565 count++;
3566 }
3567 streamer_write_uhwi (ob, count);
3568
3569 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
3570 {
3571 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
3572 cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
3573
3574 if (cnode && cnode->definition && !cnode->alias)
3575 {
3576 modref_summary_lto *r = summaries_lto->get (cnode);
3577
3578 if (!r || !r->useful_p (flags_from_decl_or_type (cnode->decl)))
3579 continue;
3580
3581 streamer_write_uhwi (ob, lto_symtab_encoder_encode (encoder, cnode));
3582
3583 streamer_write_uhwi (ob, r->arg_flags.length ());
3584 for (unsigned int i = 0; i < r->arg_flags.length (); i++)
3585 streamer_write_uhwi (ob, r->arg_flags[i]);
3586 streamer_write_uhwi (ob, r->retslot_flags);
3587 streamer_write_uhwi (ob, r->static_chain_flags);
3588
3589 write_modref_records (r->loads, ob);
3590 write_modref_records (r->stores, ob);
3591 streamer_write_uhwi (ob, r->kills.length ());
3592 for (auto kill : r->kills)
3593 kill.stream_out (ob);
3594
3595 struct bitpack_d bp = bitpack_create (ob->main_stream);
3596 bp_pack_value (&bp, r->writes_errno, 1);
3597 bp_pack_value (&bp, r->side_effects, 1);
3598 bp_pack_value (&bp, r->nondeterministic, 1);
3599 bp_pack_value (&bp, r->calls_interposable, 1);
3600 if (!flag_wpaglobal_options.x_flag_wpa)
3601 {
3602 for (cgraph_edge *e = cnode->indirect_calls;
3603 e; e = e->next_callee)
3604 {
3605 class fnspec_summary *sum = fnspec_summaries->get (e);
3606 bp_pack_value (&bp, sum != NULLnullptr, 1);
3607 if (sum)
3608 bp_pack_string (ob, &bp, sum->fnspec, true);
3609 class escape_summary *esum = escape_summaries->get (e);
3610 modref_write_escape_summary (&bp,esum);
3611 }
3612 for (cgraph_edge *e = cnode->callees; e; e = e->next_callee)
3613 {
3614 class fnspec_summary *sum = fnspec_summaries->get (e);
3615 bp_pack_value (&bp, sum != NULLnullptr, 1);
3616 if (sum)
3617 bp_pack_string (ob, &bp, sum->fnspec, true);
3618 class escape_summary *esum = escape_summaries->get (e);
3619 modref_write_escape_summary (&bp,esum);
3620 }
3621 }
3622 streamer_write_bitpack (&bp);
3623 }
3624 }
3625 streamer_write_char_stream (ob->main_stream, 0);
3626 produce_asm (ob, NULLnullptr);
3627 destroy_output_block (ob);
3628}
3629
3630static void
3631read_section (struct lto_file_decl_data *file_data, const char *data,
3632 size_t len)
3633{
3634 const struct lto_function_header *header
3635 = (const struct lto_function_header *) data;
3636 const int cfg_offset = sizeof (struct lto_function_header);
3637 const int main_offset = cfg_offset + header->cfg_size;
3638 const int string_offset = main_offset + header->main_size;
3639 struct data_in *data_in;
3640 unsigned int i;
3641 unsigned int f_count;
3642
3643 lto_input_block ib ((const char *) data + main_offset, header->main_size,
3644 file_data->mode_table);
3645
3646 data_in
3647 = lto_data_in_create (file_data, (const char *) data + string_offset,
3648 header->string_size, vNULL);
3649 f_count = streamer_read_uhwi (&ib);
3650 for (i = 0; i < f_count; i++)
3651 {
3652 struct cgraph_node *node;
3653 lto_symtab_encoder_t encoder;
3654
3655 unsigned int index = streamer_read_uhwi (&ib);
3656 encoder = file_data->symtab_node_encoder;
3657 node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder,
3658 index));
3659
3660 modref_summary *modref_sum = summaries
3661 ? summaries->get_create (node) : NULLnullptr;
3662 modref_summary_lto *modref_sum_lto = summaries_lto
3663 ? summaries_lto->get_create (node)
3664 : NULLnullptr;
3665 if (optimization_summaries)
3666 modref_sum = optimization_summaries->get_create (node);
3667
3668 if (modref_sum)
3669 {
3670 modref_sum->writes_errno = false;
3671 modref_sum->side_effects = false;
3672 modref_sum->nondeterministic = false;
3673 modref_sum->calls_interposable = false;
3674 }
3675 if (modref_sum_lto)
3676 {
3677 modref_sum_lto->writes_errno = false;
3678 modref_sum_lto->side_effects = false;
3679 modref_sum_lto->nondeterministic = false;
3680 modref_sum_lto->calls_interposable = false;
3681 }
3682
3683 gcc_assert (!modref_sum || (!modref_sum->loads((void)(!(!modref_sum || (!modref_sum->loads && !modref_sum
->stores)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3684, __FUNCTION__), 0 : 0))
3684 && !modref_sum->stores))((void)(!(!modref_sum || (!modref_sum->loads && !modref_sum
->stores)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3684, __FUNCTION__), 0 : 0))
;
3685 gcc_assert (!modref_sum_lto || (!modref_sum_lto->loads((void)(!(!modref_sum_lto || (!modref_sum_lto->loads &&
!modref_sum_lto->stores)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3686, __FUNCTION__), 0 : 0))
3686 && !modref_sum_lto->stores))((void)(!(!modref_sum_lto || (!modref_sum_lto->loads &&
!modref_sum_lto->stores)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3686, __FUNCTION__), 0 : 0))
;
3687 unsigned int args = streamer_read_uhwi (&ib);
3688 if (args && modref_sum)
3689 modref_sum->arg_flags.reserve_exact (args);
3690 if (args && modref_sum_lto)
3691 modref_sum_lto->arg_flags.reserve_exact (args);
3692 for (unsigned int i = 0; i < args; i++)
3693 {
3694 eaf_flags_t flags = streamer_read_uhwi (&ib);
3695 if (modref_sum)
3696 modref_sum->arg_flags.quick_push (flags);
3697 if (modref_sum_lto)
3698 modref_sum_lto->arg_flags.quick_push (flags);
3699 }
3700 eaf_flags_t flags = streamer_read_uhwi (&ib);
3701 if (modref_sum)
3702 modref_sum->retslot_flags = flags;
3703 if (modref_sum_lto)
3704 modref_sum_lto->retslot_flags = flags;
3705
3706 flags = streamer_read_uhwi (&ib);
3707 if (modref_sum)
3708 modref_sum->static_chain_flags = flags;
3709 if (modref_sum_lto)
3710 modref_sum_lto->static_chain_flags = flags;
3711
3712 read_modref_records (&ib, data_in,
3713 modref_sum ? &modref_sum->loads : NULLnullptr,
3714 modref_sum_lto ? &modref_sum_lto->loads : NULLnullptr);
3715 read_modref_records (&ib, data_in,
3716 modref_sum ? &modref_sum->stores : NULLnullptr,
3717 modref_sum_lto ? &modref_sum_lto->stores : NULLnullptr);
3718 int j = streamer_read_uhwi (&ib);
3719 if (j && modref_sum)
3720 modref_sum->kills.reserve_exact (j);
3721 if (j && modref_sum_lto)
3722 modref_sum_lto->kills.reserve_exact (j);
3723 for (int k = 0; k < j; k++)
3724 {
3725 modref_access_node a = modref_access_node::stream_in (&ib);
3726
3727 if (modref_sum)
3728 modref_sum->kills.quick_push (a);
3729 if (modref_sum_lto)
3730 modref_sum_lto->kills.quick_push (a);
3731 }
3732 struct bitpack_d bp = streamer_read_bitpack (&ib);
3733 if (bp_unpack_value (&bp, 1))
3734 {
3735 if (modref_sum)
3736 modref_sum->writes_errno = true;
3737 if (modref_sum_lto)
3738 modref_sum_lto->writes_errno = true;
3739 }
3740 if (bp_unpack_value (&bp, 1))
3741 {
3742 if (modref_sum)
3743 modref_sum->side_effects = true;
3744 if (modref_sum_lto)
3745 modref_sum_lto->side_effects = true;
3746 }
3747 if (bp_unpack_value (&bp, 1))
3748 {
3749 if (modref_sum)
3750 modref_sum->nondeterministic = true;
3751 if (modref_sum_lto)
3752 modref_sum_lto->nondeterministic = true;
3753 }
3754 if (bp_unpack_value (&bp, 1))
3755 {
3756 if (modref_sum)
3757 modref_sum->calls_interposable = true;
3758 if (modref_sum_lto)
3759 modref_sum_lto->calls_interposable = true;
3760 }
3761 if (!flag_ltransglobal_options.x_flag_ltrans)
3762 {
3763 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
3764 {
3765 if (bp_unpack_value (&bp, 1))
3766 {
3767 class fnspec_summary *sum = fnspec_summaries->get_create (e);
3768 sum->fnspec = xstrdup (bp_unpack_string (data_in, &bp));
3769 }
3770 modref_read_escape_summary (&bp, e);
3771 }
3772 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
3773 {
3774 if (bp_unpack_value (&bp, 1))
3775 {
3776 class fnspec_summary *sum = fnspec_summaries->get_create (e);
3777 sum->fnspec = xstrdup (bp_unpack_string (data_in, &bp));
3778 }
3779 modref_read_escape_summary (&bp, e);
3780 }
3781 }
3782 if (flag_ltransglobal_options.x_flag_ltrans)
3783 modref_sum->finalize (node->decl);
3784 if (dump_file)
3785 {
3786 fprintf (dump_file, "Read modref for %s\n",
3787 node->dump_name ());
3788 if (modref_sum)
3789 modref_sum->dump (dump_file);
3790 if (modref_sum_lto)
3791 modref_sum_lto->dump (dump_file);
3792 dump_modref_edge_summaries (dump_file, node, 4);
3793 }
3794 }
3795
3796 lto_free_section_data (file_data, LTO_section_ipa_modref, NULLnullptr, data,
3797 len);
3798 lto_data_in_delete (data_in);
3799}
3800
3801/* Callback for read_summary. */
3802
3803static void
3804modref_read (void)
3805{
3806 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3807 struct lto_file_decl_data *file_data;
3808 unsigned int j = 0;
3809
3810 gcc_checking_assert (!optimization_summaries && !summaries && !summaries_lto)((void)(!(!optimization_summaries && !summaries &&
!summaries_lto) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 3810, __FUNCTION__), 0 : 0))
;
3811 if (flag_ltransglobal_options.x_flag_ltrans)
3812 optimization_summaries = modref_summaries::create_ggc (symtab);
3813 else
3814 {
3815 if (flag_wpaglobal_options.x_flag_wpa || flag_incremental_linkglobal_options.x_flag_incremental_link == INCREMENTAL_LINK_LTO)
3816 summaries_lto = modref_summaries_lto::create_ggc (symtab);
3817 if (!flag_wpaglobal_options.x_flag_wpa
3818 || (flag_incremental_linkglobal_options.x_flag_incremental_link == INCREMENTAL_LINK_LTO
3819 && flag_fat_lto_objectsglobal_options.x_flag_fat_lto_objects))
3820 summaries = modref_summaries::create_ggc (symtab);
3821 if (!fnspec_summaries)
3822 fnspec_summaries = new fnspec_summaries_t (symtab);
3823 if (!escape_summaries)
3824 escape_summaries = new escape_summaries_t (symtab);
3825 }
3826
3827 while ((file_data = file_data_vec[j++]))
3828 {
3829 size_t len;
3830 const char *data = lto_get_summary_section_data (file_data,
3831 LTO_section_ipa_modref,
3832 &len);
3833 if (data)
3834 read_section (file_data, data, len);
3835 else
3836 /* Fatal error here. We do not want to support compiling ltrans units
3837 with different version of compiler or different flags than the WPA
3838 unit, so this should never happen. */
3839 fatal_error (input_location,
3840 "IPA modref summary is missing in input file");
3841 }
3842}
3843
3844/* Recompute arg_flags for param adjustments in INFO. */
3845
3846static void
3847remap_arg_flags (auto_vec <eaf_flags_t> &arg_flags, clone_info *info)
3848{
3849 auto_vec<eaf_flags_t> old = arg_flags.copy ();
3850 int max = -1;
3851 size_t i;
3852 ipa_adjusted_param *p;
3853
3854 arg_flags.release ();
3855
3856 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)for (i = 0; vec_safe_iterate ((info->param_adjustments->
m_adj_params), (i), &(p)); ++(i))
3857 {
3858 int o = info->param_adjustments->get_original_index (i);
3859 if (o >= 0 && (int)old.length () > o && old[o])
3860 max = i;
3861 }
3862 if (max >= 0)
3863 arg_flags.safe_grow_cleared (max + 1, true);
3864 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)for (i = 0; vec_safe_iterate ((info->param_adjustments->
m_adj_params), (i), &(p)); ++(i))
3865 {
3866 int o = info->param_adjustments->get_original_index (i);
3867 if (o >= 0 && (int)old.length () > o && old[o])
3868 arg_flags[i] = old[o];
3869 }
3870}
3871
3872/* Update kills accrdoing to the parm map MAP. */
3873
3874static void
3875remap_kills (vec <modref_access_node> &kills, const vec <int> &map)
3876{
3877 for (size_t i = 0; i < kills.length ();)
3878 if (kills[i].parm_index >= 0)
3879 {
3880 if (kills[i].parm_index < (int)map.length ()
3881 && map[kills[i].parm_index] != MODREF_UNKNOWN_PARM)
3882 {
3883 kills[i].parm_index = map[kills[i].parm_index];
3884 i++;
3885 }
3886 else
3887 kills.unordered_remove (i);
3888 }
3889 else
3890 i++;
3891}
3892
3893/* If signature changed, update the summary. */
3894
3895static void
3896update_signature (struct cgraph_node *node)
3897{
3898 clone_info *info = clone_info::get (node);
3899 if (!info || !info->param_adjustments)
3900 return;
3901
3902 modref_summary *r = optimization_summaries
3903 ? optimization_summaries->get (node) : NULLnullptr;
3904 modref_summary_lto *r_lto = summaries_lto
3905 ? summaries_lto->get (node) : NULLnullptr;
3906 if (!r && !r_lto)
3907 return;
3908 if (dump_file)
3909 {
3910 fprintf (dump_file, "Updating summary for %s from:\n",
3911 node->dump_name ());
3912 if (r)
3913 r->dump (dump_file);
3914 if (r_lto)
3915 r_lto->dump (dump_file);
3916 }
3917
3918 size_t i, max = 0;
3919 ipa_adjusted_param *p;
3920
3921 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)for (i = 0; vec_safe_iterate ((info->param_adjustments->
m_adj_params), (i), &(p)); ++(i))
3922 {
3923 int idx = info->param_adjustments->get_original_index (i);
3924 if (idx > (int)max)
3925 max = idx;
3926 }
3927
3928 auto_vec <int, 32> map;
3929
3930 map.reserve (max + 1);
3931 for (i = 0; i <= max; i++)
3932 map.quick_push (MODREF_UNKNOWN_PARM);
3933 FOR_EACH_VEC_SAFE_ELT (info->param_adjustments->m_adj_params, i, p)for (i = 0; vec_safe_iterate ((info->param_adjustments->
m_adj_params), (i), &(p)); ++(i))
3934 {
3935 int idx = info->param_adjustments->get_original_index (i);
3936 if (idx >= 0)
3937 map[idx] = i;
3938 }
3939 if (r)
3940 {
3941 r->loads->remap_params (&map);
3942 r->stores->remap_params (&map);
3943 remap_kills (r->kills, map);
3944 if (r->arg_flags.length ())
3945 remap_arg_flags (r->arg_flags, info);
3946 }
3947 if (r_lto)
3948 {
3949 r_lto->loads->remap_params (&map);
3950 r_lto->stores->remap_params (&map);
3951 remap_kills (r_lto->kills, map);
3952 if (r_lto->arg_flags.length ())
3953 remap_arg_flags (r_lto->arg_flags, info);
3954 }
3955 if (dump_file)
3956 {
3957 fprintf (dump_file, "to:\n");
3958 if (r)
3959 r->dump (dump_file);
3960 if (r_lto)
3961 r_lto->dump (dump_file);
3962 }
3963 if (r)
3964 r->finalize (node->decl);
3965 return;
3966}
3967
3968/* Definition of the modref IPA pass. */
3969const pass_data pass_data_ipa_modref =
3970{
3971 IPA_PASS, /* type */
3972 "modref", /* name */
3973 OPTGROUP_IPA, /* optinfo_flags */
3974 TV_IPA_MODREF, /* tv_id */
3975 0, /* properties_required */
3976 0, /* properties_provided */
3977 0, /* properties_destroyed */
3978 0, /* todo_flags_start */
3979 ( TODO_dump_symtab(1 << 7) ), /* todo_flags_finish */
3980};
3981
3982class pass_ipa_modref : public ipa_opt_pass_d
3983{
3984public:
3985 pass_ipa_modref (gcc::context *ctxt)
3986 : ipa_opt_pass_d (pass_data_ipa_modref, ctxt,
3987 modref_generate, /* generate_summary */
3988 modref_write, /* write_summary */
3989 modref_read, /* read_summary */
3990 modref_write, /* write_optimization_summary */
3991 modref_read, /* read_optimization_summary */
3992 NULLnullptr, /* stmt_fixup */
3993 0, /* function_transform_todo_flags_start */
3994 NULLnullptr, /* function_transform */
3995 NULLnullptr) /* variable_transform */
3996 {}
3997
3998 /* opt_pass methods: */
3999 opt_pass *clone () { return new pass_ipa_modref (m_ctxt); }
4000 virtual bool gate (function *)
4001 {
4002 return true;
4003 }
4004 virtual unsigned int execute (function *);
4005
4006};
4007
4008}
4009
4010unsigned int pass_modref::execute (function *f)
4011{
4012 if (analyze_function (f, false))
4013 return execute_fixup_cfg ();
4014 return 0;
4015}
4016
4017gimple_opt_pass *
4018make_pass_modref (gcc::context *ctxt)
4019{
4020 return new pass_modref (ctxt);
4021}
4022
4023ipa_opt_pass_d *
4024make_pass_ipa_modref (gcc::context *ctxt)
4025{
4026 return new pass_ipa_modref (ctxt);
4027}
4028
4029namespace {
4030
4031/* Skip edges from and to nodes without ipa_pure_const enabled.
4032 Ignore not available symbols. */
4033
4034static bool
4035ignore_edge (struct cgraph_edge *e)
4036{
4037 /* We merge summaries of inline clones into summaries of functions they
4038 are inlined to. For that reason the complete function bodies must
4039 act as unit. */
4040 if (!e->inline_failed)
4041 return false;
4042 enum availability avail;
4043 cgraph_node *callee = e->callee->function_or_virtual_thunk_symbol
4044 (&avail, e->caller);
4045
4046 return (avail <= AVAIL_INTERPOSABLE
4047 || ((!optimization_summaries || !optimization_summaries->get (callee))
4048 && (!summaries_lto || !summaries_lto->get (callee))));
4049}
4050
4051/* Compute parm_map for CALLEE_EDGE. */
4052
4053static bool
4054compute_parm_map (cgraph_edge *callee_edge, vec<modref_parm_map> *parm_map)
4055{
4056 class ipa_edge_args *args;
4057 if (ipa_node_params_sum
31
Assuming 'ipa_node_params_sum' is non-null
39
Taking true branch
4058 && !callee_edge->call_stmt_cannot_inline_p
32
Assuming field 'call_stmt_cannot_inline_p' is 0
4059 && (args = ipa_edge_args_sum->get (callee_edge)) != NULLnullptr)
33
Calling 'call_summary::get'
37
Returning from 'call_summary::get'
38
Assuming the condition is true
4060 {
4061 int i, count = ipa_get_cs_argument_count (args);
4062 class ipa_node_params *caller_parms_info, *callee_pi;
4063 class ipa_call_summary *es
4064 = ipa_call_summaries->get (callee_edge);
40
Calling 'fast_call_summary::get'
47
Returning from 'fast_call_summary::get'
4065 cgraph_node *callee
4066 = callee_edge->callee->function_or_virtual_thunk_symbol
48
Called C++ object pointer is null
4067 (NULLnullptr, callee_edge->caller);
4068
4069 caller_parms_info
4070 = ipa_node_params_sum->get (callee_edge->caller->inlined_to
4071 ? callee_edge->caller->inlined_to
4072 : callee_edge->caller);
4073 callee_pi = ipa_node_params_sum->get (callee);
4074
4075 (*parm_map).safe_grow_cleared (count, true);
4076
4077 for (i = 0; i < count; i++)
4078 {
4079 if (es && es->param[i].points_to_local_or_readonly_memory)
4080 {
4081 (*parm_map)[i].parm_index = MODREF_LOCAL_MEMORY_PARM;
4082 continue;
4083 }
4084
4085 struct ipa_jump_func *jf
4086 = ipa_get_ith_jump_func (args, i);
4087 if (jf && callee_pi)
4088 {
4089 tree cst = ipa_value_from_jfunc (caller_parms_info,
4090 jf,
4091 ipa_get_type
4092 (callee_pi, i));
4093 if (cst && points_to_local_or_readonly_memory_p (cst))
4094 {
4095 (*parm_map)[i].parm_index = MODREF_LOCAL_MEMORY_PARM;
4096 continue;
4097 }
4098 }
4099 if (jf && jf->type == IPA_JF_PASS_THROUGH)
4100 {
4101 (*parm_map)[i].parm_index
4102 = ipa_get_jf_pass_through_formal_id (jf);
4103 if (ipa_get_jf_pass_through_operation (jf) == NOP_EXPR)
4104 {
4105 (*parm_map)[i].parm_offset_known = true;
4106 (*parm_map)[i].parm_offset = 0;
4107 }
4108 else if (ipa_get_jf_pass_through_operation (jf)
4109 == POINTER_PLUS_EXPR
4110 && ptrdiff_tree_p (ipa_get_jf_pass_through_operand (jf),
4111 &(*parm_map)[i].parm_offset))
4112 (*parm_map)[i].parm_offset_known = true;
4113 else
4114 (*parm_map)[i].parm_offset_known = false;
4115 continue;
4116 }
4117 if (jf && jf->type == IPA_JF_ANCESTOR)
4118 {
4119 (*parm_map)[i].parm_index = ipa_get_jf_ancestor_formal_id (jf);
4120 (*parm_map)[i].parm_offset_known = true;
4121 gcc_checking_assert((void)(!(!(ipa_get_jf_ancestor_offset (jf) & ((8) - 1)))
? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 4122, __FUNCTION__), 0 : 0))
4122 (!(ipa_get_jf_ancestor_offset (jf) & (BITS_PER_UNIT - 1)))((void)(!(!(ipa_get_jf_ancestor_offset (jf) & ((8) - 1)))
? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 4122, __FUNCTION__), 0 : 0))
;
4123 (*parm_map)[i].parm_offset
4124 = ipa_get_jf_ancestor_offset (jf) >> LOG2_BITS_PER_UNIT3;
4125 }
4126 else
4127 (*parm_map)[i].parm_index = -1;
4128 }
4129 if (dump_file)
4130 {
4131 fprintf (dump_file, " Parm map: ");
4132 for (i = 0; i < count; i++)
4133 fprintf (dump_file, " %i", (*parm_map)[i].parm_index);
4134 fprintf (dump_file, "\n");
4135 }
4136 return true;
4137 }
4138 return false;
4139}
4140
4141/* Map used to translate escape infos. */
4142
4143struct escape_map
4144{
4145 int parm_index;
4146 bool direct;
4147};
4148
4149/* Update escape map for E. */
4150
4151static void
4152update_escape_summary_1 (cgraph_edge *e,
4153 vec <vec <escape_map>> &map,
4154 bool ignore_stores)
4155{
4156 escape_summary *sum = escape_summaries->get (e);
4157 if (!sum)
4158 return;
4159 auto_vec <escape_entry> old = sum->esc.copy ();
4160 sum->esc.release ();
4161
4162 unsigned int i;
4163 escape_entry *ee;
4164 FOR_EACH_VEC_ELT (old, i, ee)for (i = 0; (old).iterate ((i), &(ee)); ++(i))
4165 {
4166 unsigned int j;
4167 struct escape_map *em;
4168 /* TODO: We do not have jump functions for return slots, so we
4169 never propagate them to outer function. */
4170 if (ee->parm_index >= (int)map.length ()
4171 || ee->parm_index < 0)
4172 continue;
4173 FOR_EACH_VEC_ELT (map[ee->parm_index], j, em)for (j = 0; (map[ee->parm_index]).iterate ((j), &(em))
; ++(j))
4174 {
4175 int min_flags = ee->min_flags;
4176 if (ee->direct && !em->direct)
4177 min_flags = deref_flags (min_flags, ignore_stores);
4178 struct escape_entry entry = {em->parm_index, ee->arg,
4179 ee->min_flags,
4180 ee->direct & em->direct};
4181 sum->esc.safe_push (entry);
4182 }
4183 }
4184 if (!sum->esc.length ())
4185 escape_summaries->remove (e);
4186}
4187
4188/* Update escape map fo NODE. */
4189
4190static void
4191update_escape_summary (cgraph_node *node,
4192 vec <vec <escape_map>> &map,
4193 bool ignore_stores)
4194{
4195 if (!escape_summaries)
4196 return;
4197 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
4198 update_escape_summary_1 (e, map, ignore_stores);
4199 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
4200 {
4201 if (!e->inline_failed)
4202 update_escape_summary (e->callee, map, ignore_stores);
4203 else
4204 update_escape_summary_1 (e, map, ignore_stores);
4205 }
4206}
4207
4208/* Get parameter type from DECL. This is only safe for special cases
4209 like builtins we create fnspec for because the type match is checked
4210 at fnspec creation time. */
4211
4212static tree
4213get_parm_type (tree decl, unsigned int i)
4214{
4215 tree t = TYPE_ARG_TYPES (TREE_TYPE (decl))((tree_check2 ((((contains_struct_check ((decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 4215, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 4215, __FUNCTION__, (FUNCTION_TYPE), (METHOD_TYPE)))->type_non_common
.values)
;
4216
4217 for (unsigned int p = 0; p < i; p++)
4218 t = TREE_CHAIN (t)((contains_struct_check ((t), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 4218, __FUNCTION__))->common.chain)
;
4219 return TREE_VALUE (t)((tree_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 4219, __FUNCTION__, (TREE_LIST)))->list.value)
;
4220}
4221
4222/* Return access mode for argument I of call E with FNSPEC. */
4223
4224static modref_access_node
4225get_access_for_fnspec (cgraph_edge *e, attr_fnspec &fnspec,
4226 unsigned int i, modref_parm_map &map)
4227{
4228 tree size = NULL_TREE(tree) nullptr;
4229 unsigned int size_arg;
4230
4231 if (!fnspec.arg_specified_p (i))
4232 ;
4233 else if (fnspec.arg_max_access_size_given_by_arg_p (i, &size_arg))
4234 {
4235 cgraph_node *node = e->caller->inlined_to
4236 ? e->caller->inlined_to : e->caller;
4237 ipa_node_params *caller_parms_info = ipa_node_params_sum->get (node);
4238 ipa_edge_args *args = ipa_edge_args_sum->get (e);
4239 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, size_arg);
4240
4241 if (jf)
4242 size = ipa_value_from_jfunc (caller_parms_info, jf,
4243 get_parm_type (e->callee->decl, size_arg));
4244 }
4245 else if (fnspec.arg_access_size_given_by_type_p (i))
4246 size = TYPE_SIZE_UNIT (get_parm_type (e->callee->decl, i))((tree_class_check ((get_parm_type (e->callee->decl, i)
), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 4246, __FUNCTION__))->type_common.size_unit)
;
4247 modref_access_node a = {0, -1, -1,
4248 map.parm_offset, map.parm_index,
4249 map.parm_offset_known, 0};
4250 poly_int64 size_hwi;
4251 if (size
4252 && poly_int_tree_p (size, &size_hwi)
4253 && coeffs_in_range_p (size_hwi, 0,
4254 HOST_WIDE_INT_MAX(~((long) (1UL << (64 - 1)))) / BITS_PER_UNIT(8)))
4255 {
4256 a.size = -1;
4257 a.max_size = size_hwi << LOG2_BITS_PER_UNIT3;
4258 }
4259 return a;
4260}
4261
4262/* Call E in NODE with ECF_FLAGS has no summary; update MODREF_SUMMARY and
4263 CUR_SUMMARY_LTO accordingly. Return true if something changed. */
4264
4265static bool
4266propagate_unknown_call (cgraph_node *node,
4267 cgraph_edge *e, int ecf_flags,
4268 modref_summary *cur_summary,
4269 modref_summary_lto *cur_summary_lto,
4270 bool nontrivial_scc)
4271{
4272 bool changed = false;
4273 class fnspec_summary *fnspec_sum = fnspec_summaries->get (e);
16
Calling 'call_summary::get'
21
Returning from 'call_summary::get'
4274 auto_vec <modref_parm_map, 32> parm_map;
4275 bool looping;
4276
4277 if (e->callee
22
Assuming field 'callee' is null
4278 && builtin_safe_for_const_function_p (&looping, e->callee->decl))
4279 {
4280 if (looping && cur_summary && !cur_summary->side_effects)
4281 {
4282 cur_summary->side_effects = true;
4283 changed = true;
4284 }
4285 if (looping && cur_summary_lto && !cur_summary_lto->side_effects)
4286 {
4287 cur_summary_lto->side_effects = true;
4288 changed = true;
4289 }
4290 return changed;
4291 }
4292
4293 if (!(ecf_flags & (ECF_CONST(1 << 0) | ECF_NOVOPS(1 << 9) | ECF_PURE(1 << 1)))
23
Assuming the condition is false
26
Taking false branch
4294 || (ecf_flags & ECF_LOOPING_CONST_OR_PURE(1 << 2))
24
Assuming the condition is false
4295 || nontrivial_scc)
25
Assuming 'nontrivial_scc' is false
4296 {
4297 if (cur_summary && !cur_summary->side_effects)
4298 {
4299 cur_summary->side_effects = true;
4300 changed = true;
4301 }
4302 if (cur_summary_lto && !cur_summary_lto->side_effects)
4303 {
4304 cur_summary_lto->side_effects = true;
4305 changed = true;
4306 }
4307 if (cur_summary && !cur_summary->nondeterministic
4308 && !ignore_nondeterminism_p (node->decl, ecf_flags))
4309 {
4310 cur_summary->nondeterministic = true;
4311 changed = true;
4312 }
4313 if (cur_summary_lto && !cur_summary_lto->nondeterministic
4314 && !ignore_nondeterminism_p (node->decl, ecf_flags))
4315 {
4316 cur_summary_lto->nondeterministic = true;
4317 changed = true;
4318 }
4319 }
4320 if (ecf_flags & (ECF_CONST(1 << 0) | ECF_NOVOPS(1 << 9)))
27
Assuming the condition is false
28
Taking false branch
4321 return changed;
4322
4323 if (fnspec_sum
29
Assuming 'fnspec_sum' is non-null
4324 && compute_parm_map (e, &parm_map))
30
Calling 'compute_parm_map'
4325 {
4326 attr_fnspec fnspec (fnspec_sum->fnspec);
4327
4328 gcc_checking_assert (fnspec.known_p ())((void)(!(fnspec.known_p ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 4328, __FUNCTION__), 0 : 0))
;
4329 if (fnspec.global_memory_read_p ())
4330 collapse_loads (cur_summary, cur_summary_lto);
4331 else
4332 {
4333 tree t = TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl))((tree_check2 ((((contains_struct_check ((e->callee->decl
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 4333, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 4333, __FUNCTION__, (FUNCTION_TYPE), (METHOD_TYPE)))->type_non_common
.values)
;
4334 for (unsigned i = 0; i < parm_map.length () && t;
4335 i++, t = TREE_CHAIN (t)((contains_struct_check ((t), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 4335, __FUNCTION__))->common.chain)
)
4336 if (!POINTER_TYPE_P (TREE_VALUE (t))(((enum tree_code) (((tree_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 4336, __FUNCTION__, (TREE_LIST)))->list.value))->base
.code) == POINTER_TYPE || ((enum tree_code) (((tree_check ((t
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 4336, __FUNCTION__, (TREE_LIST)))->list.value))->base
.code) == REFERENCE_TYPE)
)
4337 ;
4338 else if (!fnspec.arg_specified_p (i)
4339 || fnspec.arg_maybe_read_p (i))
4340 {
4341 modref_parm_map map = parm_map[i];
4342 if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
4343 continue;
4344 if (map.parm_index == MODREF_UNKNOWN_PARM)
4345 {
4346 collapse_loads (cur_summary, cur_summary_lto);
4347 break;
4348 }
4349 if (cur_summary)
4350 changed |= cur_summary->loads->insert
4351 (0, 0, get_access_for_fnspec (e, fnspec, i, map), false);
4352 if (cur_summary_lto)
4353 changed |= cur_summary_lto->loads->insert
4354 (0, 0, get_access_for_fnspec (e, fnspec, i, map), false);
4355 }
4356 }
4357 if (ignore_stores_p (node->decl, ecf_flags))
4358 ;
4359 else if (fnspec.global_memory_written_p ())
4360 collapse_stores (cur_summary, cur_summary_lto);
4361 else
4362 {
4363 tree t = TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl))((tree_check2 ((((contains_struct_check ((e->callee->decl
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 4363, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 4363, __FUNCTION__, (FUNCTION_TYPE), (METHOD_TYPE)))->type_non_common
.values)
;
4364 for (unsigned i = 0; i < parm_map.length () && t;
4365 i++, t = TREE_CHAIN (t)((contains_struct_check ((t), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 4365, __FUNCTION__))->common.chain)
)
4366 if (!POINTER_TYPE_P (TREE_VALUE (t))(((enum tree_code) (((tree_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 4366, __FUNCTION__, (TREE_LIST)))->list.value))->base
.code) == POINTER_TYPE || ((enum tree_code) (((tree_check ((t
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 4366, __FUNCTION__, (TREE_LIST)))->list.value))->base
.code) == REFERENCE_TYPE)
)
4367 ;
4368 else if (!fnspec.arg_specified_p (i)
4369 || fnspec.arg_maybe_written_p (i))
4370 {
4371 modref_parm_map map = parm_map[i];
4372 if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
4373 continue;
4374 if (map.parm_index == MODREF_UNKNOWN_PARM)
4375 {
4376 collapse_stores (cur_summary, cur_summary_lto);
4377 break;
4378 }
4379 if (cur_summary)
4380 changed |= cur_summary->stores->insert
4381 (0, 0, get_access_for_fnspec (e, fnspec, i, map), false);
4382 if (cur_summary_lto)
4383 changed |= cur_summary_lto->stores->insert
4384 (0, 0, get_access_for_fnspec (e, fnspec, i, map), false);
4385 }
4386 }
4387 if (fnspec.errno_maybe_written_p () && flag_errno_mathglobal_options.x_flag_errno_math)
4388 {
4389 if (cur_summary && !cur_summary->writes_errno)
4390 {
4391 cur_summary->writes_errno = true;
4392 changed = true;
4393 }
4394 if (cur_summary_lto && !cur_summary_lto->writes_errno)
4395 {
4396 cur_summary_lto->writes_errno = true;
4397 changed = true;
4398 }
4399 }
4400 return changed;
4401 }
4402 if (dump_file)
4403 fprintf (dump_file, " collapsing loads\n");
4404 changed |= collapse_loads (cur_summary, cur_summary_lto);
4405 if (!ignore_stores_p (node->decl, ecf_flags))
4406 {
4407 if (dump_file)
4408 fprintf (dump_file, " collapsing stores\n");
4409 changed |= collapse_stores (cur_summary, cur_summary_lto);
4410 }
4411 return changed;
4412}
4413
4414/* Maybe remove summaies of NODE pointed to by CUR_SUMMARY_PTR
4415 and CUR_SUMMARY_LTO_PTR if they are useless according to ECF_FLAGS. */
4416
4417static void
4418remove_useless_summaries (cgraph_node *node,
4419 modref_summary **cur_summary_ptr,
4420 modref_summary_lto **cur_summary_lto_ptr,
4421 int ecf_flags)
4422{
4423 if (*cur_summary_ptr && !(*cur_summary_ptr)->useful_p (ecf_flags, false))
4424 {
4425 optimization_summaries->remove (node);
4426 *cur_summary_ptr = NULLnullptr;
4427 }
4428 if (*cur_summary_lto_ptr
4429 && !(*cur_summary_lto_ptr)->useful_p (ecf_flags, false))
4430 {
4431 summaries_lto->remove (node);
4432 *cur_summary_lto_ptr = NULLnullptr;
4433 }
4434}
4435
4436/* Perform iterative dataflow on SCC component starting in COMPONENT_NODE
4437 and propagate loads/stores. */
4438
4439static bool
4440modref_propagate_in_scc (cgraph_node *component_node)
4441{
4442 bool changed = true;
4443 bool first = true;
4444 int iteration = 0;
4445
4446 while (changed)
1
Loop condition is true. Entering loop body
4447 {
4448 bool nontrivial_scc
4449 = ((struct ipa_dfs_info *) component_node->aux)->next_cycle;
4450 changed = false;
4451 for (struct cgraph_node *cur = component_node; cur;
2
Loop condition is true. Entering loop body
4452 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
4453 {
4454 cgraph_node *node = cur->inlined_to ? cur->inlined_to : cur;
3
Assuming field 'inlined_to' is null
4
'?' condition is false
4455 modref_summary *cur_summary = optimization_summaries
5
Assuming 'optimization_summaries' is null
6
'?' condition is false
4456 ? optimization_summaries->get (node)
4457 : NULLnullptr;
4458 modref_summary_lto *cur_summary_lto = summaries_lto
7
Assuming 'summaries_lto' is non-null
8
'?' condition is true
4459 ? summaries_lto->get (node)
4460 : NULLnullptr;
4461
4462 if (!cur_summary
8.1
'cur_summary' is null
8.1
'cur_summary' is null
&& !cur_summary_lto)
9
Assuming 'cur_summary_lto' is non-null
10
Taking false branch
4463 continue;
4464
4465 int cur_ecf_flags = flags_from_decl_or_type (node->decl);
4466
4467 if (dump_file)
11
Assuming 'dump_file' is null
12
Taking false branch
4468 fprintf (dump_file, " Processing %s%s%s\n",
4469 cur->dump_name (),
4470 TREE_READONLY (cur->decl)((non_type_check ((cur->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 4470, __FUNCTION__))->base.readonly_flag)
? " (const)" : "",
4471 DECL_PURE_P (cur->decl)((tree_check ((cur->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 4471, __FUNCTION__, (FUNCTION_DECL)))->function_decl.pure_flag
)
? " (pure)" : "");
4472
4473 for (cgraph_edge *e = cur->indirect_calls; e; e = e->next_callee)
13
Loop condition is true. Entering loop body
4474 {
4475 if (dump_file
13.1
'dump_file' is null
13.1
'dump_file' is null
)
14
Taking false branch
4476 fprintf (dump_file, " Indirect call\n");
4477 if (propagate_unknown_call
15
Calling 'propagate_unknown_call'
4478 (node, e, e->indirect_info->ecf_flags,
4479 cur_summary, cur_summary_lto,
4480 nontrivial_scc))
4481 {
4482 changed = true;
4483 remove_useless_summaries (node, &cur_summary,
4484 &cur_summary_lto,
4485 cur_ecf_flags);
4486 if (!cur_summary && !cur_summary_lto)
4487 break;
4488 }
4489 }
4490
4491 if (!cur_summary && !cur_summary_lto)
4492 continue;
4493
4494 for (cgraph_edge *callee_edge = cur->callees; callee_edge;
4495 callee_edge = callee_edge->next_callee)
4496 {
4497 int flags = flags_from_decl_or_type (callee_edge->callee->decl);
4498 modref_summary *callee_summary = NULLnullptr;
4499 modref_summary_lto *callee_summary_lto = NULLnullptr;
4500 struct cgraph_node *callee;
4501
4502 if (!callee_edge->inline_failed
4503 || ((flags & (ECF_CONST(1 << 0) | ECF_NOVOPS(1 << 9)))
4504 && !(flags & ECF_LOOPING_CONST_OR_PURE(1 << 2))))
4505 continue;
4506
4507 /* Get the callee and its summary. */
4508 enum availability avail;
4509 callee = callee_edge->callee->function_or_virtual_thunk_symbol
4510 (&avail, cur);
4511
4512 /* It is not necessary to re-process calls outside of the
4513 SCC component. */
4514 if (iteration > 0
4515 && (!callee->aux
4516 || ((struct ipa_dfs_info *)cur->aux)->scc_no
4517 != ((struct ipa_dfs_info *)callee->aux)->scc_no))
4518 continue;
4519
4520 if (dump_file)
4521 fprintf (dump_file, " Call to %s\n",
4522 callee_edge->callee->dump_name ());
4523
4524 bool ignore_stores = ignore_stores_p (cur->decl, flags);
4525
4526 if (avail <= AVAIL_INTERPOSABLE)
4527 {
4528 if (dump_file)
4529 fprintf (dump_file, " Call target interposable"
4530 " or not available\n");
4531 changed |= propagate_unknown_call
4532 (node, callee_edge, flags,
4533 cur_summary, cur_summary_lto,
4534 nontrivial_scc);
4535 if (!cur_summary && !cur_summary_lto)
4536 break;
4537 continue;
4538 }
4539
4540 /* We don't know anything about CALLEE, hence we cannot tell
4541 anything about the entire component. */
4542
4543 if (cur_summary
4544 && !(callee_summary = optimization_summaries->get (callee)))
4545 {
4546 if (dump_file)
4547 fprintf (dump_file, " No call target summary\n");
4548 changed |= propagate_unknown_call
4549 (node, callee_edge, flags,
4550 cur_summary, NULLnullptr,
4551 nontrivial_scc);
4552 }
4553 if (cur_summary_lto
4554 && !(callee_summary_lto = summaries_lto->get (callee)))
4555 {
4556 if (dump_file)
4557 fprintf (dump_file, " No call target summary\n");
4558 changed |= propagate_unknown_call
4559 (node, callee_edge, flags,
4560 NULLnullptr, cur_summary_lto,
4561 nontrivial_scc);
4562 }
4563
4564 if (callee_summary && !cur_summary->side_effects
4565 && (callee_summary->side_effects
4566 || callee_edge->recursive_p ()))
4567 {
4568 cur_summary->side_effects = true;
4569 changed = true;
4570 }
4571 if (callee_summary_lto && !cur_summary_lto->side_effects
4572 && (callee_summary_lto->side_effects
4573 || callee_edge->recursive_p ()))
4574 {
4575 cur_summary_lto->side_effects = true;
4576 changed = true;
4577 }
4578 if (callee_summary && !cur_summary->nondeterministic
4579 && callee_summary->nondeterministic
4580 && !ignore_nondeterminism_p (cur->decl, flags))
4581 {
4582 cur_summary->nondeterministic = true;
4583 changed = true;
4584 }
4585 if (callee_summary_lto && !cur_summary_lto->nondeterministic
4586 && callee_summary_lto->nondeterministic
4587 && !ignore_nondeterminism_p (cur->decl, flags))
4588 {
4589 cur_summary_lto->nondeterministic = true;
4590 changed = true;
4591 }
4592 if (flags & (ECF_CONST(1 << 0) | ECF_NOVOPS(1 << 9)))
4593 continue;
4594
4595 /* We can not safely optimize based on summary of callee if it
4596 does not always bind to current def: it is possible that
4597 memory load was optimized out earlier which may not happen in
4598 the interposed variant. */
4599 if (!callee_edge->binds_to_current_def_p ())
4600 {
4601 if (cur_summary && !cur_summary->calls_interposable)
4602 {
4603 cur_summary->calls_interposable = true;
4604 changed = true;
4605 }
4606 if (cur_summary_lto && !cur_summary_lto->calls_interposable)
4607 {
4608 cur_summary_lto->calls_interposable = true;
4609 changed = true;
4610 }
4611 if (dump_file)
4612 fprintf (dump_file, " May not bind local;"
4613 " collapsing loads\n");
4614 }
4615
4616
4617 auto_vec <modref_parm_map, 32> parm_map;
4618 modref_parm_map chain_map;
4619 /* TODO: Once we get jump functions for static chains we could
4620 compute this. */
4621 chain_map.parm_index = MODREF_UNKNOWN_PARM;
4622
4623 compute_parm_map (callee_edge, &parm_map);
4624
4625 /* Merge in callee's information. */
4626 if (callee_summary)
4627 {
4628 changed |= cur_summary->loads->merge
4629 (callee_summary->loads, &parm_map,
4630 &chain_map, !first);
4631 if (!ignore_stores)
4632 {
4633 changed |= cur_summary->stores->merge
4634 (callee_summary->stores, &parm_map,
4635 &chain_map, !first);
4636 if (!cur_summary->writes_errno
4637 && callee_summary->writes_errno)
4638 {
4639 cur_summary->writes_errno = true;
4640 changed = true;
4641 }
4642 }
4643 }
4644 if (callee_summary_lto)
4645 {
4646 changed |= cur_summary_lto->loads->merge
4647 (callee_summary_lto->loads, &parm_map,
4648 &chain_map, !first);
4649 if (!ignore_stores)
4650 {
4651 changed |= cur_summary_lto->stores->merge
4652 (callee_summary_lto->stores, &parm_map,
4653 &chain_map, !first);
4654 if (!cur_summary_lto->writes_errno
4655 && callee_summary_lto->writes_errno)
4656 {
4657 cur_summary_lto->writes_errno = true;
4658 changed = true;
4659 }
4660 }
4661 }
4662 if (changed)
4663 remove_useless_summaries (node, &cur_summary,
4664 &cur_summary_lto,
4665 cur_ecf_flags);
4666 if (!cur_summary && !cur_summary_lto)
4667 break;
4668 if (dump_file && changed)
4669 {
4670 if (cur_summary)
4671 cur_summary->dump (dump_file);
4672 if (cur_summary_lto)
4673 cur_summary_lto->dump (dump_file);
4674 dump_modref_edge_summaries (dump_file, node, 4);
4675 }
4676 }
4677 }
4678 iteration++;
4679 first = false;
4680 }
4681 if (dump_file)
4682 fprintf (dump_file,
4683 "Propagation finished in %i iterations\n", iteration);
4684 bool pureconst = false;
4685 for (struct cgraph_node *cur = component_node; cur;
4686 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
4687 if (!cur->inlined_to && opt_for_fn (cur->decl, flag_ipa_pure_const)(opts_for_fn (cur->decl)->x_flag_ipa_pure_const))
4688 {
4689 modref_summary *summary = optimization_summaries
4690 ? optimization_summaries->get (cur)
4691 : NULLnullptr;
4692 modref_summary_lto *summary_lto = summaries_lto
4693 ? summaries_lto->get (cur)
4694 : NULLnullptr;
4695 if (summary && !summary->stores->every_base && !summary->stores->bases
4696 && !summary->nondeterministic)
4697 {
4698 if (!summary->loads->every_base && !summary->loads->bases
4699 && !summary->calls_interposable)
4700 pureconst |= ipa_make_function_const
4701 (cur, summary->side_effects, false);
4702 else
4703 pureconst |= ipa_make_function_pure
4704 (cur, summary->side_effects, false);
4705 }
4706 if (summary_lto && !summary_lto->stores->every_base
4707 && !summary_lto->stores->bases && !summary_lto->nondeterministic)
4708 {
4709 if (!summary_lto->loads->every_base && !summary_lto->loads->bases
4710 && !summary_lto->calls_interposable)
4711 pureconst |= ipa_make_function_const
4712 (cur, summary_lto->side_effects, false);
4713 else
4714 pureconst |= ipa_make_function_pure
4715 (cur, summary_lto->side_effects, false);
4716 }
4717 }
4718 return pureconst;
4719}
4720
4721/* Dump results of propagation in SCC rooted in COMPONENT_NODE. */
4722
4723static void
4724modref_propagate_dump_scc (cgraph_node *component_node)
4725{
4726 for (struct cgraph_node *cur = component_node; cur;
4727 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
4728 if (!cur->inlined_to)
4729 {
4730 modref_summary *cur_summary = optimization_summaries
4731 ? optimization_summaries->get (cur)
4732 : NULLnullptr;
4733 modref_summary_lto *cur_summary_lto = summaries_lto
4734 ? summaries_lto->get (cur)
4735 : NULLnullptr;
4736
4737 fprintf (dump_file, "Propagated modref for %s%s%s\n",
4738 cur->dump_name (),
4739 TREE_READONLY (cur->decl)((non_type_check ((cur->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 4739, __FUNCTION__))->base.readonly_flag)
? " (const)" : "",
4740 DECL_PURE_P (cur->decl)((tree_check ((cur->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 4740, __FUNCTION__, (FUNCTION_DECL)))->function_decl.pure_flag
)
? " (pure)" : "");
4741 if (optimization_summaries)
4742 {
4743 if (cur_summary)
4744 cur_summary->dump (dump_file);
4745 else
4746 fprintf (dump_file, " Not tracked\n");
4747 }
4748 if (summaries_lto)
4749 {
4750 if (cur_summary_lto)
4751 cur_summary_lto->dump (dump_file);
4752 else
4753 fprintf (dump_file, " Not tracked (lto)\n");
4754 }
4755 }
4756}
4757
4758/* Process escapes in SUM and merge SUMMARY to CUR_SUMMARY
4759 and SUMMARY_LTO to CUR_SUMMARY_LTO.
4760 Return true if something changed. */
4761
4762static bool
4763modref_merge_call_site_flags (escape_summary *sum,
4764 modref_summary *cur_summary,
4765 modref_summary_lto *cur_summary_lto,
4766 modref_summary *summary,
4767 modref_summary_lto *summary_lto,
4768 tree caller,
4769 cgraph_edge *e,
4770 int caller_ecf_flags,
4771 int callee_ecf_flags,
4772 bool binds_to_current_def)
4773{
4774 escape_entry *ee;
4775 unsigned int i;
4776 bool changed = false;
4777 bool ignore_stores = ignore_stores_p (caller, callee_ecf_flags);
4778
4779 /* If we have no useful info to propagate. */
4780 if ((!cur_summary || !cur_summary->arg_flags.length ())
4781 && (!cur_summary_lto || !cur_summary_lto->arg_flags.length ()))
4782 return false;
4783
4784 FOR_EACH_VEC_ELT (sum->esc, i, ee)for (i = 0; (sum->esc).iterate ((i), &(ee)); ++(i))
4785 {
4786 int flags = 0;
4787 int flags_lto = 0;
4788 /* Returning the value is already accounted to at local propagation. */
4789 int implicit_flags = EAF_NOT_RETURNED_DIRECTLY(1 << 6)
4790 | EAF_NOT_RETURNED_INDIRECTLY(1 << 7);
4791
4792 if (summary && ee->arg < summary->arg_flags.length ())
4793 flags = summary->arg_flags[ee->arg];
4794 if (summary_lto
4795 && ee->arg < summary_lto->arg_flags.length ())
4796 flags_lto = summary_lto->arg_flags[ee->arg];
4797 if (!ee->direct)
4798 {
4799 flags = deref_flags (flags, ignore_stores);
4800 flags_lto = deref_flags (flags_lto, ignore_stores);
4801 }
4802 if (ignore_stores)
4803 implicit_flags |= ignore_stores_eaf_flags;
4804 if (callee_ecf_flags & ECF_PURE(1 << 1))
4805 implicit_flags |= implicit_pure_eaf_flags;
4806 if (callee_ecf_flags & (ECF_CONST(1 << 0) | ECF_NOVOPS(1 << 9)))
4807 implicit_flags |= implicit_const_eaf_flags;
4808 class fnspec_summary *fnspec_sum = fnspec_summaries->get (e);
4809 if (fnspec_sum)
4810 {
4811 attr_fnspec fnspec (fnspec_sum->fnspec);
4812 implicit_flags |= fnspec.arg_eaf_flags (ee->arg);
4813 }
4814 if (!ee->direct)
4815 implicit_flags = deref_flags (implicit_flags, ignore_stores);
4816 flags |= implicit_flags;
4817 flags_lto |= implicit_flags;
4818 if (!binds_to_current_def && (flags || flags_lto))
4819 {
4820 flags = interposable_eaf_flags (flags, implicit_flags);
4821 flags_lto = interposable_eaf_flags (flags_lto, implicit_flags);
4822 }
4823 if (!(flags & EAF_UNUSED(1 << 1))
4824 && cur_summary && ee->parm_index < (int)cur_summary->arg_flags.length ())
4825 {
4826 eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
4827 ? cur_summary->retslot_flags
4828 : ee->parm_index == MODREF_STATIC_CHAIN_PARM
4829 ? cur_summary->static_chain_flags
4830 : cur_summary->arg_flags[ee->parm_index];
4831 if ((f & flags) != f)
4832 {
4833 f = remove_useless_eaf_flags
4834 (f & flags, caller_ecf_flags,
4835 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (caller)))(((enum tree_code) (((contains_struct_check ((((contains_struct_check
((caller), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 4835, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 4835, __FUNCTION__))->typed.type))->base.code) == VOID_TYPE
)
);
4836 changed = true;
4837 }
4838 }
4839 if (!(flags_lto & EAF_UNUSED(1 << 1))
4840 && cur_summary_lto
4841 && ee->parm_index < (int)cur_summary_lto->arg_flags.length ())
4842 {
4843 eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
4844 ? cur_summary_lto->retslot_flags
4845 : ee->parm_index == MODREF_STATIC_CHAIN_PARM
4846 ? cur_summary_lto->static_chain_flags
4847 : cur_summary_lto->arg_flags[ee->parm_index];
4848 if ((f & flags_lto) != f)
4849 {
4850 f = remove_useless_eaf_flags
4851 (f & flags_lto, caller_ecf_flags,
4852 VOID_TYPE_P (TREE_TYPE (TREE_TYPE (caller)))(((enum tree_code) (((contains_struct_check ((((contains_struct_check
((caller), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 4852, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 4852, __FUNCTION__))->typed.type))->base.code) == VOID_TYPE
)
);
4853 changed = true;
4854 }
4855 }
4856 }
4857 return changed;
4858}
4859
4860/* Perform iterative dataflow on SCC component starting in COMPONENT_NODE
4861 and propagate arg flags. */
4862
4863static void
4864modref_propagate_flags_in_scc (cgraph_node *component_node)
4865{
4866 bool changed = true;
4867 int iteration = 0;
4868
4869 while (changed)
4870 {
4871 changed = false;
4872 for (struct cgraph_node *cur = component_node; cur;
4873 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
4874 {
4875 cgraph_node *node = cur->inlined_to ? cur->inlined_to : cur;
4876 modref_summary *cur_summary = optimization_summaries
4877 ? optimization_summaries->get (node)
4878 : NULLnullptr;
4879 modref_summary_lto *cur_summary_lto = summaries_lto
4880 ? summaries_lto->get (node)
4881 : NULLnullptr;
4882
4883 if (!cur_summary && !cur_summary_lto)
4884 continue;
4885 int caller_ecf_flags = flags_from_decl_or_type (cur->decl);
4886
4887 if (dump_file)
4888 fprintf (dump_file, " Processing %s%s%s\n",
4889 cur->dump_name (),
4890 TREE_READONLY (cur->decl)((non_type_check ((cur->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 4890, __FUNCTION__))->base.readonly_flag)
? " (const)" : "",
4891 DECL_PURE_P (cur->decl)((tree_check ((cur->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-modref.c"
, 4891, __FUNCTION__, (FUNCTION_DECL)))->function_decl.pure_flag
)
? " (pure)" : "");
4892
4893 for (cgraph_edge *e = cur->indirect_calls; e; e = e->next_callee)
4894 {
4895 escape_summary *sum = escape_summaries->get (e);
4896
4897 if (!sum || (e->indirect_info->ecf_flags
4898 & (ECF_CONST(1 << 0) | ECF_NOVOPS(1 << 9))))
4899 continue;
4900
4901 changed |= modref_merge_call_site_flags
4902 (sum, cur_summary, cur_summary_lto,
4903 NULLnullptr, NULLnullptr,
4904 node->decl,
4905 e,
4906 caller_ecf_flags,
4907 e->indirect_info->ecf_flags,
4908 false);
4909 }
4910
4911 if (!cur_summary && !cur_summary_lto)
4912 continue;
4913
4914 for (cgraph_edge *callee_edge = cur->callees; callee_edge;
4915 callee_edge = callee_edge->next_callee)
4916 {
4917 int ecf_flags = flags_from_decl_or_type
4918 (callee_edge->callee->decl);
4919 modref_summary *callee_summary = NULLnullptr;
4920 modref_summary_lto *callee_summary_lto = NULLnullptr;
4921 struct cgraph_node *callee;
4922
4923 if (ecf_flags & (ECF_CONST(1 << 0) | ECF_NOVOPS(1 << 9))
4924 || !callee_edge->inline_failed)
4925 continue;
4926 /* Get the callee and its summary. */
4927 enum availability avail;
4928 callee = callee_edge->callee->function_or_virtual_thunk_symbol
4929 (&avail, cur);
4930
4931 /* It is not necessary to re-process calls outside of the
4932 SCC component. */
4933 if (iteration > 0
4934 && (!callee->aux
4935 || ((struct ipa_dfs_info *)cur->aux)->scc_no
4936 != ((struct ipa_dfs_info *)callee->aux)->scc_no))
4937 continue;
4938
4939 escape_summary *sum = escape_summaries->get (callee_edge);
4940 if (!sum)
4941 continue;
4942
4943 if (dump_file)
4944 fprintf (dump_file, " Call to %s\n",
4945 callee_edge->callee->dump_name ());
4946
4947 if (avail <= AVAIL_INTERPOSABLE
4948 || callee_edge->call_stmt_cannot_inline_p)
4949 ;
4950 else
4951 {
4952 if (cur_summary)
4953 callee_summary = optimization_summaries->get (callee);
4954 if (cur_summary_lto)
4955 callee_summary_lto = summaries_lto->get (callee);
4956 }
4957 changed |= modref_merge_call_site_flags
4958 (sum, cur_summary, cur_summary_lto,
4959 callee_summary, callee_summary_lto,
4960 node->decl,
4961 callee_edge,
4962 caller_ecf_flags,
4963 ecf_flags,
4964 callee->binds_to_current_def_p ());
4965 if (dump_file && changed)
4966 {
4967 if (cur_summary)
4968 cur_summary->dump (dump_file);
4969 if (cur_summary_lto)
4970 cur_summary_lto->dump (dump_file);
4971 }
4972 }
4973 }
4974 iteration++;
4975 }
4976 if (dump_file)
4977 fprintf (dump_file,
4978 "Propagation of flags finished in %i iterations\n", iteration);
4979}
4980
4981} /* ANON namespace. */
4982
4983/* Call EDGE was inlined; merge summary from callee to the caller. */
4984
4985void
4986ipa_merge_modref_summary_after_inlining (cgraph_edge *edge)
4987{
4988 if (!summaries && !summaries_lto)
4989 return;
4990
4991 struct cgraph_node *to = (edge->caller->inlined_to
4992 ? edge->caller->inlined_to : edge->caller);
4993 class modref_summary *to_info = summaries ? summaries->get (to) : NULLnullptr;
4994 class modref_summary_lto *to_info_lto = summaries_lto
4995 ? summaries_lto->get (to) : NULLnullptr;
4996
4997 if (!to_info && !to_info_lto)
4998 {
4999 if (summaries)
5000 summaries->remove (edge->callee);
5001 if (summaries_lto)
5002 summaries_lto->remove (edge->callee);
5003 remove_modref_edge_summaries (edge->callee);
5004 return;
5005 }
5006
5007 class modref_summary *callee_info = summaries ? summaries->get (edge->callee)
5008 : NULLnullptr;
5009 class modref_summary_lto *callee_info_lto
5010 = summaries_lto ? summaries_lto->get (edge->callee) : NULLnullptr;
5011 int flags = flags_from_decl_or_type (edge->callee->decl);
5012 bool ignore_stores = ignore_stores_p (edge->caller->decl, flags);
5013
5014 if (!callee_info && to_info)
5015 {
5016 if (!(flags & (ECF_CONST(1 << 0) | ECF_NOVOPS(1 << 9))))
5017 to_info->loads->collapse ();
5018 if (!ignore_stores)
5019 to_info->stores->collapse ();
5020 }
5021 if (!callee_info_lto && to_info_lto)
5022 {
5023 if (!(flags & (ECF_CONST(1 << 0) | ECF_NOVOPS(1 << 9))))
5024 to_info_lto->loads->collapse ();
5025 if (!ignore_stores)
5026 to_info_lto->stores->collapse ();
5027 }
5028 if (callee_info || callee_info_lto)
5029 {
5030 auto_vec <modref_parm_map, 32> parm_map;
5031 modref_parm_map chain_map;
5032 /* TODO: Once we get jump functions for static chains we could
5033 compute parm_index. */
5034
5035 compute_parm_map (edge, &parm_map);
5036
5037 if (!ignore_stores)
5038 {
5039 if (to_info && callee_info)
5040 to_info->stores->merge (callee_info->stores, &parm_map,
5041 &chain_map, false);
5042 if (to_info_lto && callee_info_lto)
5043 to_info_lto->stores->merge (callee_info_lto->stores, &parm_map,
5044 &chain_map, false);
5045 }
5046 if (!(flags & (ECF_CONST(1 << 0) | ECF_NOVOPS(1 << 9))))
5047 {
5048 if (to_info && callee_info)
5049 to_info->loads->merge (callee_info->loads, &parm_map,
5050 &chain_map, false);
5051 if (to_info_lto && callee_info_lto)
5052 to_info_lto->loads->merge (callee_info_lto->loads, &parm_map,
5053 &chain_map, false);
5054 }
5055 }
5056
5057 /* Now merge escape summaries.
5058 For every escape to the callee we need to merge calle flags
5059 and remap calees escapes. */
5060 class escape_summary *sum = escape_summaries->get (edge);
5061 int max_escape = -1;
5062 escape_entry *ee;
5063 unsigned int i;
5064
5065 if (sum && !(flags & (ECF_CONST(1 << 0) | ECF_NOVOPS(1 << 9))))
5066 FOR_EACH_VEC_ELT (sum->esc, i, ee)for (i = 0; (sum->esc).iterate ((i), &(ee)); ++(i))
5067 if ((int)ee->arg > max_escape)
5068 max_escape = ee->arg;
5069
5070 auto_vec <vec <struct escape_map>, 32> emap (max_escape + 1);
5071 emap.safe_grow (max_escape + 1, true);
5072 for (i = 0; (int)i < max_escape + 1; i++)
5073 emap[i] = vNULL;
5074
5075 if (sum && !(flags & (ECF_CONST(1 << 0) | ECF_NOVOPS(1 << 9))))
5076 FOR_EACH_VEC_ELT (sum->esc, i, ee)for (i = 0; (sum->esc).iterate ((i), &(ee)); ++(i))
5077 {
5078 bool needed = false;
5079 /* TODO: We do not have jump functions for return slots, so we
5080 never propagate them to outer function. */
5081 if (ee->parm_index < 0)
5082 continue;
5083 if (to_info && (int)to_info->arg_flags.length () > ee->parm_index)
5084 {
5085 int flags = callee_info
5086 && callee_info->arg_flags.length () > ee->arg
5087 ? callee_info->arg_flags[ee->arg] : 0;
5088 if (!ee->direct)
5089 flags = deref_flags (flags, ignore_stores);
5090 else if (ignore_stores)
5091 flags |= ignore_stores_eaf_flags;
5092 flags |= ee->min_flags;
5093 to_info->arg_flags[ee->parm_index] &= flags;
5094 if (to_info->arg_flags[ee->parm_index])
5095 needed = true;
5096 }
5097 if (to_info_lto
5098 && (int)to_info_lto->arg_flags.length () > ee->parm_index)
5099 {
5100 int flags = callee_info_lto
5101 && callee_info_lto->arg_flags.length () > ee->arg
5102 ? callee_info_lto->arg_flags[ee->arg] : 0;
5103 if (!ee->direct)
5104 flags = deref_flags (flags, ignore_stores);
5105 else if (ignore_stores)
5106 flags |= ignore_stores_eaf_flags;
5107 flags |= ee->min_flags;
5108 to_info_lto->arg_flags[ee->parm_index] &= flags;
5109 if (to_info_lto->arg_flags[ee->parm_index])
5110 needed = true;
5111 }
5112 struct escape_map entry = {ee->parm_index, ee->direct};
5113 if (needed)
5114 emap[ee->arg].safe_push (entry);
5115 }
5116 update_escape_summary (edge->callee, emap, ignore_stores);
5117 for (i = 0; (int)i < max_escape + 1; i++)
5118 emap[i].release ();
5119 if (sum)
5120 escape_summaries->remove (edge);
5121
5122 if (summaries)
5123 {
5124 if (to_info && !to_info->useful_p (flags))
5125 {
5126 if (dump_file)
5127 fprintf (dump_file, "Removed mod-ref summary for %s\n",
5128 to->dump_name ());
5129 summaries->remove (to);
5130 to_info = NULLnullptr;
5131 }
5132 else if (to_info && dump_file)
5133 {
5134 if (dump_file)
5135 fprintf (dump_file, "Updated mod-ref summary for %s\n",
5136 to->dump_name ());
5137 to_info->dump (dump_file);
5138 }
5139 if (callee_info)
5140 summaries->remove (edge->callee);
5141 }
5142 if (summaries_lto)
5143 {
5144 if (to_info_lto && !to_info_lto->useful_p (flags))
5145 {
5146 if (dump_file)
5147 fprintf (dump_file, "Removed mod-ref summary for %s\n",
5148 to->dump_name ());
5149 summaries_lto->remove (to);
5150 to_info_lto = NULLnullptr;
5151 }
5152 else if (to_info_lto && dump_file)
5153 {
5154 if (dump_file)
5155 fprintf (dump_file, "Updated mod-ref summary for %s\n",
5156 to->dump_name ());
5157 to_info_lto->dump (dump_file);
5158 }
5159 if (callee_info_lto)
5160 summaries_lto->remove (edge->callee);
5161 }
5162 if (!to_info && !to_info_lto)
5163 remove_modref_edge_summaries (to);
5164 return;
5165}
5166
5167/* Run the IPA pass. This will take a function's summaries and calls and
5168 construct new summaries which represent a transitive closure. So that
5169 summary of an analyzed function contains information about the loads and
5170 stores that the function or any function that it calls does. */
5171
5172unsigned int
5173pass_ipa_modref::execute (function *)
5174{
5175 if (!summaries && !summaries_lto)
5176 return 0;
5177 bool pureconst = false;
5178
5179 if (optimization_summaries)
5180 ggc_delete (optimization_summaries);
5181 optimization_summaries = summaries;
5182 summaries = NULLnullptr;
5183
5184 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *,((struct cgraph_node * *) xcalloc ((symtab->cgraph_count),
sizeof (struct cgraph_node *)))
5185 symtab->cgraph_count)((struct cgraph_node * *) xcalloc ((symtab->cgraph_count),
sizeof (struct cgraph_node *)))
;
5186 int order_pos;
5187 order_pos = ipa_reduced_postorder (order, true, ignore_edge);
5188 int i;
5189
5190 /* Iterate over all strongly connected components in post-order. */
5191 for (i = 0; i < order_pos; i++)
5192 {
5193 /* Get the component's representative. That's just any node in the
5194 component from which we can traverse the entire component. */
5195 struct cgraph_node *component_node = order[i];
5196
5197 if (dump_file)
5198 fprintf (dump_file, "\n\nStart of SCC component\n");
5199
5200 pureconst |= modref_propagate_in_scc (component_node);
5201 modref_propagate_flags_in_scc (component_node);
5202 if (optimization_summaries)
5203 for (struct cgraph_node *cur = component_node; cur;
5204 cur = ((struct ipa_dfs_info *) cur->aux)->next_cycle)
5205 if (modref_summary *sum = optimization_summaries->get (cur))
5206 sum->finalize (cur->decl);
5207 if (dump_file)
5208 modref_propagate_dump_scc (component_node);
5209 }
5210 cgraph_node *node;
5211 FOR_EACH_FUNCTION (node)for ((node) = symtab->first_function (); (node); (node) = symtab
->next_function ((node)))
5212 update_signature (node);
5213 if (summaries_lto)
5214 ((modref_summaries_lto *)summaries_lto)->propagated = true;
5215 ipa_free_postorder_info ();
5216 free (order);
5217 delete fnspec_summaries;
5218 fnspec_summaries = NULLnullptr;
5219 delete escape_summaries;
5220 escape_summaries = NULLnullptr;
5221
5222 /* If we posibly made constructors const/pure we may need to remove
5223 them. */
5224 return pureconst ? TODO_remove_functions(1 << 8) : 0;
5225}
5226
5227/* Summaries must stay alive until end of compilation. */
5228
5229void
5230ipa_modref_c_finalize ()
5231{
5232 if (optimization_summaries)
5233 ggc_delete (optimization_summaries);
5234 optimization_summaries = NULLnullptr;
5235 if (summaries_lto)
5236 ggc_delete (summaries_lto);
5237 summaries_lto = NULLnullptr;
5238 if (fnspec_summaries)
5239 delete fnspec_summaries;
5240 fnspec_summaries = NULLnullptr;
5241 if (escape_summaries)
5242 delete escape_summaries;
5243 escape_summaries = NULLnullptr;
5244}
5245
5246#include "gt-ipa-modref.h"

/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/symbol-summary.h

1/* Callgraph summary data structure.
2 Copyright (C) 2014-2021 Free Software Foundation, Inc.
3 Contributed by Martin Liska
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#ifndef GCC_SYMBOL_SUMMARY_H
22#define GCC_SYMBOL_SUMMARY_H
23
24/* Base class for function_summary and fast_function_summary classes. */
25
26template <class T>
27class function_summary_base
28{
29public:
30 /* Default construction takes SYMTAB as an argument. */
31 function_summary_base (symbol_table *symtab,
32 cgraph_node_hook symtab_insertion,
33 cgraph_node_hook symtab_removal,
34 cgraph_2node_hook symtab_duplication
35 CXX_MEM_STAT_INFO):
36 m_symtab (symtab), m_symtab_insertion (symtab_insertion),
37 m_symtab_removal (symtab_removal),
38 m_symtab_duplication (symtab_duplication),
39 m_symtab_insertion_hook (NULLnullptr), m_symtab_duplication_hook (NULLnullptr),
40 m_allocator ("function summary" PASS_MEM_STAT)
41 {
42 enable_insertion_hook ();
43 m_symtab_removal_hook
44 = m_symtab->add_cgraph_removal_hook (m_symtab_removal, this);
45 enable_duplication_hook ();
46 }
47
48 /* Basic implementation of insert operation. */
49 virtual void insert (cgraph_node *, T *)
50 {
51 /* In most cases, it makes no sense to create summaries without
52 initializing them. */
53 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/symbol-summary.h"
, 53, __FUNCTION__))
;
54 }
55
56 /* Basic implementation of removal operation. */
57 virtual void remove (cgraph_node *, T *) {}
58
59 /* Basic implementation of duplication operation. */
60 virtual void duplicate (cgraph_node *, cgraph_node *, T *, T *)
61 {
62 /* It makes no sense to not copy anything during duplication. */
63 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/symbol-summary.h"
, 63, __FUNCTION__))
;
64 }
65
66 /* Enable insertion hook invocation. */
67 void enable_insertion_hook ()
68 {
69 if (m_symtab_insertion_hook == NULLnullptr)
70 m_symtab_insertion_hook
71 = m_symtab->add_cgraph_insertion_hook (m_symtab_insertion, this);
72 }
73
74 /* Enable insertion hook invocation. */
75 void disable_insertion_hook ()
76 {
77 if (m_symtab_insertion_hook != NULLnullptr)
78 {
79 m_symtab->remove_cgraph_insertion_hook (m_symtab_insertion_hook);
80 m_symtab_insertion_hook = NULLnullptr;
81 }
82 }
83
84 /* Enable duplication hook invocation. */
85 void enable_duplication_hook ()
86 {
87 if (m_symtab_duplication_hook == NULLnullptr)
88 m_symtab_duplication_hook
89 = m_symtab->add_cgraph_duplication_hook (m_symtab_duplication, this);
90 }
91
92 /* Enable duplication hook invocation. */
93 void disable_duplication_hook ()
94 {
95 if (m_symtab_duplication_hook != NULLnullptr)
96 {
97 m_symtab->remove_cgraph_duplication_hook (m_symtab_duplication_hook);
98 m_symtab_duplication_hook = NULLnullptr;
99 }
100 }
101
102protected:
103 /* Allocates new data that are stored within map. */
104 T* allocate_new ()
105 {
106 /* Call gcc_internal_because we do not want to call finalizer for
107 a type T. We call dtor explicitly. */
108 return is_ggc () ? new (ggc_internal_alloc (sizeof (T))) T ()
109 : m_allocator.allocate () ;
110 }
111
112 /* Release an item that is stored within map. */
113 void release (T *item)
114 {
115 if (is_ggc ())
116 ggc_delete (item);
117 else
118 m_allocator.remove (item);
119 }
120
121 /* Unregister all call-graph hooks. */
122 void unregister_hooks ();
123
124 /* Symbol table the summary is registered to. */
125 symbol_table *m_symtab;
126
127 /* Insertion function defined by a summary. */
128 cgraph_node_hook m_symtab_insertion;
129 /* Removal function defined by a summary. */
130 cgraph_node_hook m_symtab_removal;
131 /* Duplication function defined by a summary. */
132 cgraph_2node_hook m_symtab_duplication;
133
134 /* Internal summary insertion hook pointer. */
135 cgraph_node_hook_list *m_symtab_insertion_hook;
136 /* Internal summary removal hook pointer. */
137 cgraph_node_hook_list *m_symtab_removal_hook;
138 /* Internal summary duplication hook pointer. */
139 cgraph_2node_hook_list *m_symtab_duplication_hook;
140
141private:
142 /* Return true when the summary uses GGC memory for allocation. */
143 virtual bool is_ggc () = 0;
144
145 /* Object allocator for heap allocation. */
146 object_allocator<T> m_allocator;
147};
148
149template <typename T>
150void
151function_summary_base<T>::unregister_hooks ()
152{
153 disable_insertion_hook ();
154 m_symtab->remove_cgraph_removal_hook (m_symtab_removal_hook);
155 disable_duplication_hook ();
156}
157
158/* We want to pass just pointer types as argument for function_summary
159 template class. */
160
161template <class T>
162class function_summary
163{
164private:
165 function_summary();
166};
167
168/* Function summary is a helper class that is used to associate a data structure
169 related to a callgraph node. Typical usage can be seen in IPA passes which
170 create a temporary pass-related structures. The summary class registers
171 hooks that are triggered when a new node is inserted, duplicated and deleted.
172 A user of a summary class can ovewrite virtual methods than are triggered by
173 the summary if such hook is triggered. Apart from a callgraph node, the user
174 is given a data structure tied to the node.
175
176 The function summary class can work both with a heap-allocated memory and
177 a memory gained by garbage collected memory. */
178
179template <class T>
180class GTY((user)) function_summary <T *>: public function_summary_base<T>
181{
182public:
183 /* Default construction takes SYMTAB as an argument. */
184 function_summary (symbol_table *symtab, bool ggc = false CXX_MEM_STAT_INFO);
185
186 /* Destructor. */
187 virtual ~function_summary ();
188
189 /* Traverses all summarys with a function F called with
190 ARG as argument. */
191 template<typename Arg, bool (*f)(const T &, Arg)>
192 void traverse (Arg a) const
193 {
194 m_map.template traverse <f> (a);
195 }
196
197 /* Getter for summary callgraph node pointer. If a summary for a node
198 does not exist it will be created. */
199 T* get_create (cgraph_node *node)
200 {
201 bool existed;
202 T **v = &m_map.get_or_insert (node->get_uid (), &existed);
203 if (!existed)
204 *v = this->allocate_new ();
205
206 return *v;
207 }
208
209 /* Getter for summary callgraph node pointer. */
210 T* get (cgraph_node *node) ATTRIBUTE_PURE__attribute__ ((__pure__))
211 {
212 T **v = m_map.get (node->get_uid ());
213 return v == NULLnullptr ? NULLnullptr : *v;
214 }
215
216 /* Remove node from summary. */
217 using function_summary_base<T>::remove;
218 void remove (cgraph_node *node)
219 {
220 int uid = node->get_uid ();
221 T **v = m_map.get (uid);
222 if (v)
223 {
224 m_map.remove (uid);
225 this->release (*v);
226 }
227 }
228
229 /* Return true if a summary for the given NODE already exists. */
230 bool exists (cgraph_node *node)
231 {
232 return m_map.get (node->get_uid ()) != NULLnullptr;
233 }
234
235 /* Symbol insertion hook that is registered to symbol table. */
236 static void symtab_insertion (cgraph_node *node, void *data);
237
238 /* Symbol removal hook that is registered to symbol table. */
239 static void symtab_removal (cgraph_node *node, void *data);
240
241 /* Symbol duplication hook that is registered to symbol table. */
242 static void symtab_duplication (cgraph_node *node, cgraph_node *node2,
243 void *data);
244
245protected:
246 /* Indication if we use ggc summary. */
247 bool m_ggc;
248
249private:
250 /* Indication if we use ggc summary. */
251 virtual bool is_ggc ()
252 {
253 return m_ggc;
254 }
255
256 typedef int_hash <int, 0, -1> map_hash;
257
258 /* Main summary store, where summary ID is used as key. */
259 hash_map <map_hash, T *> m_map;
260
261 template <typename U> friend void gt_ggc_mx (function_summary <U *> * const &);
262 template <typename U> friend void gt_pch_nx (function_summary <U *> * const &);
263 template <typename U> friend void gt_pch_nx (function_summary <U *> * const &,
264 gt_pointer_operator, void *);
265};
266
267template <typename T>
268function_summary<T *>::function_summary (symbol_table *symtab, bool ggc
269 MEM_STAT_DECL):
270 function_summary_base<T> (symtab, function_summary::symtab_insertion,
271 function_summary::symtab_removal,
272 function_summary::symtab_duplication
273 PASS_MEM_STAT),
274 m_ggc (ggc), m_map (13, ggc, true, GATHER_STATISTICS0 PASS_MEM_STAT) {}
275
276template <typename T>
277function_summary<T *>::~function_summary ()
278{
279 this->unregister_hooks ();
280
281 /* Release all summaries. */
282 typedef typename hash_map <map_hash, T *>::iterator map_iterator;
283 for (map_iterator it = m_map.begin (); it != m_map.end (); ++it)
284 this->release ((*it).second);
285}
286
287template <typename T>
288void
289function_summary<T *>::symtab_insertion (cgraph_node *node, void *data)
290{
291 gcc_checking_assert (node->get_uid ())((void)(!(node->get_uid ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/symbol-summary.h"
, 291, __FUNCTION__), 0 : 0))
;
292 function_summary *summary = (function_summary <T *> *) (data);
293 summary->insert (node, summary->get_create (node));
294}
295
296template <typename T>
297void
298function_summary<T *>::symtab_removal (cgraph_node *node, void *data)
299{
300 gcc_checking_assert (node->get_uid ())((void)(!(node->get_uid ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/symbol-summary.h"
, 300, __FUNCTION__), 0 : 0))
;
301 function_summary *summary = (function_summary <T *> *) (data);
302 summary->remove (node);
303}
304
305template <typename T>
306void
307function_summary<T *>::symtab_duplication (cgraph_node *node,
308 cgraph_node *node2, void *data)
309{
310 function_summary *summary = (function_summary <T *> *) (data);
311 T *v = summary->get (node);
312
313 if (v)
314 summary->duplicate (node, node2, v, summary->get_create (node2));
315}
316
317template <typename T>
318void
319gt_ggc_mx(function_summary<T *>* const &summary)
320{
321 gcc_checking_assert (summary->m_ggc)((void)(!(summary->m_ggc) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/symbol-summary.h"
, 321, __FUNCTION__), 0 : 0))
;
322 gt_ggc_mx (&summary->m_map);
323}
324
325template <typename T>
326void
327gt_pch_nx (function_summary<T *> *const &)
328{
329 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/symbol-summary.h"
, 329, __FUNCTION__))
;
330}
331
332template <typename T>
333void
334gt_pch_nx (function_summary<T *> *const &, gt_pointer_operator, void *)
335{
336 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/symbol-summary.h"
, 336, __FUNCTION__))
;
337}
338
339/* Help template from std c++11. */
340
341template<typename T, typename U>
342struct is_same
343{
344 static const bool value = false;
345};
346
347template<typename T>
348struct is_same<T,T> //specialization
349{
350 static const bool value = true;
351};
352
353/* We want to pass just pointer types as argument for fast_function_summary
354 template class. */
355
356template <class T, class V>
357class fast_function_summary
358{
359private:
360 fast_function_summary ();
361};
362
363/* Function vector summary is a fast implementation of function_summary that
364 utilizes vector as primary storage of summaries. */
365
366template <class T, class V>
367class GTY((user)) fast_function_summary <T *, V>
368 : public function_summary_base<T>
369{
370public:
371 /* Default construction takes SYMTAB as an argument. */
372 fast_function_summary (symbol_table *symtab CXX_MEM_STAT_INFO);
373
374 /* Destructor. */
375 virtual ~fast_function_summary ();
376
377 /* Traverses all summarys with a function F called with
378 ARG as argument. */
379 template<typename Arg, bool (*f)(const T &, Arg)>
380 void traverse (Arg a) const
381 {
382 for (unsigned i = 0; i < m_vector->length (); i++)
383 if ((*m_vector[i]) != NULLnullptr)
384 f ((*m_vector)[i], a);
385 }
386
387 /* Getter for summary callgraph node pointer. If a summary for a node
388 does not exist it will be created. */
389 T* get_create (cgraph_node *node)
390 {
391 int id = node->get_summary_id ();
392 if (id == -1)
393 id = this->m_symtab->assign_summary_id (node);
394
395 if ((unsigned int)id >= m_vector->length ())
396 vec_safe_grow_cleared (m_vector,
397 this->m_symtab->cgraph_max_summary_id);
398
399 if ((*m_vector)[id] == NULLnullptr)
400 (*m_vector)[id] = this->allocate_new ();
401
402 return (*m_vector)[id];
403 }
404
405 /* Getter for summary callgraph node pointer. */
406 T* get (cgraph_node *node) ATTRIBUTE_PURE__attribute__ ((__pure__))
407 {
408 return exists (node) ? (*m_vector)[node->get_summary_id ()] : NULLnullptr;
409 }
410
411 using function_summary_base<T>::remove;
412 void remove (cgraph_node *node)
413 {
414 if (exists (node))
415 {
416 int id = node->get_summary_id ();
417 this->release ((*m_vector)[id]);
418 (*m_vector)[id] = NULLnullptr;
419 }
420 }
421
422 /* Return true if a summary for the given NODE already exists. */
423 bool exists (cgraph_node *node)
424 {
425 int id = node->get_summary_id ();
426 return (id != -1
427 && (unsigned int)id < m_vector->length ()
428 && (*m_vector)[id] != NULLnullptr);
429 }
430
431 /* Symbol insertion hook that is registered to symbol table. */
432 static void symtab_insertion (cgraph_node *node, void *data);
433
434 /* Symbol removal hook that is registered to symbol table. */
435 static void symtab_removal (cgraph_node *node, void *data);
436
437 /* Symbol duplication hook that is registered to symbol table. */
438 static void symtab_duplication (cgraph_node *node, cgraph_node *node2,
439 void *data);
440
441private:
442 virtual bool is_ggc ();
443
444 /* Summary is stored in the vector. */
445 vec <T *, V> *m_vector;
446
447 template <typename U> friend void gt_ggc_mx (fast_function_summary <U *, va_gc> * const &);
448 template <typename U> friend void gt_pch_nx (fast_function_summary <U *, va_gc> * const &);
449 template <typename U> friend void gt_pch_nx (fast_function_summary <U *, va_gc> * const &,
450 gt_pointer_operator, void *);
451};
452
453template <typename T, typename V>
454fast_function_summary<T *, V>::fast_function_summary (symbol_table *symtab
455 MEM_STAT_DECL):
456 function_summary_base<T> (symtab,
457 fast_function_summary::symtab_insertion,
458 fast_function_summary::symtab_removal,
459 fast_function_summary::symtab_duplication
460 PASS_MEM_STAT), m_vector (NULLnullptr)
461{
462 vec_alloc (m_vector, 13 PASS_MEM_STAT);
463}
464
465template <typename T, typename V>
466fast_function_summary<T *, V>::~fast_function_summary ()
467{
468 this->unregister_hooks ();
469
470 /* Release all summaries. */
471 for (unsigned i = 0; i < m_vector->length (); i++)
472 if ((*m_vector)[i] != NULLnullptr)
473 this->release ((*m_vector)[i]);
474 vec_free (m_vector);
475}
476
477template <typename T, typename V>
478void
479fast_function_summary<T *, V>::symtab_insertion (cgraph_node *node, void *data)
480{
481 gcc_checking_assert (node->get_uid ())((void)(!(node->get_uid ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/symbol-summary.h"
, 481, __FUNCTION__), 0 : 0))
;
482 fast_function_summary *summary = (fast_function_summary <T *, V> *) (data);
483 summary->insert (node, summary->get_create (node));
484}
485
486template <typename T, typename V>
487void
488fast_function_summary<T *, V>::symtab_removal (cgraph_node *node, void *data)
489{
490 gcc_checking_assert (node->get_uid ())((void)(!(node->get_uid ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/symbol-summary.h"
, 490, __FUNCTION__), 0 : 0))
;
491 fast_function_summary *summary = (fast_function_summary <T *, V> *) (data);
492
493 if (summary->exists (node))
494 summary->remove (node);
495}
496
497template <typename T, typename V>
498void
499fast_function_summary<T *, V>::symtab_duplication (cgraph_node *node,
500 cgraph_node *node2,
501 void *data)
502{
503 fast_function_summary *summary = (fast_function_summary <T *, V> *) (data);
504 T *v = summary->get (node);
505
506 if (v)
507 {
508 T *duplicate = summary->get_create (node2);
509 summary->duplicate (node, node2, v, duplicate);
510 }
511}
512
513template <typename T, typename V>
514inline bool
515fast_function_summary<T *, V>::is_ggc ()
516{
517 return is_same<V, va_gc>::value;
518}
519
520template <typename T>
521void
522gt_ggc_mx (fast_function_summary<T *, va_heap>* const &)
523{
524}
525
526template <typename T>
527void
528gt_pch_nx (fast_function_summary<T *, va_heap>* const &)
529{
530}
531
532template <typename T>
533void
534gt_pch_nx (fast_function_summary<T *, va_heap>* const&, gt_pointer_operator,
535 void *)
536{
537}
538
539template <typename T>
540void
541gt_ggc_mx (fast_function_summary<T *, va_gc>* const &summary)
542{
543 ggc_test_and_set_mark (summary->m_vector)((summary->m_vector) != nullptr && ((void *) (summary
->m_vector)) != (void *) 1 && ! ggc_set_mark (summary
->m_vector))
;
544 gt_ggc_mx (summary->m_vector);
545}
546
547template <typename T>
548void
549gt_pch_nx (fast_function_summary<T *, va_gc> *const &)
550{
551 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/symbol-summary.h"
, 551, __FUNCTION__))
;
552}
553
554template <typename T>
555void
556gt_pch_nx (fast_function_summary<T *, va_gc> *const &, gt_pointer_operator,
557 void *)
558{
559 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/symbol-summary.h"
, 559, __FUNCTION__))
;
560}
561
562/* Base class for call_summary and fast_call_summary classes. */
563
564template <class T>
565class call_summary_base
566{
567public:
568 /* Default construction takes SYMTAB as an argument. */
569 call_summary_base (symbol_table *symtab, cgraph_edge_hook symtab_removal,
570 cgraph_2edge_hook symtab_duplication CXX_MEM_STAT_INFO):
571 m_symtab (symtab), m_symtab_removal (symtab_removal),
572 m_symtab_duplication (symtab_duplication), m_symtab_duplication_hook (NULLnullptr),
573 m_initialize_when_cloning (false),
574 m_allocator ("call summary" PASS_MEM_STAT)
575 {
576 m_symtab_removal_hook
577 = m_symtab->add_edge_removal_hook (m_symtab_removal, this);
578 enable_duplication_hook ();
579 }
580
581 /* Basic implementation of removal operation. */
582 virtual void remove (cgraph_edge *, T *) {}
583
584 /* Basic implementation of duplication operation. */
585 virtual void duplicate (cgraph_edge *, cgraph_edge *, T *, T *)
586 {
587 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/symbol-summary.h"
, 587, __FUNCTION__))
;
588 }
589
590 /* Enable duplication hook invocation. */
591 void enable_duplication_hook ()
592 {
593 if (m_symtab_duplication_hook == NULLnullptr)
594 m_symtab_duplication_hook
595 = m_symtab->add_edge_duplication_hook (m_symtab_duplication,
596 this);
597 }
598
599 /* Enable duplication hook invocation. */
600 void disable_duplication_hook ()
601 {
602 if (m_symtab_duplication_hook != NULLnullptr)
603 {
604 m_symtab->remove_edge_duplication_hook (m_symtab_duplication_hook);
605 m_symtab_duplication_hook = NULLnullptr;
606 }
607 }
608
609protected:
610 /* Allocates new data that are stored within map. */
611 T* allocate_new ()
612 {
613 /* Call gcc_internal_because we do not want to call finalizer for
614 a type T. We call dtor explicitly. */
615 return is_ggc () ? new (ggc_internal_alloc (sizeof (T))) T ()
616 : m_allocator.allocate ();
617 }
618
619 /* Release an item that is stored within map. */
620 void release (T *item)
621 {
622 if (is_ggc ())
623 ggc_delete (item);
624 else
625 m_allocator.remove (item);
626 }
627
628 /* Unregister all call-graph hooks. */
629 void unregister_hooks ();
630
631 /* Symbol table the summary is registered to. */
632 symbol_table *m_symtab;
633
634 /* Removal function defined by a summary. */
635 cgraph_edge_hook m_symtab_removal;
636 /* Duplication function defined by a summary. */
637 cgraph_2edge_hook m_symtab_duplication;
638
639 /* Internal summary removal hook pointer. */
640 cgraph_edge_hook_list *m_symtab_removal_hook;
641 /* Internal summary duplication hook pointer. */
642 cgraph_2edge_hook_list *m_symtab_duplication_hook;
643 /* Initialize summary for an edge that is cloned. */
644 bool m_initialize_when_cloning;
645
646private:
647 /* Return true when the summary uses GGC memory for allocation. */
648 virtual bool is_ggc () = 0;
649
650 /* Object allocator for heap allocation. */
651 object_allocator<T> m_allocator;
652};
653
654template <typename T>
655void
656call_summary_base<T>::unregister_hooks ()
657{
658 m_symtab->remove_edge_removal_hook (m_symtab_removal_hook);
659 disable_duplication_hook ();
660}
661
662/* An impossible class templated by non-pointers so, which makes sure that only
663 summaries gathering pointers can be created. */
664
665template <class T>
666class call_summary
667{
668private:
669 call_summary ();
670};
671
672/* Class to store auxiliary information about call graph edges. */
673
674template <class T>
675class GTY((user)) call_summary <T *>: public call_summary_base<T>
676{
677public:
678 /* Default construction takes SYMTAB as an argument. */
679 call_summary (symbol_table *symtab, bool ggc = false
680 CXX_MEM_STAT_INFO)
681 : call_summary_base<T> (symtab, call_summary::symtab_removal,
682 call_summary::symtab_duplication PASS_MEM_STAT),
683 m_ggc (ggc), m_map (13, ggc, true, GATHER_STATISTICS0 PASS_MEM_STAT) {}
684
685 /* Destructor. */
686 virtual ~call_summary ();
687
688 /* Traverses all summarys with an edge E called with
689 ARG as argument. */
690 template<typename Arg, bool (*f)(const T &, Arg)>
691 void traverse (Arg a) const
692 {
693 m_map.template traverse <f> (a);
694 }
695
696 /* Getter for summary callgraph edge pointer.
697 If a summary for an edge does not exist, it will be created. */
698 T* get_create (cgraph_edge *edge)
699 {
700 bool existed;
701 T **v = &m_map.get_or_insert (edge->get_uid (), &existed);
702 if (!existed)
703 *v = this->allocate_new ();
704
705 return *v;
706 }
707
708 /* Getter for summary callgraph edge pointer. */
709 T* get (cgraph_edge *edge) ATTRIBUTE_PURE__attribute__ ((__pure__))
710 {
711 T **v = m_map.get (edge->get_uid ());
712 return v == NULLnullptr ? NULLnullptr : *v;
17
Assuming the condition is false
18
'?' condition is false
19
Returning without writing to 'edge->call_stmt_cannot_inline_p', which participates in a condition later
20
Returning without writing to 'edge->callee'
34
Assuming the condition is false
35
'?' condition is false
36
Returning without writing to 'edge->callee'
713 }
714
715 /* Remove edge from summary. */
716 using call_summary_base<T>::remove;
717 void remove (cgraph_edge *edge)
718 {
719 int uid = edge->get_uid ();
720 T **v = m_map.get (uid);
721 if (v)
722 {
723 m_map.remove (uid);
724 this->release (*v);
725 }
726 }
727
728 /* Return true if a summary for the given EDGE already exists. */
729 bool exists (cgraph_edge *edge)
730 {
731 return m_map.get (edge->get_uid ()) != NULLnullptr;
732 }
733
734 /* Symbol removal hook that is registered to symbol table. */
735 static void symtab_removal (cgraph_edge *edge, void *data);
736
737 /* Symbol duplication hook that is registered to symbol table. */
738 static void symtab_duplication (cgraph_edge *edge1, cgraph_edge *edge2,
739 void *data);
740
741protected:
742 /* Indication if we use ggc summary. */
743 bool m_ggc;
744
745private:
746 /* Indication if we use ggc summary. */
747 virtual bool is_ggc ()
748 {
749 return m_ggc;
750 }
751
752 typedef int_hash <int, 0, -1> map_hash;
753
754 /* Main summary store, where summary ID is used as key. */
755 hash_map <map_hash, T *> m_map;
756
757 template <typename U> friend void gt_ggc_mx (call_summary <U *> * const &);
758 template <typename U> friend void gt_pch_nx (call_summary <U *> * const &);
759 template <typename U> friend void gt_pch_nx (call_summary <U *> * const &,
760 gt_pointer_operator, void *);
761};
762
763template <typename T>
764call_summary<T *>::~call_summary ()
765{
766 this->unregister_hooks ();
767
768 /* Release all summaries. */
769 typedef typename hash_map <map_hash, T *>::iterator map_iterator;
770 for (map_iterator it = m_map.begin (); it != m_map.end (); ++it)
771 this->release ((*it).second);
772}
773
774template <typename T>
775void
776call_summary<T *>::symtab_removal (cgraph_edge *edge, void *data)
777{
778 call_summary *summary = (call_summary <T *> *) (data);
779 summary->remove (edge);
780}
781
782template <typename T>
783void
784call_summary<T *>::symtab_duplication (cgraph_edge *edge1,
785 cgraph_edge *edge2, void *data)
786{
787 call_summary *summary = (call_summary <T *> *) (data);
788 T *edge1_summary = NULLnullptr;
789
790 if (summary->m_initialize_when_cloning)
791 edge1_summary = summary->get_create (edge1);
792 else
793 edge1_summary = summary->get (edge1);
794
795 if (edge1_summary)
796 summary->duplicate (edge1, edge2, edge1_summary,
797 summary->get_create (edge2));
798}
799
800template <typename T>
801void
802gt_ggc_mx(call_summary<T *>* const &summary)
803{
804 gcc_checking_assert (summary->m_ggc)((void)(!(summary->m_ggc) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/symbol-summary.h"
, 804, __FUNCTION__), 0 : 0))
;
805 gt_ggc_mx (&summary->m_map);
806}
807
808template <typename T>
809void
810gt_pch_nx (call_summary<T *> *const &)
811{
812 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/symbol-summary.h"
, 812, __FUNCTION__))
;
813}
814
815template <typename T>
816void
817gt_pch_nx (call_summary<T *> *const &, gt_pointer_operator, void *)
818{
819 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/symbol-summary.h"
, 819, __FUNCTION__))
;
820}
821
822