Bug Summary

File:build/gcc/ipa-sra.c
Warning:line 406, column 3
Called C++ object pointer is null

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-unknown-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name ipa-sra.c -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -fno-rounding-math -mconstructor-aliases -munwind-tables -target-cpu x86-64 -tune-cpu generic -debugger-tuning=gdb -fcoverage-compilation-dir=/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -resource-dir /usr/lib64/clang/13.0.0 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11/backward -internal-isystem /usr/lib64/clang/13.0.0/include -internal-isystem /usr/local/include -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../x86_64-suse-linux/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-error=format-diag -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir=/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -D__GCC_HAVE_DWARF2_CFI_ASM=1 -o /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2021-11-20-133755-20252-1/report-MtZoLe.plist -x c++ /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c

/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c

1/* Interprocedural scalar replacement of aggregates
2 Copyright (C) 2008-2021 Free Software Foundation, Inc.
3
4 Contributed by Martin Jambor <mjambor@suse.cz>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22/* IPA-SRA is an interprocedural pass that removes unused function return
23 values (turning functions returning a value which is never used into void
24 functions), removes unused function parameters. It can also replace an
25 aggregate parameter by a set of other parameters representing part of the
26 original, turning those passed by reference into new ones which pass the
27 value directly.
28
29 The pass is a true IPA one, which means that it works in three stages in
30 order to be able to take advantage of LTO. First, summaries about functions
31 and each calls are generated. Function summaries (often called call graph
32 node summaries) contain mainly information about which parameters are
33 potential transformation candidates and which bits of candidates are
34 accessed. We differentiate between accesses done as a part of a call
35 statement (which might be not necessary if the callee is also transformed)
36 and others (which are mandatory). Call summaries (often called call graph
37 edge summaries) contain information about which function formal parameters
38 feed into which actual call arguments so that if two parameters are only
39 used in a sum which is then passed to another function which then however
40 does not use this parameter, all three parameters of the two functions can
41 be eliminated. Edge summaries also have flags whether the return value is
42 used or if it is only returned in the caller too. In LTO mode these
43 summaries are then streamed to the object file in the compilation phase and
44 streamed back in in the WPA analysis stage.
45
46 The interprocedural analysis phase traverses the graph in topological order
47 in two sweeps, one in each direction. First, from callees to callers for
48 parameter removal and splitting. Each strongly-connected component is
49 processed iteratively until the situation in it stabilizes. The pass from
50 callers to callees is then carried out to remove unused return values in a
51 very similar fashion.
52
53 Because parameter manipulation has big implications for call redirection
54 which is done only after all call graph nodes materialize, the
55 transformation phase is not part of this patch but is carried out by the
56 clone materialization and edge redirection itself, see comments in
57 ipa-param-manipulation.h for more details. */
58
59
60
61#include "config.h"
62#include "system.h"
63#include "coretypes.h"
64#include "backend.h"
65#include "tree.h"
66#include "gimple.h"
67#include "predict.h"
68#include "tree-pass.h"
69#include "ssa.h"
70#include "cgraph.h"
71#include "gimple-pretty-print.h"
72#include "alias.h"
73#include "tree-eh.h"
74#include "gimple-iterator.h"
75#include "gimple-walk.h"
76#include "tree-dfa.h"
77#include "tree-sra.h"
78#include "alloc-pool.h"
79#include "symbol-summary.h"
80#include "dbgcnt.h"
81#include "tree-inline.h"
82#include "ipa-utils.h"
83#include "builtins.h"
84#include "cfganal.h"
85#include "tree-streamer.h"
86#include "internal-fn.h"
87#include "symtab-clones.h"
88#include "attribs.h"
89
90static void ipa_sra_summarize_function (cgraph_node *);
91
92/* Bits used to track size of an aggregate in bytes interprocedurally. */
93#define ISRA_ARG_SIZE_LIMIT_BITS16 16
94#define ISRA_ARG_SIZE_LIMIT(1 << 16) (1 << ISRA_ARG_SIZE_LIMIT_BITS16)
95/* How many parameters can feed into a call actual argument and still be
96 tracked. */
97#define IPA_SRA_MAX_PARAM_FLOW_LEN7 7
98
99/* Structure describing accesses to a specific portion of an aggregate
100 parameter, as given by the offset and size. Any smaller accesses that occur
101 within a function that fall within another access form a tree. The pass
102 cannot analyze parameters with only partially overlapping accesses. */
103
104struct GTY(()) param_access
105{
106 /* Type that a potential replacement should have. This field only has
107 meaning in the summary building and transformation phases, when it is
108 reconstructed from the body. Must not be touched in IPA analysis
109 stage. */
110 tree type;
111
112 /* Alias reference type to be used in MEM_REFs when adjusting caller
113 arguments. */
114 tree alias_ptr_type;
115
116 /* Values returned by get_ref_base_and_extent but converted to bytes and
117 stored as unsigned ints. */
118 unsigned unit_offset;
119 unsigned unit_size : ISRA_ARG_SIZE_LIMIT_BITS16;
120
121 /* Set once we are sure that the access will really end up in a potentially
122 transformed function - initially not set for portions of formal parameters
123 that are only used as actual function arguments passed to callees. */
124 unsigned certain : 1;
125 /* Set if the access has a reversed scalar storage order. */
126 unsigned reverse : 1;
127};
128
129/* This structure has the same purpose as the one above and additionally it
130 contains some fields that are only necessary in the summary generation
131 phase. */
132
133struct gensum_param_access
134{
135 /* Values returned by get_ref_base_and_extent. */
136 HOST_WIDE_INTlong offset;
137 HOST_WIDE_INTlong size;
138
139 /* if this access has any children (in terms of the definition above), this
140 points to the first one. */
141 struct gensum_param_access *first_child;
142 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
143 described above. */
144 struct gensum_param_access *next_sibling;
145
146 /* Type that a potential replacement should have. This field only has
147 meaning in the summary building and transformation phases, when it is
148 reconstructed from the body. Must not be touched in IPA analysis
149 stage. */
150 tree type;
151 /* Alias reference type to be used in MEM_REFs when adjusting caller
152 arguments. */
153 tree alias_ptr_type;
154
155 /* Have there been writes to or reads from this exact location except for as
156 arguments to a function call that can be tracked. */
157 bool nonarg;
158
159 /* Set if the access has a reversed scalar storage order. */
160 bool reverse;
161};
162
163/* Summary describing a parameter in the IPA stages. */
164
165struct GTY(()) isra_param_desc
166{
167 /* List of access representatives to the parameters, sorted according to
168 their offset. */
169 vec <param_access *, va_gc> *accesses;
170
171 /* Unit size limit of total size of all replacements. */
172 unsigned param_size_limit : ISRA_ARG_SIZE_LIMIT_BITS16;
173 /* Sum of unit sizes of all certain replacements. */
174 unsigned size_reached : ISRA_ARG_SIZE_LIMIT_BITS16;
175
176 /* A parameter that is used only in call arguments and can be removed if all
177 concerned actual arguments are removed. */
178 unsigned locally_unused : 1;
179 /* An aggregate that is a candidate for breaking up or complete removal. */
180 unsigned split_candidate : 1;
181 /* Is this a parameter passing stuff by reference? */
182 unsigned by_ref : 1;
183};
184
185/* Structure used when generating summaries that describes a parameter. */
186
187struct gensum_param_desc
188{
189 /* Roots of param_accesses. */
190 gensum_param_access *accesses;
191 /* Number of accesses in the access tree rooted in field accesses. */
192 unsigned access_count;
193
194 /* If the below is non-zero, this is the number of uses as actual arguments. */
195 int call_uses;
196 /* Number of times this parameter has been directly passed to. */
197 unsigned ptr_pt_count;
198
199 /* Size limit of total size of all replacements. */
200 unsigned param_size_limit;
201 /* Sum of sizes of nonarg accesses. */
202 unsigned nonarg_acc_size;
203
204 /* A parameter that is used only in call arguments and can be removed if all
205 concerned actual arguments are removed. */
206 bool locally_unused;
207 /* An aggregate that is a candidate for breaking up or a pointer passing data
208 by reference that is a candidate for being converted to a set of
209 parameters passing those data by value. */
210 bool split_candidate;
211 /* Is this a parameter passing stuff by reference? */
212 bool by_ref;
213
214 /* The number of this parameter as they are ordered in function decl. */
215 int param_number;
216 /* For parameters passing data by reference, this is parameter index to
217 compute indices to bb_dereferences. */
218 int deref_index;
219};
220
221/* Properly deallocate accesses of DESC. TODO: Since this data structure is
222 not in GC memory, this is not necessary and we can consider removing the
223 function. */
224
225static void
226free_param_decl_accesses (isra_param_desc *desc)
227{
228 unsigned len = vec_safe_length (desc->accesses);
229 for (unsigned i = 0; i < len; ++i)
230 ggc_free ((*desc->accesses)[i]);
231 vec_free (desc->accesses);
232}
233
234/* Class used to convey information about functions from the
235 intra-procedural analysis stage to inter-procedural one. */
236
237class GTY((for_user)) isra_func_summary
238{
239public:
240 /* initialize the object. */
241
242 isra_func_summary ()
243 : m_parameters (NULLnullptr), m_candidate (false), m_returns_value (false),
244 m_return_ignored (false), m_queued (false)
245 {}
246
247 /* Destroy m_parameters. */
248
249 ~isra_func_summary ();
250
251 /* Mark the function as not a candidate for any IPA-SRA transformation.
252 Return true if it was a candidate until now. */
253
254 bool zap ();
255
256 /* Vector of parameter descriptors corresponding to the function being
257 analyzed. */
258 vec<isra_param_desc, va_gc> *m_parameters;
259
260 /* Whether the node is even a candidate for any IPA-SRA transformation at
261 all. */
262 unsigned m_candidate : 1;
263
264 /* Whether the original function returns any value. */
265 unsigned m_returns_value : 1;
266
267 /* Set to true if all call statements do not actually use the returned
268 value. */
269
270 unsigned m_return_ignored : 1;
271
272 /* Whether the node is already queued in IPA SRA stack during processing of
273 call graphs SCCs. */
274
275 unsigned m_queued : 1;
276};
277
278/* Clean up and deallocate isra_func_summary points to. TODO: Since this data
279 structure is not in GC memory, this is not necessary and we can consider
280 removing the destructor. */
281
282isra_func_summary::~isra_func_summary ()
283{
284 unsigned len = vec_safe_length (m_parameters);
285 for (unsigned i = 0; i < len; ++i)
286 free_param_decl_accesses (&(*m_parameters)[i]);
287 vec_free (m_parameters);
288}
289
290
291/* Mark the function as not a candidate for any IPA-SRA transformation. Return
292 true if it was a candidate until now. */
293
294bool
295isra_func_summary::zap ()
296{
297 bool ret = m_candidate;
298 m_candidate = false;
299
300 unsigned len = vec_safe_length (m_parameters);
301 for (unsigned i = 0; i < len; ++i)
302 free_param_decl_accesses (&(*m_parameters)[i]);
303 vec_free (m_parameters);
304
305 return ret;
306}
307
308/* Structure to describe which formal parameters feed into a particular actual
309 arguments. */
310
311struct isra_param_flow
312{
313 /* Number of elements in array inputs that contain valid data. */
314 char length;
315 /* Indices of formal parameters that feed into the described actual argument.
316 If aggregate_pass_through or pointer_pass_through below are true, it must
317 contain exactly one element which is passed through from a formal
318 parameter if the given number. Otherwise, the array contains indices of
319 callee's formal parameters which are used to calculate value of this
320 actual argument. */
321 unsigned char inputs[IPA_SRA_MAX_PARAM_FLOW_LEN7];
322
323 /* Offset within the formal parameter. */
324 unsigned unit_offset;
325 /* Size of the portion of the formal parameter that is being passed. */
326 unsigned unit_size : ISRA_ARG_SIZE_LIMIT_BITS16;
327
328 /* True when the value of this actual copy is a portion of a formal
329 parameter. */
330 unsigned aggregate_pass_through : 1;
331 /* True when the value of this actual copy is a verbatim pass through of an
332 obtained pointer. */
333 unsigned pointer_pass_through : 1;
334 /* True when it is safe to copy access candidates here from the callee, which
335 would mean introducing dereferences into callers of the caller. */
336 unsigned safe_to_import_accesses : 1;
337};
338
339/* Structure used to convey information about calls from the intra-procedural
340 analysis stage to inter-procedural one. */
341
342class isra_call_summary
343{
344public:
345 isra_call_summary ()
346 : m_arg_flow (), m_return_ignored (false), m_return_returned (false),
347 m_bit_aligned_arg (false), m_before_any_store (false)
348 {}
349
350 void init_inputs (unsigned arg_count);
351 void dump (FILE *f);
352
353 /* Information about what formal parameters of the caller are used to compute
354 individual actual arguments of this call. */
355 auto_vec <isra_param_flow> m_arg_flow;
356
357 /* Set to true if the call statement does not have a LHS. */
358 unsigned m_return_ignored : 1;
359
360 /* Set to true if the LHS of call statement is only used to construct the
361 return value of the caller. */
362 unsigned m_return_returned : 1;
363
364 /* Set when any of the call arguments are not byte-aligned. */
365 unsigned m_bit_aligned_arg : 1;
366
367 /* Set to true if the call happend before any (other) store to memory in the
368 caller. */
369 unsigned m_before_any_store : 1;
370};
371
372/* Class to manage function summaries. */
373
374class GTY((user)) ipa_sra_function_summaries
375 : public function_summary <isra_func_summary *>
376{
377public:
378 ipa_sra_function_summaries (symbol_table *table, bool ggc):
379 function_summary<isra_func_summary *> (table, ggc) { }
380
381 virtual void duplicate (cgraph_node *, cgraph_node *,
382 isra_func_summary *old_sum,
383 isra_func_summary *new_sum);
384 virtual void insert (cgraph_node *, isra_func_summary *);
385};
386
387/* Hook that is called by summary when a node is duplicated. */
388
389void
390ipa_sra_function_summaries::duplicate (cgraph_node *, cgraph_node *,
391 isra_func_summary *old_sum,
392 isra_func_summary *new_sum)
393{
394 /* TODO: Somehow stop copying when ISRA is doing the cloning, it is
395 useless. */
396 new_sum->m_candidate = old_sum->m_candidate;
397 new_sum->m_returns_value = old_sum->m_returns_value;
398 new_sum->m_return_ignored = old_sum->m_return_ignored;
399 gcc_assert (!old_sum->m_queued)((void)(!(!old_sum->m_queued) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 399, __FUNCTION__), 0 : 0))
;
1
Assuming field 'm_queued' is 0
2
'?' condition is false
400 new_sum->m_queued = false;
401
402 unsigned param_count = vec_safe_length (old_sum->m_parameters);
3
Calling 'vec_safe_length<isra_param_desc, va_gc>'
7
Returning from 'vec_safe_length<isra_param_desc, va_gc>'
403 if (!param_count)
8
Assuming 'param_count' is not equal to 0
9
Taking false branch
404 return;
405 vec_safe_reserve_exact (new_sum->m_parameters, param_count);
10
Calling 'vec_safe_reserve_exact<isra_param_desc, va_gc>'
22
Returning from 'vec_safe_reserve_exact<isra_param_desc, va_gc>'
406 new_sum->m_parameters->quick_grow_cleared (param_count);
23
Called C++ object pointer is null
407 for (unsigned i = 0; i < param_count; i++)
408 {
409 isra_param_desc *s = &(*old_sum->m_parameters)[i];
410 isra_param_desc *d = &(*new_sum->m_parameters)[i];
411
412 d->param_size_limit = s->param_size_limit;
413 d->size_reached = s->size_reached;
414 d->locally_unused = s->locally_unused;
415 d->split_candidate = s->split_candidate;
416 d->by_ref = s->by_ref;
417
418 unsigned acc_count = vec_safe_length (s->accesses);
419 vec_safe_reserve_exact (d->accesses, acc_count);
420 for (unsigned j = 0; j < acc_count; j++)
421 {
422 param_access *from = (*s->accesses)[j];
423 param_access *to = ggc_cleared_alloc<param_access> ();
424 to->type = from->type;
425 to->alias_ptr_type = from->alias_ptr_type;
426 to->unit_offset = from->unit_offset;
427 to->unit_size = from->unit_size;
428 to->certain = from->certain;
429 d->accesses->quick_push (to);
430 }
431 }
432}
433
434/* Pointer to the pass function summary holder. */
435
436static GTY(()) ipa_sra_function_summaries *func_sums;
437
438/* Hook that is called by summary when new node appears. */
439
440void
441ipa_sra_function_summaries::insert (cgraph_node *node, isra_func_summary *)
442{
443 if (opt_for_fn (node->decl, flag_ipa_sra)(opts_for_fn (node->decl)->x_flag_ipa_sra))
444 {
445 push_cfun (DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 445, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
);
446 ipa_sra_summarize_function (node);
447 pop_cfun ();
448 }
449 else
450 func_sums->remove (node);
451}
452
453/* Class to manage call summaries. */
454
455class ipa_sra_call_summaries: public call_summary <isra_call_summary *>
456{
457public:
458 ipa_sra_call_summaries (symbol_table *table):
459 call_summary<isra_call_summary *> (table) { }
460
461 /* Duplicate info when an edge is cloned. */
462 virtual void duplicate (cgraph_edge *, cgraph_edge *,
463 isra_call_summary *old_sum,
464 isra_call_summary *new_sum);
465};
466
467static ipa_sra_call_summaries *call_sums;
468
469
470/* Initialize m_arg_flow of a particular instance of isra_call_summary.
471 ARG_COUNT is the number of actual arguments passed. */
472
473void
474isra_call_summary::init_inputs (unsigned arg_count)
475{
476 if (arg_count == 0)
477 {
478 gcc_checking_assert (m_arg_flow.length () == 0)((void)(!(m_arg_flow.length () == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 478, __FUNCTION__), 0 : 0))
;
479 return;
480 }
481 if (m_arg_flow.length () == 0)
482 {
483 m_arg_flow.reserve_exact (arg_count);
484 m_arg_flow.quick_grow_cleared (arg_count);
485 }
486 else
487 gcc_checking_assert (arg_count == m_arg_flow.length ())((void)(!(arg_count == m_arg_flow.length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 487, __FUNCTION__), 0 : 0))
;
488}
489
490/* Dump all information in call summary to F. */
491
492void
493isra_call_summary::dump (FILE *f)
494{
495 if (m_return_ignored)
496 fprintf (f, " return value ignored\n");
497 if (m_return_returned)
498 fprintf (f, " return value used only to compute caller return value\n");
499 if (m_before_any_store)
500 fprintf (f, " happens before any store to memory\n");
501 for (unsigned i = 0; i < m_arg_flow.length (); i++)
502 {
503 fprintf (f, " Parameter %u:\n", i);
504 isra_param_flow *ipf = &m_arg_flow[i];
505
506 if (ipf->length)
507 {
508 bool first = true;
509 fprintf (f, " Scalar param sources: ");
510 for (int j = 0; j < ipf->length; j++)
511 {
512 if (!first)
513 fprintf (f, ", ");
514 else
515 first = false;
516 fprintf (f, "%i", (int) ipf->inputs[j]);
517 }
518 fprintf (f, "\n");
519 }
520 if (ipf->aggregate_pass_through)
521 fprintf (f, " Aggregate pass through from the param given above, "
522 "unit offset: %u , unit size: %u\n",
523 ipf->unit_offset, ipf->unit_size);
524 if (ipf->pointer_pass_through)
525 fprintf (f, " Pointer pass through from the param given above, "
526 "safe_to_import_accesses: %u\n", ipf->safe_to_import_accesses);
527 }
528}
529
530/* Duplicate edge summary when an edge is cloned. */
531
532void
533ipa_sra_call_summaries::duplicate (cgraph_edge *, cgraph_edge *,
534 isra_call_summary *old_sum,
535 isra_call_summary *new_sum)
536{
537 unsigned arg_count = old_sum->m_arg_flow.length ();
538 new_sum->init_inputs (arg_count);
539 for (unsigned i = 0; i < arg_count; i++)
540 new_sum->m_arg_flow[i] = old_sum->m_arg_flow[i];
541
542 new_sum->m_return_ignored = old_sum->m_return_ignored;
543 new_sum->m_return_returned = old_sum->m_return_returned;
544 new_sum->m_bit_aligned_arg = old_sum->m_bit_aligned_arg;
545 new_sum->m_before_any_store = old_sum->m_before_any_store;
546}
547
548
549/* With all GTY stuff done, we can move to anonymous namespace. */
550namespace {
551/* Quick mapping from a decl to its param descriptor. */
552
553hash_map<tree, gensum_param_desc *> *decl2desc;
554
555/* Countdown of allowed Alias analysis steps during summary building. */
556
557int aa_walking_limit;
558
559/* This is a table in which for each basic block and parameter there is a
560 distance (offset + size) in that parameter which is dereferenced and
561 accessed in that BB. */
562HOST_WIDE_INTlong *bb_dereferences = NULLnullptr;
563/* How many by-reference parameters there are in the current function. */
564int by_ref_count;
565
566/* Bitmap of BBs that can cause the function to "stop" progressing by
567 returning, throwing externally, looping infinitely or calling a function
568 which might abort etc.. */
569bitmap final_bbs;
570
571/* Obstack to allocate various small structures required only when generating
572 summary for a function. */
573struct obstack gensum_obstack;
574
575/* Return false the function is apparently unsuitable for IPA-SRA based on it's
576 attributes, return true otherwise. NODE is the cgraph node of the current
577 function. */
578
579static bool
580ipa_sra_preliminary_function_checks (cgraph_node *node)
581{
582 if (!node->can_change_signature)
583 {
584 if (dump_file)
585 fprintf (dump_file, "Function cannot change signature.\n");
586 return false;
587 }
588
589 if (!tree_versionable_function_p (node->decl))
590 {
591 if (dump_file)
592 fprintf (dump_file, "Function is not versionable.\n");
593 return false;
594 }
595
596 if (!opt_for_fn (node->decl, optimize)(opts_for_fn (node->decl)->x_optimize)
597 || !opt_for_fn (node->decl, flag_ipa_sra)(opts_for_fn (node->decl)->x_flag_ipa_sra))
598 {
599 if (dump_file)
600 fprintf (dump_file, "Not optimizing or IPA-SRA turned off for this "
601 "function.\n");
602 return false;
603 }
604
605 if (DECL_VIRTUAL_P (node->decl)((contains_struct_check ((node->decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 605, __FUNCTION__))->decl_common.virtual_flag)
)
606 {
607 if (dump_file)
608 fprintf (dump_file, "Function is a virtual method.\n");
609 return false;
610 }
611
612 struct function *fun = DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 612, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
;
613 if (fun->stdarg)
614 {
615 if (dump_file)
616 fprintf (dump_file, "Function uses stdarg. \n");
617 return false;
618 }
619
620 if (DECL_DISREGARD_INLINE_LIMITS (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 620, __FUNCTION__, (FUNCTION_DECL)))->function_decl.disregard_inline_limits
)
)
621 {
622 if (dump_file)
623 fprintf (dump_file, "Always inline function will be inlined "
624 "anyway. \n");
625 return false;
626 }
627
628 return true;
629}
630
631/* Print access tree starting at ACCESS to F. */
632
633static void
634dump_gensum_access (FILE *f, gensum_param_access *access, unsigned indent)
635{
636 fprintf (f, " ");
637 for (unsigned i = 0; i < indent; i++)
638 fprintf (f, " ");
639 fprintf (f, " * Access to offset: " HOST_WIDE_INT_PRINT_DEC"%" "l" "d",
640 access->offset);
641 fprintf (f, ", size: " HOST_WIDE_INT_PRINT_DEC"%" "l" "d", access->size);
642 fprintf (f, ", type: ");
643 print_generic_expr (f, access->type);
644 fprintf (f, ", alias_ptr_type: ");
645 print_generic_expr (f, access->alias_ptr_type);
646 fprintf (f, ", nonarg: %u, reverse: %u\n", access->nonarg, access->reverse);
647 for (gensum_param_access *ch = access->first_child;
648 ch;
649 ch = ch->next_sibling)
650 dump_gensum_access (f, ch, indent + 2);
651}
652
653
654/* Print access tree starting at ACCESS to F. */
655
656static void
657dump_isra_access (FILE *f, param_access *access)
658{
659 fprintf (f, " * Access to unit offset: %u", access->unit_offset);
660 fprintf (f, ", unit size: %u", access->unit_size);
661 fprintf (f, ", type: ");
662 print_generic_expr (f, access->type);
663 fprintf (f, ", alias_ptr_type: ");
664 print_generic_expr (f, access->alias_ptr_type);
665 if (access->certain)
666 fprintf (f, ", certain");
667 else
668 fprintf (f, ", not-certain");
669 if (access->reverse)
670 fprintf (f, ", reverse");
671 fprintf (f, "\n");
672}
673
674/* Dump access tree starting at ACCESS to stderr. */
675
676DEBUG_FUNCTION__attribute__ ((__used__)) void
677debug_isra_access (param_access *access)
678{
679 dump_isra_access (stderrstderr, access);
680}
681
682/* Dump DESC to F. */
683
684static void
685dump_gensum_param_descriptor (FILE *f, gensum_param_desc *desc)
686{
687 if (desc->locally_unused)
688 fprintf (f, " unused with %i call_uses\n", desc->call_uses);
689 if (!desc->split_candidate)
690 {
691 fprintf (f, " not a candidate\n");
692 return;
693 }
694 if (desc->by_ref)
695 fprintf (f, " by_ref with %u pass throughs\n", desc->ptr_pt_count);
696
697 for (gensum_param_access *acc = desc->accesses; acc; acc = acc->next_sibling)
698 dump_gensum_access (f, acc, 2);
699}
700
701/* Dump all parameter descriptors in IFS, assuming it describes FNDECL, to
702 F. */
703
704static void
705dump_gensum_param_descriptors (FILE *f, tree fndecl,
706 vec<gensum_param_desc> *param_descriptions)
707{
708 tree parm = DECL_ARGUMENTS (fndecl)((tree_check ((fndecl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 708, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
;
709 for (unsigned i = 0;
710 i < param_descriptions->length ();
711 ++i, parm = DECL_CHAIN (parm)(((contains_struct_check (((contains_struct_check ((parm), (TS_DECL_MINIMAL
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 711, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 711, __FUNCTION__))->common.chain))
)
712 {
713 fprintf (f, " Descriptor for parameter %i ", i);
714 print_generic_expr (f, parm, TDF_UID);
715 fprintf (f, "\n");
716 dump_gensum_param_descriptor (f, &(*param_descriptions)[i]);
717 }
718}
719
720
721/* Dump DESC to F. */
722
723static void
724dump_isra_param_descriptor (FILE *f, isra_param_desc *desc)
725{
726 if (desc->locally_unused)
727 {
728 fprintf (f, " (locally) unused\n");
729 }
730 if (!desc->split_candidate)
731 {
732 fprintf (f, " not a candidate for splitting\n");
733 return;
734 }
735 fprintf (f, " param_size_limit: %u, size_reached: %u%s\n",
736 desc->param_size_limit, desc->size_reached,
737 desc->by_ref ? ", by_ref" : "");
738
739 for (unsigned i = 0; i < vec_safe_length (desc->accesses); ++i)
740 {
741 param_access *access = (*desc->accesses)[i];
742 dump_isra_access (f, access);
743 }
744}
745
746/* Dump all parameter descriptors in IFS, assuming it describes FNDECL, to
747 F. */
748
749static void
750dump_isra_param_descriptors (FILE *f, tree fndecl,
751 isra_func_summary *ifs)
752{
753 tree parm = DECL_ARGUMENTS (fndecl)((tree_check ((fndecl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 753, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
;
754 if (!ifs->m_parameters)
755 {
756 fprintf (f, " parameter descriptors not available\n");
757 return;
758 }
759
760 for (unsigned i = 0;
761 i < ifs->m_parameters->length ();
762 ++i, parm = DECL_CHAIN (parm)(((contains_struct_check (((contains_struct_check ((parm), (TS_DECL_MINIMAL
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 762, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 762, __FUNCTION__))->common.chain))
)
763 {
764 fprintf (f, " Descriptor for parameter %i ", i);
765 print_generic_expr (f, parm, TDF_UID);
766 fprintf (f, "\n");
767 dump_isra_param_descriptor (f, &(*ifs->m_parameters)[i]);
768 }
769}
770
771/* Add SRC to inputs of PARAM_FLOW, unless it would exceed storage. If the
772 function fails return false, otherwise return true. SRC must fit into an
773 unsigned char. Used for purposes of transitive unused parameter
774 removal. */
775
776static bool
777add_src_to_param_flow (isra_param_flow *param_flow, int src)
778{
779 gcc_checking_assert (src >= 0 && src <= UCHAR_MAX)((void)(!(src >= 0 && src <= (127*2 +1)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 779, __FUNCTION__), 0 : 0))
;
780 if (param_flow->length == IPA_SRA_MAX_PARAM_FLOW_LEN7)
781 return false;
782
783 param_flow->inputs[(int) param_flow->length] = src;
784 param_flow->length++;
785 return true;
786}
787
788/* Add a SRC to the inputs of PARAM_FLOW unless it is already there and assert
789 it is the only input. Used for purposes of transitive parameter
790 splitting. */
791
792static void
793set_single_param_flow_source (isra_param_flow *param_flow, int src)
794{
795 gcc_checking_assert (src >= 0 && src <= UCHAR_MAX)((void)(!(src >= 0 && src <= (127*2 +1)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 795, __FUNCTION__), 0 : 0))
;
796 if (param_flow->length == 0)
797 {
798 param_flow->inputs[0] = src;
799 param_flow->length = 1;
800 }
801 else if (param_flow->length == 1)
802 gcc_assert (param_flow->inputs[0] == src)((void)(!(param_flow->inputs[0] == src) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 802, __FUNCTION__), 0 : 0))
;
803 else
804 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 804, __FUNCTION__))
;
805}
806
807/* Assert that there is only a single value in PARAM_FLOW's inputs and return
808 it. */
809
810static unsigned
811get_single_param_flow_source (const isra_param_flow *param_flow)
812{
813 gcc_assert (param_flow->length == 1)((void)(!(param_flow->length == 1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 813, __FUNCTION__), 0 : 0))
;
814 return param_flow->inputs[0];
815}
816
817/* Inspect all uses of NAME and simple arithmetic calculations involving NAME
818 in FUN represented with NODE and return a negative number if any of them is
819 used for something else than either an actual call argument, simple
820 arithmetic operation or debug statement. If there are no such uses, return
821 the number of actual arguments that this parameter eventually feeds to (or
822 zero if there is none). For any such parameter, mark PARM_NUM as one of its
823 sources. ANALYZED is a bitmap that tracks which SSA names we have already
824 started investigating. */
825
826static int
827isra_track_scalar_value_uses (function *fun, cgraph_node *node, tree name,
828 int parm_num, bitmap analyzed)
829{
830 int res = 0;
831 imm_use_iterator imm_iter;
832 gimple *stmt;
833
834 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)for (struct auto_end_imm_use_stmt_traverse auto_end_imm_use_stmt_traverse
((((stmt) = first_imm_use_stmt (&(imm_iter), (name))), &
(imm_iter))); !end_imm_use_stmt_p (&(imm_iter)); (void) (
(stmt) = next_imm_use_stmt (&(imm_iter))))
835 {
836 if (is_gimple_debug (stmt))
837 continue;
838
839 /* TODO: We could handle at least const builtin functions like arithmetic
840 operations below. */
841 if (is_gimple_call (stmt))
842 {
843 int all_uses = 0;
844 use_operand_p use_p;
845 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)for ((use_p) = first_imm_use_on_stmt (&(imm_iter)); !end_imm_use_on_stmt_p
(&(imm_iter)); (void) ((use_p) = next_imm_use_on_stmt (&
(imm_iter))))
846 all_uses++;
847
848 gcall *call = as_a <gcall *> (stmt);
849 unsigned arg_count;
850 if (gimple_call_internal_p (call)
851 || (arg_count = gimple_call_num_args (call)) == 0)
852 {
853 res = -1;
854 break;
855 }
856
857 cgraph_edge *cs = node->get_edge (stmt);
858 gcc_checking_assert (cs)((void)(!(cs) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 858, __FUNCTION__), 0 : 0))
;
859 isra_call_summary *csum = call_sums->get_create (cs);
860 csum->init_inputs (arg_count);
861
862 int simple_uses = 0;
863 for (unsigned i = 0; i < arg_count; i++)
864 if (gimple_call_arg (call, i) == name)
865 {
866 if (!add_src_to_param_flow (&csum->m_arg_flow[i], parm_num))
867 {
868 simple_uses = -1;
869 break;
870 }
871 simple_uses++;
872 }
873
874 if (simple_uses < 0
875 || all_uses != simple_uses)
876 {
877 res = -1;
878 break;
879 }
880 res += all_uses;
881 }
882 else if (!stmt_unremovable_because_of_non_call_eh_p (fun, stmt)
883 && ((is_gimple_assign (stmt) && !gimple_has_volatile_ops (stmt))
884 || gimple_code (stmt) == GIMPLE_PHI))
885 {
886 tree lhs;
887 if (gimple_code (stmt) == GIMPLE_PHI)
888 lhs = gimple_phi_result (stmt);
889 else
890 lhs = gimple_assign_lhs (stmt);
891
892 if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) != SSA_NAME)
893 {
894 res = -1;
895 break;
896 }
897 gcc_assert (!gimple_vdef (stmt))((void)(!(!gimple_vdef (stmt)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 897, __FUNCTION__), 0 : 0))
;
898 if (bitmap_set_bit (analyzed, SSA_NAME_VERSION (lhs)(tree_check ((lhs), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 898, __FUNCTION__, (SSA_NAME)))->base.u.version
))
899 {
900 int tmp = isra_track_scalar_value_uses (fun, node, lhs, parm_num,
901 analyzed);
902 if (tmp < 0)
903 {
904 res = tmp;
905 break;
906 }
907 res += tmp;
908 }
909 }
910 else
911 {
912 res = -1;
913 break;
914 }
915 }
916 return res;
917}
918
919/* Inspect all uses of PARM, which must be a gimple register, in FUN (which is
920 also described by NODE) and simple arithmetic calculations involving PARM
921 and return false if any of them is used for something else than either an
922 actual call argument, simple arithmetic operation or debug statement. If
923 there are no such uses, return true and store the number of actual arguments
924 that this parameter eventually feeds to (or zero if there is none) to
925 *CALL_USES_P. For any such parameter, mark PARM_NUM as one of its
926 sources.
927
928 This function is similar to ptr_parm_has_nonarg_uses but its results are
929 meant for unused parameter removal, as opposed to splitting of parameters
930 passed by reference or converting them to passed by value.
931 */
932
933static bool
934isra_track_scalar_param_local_uses (function *fun, cgraph_node *node, tree parm,
935 int parm_num, int *call_uses_p)
936{
937 gcc_checking_assert (is_gimple_reg (parm))((void)(!(is_gimple_reg (parm)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 937, __FUNCTION__), 0 : 0))
;
938
939 tree name = ssa_default_def (fun, parm);
940 if (!name || has_zero_uses (name))
941 {
942 *call_uses_p = 0;
943 return false;
944 }
945
946 /* Edge summaries can only handle callers with fewer than 256 parameters. */
947 if (parm_num > UCHAR_MAX(127*2 +1))
948 return true;
949
950 bitmap analyzed = BITMAP_ALLOCbitmap_alloc (NULLnullptr);
951 int call_uses = isra_track_scalar_value_uses (fun, node, name, parm_num,
952 analyzed);
953 BITMAP_FREE (analyzed)((void) (bitmap_obstack_free ((bitmap) analyzed), (analyzed) =
(bitmap) nullptr))
;
954 if (call_uses < 0)
955 return true;
956 *call_uses_p = call_uses;
957 return false;
958}
959
960/* Scan immediate uses of a default definition SSA name of a parameter PARM and
961 examine whether there are any nonarg uses that are not actual arguments or
962 otherwise infeasible uses. If so, return true, otherwise return false.
963 Create pass-through IPA flow records for any direct uses as argument calls
964 and if returning false, store their number into *PT_COUNT_P. NODE and FUN
965 must represent the function that is currently analyzed, PARM_NUM must be the
966 index of the analyzed parameter.
967
968 This function is similar to isra_track_scalar_param_local_uses but its
969 results are meant for splitting of parameters passed by reference or turning
970 them into bits passed by value, as opposed to generic unused parameter
971 removal.
972 */
973
974static bool
975ptr_parm_has_nonarg_uses (cgraph_node *node, function *fun, tree parm,
976 int parm_num, unsigned *pt_count_p)
977{
978 imm_use_iterator ui;
979 gimple *stmt;
980 tree name = ssa_default_def (fun, parm);
981 bool ret = false;
982 unsigned pt_count = 0;
983
984 if (!name || has_zero_uses (name))
985 return false;
986
987 /* Edge summaries can only handle callers with fewer than 256 parameters. */
988 if (parm_num > UCHAR_MAX(127*2 +1))
989 return true;
990
991 FOR_EACH_IMM_USE_STMT (stmt, ui, name)for (struct auto_end_imm_use_stmt_traverse auto_end_imm_use_stmt_traverse
((((stmt) = first_imm_use_stmt (&(ui), (name))), &(ui
))); !end_imm_use_stmt_p (&(ui)); (void) ((stmt) = next_imm_use_stmt
(&(ui))))
992 {
993 unsigned uses_ok = 0;
994 use_operand_p use_p;
995
996 if (is_gimple_debug (stmt))
997 continue;
998
999 if (gimple_assign_single_p (stmt))
1000 {
1001 tree rhs = gimple_assign_rhs1 (stmt);
1002 if (!TREE_THIS_VOLATILE (rhs)((rhs)->base.volatile_flag))
1003 {
1004 while (handled_component_p (rhs))
1005 rhs = TREE_OPERAND (rhs, 0)(*((const_cast<tree*> (tree_operand_check ((rhs), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1005, __FUNCTION__)))))
;
1006 if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == MEM_REF
1007 && TREE_OPERAND (rhs, 0)(*((const_cast<tree*> (tree_operand_check ((rhs), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1007, __FUNCTION__)))))
== name
1008 && integer_zerop (TREE_OPERAND (rhs, 1)(*((const_cast<tree*> (tree_operand_check ((rhs), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1008, __FUNCTION__)))))
)
1009 && types_compatible_p (TREE_TYPE (rhs)((contains_struct_check ((rhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1009, __FUNCTION__))->typed.type)
,
1010 TREE_TYPE (TREE_TYPE (name))((contains_struct_check ((((contains_struct_check ((name), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1010, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1010, __FUNCTION__))->typed.type)
))
1011 uses_ok++;
1012 }
1013 }
1014 else if (is_gimple_call (stmt))
1015 {
1016 gcall *call = as_a <gcall *> (stmt);
1017 unsigned arg_count;
1018 if (gimple_call_internal_p (call)
1019 || (arg_count = gimple_call_num_args (call)) == 0)
1020 {
1021 ret = true;
1022 break;
1023 }
1024
1025 cgraph_edge *cs = node->get_edge (stmt);
1026 gcc_checking_assert (cs)((void)(!(cs) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1026, __FUNCTION__), 0 : 0))
;
1027 isra_call_summary *csum = call_sums->get_create (cs);
1028 csum->init_inputs (arg_count);
1029
1030 for (unsigned i = 0; i < arg_count; ++i)
1031 {
1032 tree arg = gimple_call_arg (stmt, i);
1033
1034 if (arg == name)
1035 {
1036 /* TODO: Allow &MEM_REF[name + offset] here,
1037 ipa_param_body_adjustments::modify_call_stmt has to be
1038 adjusted too. */
1039 csum->m_arg_flow[i].pointer_pass_through = true;
1040 set_single_param_flow_source (&csum->m_arg_flow[i], parm_num);
1041 pt_count++;
1042 uses_ok++;
1043 continue;
1044 }
1045
1046 if (!TREE_THIS_VOLATILE (arg)((arg)->base.volatile_flag))
1047 {
1048 while (handled_component_p (arg))
1049 arg = TREE_OPERAND (arg, 0)(*((const_cast<tree*> (tree_operand_check ((arg), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1049, __FUNCTION__)))))
;
1050 if (TREE_CODE (arg)((enum tree_code) (arg)->base.code) == MEM_REF
1051 && TREE_OPERAND (arg, 0)(*((const_cast<tree*> (tree_operand_check ((arg), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1051, __FUNCTION__)))))
== name
1052 && integer_zerop (TREE_OPERAND (arg, 1)(*((const_cast<tree*> (tree_operand_check ((arg), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1052, __FUNCTION__)))))
)
1053 && types_compatible_p (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1053, __FUNCTION__))->typed.type)
,
1054 TREE_TYPE (TREE_TYPE (name))((contains_struct_check ((((contains_struct_check ((name), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1054, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1054, __FUNCTION__))->typed.type)
))
1055 uses_ok++;
1056 }
1057 }
1058 }
1059
1060 /* If the number of valid uses does not match the number of
1061 uses in this stmt there is an unhandled use. */
1062 unsigned all_uses = 0;
1063 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)for ((use_p) = first_imm_use_on_stmt (&(ui)); !end_imm_use_on_stmt_p
(&(ui)); (void) ((use_p) = next_imm_use_on_stmt (&(ui
))))
1064 all_uses++;
1065
1066 gcc_checking_assert (uses_ok <= all_uses)((void)(!(uses_ok <= all_uses) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1066, __FUNCTION__), 0 : 0))
;
1067 if (uses_ok != all_uses)
1068 {
1069 ret = true;
1070 break;
1071 }
1072 }
1073
1074 *pt_count_p = pt_count;
1075 return ret;
1076}
1077
1078/* Initialize vector of parameter descriptors of NODE. Return true if there
1079 are any candidates for splitting or unused aggregate parameter removal (the
1080 function may return false if there are candidates for removal of register
1081 parameters) and function body must be scanned. */
1082
1083static bool
1084create_parameter_descriptors (cgraph_node *node,
1085 vec<gensum_param_desc> *param_descriptions)
1086{
1087 function *fun = DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1087, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
;
1088 bool ret = false;
1089
1090 int num = 0;
1091 for (tree parm = DECL_ARGUMENTS (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1091, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
;
1092 parm;
1093 parm = DECL_CHAIN (parm)(((contains_struct_check (((contains_struct_check ((parm), (TS_DECL_MINIMAL
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1093, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1093, __FUNCTION__))->common.chain))
, num++)
1094 {
1095 const char *msg;
1096 gensum_param_desc *desc = &(*param_descriptions)[num];
1097 /* param_descriptions vector is grown cleared in the caller. */
1098 desc->param_number = num;
1099 decl2desc->put (parm, desc);
1100
1101 if (dump_file && (dump_flags & TDF_DETAILS))
1102 print_generic_expr (dump_file, parm, TDF_UID);
1103
1104 int scalar_call_uses;
1105 tree type = TREE_TYPE (parm)((contains_struct_check ((parm), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1105, __FUNCTION__))->typed.type)
;
1106 if (TREE_THIS_VOLATILE (parm)((parm)->base.volatile_flag))
1107 {
1108 if (dump_file && (dump_flags & TDF_DETAILS))
1109 fprintf (dump_file, " not a candidate, is volatile\n");
1110 continue;
1111 }
1112 if (!is_gimple_reg_type (type) && is_va_list_type (type))
1113 {
1114 if (dump_file && (dump_flags & TDF_DETAILS))
1115 fprintf (dump_file, " not a candidate, is a va_list type\n");
1116 continue;
1117 }
1118 if (TREE_ADDRESSABLE (parm)((parm)->base.addressable_flag))
1119 {
1120 if (dump_file && (dump_flags & TDF_DETAILS))
1121 fprintf (dump_file, " not a candidate, is addressable\n");
1122 continue;
1123 }
1124 if (TREE_ADDRESSABLE (type)((type)->base.addressable_flag))
1125 {
1126 if (dump_file && (dump_flags & TDF_DETAILS))
1127 fprintf (dump_file, " not a candidate, type cannot be split\n");
1128 continue;
1129 }
1130
1131 if (is_gimple_reg (parm)
1132 && !isra_track_scalar_param_local_uses (fun, node, parm, num,
1133 &scalar_call_uses))
1134 {
1135 if (dump_file && (dump_flags & TDF_DETAILS))
1136 fprintf (dump_file, " is a scalar with only %i call uses\n",
1137 scalar_call_uses);
1138
1139 desc->locally_unused = true;
1140 desc->call_uses = scalar_call_uses;
1141 }
1142
1143 if (POINTER_TYPE_P (type)(((enum tree_code) (type)->base.code) == POINTER_TYPE || (
(enum tree_code) (type)->base.code) == REFERENCE_TYPE)
)
1144 {
1145 type = TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1145, __FUNCTION__))->typed.type)
;
1146
1147 if (TREE_CODE (type)((enum tree_code) (type)->base.code) == FUNCTION_TYPE
1148 || TREE_CODE (type)((enum tree_code) (type)->base.code) == METHOD_TYPE)
1149 {
1150 if (dump_file && (dump_flags & TDF_DETAILS))
1151 fprintf (dump_file, " not a candidate, reference to "
1152 "a function\n");
1153 continue;
1154 }
1155 if (TYPE_VOLATILE (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1155, __FUNCTION__))->base.volatile_flag)
)
1156 {
1157 if (dump_file && (dump_flags & TDF_DETAILS))
1158 fprintf (dump_file, " not a candidate, reference to "
1159 "a volatile type\n");
1160 continue;
1161 }
1162 if (TREE_CODE (type)((enum tree_code) (type)->base.code) == ARRAY_TYPE
1163 && TYPE_NONALIASED_COMPONENT (type)((tree_check ((type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1163, __FUNCTION__, (ARRAY_TYPE)))->type_common.transparent_aggr_flag
)
)
1164 {
1165 if (dump_file && (dump_flags & TDF_DETAILS))
1166 fprintf (dump_file, " not a candidate, reference to "
1167 "a nonaliased component array\n");
1168 continue;
1169 }
1170 if (!is_gimple_reg (parm))
1171 {
1172 if (dump_file && (dump_flags & TDF_DETAILS))
1173 fprintf (dump_file, " not a candidate, a reference which is "
1174 "not a gimple register (probably addressable)\n");
1175 continue;
1176 }
1177 if (is_va_list_type (type))
1178 {
1179 if (dump_file && (dump_flags & TDF_DETAILS))
1180 fprintf (dump_file, " not a candidate, reference to "
1181 "a va list\n");
1182 continue;
1183 }
1184 if (ptr_parm_has_nonarg_uses (node, fun, parm, num,
1185 &desc->ptr_pt_count))
1186 {
1187 if (dump_file && (dump_flags & TDF_DETAILS))
1188 fprintf (dump_file, " not a candidate, reference has "
1189 "nonarg uses\n");
1190 continue;
1191 }
1192 desc->by_ref = true;
1193 }
1194 else if (!AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || (((
enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum
tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code
) (type)->base.code) == QUAL_UNION_TYPE))
)
1195 {
1196 /* This is in an else branch because scalars passed by reference are
1197 still candidates to be passed by value. */
1198 if (dump_file && (dump_flags & TDF_DETAILS))
1199 fprintf (dump_file, " not a candidate, not an aggregate\n");
1200 continue;
1201 }
1202
1203 if (!COMPLETE_TYPE_P (type)(((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1203, __FUNCTION__))->type_common.size) != (tree) nullptr
)
)
1204 {
1205 if (dump_file && (dump_flags & TDF_DETAILS))
1206 fprintf (dump_file, " not a candidate, not a complete type\n");
1207 continue;
1208 }
1209 if (!tree_fits_uhwi_p (TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1209, __FUNCTION__))->type_common.size)
))
1210 {
1211 if (dump_file && (dump_flags & TDF_DETAILS))
1212 fprintf (dump_file, " not a candidate, size not representable\n");
1213 continue;
1214 }
1215 unsigned HOST_WIDE_INTlong type_size
1216 = tree_to_uhwi (TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1216, __FUNCTION__))->type_common.size)
) / BITS_PER_UNIT(8);
1217 if (type_size == 0
1218 || type_size >= ISRA_ARG_SIZE_LIMIT(1 << 16))
1219 {
1220 if (dump_file && (dump_flags & TDF_DETAILS))
1221 fprintf (dump_file, " not a candidate, has zero or huge size\n");
1222 continue;
1223 }
1224 if (type_internals_preclude_sra_p (type, &msg))
1225 {
1226 if (dump_file && (dump_flags & TDF_DETAILS))
1227 fprintf (dump_file, " not a candidate, %s\n", msg);
1228 continue;
1229 }
1230
1231 if (dump_file && (dump_flags & TDF_DETAILS))
1232 fprintf (dump_file, " is a candidate\n");
1233
1234 ret = true;
1235 desc->split_candidate = true;
1236 if (desc->by_ref)
1237 desc->deref_index = by_ref_count++;
1238 }
1239 return ret;
1240}
1241
1242/* Return pointer to descriptor of parameter DECL or NULL if it cannot be
1243 found, which happens if DECL is for a static chain. */
1244
1245static gensum_param_desc *
1246get_gensum_param_desc (tree decl)
1247{
1248 gcc_checking_assert (TREE_CODE (decl) == PARM_DECL)((void)(!(((enum tree_code) (decl)->base.code) == PARM_DECL
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1248, __FUNCTION__), 0 : 0))
;
1249 gensum_param_desc **slot = decl2desc->get (decl);
1250 if (!slot)
1251 /* This can happen for static chains which we cannot handle so far. */
1252 return NULLnullptr;
1253 gcc_checking_assert (*slot)((void)(!(*slot) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1253, __FUNCTION__), 0 : 0))
;
1254 return *slot;
1255}
1256
1257
1258/* Remove parameter described by DESC from candidates for IPA-SRA splitting and
1259 write REASON to the dump file if there is one. */
1260
1261static void
1262disqualify_split_candidate (gensum_param_desc *desc, const char *reason)
1263{
1264 if (!desc->split_candidate)
1265 return;
1266
1267 if (dump_file && (dump_flags & TDF_DETAILS))
1268 fprintf (dump_file, "! Disqualifying parameter number %i - %s\n",
1269 desc->param_number, reason);
1270
1271 desc->split_candidate = false;
1272}
1273
1274/* Remove DECL from candidates for IPA-SRA and write REASON to the dump file if
1275 there is one. */
1276
1277static void
1278disqualify_split_candidate (tree decl, const char *reason)
1279{
1280 gensum_param_desc *desc = get_gensum_param_desc (decl);
1281 if (desc)
1282 disqualify_split_candidate (desc, reason);
1283}
1284
1285/* Allocate a new access to DESC and fill it in with OFFSET and SIZE. But
1286 first, check that there are not too many of them already. If so, do not
1287 allocate anything and return NULL. */
1288
1289static gensum_param_access *
1290allocate_access (gensum_param_desc *desc,
1291 HOST_WIDE_INTlong offset, HOST_WIDE_INTlong size)
1292{
1293 if (desc->access_count
1294 == (unsigned) param_ipa_sra_max_replacementsglobal_options.x_param_ipa_sra_max_replacements)
1295 {
1296 disqualify_split_candidate (desc, "Too many replacement candidates");
1297 return NULLnullptr;
1298 }
1299
1300 gensum_param_access *access
1301 = (gensum_param_access *) obstack_alloc (&gensum_obstack,__extension__ ({ struct obstack *__h = (&gensum_obstack);
__extension__ ({ struct obstack *__o = (__h); size_t __len =
((sizeof (gensum_param_access))); if (__extension__ ({ struct
obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit -
__o1->next_free); }) < __len) _obstack_newchunk (__o, __len
); ((void) ((__o)->next_free += (__len))); }); __extension__
({ struct obstack *__o1 = (__h); void *__value = (void *) __o1
->object_base; if (__o1->next_free == __value) __o1->
maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t
) < sizeof (void *) ? ((__o1->object_base) + (((__o1->
next_free) - (__o1->object_base) + (__o1->alignment_mask
)) & ~(__o1->alignment_mask))) : (char *) (((ptrdiff_t
) (__o1->next_free) + (__o1->alignment_mask)) & ~(__o1
->alignment_mask))); if ((size_t) (__o1->next_free - (char
*) __o1->chunk) > (size_t) (__o1->chunk_limit - (char
*) __o1->chunk)) __o1->next_free = __o1->chunk_limit
; __o1->object_base = __o1->next_free; __value; }); })
1302 sizeof (gensum_param_access))__extension__ ({ struct obstack *__h = (&gensum_obstack);
__extension__ ({ struct obstack *__o = (__h); size_t __len =
((sizeof (gensum_param_access))); if (__extension__ ({ struct
obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit -
__o1->next_free); }) < __len) _obstack_newchunk (__o, __len
); ((void) ((__o)->next_free += (__len))); }); __extension__
({ struct obstack *__o1 = (__h); void *__value = (void *) __o1
->object_base; if (__o1->next_free == __value) __o1->
maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t
) < sizeof (void *) ? ((__o1->object_base) + (((__o1->
next_free) - (__o1->object_base) + (__o1->alignment_mask
)) & ~(__o1->alignment_mask))) : (char *) (((ptrdiff_t
) (__o1->next_free) + (__o1->alignment_mask)) & ~(__o1
->alignment_mask))); if ((size_t) (__o1->next_free - (char
*) __o1->chunk) > (size_t) (__o1->chunk_limit - (char
*) __o1->chunk)) __o1->next_free = __o1->chunk_limit
; __o1->object_base = __o1->next_free; __value; }); })
;
1303 memset (access, 0, sizeof (*access));
1304 access->offset = offset;
1305 access->size = size;
1306 return access;
1307}
1308
1309/* In what context scan_expr_access has been called, whether it deals with a
1310 load, a function argument, or a store. Please note that in rare
1311 circumstances when it is not clear if the access is a load or store,
1312 ISRA_CTX_STORE is used too. */
1313
1314enum isra_scan_context {ISRA_CTX_LOAD, ISRA_CTX_ARG, ISRA_CTX_STORE};
1315
1316/* Return an access describing memory access to the variable described by DESC
1317 at OFFSET with SIZE in context CTX, starting at pointer to the linked list
1318 at a certain tree level FIRST. Attempt to create it and put into the
1319 appropriate place in the access tree if does not exist, but fail and return
1320 NULL if there are already too many accesses, if it would create a partially
1321 overlapping access or if an access would end up within a pre-existing
1322 non-call access. */
1323
1324static gensum_param_access *
1325get_access_1 (gensum_param_desc *desc, gensum_param_access **first,
1326 HOST_WIDE_INTlong offset, HOST_WIDE_INTlong size, isra_scan_context ctx)
1327{
1328 gensum_param_access *access = *first, **ptr = first;
1329
1330 if (!access)
1331 {
1332 /* No pre-existing access at this level, just create it. */
1333 gensum_param_access *a = allocate_access (desc, offset, size);
1334 if (!a)
1335 return NULLnullptr;
1336 *first = a;
1337 return *first;
1338 }
1339
1340 if (access->offset >= offset + size)
1341 {
1342 /* We want to squeeze it in front of the very first access, just do
1343 it. */
1344 gensum_param_access *r = allocate_access (desc, offset, size);
1345 if (!r)
1346 return NULLnullptr;
1347 r->next_sibling = access;
1348 *first = r;
1349 return r;
1350 }
1351
1352 /* Skip all accesses that have to come before us until the next sibling is
1353 already too far. */
1354 while (offset >= access->offset + access->size
1355 && access->next_sibling
1356 && access->next_sibling->offset < offset + size)
1357 {
1358 ptr = &access->next_sibling;
1359 access = access->next_sibling;
1360 }
1361
1362 /* At this point we know we do not belong before access. */
1363 gcc_assert (access->offset < offset + size)((void)(!(access->offset < offset + size) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1363, __FUNCTION__), 0 : 0))
;
1364
1365 if (access->offset == offset && access->size == size)
1366 /* We found what we were looking for. */
1367 return access;
1368
1369 if (access->offset <= offset
1370 && access->offset + access->size >= offset + size)
1371 {
1372 /* We fit into access which is larger than us. We need to find/create
1373 something below access. But we only allow nesting in call
1374 arguments. */
1375 if (access->nonarg)
1376 return NULLnullptr;
1377
1378 return get_access_1 (desc, &access->first_child, offset, size, ctx);
1379 }
1380
1381 if (offset <= access->offset
1382 && offset + size >= access->offset + access->size)
1383 /* We are actually bigger than access, which fully fits into us, take its
1384 place and make all accesses fitting into it its children. */
1385 {
1386 /* But first, we only allow nesting in call arguments so check if that is
1387 what we are trying to represent. */
1388 if (ctx != ISRA_CTX_ARG)
1389 return NULLnullptr;
1390
1391 gensum_param_access *r = allocate_access (desc, offset, size);
1392 if (!r)
1393 return NULLnullptr;
1394 r->first_child = access;
1395
1396 while (access->next_sibling
1397 && access->next_sibling->offset < offset + size)
1398 access = access->next_sibling;
1399 if (access->offset + access->size > offset + size)
1400 {
1401 /* This must be a different access, which are sorted, so the
1402 following must be true and this signals a partial overlap. */
1403 gcc_assert (access->offset > offset)((void)(!(access->offset > offset) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1403, __FUNCTION__), 0 : 0))
;
1404 return NULLnullptr;
1405 }
1406
1407 r->next_sibling = access->next_sibling;
1408 access->next_sibling = NULLnullptr;
1409 *ptr = r;
1410 return r;
1411 }
1412
1413 if (offset >= access->offset + access->size)
1414 {
1415 /* We belong after access. */
1416 gensum_param_access *r = allocate_access (desc, offset, size);
1417 if (!r)
1418 return NULLnullptr;
1419 r->next_sibling = access->next_sibling;
1420 access->next_sibling = r;
1421 return r;
1422 }
1423
1424 if (offset < access->offset)
1425 {
1426 /* We know the following, otherwise we would have created a
1427 super-access. */
1428 gcc_checking_assert (offset + size < access->offset + access->size)((void)(!(offset + size < access->offset + access->size
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1428, __FUNCTION__), 0 : 0))
;
1429 return NULLnullptr;
1430 }
1431
1432 if (offset + size > access->offset + access->size)
1433 {
1434 /* Likewise. */
1435 gcc_checking_assert (offset > access->offset)((void)(!(offset > access->offset) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1435, __FUNCTION__), 0 : 0))
;
1436 return NULLnullptr;
1437 }
1438
1439 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1439, __FUNCTION__))
;
1440}
1441
1442/* Return an access describing memory access to the variable described by DESC
1443 at OFFSET with SIZE in context CTX, mark it as used in context CTX. Attempt
1444 to create if it does not exist, but fail and return NULL if there are
1445 already too many accesses, if it would create a partially overlapping access
1446 or if an access would end up in a non-call access. */
1447
1448static gensum_param_access *
1449get_access (gensum_param_desc *desc, HOST_WIDE_INTlong offset, HOST_WIDE_INTlong size,
1450 isra_scan_context ctx)
1451{
1452 gcc_checking_assert (desc->split_candidate)((void)(!(desc->split_candidate) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1452, __FUNCTION__), 0 : 0))
;
1453
1454 gensum_param_access *access = get_access_1 (desc, &desc->accesses, offset,
1455 size, ctx);
1456 if (!access)
1457 {
1458 disqualify_split_candidate (desc,
1459 "Bad access overlap or too many accesses");
1460 return NULLnullptr;
1461 }
1462
1463 switch (ctx)
1464 {
1465 case ISRA_CTX_STORE:
1466 gcc_assert (!desc->by_ref)((void)(!(!desc->by_ref) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1466, __FUNCTION__), 0 : 0))
;
1467 /* Fall-through */
1468 case ISRA_CTX_LOAD:
1469 access->nonarg = true;
1470 break;
1471 case ISRA_CTX_ARG:
1472 break;
1473 }
1474
1475 return access;
1476}
1477
1478/* Verify that parameter access tree starting with ACCESS is in good shape.
1479 PARENT_OFFSET and PARENT_SIZE are the corresponding fields of parent of
1480 ACCESS or zero if there is none. */
1481
1482static bool
1483verify_access_tree_1 (gensum_param_access *access, HOST_WIDE_INTlong parent_offset,
1484 HOST_WIDE_INTlong parent_size)
1485{
1486 while (access)
1487 {
1488 gcc_assert (access->offset >= 0 && access->size >= 0)((void)(!(access->offset >= 0 && access->size
>= 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1488, __FUNCTION__), 0 : 0))
;
1489
1490 if (parent_size != 0)
1491 {
1492 if (access->offset < parent_offset)
1493 {
1494 error ("Access offset before parent offset");
1495 return true;
1496 }
1497 if (access->size >= parent_size)
1498 {
1499 error ("Access size greater or equal to its parent size");
1500 return true;
1501 }
1502 if (access->offset + access->size > parent_offset + parent_size)
1503 {
1504 error ("Access terminates outside of its parent");
1505 return true;
1506 }
1507 }
1508
1509 if (verify_access_tree_1 (access->first_child, access->offset,
1510 access->size))
1511 return true;
1512
1513 if (access->next_sibling
1514 && (access->next_sibling->offset < access->offset + access->size))
1515 {
1516 error ("Access overlaps with its sibling");
1517 return true;
1518 }
1519
1520 access = access->next_sibling;
1521 }
1522 return false;
1523}
1524
1525/* Verify that parameter access tree starting with ACCESS is in good shape,
1526 halt compilation and dump the tree to stderr if not. */
1527
1528DEBUG_FUNCTION__attribute__ ((__used__)) void
1529isra_verify_access_tree (gensum_param_access *access)
1530{
1531 if (verify_access_tree_1 (access, 0, 0))
1532 {
1533 for (; access; access = access->next_sibling)
1534 dump_gensum_access (stderrstderr, access, 2);
1535 internal_error ("IPA-SRA access verification failed");
1536 }
1537}
1538
1539
1540/* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1541 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1542
1543static bool
1544asm_visit_addr (gimple *, tree op, tree, void *)
1545{
1546 op = get_base_address (op);
1547 if (op
1548 && TREE_CODE (op)((enum tree_code) (op)->base.code) == PARM_DECL)
1549 disqualify_split_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1550
1551 return false;
1552}
1553
1554/* Mark a dereference of parameter identified by DESC of distance DIST in a
1555 basic block BB, unless the BB has already been marked as a potentially
1556 final. */
1557
1558static void
1559mark_param_dereference (gensum_param_desc *desc, HOST_WIDE_INTlong dist,
1560 basic_block bb)
1561{
1562 gcc_assert (desc->by_ref)((void)(!(desc->by_ref) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1562, __FUNCTION__), 0 : 0))
;
1563 gcc_checking_assert (desc->split_candidate)((void)(!(desc->split_candidate) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1563, __FUNCTION__), 0 : 0))
;
1564
1565 if (bitmap_bit_p (final_bbs, bb->index))
1566 return;
1567
1568 int idx = bb->index * by_ref_count + desc->deref_index;
1569 if (bb_dereferences[idx] < dist)
1570 bb_dereferences[idx] = dist;
1571}
1572
1573/* Return true, if any potential replacements should use NEW_TYPE as opposed to
1574 previously recorded OLD_TYPE. */
1575
1576static bool
1577type_prevails_p (tree old_type, tree new_type)
1578{
1579 if (old_type == new_type)
1580 return false;
1581
1582 /* Non-aggregates are always better. */
1583 if (!is_gimple_reg_type (old_type)
1584 && is_gimple_reg_type (new_type))
1585 return true;
1586 if (is_gimple_reg_type (old_type)
1587 && !is_gimple_reg_type (new_type))
1588 return false;
1589
1590 /* Prefer any complex or vector type over any other scalar type. */
1591 if (TREE_CODE (old_type)((enum tree_code) (old_type)->base.code) != COMPLEX_TYPE
1592 && TREE_CODE (old_type)((enum tree_code) (old_type)->base.code) != VECTOR_TYPE
1593 && (TREE_CODE (new_type)((enum tree_code) (new_type)->base.code) == COMPLEX_TYPE
1594 || TREE_CODE (new_type)((enum tree_code) (new_type)->base.code) == VECTOR_TYPE))
1595 return true;
1596 if ((TREE_CODE (old_type)((enum tree_code) (old_type)->base.code) == COMPLEX_TYPE
1597 || TREE_CODE (old_type)((enum tree_code) (old_type)->base.code) == VECTOR_TYPE)
1598 && TREE_CODE (new_type)((enum tree_code) (new_type)->base.code) != COMPLEX_TYPE
1599 && TREE_CODE (new_type)((enum tree_code) (new_type)->base.code) != VECTOR_TYPE)
1600 return false;
1601
1602 /* Use the integral type with the bigger precision. */
1603 if (INTEGRAL_TYPE_P (old_type)(((enum tree_code) (old_type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (old_type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (old_type)->base.code) == INTEGER_TYPE
)
1604 && INTEGRAL_TYPE_P (new_type)(((enum tree_code) (new_type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (new_type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (new_type)->base.code) == INTEGER_TYPE
)
)
1605 return (TYPE_PRECISION (new_type)((tree_class_check ((new_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1605, __FUNCTION__))->type_common.precision)
> TYPE_PRECISION (old_type)((tree_class_check ((old_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1605, __FUNCTION__))->type_common.precision)
);
1606
1607 /* Attempt to disregard any integral type with non-full precision. */
1608 if (INTEGRAL_TYPE_P (old_type)(((enum tree_code) (old_type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (old_type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (old_type)->base.code) == INTEGER_TYPE
)
1609 && (TREE_INT_CST_LOW (TYPE_SIZE (old_type))((unsigned long) (*tree_int_cst_elt_check ((((tree_class_check
((old_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1609, __FUNCTION__))->type_common.size)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1609, __FUNCTION__)))
1610 != TYPE_PRECISION (old_type)((tree_class_check ((old_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1610, __FUNCTION__))->type_common.precision)
))
1611 return true;
1612 if (INTEGRAL_TYPE_P (new_type)(((enum tree_code) (new_type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (new_type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (new_type)->base.code) == INTEGER_TYPE
)
1613 && (TREE_INT_CST_LOW (TYPE_SIZE (new_type))((unsigned long) (*tree_int_cst_elt_check ((((tree_class_check
((new_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1613, __FUNCTION__))->type_common.size)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1613, __FUNCTION__)))
1614 != TYPE_PRECISION (new_type)((tree_class_check ((new_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1614, __FUNCTION__))->type_common.precision)
))
1615 return false;
1616 /* Stabilize the selection. */
1617 return TYPE_UID (old_type)((tree_class_check ((old_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1617, __FUNCTION__))->type_common.uid)
< TYPE_UID (new_type)((tree_class_check ((new_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1617, __FUNCTION__))->type_common.uid)
;
1618}
1619
1620/* When scanning an expression which is a call argument, this structure
1621 specifies the call and the position of the argument. */
1622
1623struct scan_call_info
1624{
1625 /* Call graph edge representing the call. */
1626 cgraph_edge *cs;
1627 /* Total number of arguments in the call. */
1628 unsigned argument_count;
1629 /* Number of the actual argument being scanned. */
1630 unsigned arg_idx;
1631};
1632
1633/* Record use of ACCESS which belongs to a parameter described by DESC in a
1634 call argument described by CALL_INFO. */
1635
1636static void
1637record_nonregister_call_use (gensum_param_desc *desc,
1638 scan_call_info *call_info,
1639 unsigned unit_offset, unsigned unit_size)
1640{
1641 isra_call_summary *csum = call_sums->get_create (call_info->cs);
1642 csum->init_inputs (call_info->argument_count);
1643
1644 isra_param_flow *param_flow = &csum->m_arg_flow[call_info->arg_idx];
1645 param_flow->aggregate_pass_through = true;
1646 set_single_param_flow_source (param_flow, desc->param_number);
1647 param_flow->unit_offset = unit_offset;
1648 param_flow->unit_size = unit_size;
1649 desc->call_uses++;
1650}
1651
1652/* Callback of walk_aliased_vdefs, just mark that there was a possible
1653 modification. */
1654
1655static bool
1656mark_maybe_modified (ao_ref *, tree, void *data)
1657{
1658 bool *maybe_modified = (bool *) data;
1659 *maybe_modified = true;
1660 return true;
1661}
1662
1663/* Analyze expression EXPR from GIMPLE for accesses to parameters. CTX
1664 specifies whether EXPR is used in a load, store or as an argument call. BB
1665 must be the basic block in which expr resides. If CTX specifies call
1666 argument context, CALL_INFO must describe that call and argument position,
1667 otherwise it is ignored. */
1668
1669static void
1670scan_expr_access (tree expr, gimple *stmt, isra_scan_context ctx,
1671 basic_block bb, scan_call_info *call_info = NULLnullptr)
1672{
1673 poly_int64 poffset, psize, pmax_size;
1674 HOST_WIDE_INTlong offset, size, max_size;
1675 tree base;
1676 bool deref = false;
1677 bool reverse;
1678
1679 if (TREE_CODE (expr)((enum tree_code) (expr)->base.code) == BIT_FIELD_REF
1680 || TREE_CODE (expr)((enum tree_code) (expr)->base.code) == IMAGPART_EXPR
1681 || TREE_CODE (expr)((enum tree_code) (expr)->base.code) == REALPART_EXPR)
1682 expr = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1682, __FUNCTION__)))))
;
1683
1684 base = get_ref_base_and_extent (expr, &poffset, &psize, &pmax_size, &reverse);
1685
1686 if (TREE_CODE (base)((enum tree_code) (base)->base.code) == MEM_REF)
1687 {
1688 tree op = TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1688, __FUNCTION__)))))
;
1689 if (TREE_CODE (op)((enum tree_code) (op)->base.code) != SSA_NAME
1690 || !SSA_NAME_IS_DEFAULT_DEF (op)(tree_check ((op), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1690, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
)
1691 return;
1692 base = SSA_NAME_VAR (op)((tree_check ((op), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1692, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((op)->ssa_name.var)->base
.code) == IDENTIFIER_NODE ? (tree) nullptr : (op)->ssa_name
.var)
;
1693 if (!base)
1694 return;
1695 deref = true;
1696 }
1697 if (TREE_CODE (base)((enum tree_code) (base)->base.code) != PARM_DECL)
1698 return;
1699
1700 gensum_param_desc *desc = get_gensum_param_desc (base);
1701 if (!desc || !desc->split_candidate)
1702 return;
1703
1704 if (!poffset.is_constant (&offset)
1705 || !psize.is_constant (&size)
1706 || !pmax_size.is_constant (&max_size))
1707 {
1708 disqualify_split_candidate (desc, "Encountered a polynomial-sized "
1709 "access.");
1710 return;
1711 }
1712 if (size < 0 || size != max_size)
1713 {
1714 disqualify_split_candidate (desc, "Encountered a variable sized access.");
1715 return;
1716 }
1717 if (TREE_CODE (expr)((enum tree_code) (expr)->base.code) == COMPONENT_REF
1718 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1))((tree_check (((*((const_cast<tree*> (tree_operand_check
((expr), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1718, __FUNCTION__)))))), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1718, __FUNCTION__, (FIELD_DECL)))->decl_common.decl_flag_1
)
)
1719 {
1720 disqualify_split_candidate (desc, "Encountered a bit-field access.");
1721 return;
1722 }
1723 if (offset < 0)
1724 {
1725 disqualify_split_candidate (desc, "Encountered an access at a "
1726 "negative offset.");
1727 return;
1728 }
1729 gcc_assert ((offset % BITS_PER_UNIT) == 0)((void)(!((offset % (8)) == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1729, __FUNCTION__), 0 : 0))
;
1730 gcc_assert ((size % BITS_PER_UNIT) == 0)((void)(!((size % (8)) == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1730, __FUNCTION__), 0 : 0))
;
1731 if ((offset / BITS_PER_UNIT(8)) >= (UINT_MAX(2147483647 *2U +1U) - ISRA_ARG_SIZE_LIMIT(1 << 16))
1732 || (size / BITS_PER_UNIT(8)) >= ISRA_ARG_SIZE_LIMIT(1 << 16))
1733 {
1734 disqualify_split_candidate (desc, "Encountered an access with too big "
1735 "offset or size");
1736 return;
1737 }
1738
1739 tree type = TREE_TYPE (expr)((contains_struct_check ((expr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1739, __FUNCTION__))->typed.type)
;
1740 unsigned int exp_align = get_object_alignment (expr);
1741
1742 if (exp_align < TYPE_ALIGN (type)(((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1742, __FUNCTION__))->type_common.align) ? ((unsigned)1)
<< (((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1742, __FUNCTION__))->type_common.align) - 1) : 0)
)
1743 {
1744 disqualify_split_candidate (desc, "Underaligned access.");
1745 return;
1746 }
1747
1748 if (deref)
1749 {
1750 if (!desc->by_ref)
1751 {
1752 disqualify_split_candidate (desc, "Dereferencing a non-reference.");
1753 return;
1754 }
1755 else if (ctx == ISRA_CTX_STORE)
1756 {
1757 disqualify_split_candidate (desc, "Storing to data passed by "
1758 "reference.");
1759 return;
1760 }
1761
1762 if (!aa_walking_limit)
1763 {
1764 disqualify_split_candidate (desc, "Out of alias analysis step "
1765 "limit.");
1766 return;
1767 }
1768
1769 gcc_checking_assert (gimple_vuse (stmt))((void)(!(gimple_vuse (stmt)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1769, __FUNCTION__), 0 : 0))
;
1770 bool maybe_modified = false;
1771 ao_ref ar;
1772
1773 ao_ref_init (&ar, expr);
1774 bitmap visited = BITMAP_ALLOCbitmap_alloc (NULLnullptr);
1775 int walked = walk_aliased_vdefs (&ar, gimple_vuse (stmt),
1776 mark_maybe_modified, &maybe_modified,
1777 &visited, NULLnullptr, aa_walking_limit);
1778 BITMAP_FREE (visited)((void) (bitmap_obstack_free ((bitmap) visited), (visited) = (
bitmap) nullptr))
;
1779 if (walked > 0)
1780 {
1781 gcc_assert (aa_walking_limit > walked)((void)(!(aa_walking_limit > walked) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1781, __FUNCTION__), 0 : 0))
;
1782 aa_walking_limit = aa_walking_limit - walked;
1783 }
1784 if (walked < 0)
1785 aa_walking_limit = 0;
1786 if (maybe_modified || walked < 0)
1787 {
1788 disqualify_split_candidate (desc, "Data passed by reference possibly "
1789 "modified through an alias.");
1790 return;
1791 }
1792 else
1793 mark_param_dereference (desc, offset + size, bb);
1794 }
1795 else
1796 /* Pointer parameters with direct uses should have been ruled out by
1797 analyzing SSA default def when looking at the parameters. */
1798 gcc_assert (!desc->by_ref)((void)(!(!desc->by_ref) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1798, __FUNCTION__), 0 : 0))
;
1799
1800 gensum_param_access *access = get_access (desc, offset, size, ctx);
1801 if (!access)
1802 return;
1803
1804 if (ctx == ISRA_CTX_ARG)
1805 {
1806 gcc_checking_assert (call_info)((void)(!(call_info) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1806, __FUNCTION__), 0 : 0))
;
1807
1808 if (!deref)
1809 record_nonregister_call_use (desc, call_info, offset / BITS_PER_UNIT(8),
1810 size / BITS_PER_UNIT(8));
1811 else
1812 /* This is not a pass-through of a pointer, this is a use like any
1813 other. */
1814 access->nonarg = true;
1815 }
1816
1817 if (!access->type)
1818 {
1819 access->type = type;
1820 access->alias_ptr_type = reference_alias_ptr_type (expr);
1821 access->reverse = reverse;
1822 }
1823 else
1824 {
1825 if (exp_align < TYPE_ALIGN (access->type)(((tree_class_check ((access->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1825, __FUNCTION__))->type_common.align) ? ((unsigned)1)
<< (((tree_class_check ((access->type), (tcc_type),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1825, __FUNCTION__))->type_common.align) - 1) : 0)
)
1826 {
1827 disqualify_split_candidate (desc, "Reference has lower alignment "
1828 "than a previous one.");
1829 return;
1830 }
1831 if (access->alias_ptr_type != reference_alias_ptr_type (expr))
1832 {
1833 disqualify_split_candidate (desc, "Multiple alias pointer types.");
1834 return;
1835 }
1836 if (access->reverse != reverse)
1837 {
1838 disqualify_split_candidate (desc, "Both normal and reverse "
1839 "scalar storage order.");
1840 return;
1841 }
1842 if (!deref
1843 && (AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || (((
enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum
tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code
) (type)->base.code) == QUAL_UNION_TYPE))
|| AGGREGATE_TYPE_P (access->type)(((enum tree_code) (access->type)->base.code) == ARRAY_TYPE
|| (((enum tree_code) (access->type)->base.code) == RECORD_TYPE
|| ((enum tree_code) (access->type)->base.code) == UNION_TYPE
|| ((enum tree_code) (access->type)->base.code) == QUAL_UNION_TYPE
))
)
1844 && (TYPE_MAIN_VARIANT (access->type)((tree_class_check ((access->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1844, __FUNCTION__))->type_common.main_variant)
!= TYPE_MAIN_VARIANT (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1844, __FUNCTION__))->type_common.main_variant)
))
1845 {
1846 /* We need the same aggregate type on all accesses to be able to
1847 distinguish transformation spots from pass-through arguments in
1848 the transformation phase. */
1849 disqualify_split_candidate (desc, "We do not support aggregate "
1850 "type punning.");
1851 return;
1852 }
1853
1854 if (type_prevails_p (access->type, type))
1855 access->type = type;
1856 }
1857}
1858
1859/* Scan body function described by NODE and FUN and create access trees for
1860 parameters. */
1861
1862static void
1863scan_function (cgraph_node *node, struct function *fun)
1864{
1865 basic_block bb;
1866
1867 FOR_EACH_BB_FN (bb, fun)for (bb = (fun)->cfg->x_entry_block_ptr->next_bb; bb
!= (fun)->cfg->x_exit_block_ptr; bb = bb->next_bb)
1868 {
1869 gimple_stmt_iterator gsi;
1870 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1871 {
1872 gimple *stmt = gsi_stmt (gsi);
1873
1874 if (stmt_can_throw_external (fun, stmt))
1875 bitmap_set_bit (final_bbs, bb->index);
1876 switch (gimple_code (stmt))
1877 {
1878 case GIMPLE_RETURN:
1879 {
1880 tree t = gimple_return_retval (as_a <greturn *> (stmt));
1881 if (t != NULL_TREE(tree) nullptr)
1882 scan_expr_access (t, stmt, ISRA_CTX_LOAD, bb);
1883 bitmap_set_bit (final_bbs, bb->index);
1884 }
1885 break;
1886
1887 case GIMPLE_ASSIGN:
1888 if (gimple_assign_single_p (stmt)
1889 && !gimple_clobber_p (stmt))
1890 {
1891 tree rhs = gimple_assign_rhs1 (stmt);
1892 scan_expr_access (rhs, stmt, ISRA_CTX_LOAD, bb);
1893 tree lhs = gimple_assign_lhs (stmt);
1894 scan_expr_access (lhs, stmt, ISRA_CTX_STORE, bb);
1895 }
1896 break;
1897
1898 case GIMPLE_CALL:
1899 {
1900 unsigned argument_count = gimple_call_num_args (stmt);
1901 isra_scan_context ctx = ISRA_CTX_ARG;
1902 scan_call_info call_info, *call_info_p = &call_info;
1903 if (gimple_call_internal_p (stmt))
1904 {
1905 call_info_p = NULLnullptr;
1906 ctx = ISRA_CTX_LOAD;
1907 internal_fn ifn = gimple_call_internal_fn (stmt);
1908 if (internal_store_fn_p (ifn))
1909 ctx = ISRA_CTX_STORE;
1910 }
1911 else
1912 {
1913 call_info.cs = node->get_edge (stmt);
1914 call_info.argument_count = argument_count;
1915 }
1916
1917 for (unsigned i = 0; i < argument_count; i++)
1918 {
1919 call_info.arg_idx = i;
1920 scan_expr_access (gimple_call_arg (stmt, i), stmt,
1921 ctx, bb, call_info_p);
1922 }
1923
1924 tree lhs = gimple_call_lhs (stmt);
1925 if (lhs)
1926 scan_expr_access (lhs, stmt, ISRA_CTX_STORE, bb);
1927 int flags = gimple_call_flags (stmt);
1928 if ((flags & (ECF_CONST(1 << 0) | ECF_PURE(1 << 1))) == 0)
1929 bitmap_set_bit (final_bbs, bb->index);
1930 }
1931 break;
1932
1933 case GIMPLE_ASM:
1934 {
1935 gasm *asm_stmt = as_a <gasm *> (stmt);
1936 walk_stmt_load_store_addr_ops (asm_stmt, NULLnullptr, NULLnullptr, NULLnullptr,
1937 asm_visit_addr);
1938 bitmap_set_bit (final_bbs, bb->index);
1939
1940 for (unsigned i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1941 {
1942 tree t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i))((tree_check ((gimple_asm_input_op (asm_stmt, i)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1942, __FUNCTION__, (TREE_LIST)))->list.value)
;
1943 scan_expr_access (t, stmt, ISRA_CTX_LOAD, bb);
1944 }
1945 for (unsigned i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1946 {
1947 tree t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i))((tree_check ((gimple_asm_output_op (asm_stmt, i)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1947, __FUNCTION__, (TREE_LIST)))->list.value)
;
1948 scan_expr_access (t, stmt, ISRA_CTX_STORE, bb);
1949 }
1950 }
1951 break;
1952
1953 default:
1954 break;
1955 }
1956 }
1957 }
1958}
1959
1960/* Return true if SSA_NAME NAME of function described by FUN is only used in
1961 return statements, or if results of any operations it is involved in are
1962 only used in return statements. ANALYZED is a bitmap that tracks which SSA
1963 names we have already started investigating. */
1964
1965static bool
1966ssa_name_only_returned_p (function *fun, tree name, bitmap analyzed)
1967{
1968 bool res = true;
1969 imm_use_iterator imm_iter;
1970 gimple *stmt;
1971
1972 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)for (struct auto_end_imm_use_stmt_traverse auto_end_imm_use_stmt_traverse
((((stmt) = first_imm_use_stmt (&(imm_iter), (name))), &
(imm_iter))); !end_imm_use_stmt_p (&(imm_iter)); (void) (
(stmt) = next_imm_use_stmt (&(imm_iter))))
1973 {
1974 if (is_gimple_debug (stmt))
1975 continue;
1976
1977 if (gimple_code (stmt) == GIMPLE_RETURN)
1978 {
1979 tree t = gimple_return_retval (as_a <greturn *> (stmt));
1980 if (t != name)
1981 {
1982 res = false;
1983 break;
1984 }
1985 }
1986 else if (!stmt_unremovable_because_of_non_call_eh_p (fun, stmt)
1987 && ((is_gimple_assign (stmt) && !gimple_has_volatile_ops (stmt))
1988 || gimple_code (stmt) == GIMPLE_PHI))
1989 {
1990 /* TODO: And perhaps for const function calls too? */
1991 tree lhs;
1992 if (gimple_code (stmt) == GIMPLE_PHI)
1993 lhs = gimple_phi_result (stmt);
1994 else
1995 lhs = gimple_assign_lhs (stmt);
1996
1997 if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) != SSA_NAME)
1998 {
1999 res = false;
2000 break;
2001 }
2002 gcc_assert (!gimple_vdef (stmt))((void)(!(!gimple_vdef (stmt)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2002, __FUNCTION__), 0 : 0))
;
2003 if (bitmap_set_bit (analyzed, SSA_NAME_VERSION (lhs)(tree_check ((lhs), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2003, __FUNCTION__, (SSA_NAME)))->base.u.version
)
2004 && !ssa_name_only_returned_p (fun, lhs, analyzed))
2005 {
2006 res = false;
2007 break;
2008 }
2009 }
2010 else
2011 {
2012 res = false;
2013 break;
2014 }
2015 }
2016 return res;
2017}
2018
2019/* Inspect the uses of the return value of the call associated with CS, and if
2020 it is not used or if it is only used to construct the return value of the
2021 caller, mark it as such in call or caller summary. Also check for
2022 misaligned arguments. */
2023
2024static void
2025isra_analyze_call (cgraph_edge *cs)
2026{
2027 gcall *call_stmt = cs->call_stmt;
2028 unsigned count = gimple_call_num_args (call_stmt);
2029 isra_call_summary *csum = call_sums->get_create (cs);
2030
2031 for (unsigned i = 0; i < count; i++)
2032 {
2033 tree arg = gimple_call_arg (call_stmt, i);
2034 if (is_gimple_reg (arg))
2035 continue;
2036
2037 tree offset;
2038 poly_int64 bitsize, bitpos;
2039 machine_mode mode;
2040 int unsignedp, reversep, volatilep = 0;
2041 get_inner_reference (arg, &bitsize, &bitpos, &offset, &mode,
2042 &unsignedp, &reversep, &volatilep);
2043 if (!multiple_p (bitpos, BITS_PER_UNIT(8)))
2044 {
2045 csum->m_bit_aligned_arg = true;
2046 break;
2047 }
2048 }
2049
2050 tree lhs = gimple_call_lhs (call_stmt);
2051 if (lhs)
2052 {
2053 /* TODO: Also detect aggregates on a LHS of a call that are only returned
2054 from this function (without being read anywhere). */
2055 if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME)
2056 {
2057 bitmap analyzed = BITMAP_ALLOCbitmap_alloc (NULLnullptr);
2058 if (ssa_name_only_returned_p (DECL_STRUCT_FUNCTION (cs->caller->decl)((tree_check ((cs->caller->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2058, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
,
2059 lhs, analyzed))
2060 csum->m_return_returned = true;
2061 BITMAP_FREE (analyzed)((void) (bitmap_obstack_free ((bitmap) analyzed), (analyzed) =
(bitmap) nullptr))
;
2062 }
2063 }
2064 else
2065 csum->m_return_ignored = true;
2066}
2067
2068/* Look at all calls going out of NODE, described also by IFS and perform all
2069 analyses necessary for IPA-SRA that are not done at body scan time or done
2070 even when body is not scanned because the function is not a candidate. */
2071
2072static void
2073isra_analyze_all_outgoing_calls (cgraph_node *node)
2074{
2075 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2076 isra_analyze_call (cs);
2077 for (cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2078 isra_analyze_call (cs);
2079}
2080
2081/* Dump a dereferences table with heading STR to file F. */
2082
2083static void
2084dump_dereferences_table (FILE *f, struct function *fun, const char *str)
2085{
2086 basic_block bb;
2087
2088 fprintf (dump_file, "%s", str);
2089 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),for (bb = ((fun)->cfg->x_entry_block_ptr); bb != ((fun)
->cfg->x_exit_block_ptr); bb = bb->next_bb)
2090 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)for (bb = ((fun)->cfg->x_entry_block_ptr); bb != ((fun)
->cfg->x_exit_block_ptr); bb = bb->next_bb)
2091 {
2092 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
2093 if (bb != EXIT_BLOCK_PTR_FOR_FN (fun)((fun)->cfg->x_exit_block_ptr))
2094 {
2095 int i;
2096 for (i = 0; i < by_ref_count; i++)
2097 {
2098 int idx = bb->index * by_ref_count + i;
2099 fprintf (f, " %4" HOST_WIDE_INT_PRINT"l" "d", bb_dereferences[idx]);
2100 }
2101 }
2102 fprintf (f, "\n");
2103 }
2104 fprintf (dump_file, "\n");
2105}
2106
2107/* Propagate distances in bb_dereferences in the opposite direction than the
2108 control flow edges, in each step storing the maximum of the current value
2109 and the minimum of all successors. These steps are repeated until the table
2110 stabilizes. Note that BBs which might terminate the functions (according to
2111 final_bbs bitmap) never updated in this way. */
2112
2113static void
2114propagate_dereference_distances (struct function *fun)
2115{
2116 basic_block bb;
2117
2118 if (dump_file && (dump_flags & TDF_DETAILS))
2119 dump_dereferences_table (dump_file, fun,
2120 "Dereference table before propagation:\n");
2121
2122 auto_vec<basic_block> queue (last_basic_block_for_fn (fun)((fun)->cfg->x_last_basic_block));
2123 queue.quick_push (ENTRY_BLOCK_PTR_FOR_FN (fun)((fun)->cfg->x_entry_block_ptr));
2124 FOR_EACH_BB_FN (bb, fun)for (bb = (fun)->cfg->x_entry_block_ptr->next_bb; bb
!= (fun)->cfg->x_exit_block_ptr; bb = bb->next_bb)
2125 {
2126 queue.quick_push (bb);
2127 bb->aux = bb;
2128 }
2129
2130 while (!queue.is_empty ())
2131 {
2132 edge_iterator ei;
2133 edge e;
2134 bool change = false;
2135 int i;
2136
2137 bb = queue.pop ();
2138 bb->aux = NULLnullptr;
2139
2140 if (bitmap_bit_p (final_bbs, bb->index))
2141 continue;
2142
2143 for (i = 0; i < by_ref_count; i++)
2144 {
2145 int idx = bb->index * by_ref_count + i;
2146 bool first = true;
2147 HOST_WIDE_INTlong inh = 0;
2148
2149 FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
2150 {
2151 int succ_idx = e->dest->index * by_ref_count + i;
2152
2153 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (fun)((fun)->cfg->x_exit_block_ptr))
2154 continue;
2155
2156 if (first)
2157 {
2158 first = false;
2159 inh = bb_dereferences [succ_idx];
2160 }
2161 else if (bb_dereferences [succ_idx] < inh)
2162 inh = bb_dereferences [succ_idx];
2163 }
2164
2165 if (!first && bb_dereferences[idx] < inh)
2166 {
2167 bb_dereferences[idx] = inh;
2168 change = true;
2169 }
2170 }
2171
2172 if (change)
2173 FOR_EACH_EDGE (e, ei, bb->preds)for ((ei) = ei_start_1 (&((bb->preds))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
2174 {
2175 if (e->src->aux)
2176 continue;
2177
2178 e->src->aux = e->src;
2179 queue.quick_push (e->src);
2180 }
2181 }
2182
2183 if (dump_file && (dump_flags & TDF_DETAILS))
2184 dump_dereferences_table (dump_file, fun,
2185 "Dereference table after propagation:\n");
2186}
2187
2188/* Perform basic checks on ACCESS to PARM described by DESC and all its
2189 children, return true if the parameter cannot be split, otherwise return
2190 true and update *TOTAL_SIZE and *ONLY_CALLS. ENTRY_BB_INDEX must be the
2191 index of the entry BB in the function of PARM. */
2192
2193static bool
2194check_gensum_access (tree parm, gensum_param_desc *desc,
2195 gensum_param_access *access,
2196 HOST_WIDE_INTlong *nonarg_acc_size, bool *only_calls,
2197 int entry_bb_index)
2198{
2199 if (access->nonarg)
2200 {
2201 *only_calls = false;
2202 *nonarg_acc_size += access->size;
2203
2204 if (access->first_child)
2205 {
2206 disqualify_split_candidate (desc, "Overlapping non-call uses.");
2207 return true;
2208 }
2209 }
2210 /* Do not decompose a non-BLKmode param in a way that would create
2211 BLKmode params. Especially for by-reference passing (thus,
2212 pointer-type param) this is hardly worthwhile. */
2213 if (DECL_MODE (parm)((contains_struct_check ((parm), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2213, __FUNCTION__))->decl_common.mode)
!= BLKmode((void) 0, E_BLKmode)
2214 && TYPE_MODE (access->type)((((enum tree_code) ((tree_class_check ((access->type), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2214, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(access->type) : (access->type)->type_common.mode)
== BLKmode((void) 0, E_BLKmode))
2215 {
2216 disqualify_split_candidate (desc, "Would convert a non-BLK to a BLK.");
2217 return true;
2218 }
2219
2220 if (desc->by_ref)
2221 {
2222 int idx = (entry_bb_index * by_ref_count + desc->deref_index);
2223 if ((access->offset + access->size) > bb_dereferences[idx])
2224 {
2225 disqualify_split_candidate (desc, "Would create a possibly "
2226 "illegal dereference in a caller.");
2227 return true;
2228 }
2229 }
2230
2231 for (gensum_param_access *ch = access->first_child;
2232 ch;
2233 ch = ch->next_sibling)
2234 if (check_gensum_access (parm, desc, ch, nonarg_acc_size, only_calls,
2235 entry_bb_index))
2236 return true;
2237
2238 return false;
2239}
2240
2241/* Copy data from FROM and all of its children to a vector of accesses in IPA
2242 descriptor DESC. */
2243
2244static void
2245copy_accesses_to_ipa_desc (gensum_param_access *from, isra_param_desc *desc)
2246{
2247 param_access *to = ggc_cleared_alloc<param_access> ();
2248 gcc_checking_assert ((from->offset % BITS_PER_UNIT) == 0)((void)(!((from->offset % (8)) == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2248, __FUNCTION__), 0 : 0))
;
2249 gcc_checking_assert ((from->size % BITS_PER_UNIT) == 0)((void)(!((from->size % (8)) == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2249, __FUNCTION__), 0 : 0))
;
2250 to->unit_offset = from->offset / BITS_PER_UNIT(8);
2251 to->unit_size = from->size / BITS_PER_UNIT(8);
2252 to->type = from->type;
2253 to->alias_ptr_type = from->alias_ptr_type;
2254 to->certain = from->nonarg;
2255 to->reverse = from->reverse;
2256 vec_safe_push (desc->accesses, to);
2257
2258 for (gensum_param_access *ch = from->first_child;
2259 ch;
2260 ch = ch->next_sibling)
2261 copy_accesses_to_ipa_desc (ch, desc);
2262}
2263
2264/* Analyze function body scan results stored in param_accesses and
2265 param_accesses, detect possible transformations and store information of
2266 those in function summary. NODE, FUN and IFS are all various structures
2267 describing the currently analyzed function. */
2268
2269static void
2270process_scan_results (cgraph_node *node, struct function *fun,
2271 isra_func_summary *ifs,
2272 vec<gensum_param_desc> *param_descriptions)
2273{
2274 bool check_pass_throughs = false;
2275 bool dereferences_propagated = false;
2276 tree parm = DECL_ARGUMENTS (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2276, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
;
2277 unsigned param_count = param_descriptions->length();
2278
2279 for (unsigned desc_index = 0;
2280 desc_index < param_count;
2281 desc_index++, parm = DECL_CHAIN (parm)(((contains_struct_check (((contains_struct_check ((parm), (TS_DECL_MINIMAL
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2281, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2281, __FUNCTION__))->common.chain))
)
2282 {
2283 gensum_param_desc *desc = &(*param_descriptions)[desc_index];
2284 if (!desc->split_candidate)
2285 continue;
2286
2287 if (flag_checkingglobal_options.x_flag_checking)
2288 isra_verify_access_tree (desc->accesses);
2289
2290 if (!dereferences_propagated
2291 && desc->by_ref
2292 && desc->accesses)
2293 {
2294 propagate_dereference_distances (fun);
2295 dereferences_propagated = true;
2296 }
2297
2298 HOST_WIDE_INTlong nonarg_acc_size = 0;
2299 bool only_calls = true;
2300 bool check_failed = false;
2301
2302 int entry_bb_index = ENTRY_BLOCK_PTR_FOR_FN (fun)((fun)->cfg->x_entry_block_ptr)->index;
2303 for (gensum_param_access *acc = desc->accesses;
2304 acc;
2305 acc = acc->next_sibling)
2306 if (check_gensum_access (parm, desc, acc, &nonarg_acc_size, &only_calls,
2307 entry_bb_index))
2308 {
2309 check_failed = true;
2310 break;
2311 }
2312 if (check_failed)
2313 continue;
2314
2315 if (only_calls)
2316 desc->locally_unused = true;
2317
2318 HOST_WIDE_INTlong cur_param_size
2319 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm))((tree_class_check ((((contains_struct_check ((parm), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2319, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2319, __FUNCTION__))->type_common.size)
);
2320 HOST_WIDE_INTlong param_size_limit;
2321 if (!desc->by_ref || optimize_function_for_size_p (fun))
2322 param_size_limit = cur_param_size;
2323 else
2324 param_size_limit
2325 = opt_for_fn (node->decl,(opts_for_fn (node->decl)->x_param_ipa_sra_ptr_growth_factor
)
2326 param_ipa_sra_ptr_growth_factor)(opts_for_fn (node->decl)->x_param_ipa_sra_ptr_growth_factor
)
* cur_param_size;
2327 if (nonarg_acc_size > param_size_limit
2328 || (!desc->by_ref && nonarg_acc_size == param_size_limit))
2329 {
2330 disqualify_split_candidate (desc, "Would result into a too big set "
2331 "of replacements.");
2332 }
2333 else
2334 {
2335 /* create_parameter_descriptors makes sure unit sizes of all
2336 candidate parameters fit unsigned integers restricted to
2337 ISRA_ARG_SIZE_LIMIT. */
2338 desc->param_size_limit = param_size_limit / BITS_PER_UNIT(8);
2339 desc->nonarg_acc_size = nonarg_acc_size / BITS_PER_UNIT(8);
2340 if (desc->split_candidate && desc->ptr_pt_count)
2341 {
2342 gcc_assert (desc->by_ref)((void)(!(desc->by_ref) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2342, __FUNCTION__), 0 : 0))
;
2343 check_pass_throughs = true;
2344 }
2345 }
2346 }
2347
2348 /* When a pointer parameter is passed-through to a callee, in which it is
2349 only used to read only one or a few items, we can attempt to transform it
2350 to obtaining and passing through the items instead of the pointer. But we
2351 must take extra care that 1) we do not introduce any segfault by moving
2352 dereferences above control flow and that 2) the data is not modified
2353 through an alias in this function. The IPA analysis must not introduce
2354 any accesses candidates unless it can prove both.
2355
2356 The current solution is very crude as it consists of ensuring that the
2357 call postdominates entry BB and that the definition of VUSE of the call is
2358 default definition. TODO: For non-recursive callees in the same
2359 compilation unit we could do better by doing analysis in topological order
2360 an looking into access candidates of callees, using their alias_ptr_types
2361 to attempt real AA. We could also use the maximum known dereferenced
2362 offset in this function at IPA level.
2363
2364 TODO: Measure the overhead and the effect of just being pessimistic.
2365 Maybe this is only -O3 material?
2366 */
2367 bool pdoms_calculated = false;
2368 if (check_pass_throughs)
2369 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2370 {
2371 gcall *call_stmt = cs->call_stmt;
2372 tree vuse = gimple_vuse (call_stmt);
2373
2374 /* If the callee is a const function, we don't get a VUSE. In such
2375 case there will be no memory accesses in the called function (or the
2376 const attribute is wrong) and then we just don't care. */
2377 bool uses_memory_as_obtained = vuse && SSA_NAME_IS_DEFAULT_DEF (vuse)(tree_check ((vuse), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2377, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
;
2378
2379 unsigned count = gimple_call_num_args (call_stmt);
2380 isra_call_summary *csum = call_sums->get_create (cs);
2381 csum->init_inputs (count);
2382 csum->m_before_any_store = uses_memory_as_obtained;
2383 for (unsigned argidx = 0; argidx < count; argidx++)
2384 {
2385 if (!csum->m_arg_flow[argidx].pointer_pass_through)
2386 continue;
2387 unsigned pidx
2388 = get_single_param_flow_source (&csum->m_arg_flow[argidx]);
2389 gensum_param_desc *desc = &(*param_descriptions)[pidx];
2390 if (!desc->split_candidate)
2391 {
2392 csum->m_arg_flow[argidx].pointer_pass_through = false;
2393 continue;
2394 }
2395 if (!uses_memory_as_obtained)
2396 continue;
2397
2398 /* Post-dominator check placed last, hoping that it usually won't
2399 be needed. */
2400 if (!pdoms_calculated)
2401 {
2402 gcc_checking_assert (cfun)((void)(!((cfun + 0)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2402, __FUNCTION__), 0 : 0))
;
2403 connect_infinite_loops_to_exit ();
2404 calculate_dominance_info (CDI_POST_DOMINATORS);
2405 pdoms_calculated = true;
2406 }
2407 if (dominated_by_p (CDI_POST_DOMINATORS,
2408 gimple_bb (call_stmt),
2409 single_succ (ENTRY_BLOCK_PTR_FOR_FN (fun)((fun)->cfg->x_entry_block_ptr))))
2410 csum->m_arg_flow[argidx].safe_to_import_accesses = true;
2411 }
2412
2413 }
2414 if (pdoms_calculated)
2415 {
2416 free_dominance_info (CDI_POST_DOMINATORS);
2417 remove_fake_exit_edges ();
2418 }
2419
2420 /* TODO: Add early exit if we disqualified everything. This also requires
2421 that we either relax the restriction that
2422 ipa_param_adjustments.m_always_copy_start must be the number of PARM_DECLs
2423 or store the number of parameters to IPA-SRA function summary and use that
2424 when just removing params. */
2425
2426 vec_safe_reserve_exact (ifs->m_parameters, param_count);
2427 ifs->m_parameters->quick_grow_cleared (param_count);
2428 for (unsigned desc_index = 0; desc_index < param_count; desc_index++)
2429 {
2430 gensum_param_desc *s = &(*param_descriptions)[desc_index];
2431 isra_param_desc *d = &(*ifs->m_parameters)[desc_index];
2432
2433 d->param_size_limit = s->param_size_limit;
2434 d->size_reached = s->nonarg_acc_size;
2435 d->locally_unused = s->locally_unused;
2436 d->split_candidate = s->split_candidate;
2437 d->by_ref = s->by_ref;
2438
2439 for (gensum_param_access *acc = s->accesses;
2440 acc;
2441 acc = acc->next_sibling)
2442 copy_accesses_to_ipa_desc (acc, d);
2443 }
2444
2445 if (dump_file)
2446 dump_isra_param_descriptors (dump_file, node->decl, ifs);
2447}
2448
2449/* Return true if there are any overlaps among certain accesses of DESC. If
2450 non-NULL, set *CERTAIN_ACCESS_PRESENT_P upon encountering a certain access
2451 too. DESC is assumed to be a split candidate that is not locally
2452 unused. */
2453
2454static bool
2455overlapping_certain_accesses_p (isra_param_desc *desc,
2456 bool *certain_access_present_p)
2457{
2458 unsigned pclen = vec_safe_length (desc->accesses);
2459 for (unsigned i = 0; i < pclen; i++)
2460 {
2461 param_access *a1 = (*desc->accesses)[i];
2462
2463 if (!a1->certain)
2464 continue;
2465 if (certain_access_present_p)
2466 *certain_access_present_p = true;
2467 for (unsigned j = i + 1; j < pclen; j++)
2468 {
2469 param_access *a2 = (*desc->accesses)[j];
2470 if (a2->certain
2471 && a1->unit_offset < a2->unit_offset + a2->unit_size
2472 && a1->unit_offset + a1->unit_size > a2->unit_offset)
2473 return true;
2474 }
2475 }
2476 return false;
2477}
2478
2479/* Check for any overlaps of certain param accesses among splitting candidates
2480 and signal an ICE if there are any. If CERTAIN_MUST_EXIST is set, also
2481 check that used splitting candidates have at least one certain access. */
2482
2483static void
2484verify_splitting_accesses (cgraph_node *node, bool certain_must_exist)
2485{
2486 isra_func_summary *ifs = func_sums->get (node);
2487 if (!ifs || !ifs->m_candidate)
2488 return;
2489 unsigned param_count = vec_safe_length (ifs->m_parameters);
2490 for (unsigned pidx = 0; pidx < param_count; pidx++)
2491 {
2492 isra_param_desc *desc = &(*ifs->m_parameters)[pidx];
2493 if (!desc->split_candidate || desc->locally_unused)
2494 continue;
2495
2496 bool certain_access_present = !certain_must_exist;
2497 if (overlapping_certain_accesses_p (desc, &certain_access_present))
2498 internal_error ("Function %qs, parameter %u, has IPA-SRA accesses "
2499 "which overlap", node->dump_name (), pidx);
2500 if (!certain_access_present)
2501 internal_error ("Function %s, parameter %u, is used but does not "
2502 "have any certain IPA-SRA access",
2503 node->dump_name (), pidx);
2504 }
2505}
2506
2507/* Intraprocedural part of IPA-SRA analysis. Scan bodies of all functions in
2508 this compilation unit and create summary structures describing IPA-SRA
2509 opportunities and constraints in them. */
2510
2511static void
2512ipa_sra_generate_summary (void)
2513{
2514 struct cgraph_node *node;
2515
2516 gcc_checking_assert (!func_sums)((void)(!(!func_sums) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2516, __FUNCTION__), 0 : 0))
;
2517 gcc_checking_assert (!call_sums)((void)(!(!call_sums) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2517, __FUNCTION__), 0 : 0))
;
2518 func_sums
2519 = (new (ggc_alloc_no_dtor <ipa_sra_function_summaries> ())
2520 ipa_sra_function_summaries (symtab, true));
2521 call_sums = new ipa_sra_call_summaries (symtab);
2522
2523 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)for ((node) = symtab->first_function_with_gimple_body (); (
node); (node) = symtab->next_function_with_gimple_body (node
))
2524 ipa_sra_summarize_function (node);
2525 return;
2526}
2527
2528/* Write intraprocedural analysis information about E and all of its outgoing
2529 edges into a stream for LTO WPA. */
2530
2531static void
2532isra_write_edge_summary (output_block *ob, cgraph_edge *e)
2533{
2534 isra_call_summary *csum = call_sums->get (e);
2535 unsigned input_count = csum->m_arg_flow.length ();
2536 streamer_write_uhwi (ob, input_count);
2537 for (unsigned i = 0; i < input_count; i++)
2538 {
2539 isra_param_flow *ipf = &csum->m_arg_flow[i];
2540 streamer_write_hwi (ob, ipf->length);
2541 bitpack_d bp = bitpack_create (ob->main_stream);
2542 for (int j = 0; j < ipf->length; j++)
2543 bp_pack_value (&bp, ipf->inputs[j], 8);
2544 bp_pack_value (&bp, ipf->aggregate_pass_through, 1);
2545 bp_pack_value (&bp, ipf->pointer_pass_through, 1);
2546 bp_pack_value (&bp, ipf->safe_to_import_accesses, 1);
2547 streamer_write_bitpack (&bp);
2548 streamer_write_uhwi (ob, ipf->unit_offset);
2549 streamer_write_uhwi (ob, ipf->unit_size);
2550 }
2551 bitpack_d bp = bitpack_create (ob->main_stream);
2552 bp_pack_value (&bp, csum->m_return_ignored, 1);
2553 bp_pack_value (&bp, csum->m_return_returned, 1);
2554 bp_pack_value (&bp, csum->m_bit_aligned_arg, 1);
2555 bp_pack_value (&bp, csum->m_before_any_store, 1);
2556 streamer_write_bitpack (&bp);
2557}
2558
2559/* Write intraprocedural analysis information about NODE and all of its outgoing
2560 edges into a stream for LTO WPA. */
2561
2562static void
2563isra_write_node_summary (output_block *ob, cgraph_node *node)
2564{
2565 isra_func_summary *ifs = func_sums->get (node);
2566 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2567 int node_ref = lto_symtab_encoder_encode (encoder, node);
2568 streamer_write_uhwi (ob, node_ref);
2569
2570 unsigned param_desc_count = vec_safe_length (ifs->m_parameters);
2571 streamer_write_uhwi (ob, param_desc_count);
2572 for (unsigned i = 0; i < param_desc_count; i++)
2573 {
2574 isra_param_desc *desc = &(*ifs->m_parameters)[i];
2575 unsigned access_count = vec_safe_length (desc->accesses);
2576 streamer_write_uhwi (ob, access_count);
2577 for (unsigned j = 0; j < access_count; j++)
2578 {
2579 param_access *acc = (*desc->accesses)[j];
2580 stream_write_tree (ob, acc->type, true)streamer_hooks.write_tree (ob, acc->type, true, true);
2581 stream_write_tree (ob, acc->alias_ptr_type, true)streamer_hooks.write_tree (ob, acc->alias_ptr_type, true, true
)
;
2582 streamer_write_uhwi (ob, acc->unit_offset);
2583 streamer_write_uhwi (ob, acc->unit_size);
2584 bitpack_d bp = bitpack_create (ob->main_stream);
2585 bp_pack_value (&bp, acc->certain, 1);
2586 streamer_write_bitpack (&bp);
2587 }
2588 streamer_write_uhwi (ob, desc->param_size_limit);
2589 streamer_write_uhwi (ob, desc->size_reached);
2590 bitpack_d bp = bitpack_create (ob->main_stream);
2591 bp_pack_value (&bp, desc->locally_unused, 1);
2592 bp_pack_value (&bp, desc->split_candidate, 1);
2593 bp_pack_value (&bp, desc->by_ref, 1);
2594 streamer_write_bitpack (&bp);
2595 }
2596 bitpack_d bp = bitpack_create (ob->main_stream);
2597 bp_pack_value (&bp, ifs->m_candidate, 1);
2598 bp_pack_value (&bp, ifs->m_returns_value, 1);
2599 bp_pack_value (&bp, ifs->m_return_ignored, 1);
2600 gcc_assert (!ifs->m_queued)((void)(!(!ifs->m_queued) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2600, __FUNCTION__), 0 : 0))
;
2601 streamer_write_bitpack (&bp);
2602
2603 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
2604 isra_write_edge_summary (ob, e);
2605 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
2606 isra_write_edge_summary (ob, e);
2607}
2608
2609/* Write intraprocedural analysis information into a stream for LTO WPA. */
2610
2611static void
2612ipa_sra_write_summary (void)
2613{
2614 if (!func_sums || !call_sums)
2615 return;
2616
2617 struct output_block *ob = create_output_block (LTO_section_ipa_sra);
2618 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2619 ob->symbol = NULLnullptr;
2620
2621 unsigned int count = 0;
2622 lto_symtab_encoder_iterator lsei;
2623 for (lsei = lsei_start_function_in_partition (encoder);
2624 !lsei_end_p (lsei);
2625 lsei_next_function_in_partition (&lsei))
2626 {
2627 cgraph_node *node = lsei_cgraph_node (lsei);
2628 if (node->has_gimple_body_p ()
2629 && func_sums->get (node) != NULLnullptr)
2630 count++;
2631 }
2632 streamer_write_uhwi (ob, count);
2633
2634 /* Process all of the functions. */
2635 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
2636 lsei_next_function_in_partition (&lsei))
2637 {
2638 cgraph_node *node = lsei_cgraph_node (lsei);
2639 if (node->has_gimple_body_p ()
2640 && func_sums->get (node) != NULLnullptr)
2641 isra_write_node_summary (ob, node);
2642 }
2643 streamer_write_char_stream (ob->main_stream, 0);
2644 produce_asm (ob, NULLnullptr);
2645 destroy_output_block (ob);
2646}
2647
2648/* Read intraprocedural analysis information about E and all of its outgoing
2649 edges into a stream for LTO WPA. */
2650
2651static void
2652isra_read_edge_summary (struct lto_input_block *ib, cgraph_edge *cs)
2653{
2654 isra_call_summary *csum = call_sums->get_create (cs);
2655 unsigned input_count = streamer_read_uhwi (ib);
2656 csum->init_inputs (input_count);
2657 for (unsigned i = 0; i < input_count; i++)
2658 {
2659 isra_param_flow *ipf = &csum->m_arg_flow[i];
2660 ipf->length = streamer_read_hwi (ib);
2661 bitpack_d bp = streamer_read_bitpack (ib);
2662 for (int j = 0; j < ipf->length; j++)
2663 ipf->inputs[j] = bp_unpack_value (&bp, 8);
2664 ipf->aggregate_pass_through = bp_unpack_value (&bp, 1);
2665 ipf->pointer_pass_through = bp_unpack_value (&bp, 1);
2666 ipf->safe_to_import_accesses = bp_unpack_value (&bp, 1);
2667 ipf->unit_offset = streamer_read_uhwi (ib);
2668 ipf->unit_size = streamer_read_uhwi (ib);
2669 }
2670 bitpack_d bp = streamer_read_bitpack (ib);
2671 csum->m_return_ignored = bp_unpack_value (&bp, 1);
2672 csum->m_return_returned = bp_unpack_value (&bp, 1);
2673 csum->m_bit_aligned_arg = bp_unpack_value (&bp, 1);
2674 csum->m_before_any_store = bp_unpack_value (&bp, 1);
2675}
2676
2677/* Read intraprocedural analysis information about NODE and all of its outgoing
2678 edges into a stream for LTO WPA. */
2679
2680static void
2681isra_read_node_info (struct lto_input_block *ib, cgraph_node *node,
2682 struct data_in *data_in)
2683{
2684 isra_func_summary *ifs = func_sums->get_create (node);
2685 unsigned param_desc_count = streamer_read_uhwi (ib);
2686 if (param_desc_count > 0)
2687 {
2688 vec_safe_reserve_exact (ifs->m_parameters, param_desc_count);
2689 ifs->m_parameters->quick_grow_cleared (param_desc_count);
2690 }
2691 for (unsigned i = 0; i < param_desc_count; i++)
2692 {
2693 isra_param_desc *desc = &(*ifs->m_parameters)[i];
2694 unsigned access_count = streamer_read_uhwi (ib);
2695 for (unsigned j = 0; j < access_count; j++)
2696 {
2697 param_access *acc = ggc_cleared_alloc<param_access> ();
2698 acc->type = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
2699 acc->alias_ptr_type = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
2700 acc->unit_offset = streamer_read_uhwi (ib);
2701 acc->unit_size = streamer_read_uhwi (ib);
2702 bitpack_d bp = streamer_read_bitpack (ib);
2703 acc->certain = bp_unpack_value (&bp, 1);
2704 vec_safe_push (desc->accesses, acc);
2705 }
2706 desc->param_size_limit = streamer_read_uhwi (ib);
2707 desc->size_reached = streamer_read_uhwi (ib);
2708 bitpack_d bp = streamer_read_bitpack (ib);
2709 desc->locally_unused = bp_unpack_value (&bp, 1);
2710 desc->split_candidate = bp_unpack_value (&bp, 1);
2711 desc->by_ref = bp_unpack_value (&bp, 1);
2712 }
2713 bitpack_d bp = streamer_read_bitpack (ib);
2714 ifs->m_candidate = bp_unpack_value (&bp, 1);
2715 ifs->m_returns_value = bp_unpack_value (&bp, 1);
2716 ifs->m_return_ignored = bp_unpack_value (&bp, 1);
2717 ifs->m_queued = 0;
2718
2719 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
2720 isra_read_edge_summary (ib, e);
2721 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
2722 isra_read_edge_summary (ib, e);
2723}
2724
2725/* Read IPA-SRA summaries from a section in file FILE_DATA of length LEN with
2726 data DATA. TODO: This function was copied almost verbatim from ipa-prop.c,
2727 it should be possible to unify them somehow. */
2728
2729static void
2730isra_read_summary_section (struct lto_file_decl_data *file_data,
2731 const char *data, size_t len)
2732{
2733 const struct lto_function_header *header =
2734 (const struct lto_function_header *) data;
2735 const int cfg_offset = sizeof (struct lto_function_header);
2736 const int main_offset = cfg_offset + header->cfg_size;
2737 const int string_offset = main_offset + header->main_size;
2738 struct data_in *data_in;
2739 unsigned int i;
2740 unsigned int count;
2741
2742 lto_input_block ib_main ((const char *) data + main_offset,
2743 header->main_size, file_data->mode_table);
2744
2745 data_in =
2746 lto_data_in_create (file_data, (const char *) data + string_offset,
2747 header->string_size, vNULL);
2748 count = streamer_read_uhwi (&ib_main);
2749
2750 for (i = 0; i < count; i++)
2751 {
2752 unsigned int index;
2753 struct cgraph_node *node;
2754 lto_symtab_encoder_t encoder;
2755
2756 index = streamer_read_uhwi (&ib_main);
2757 encoder = file_data->symtab_node_encoder;
2758 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
2759 index));
2760 gcc_assert (node->definition)((void)(!(node->definition) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2760, __FUNCTION__), 0 : 0))
;
2761 isra_read_node_info (&ib_main, node, data_in);
2762 }
2763 lto_free_section_data (file_data, LTO_section_ipa_sra, NULLnullptr, data,
2764 len);
2765 lto_data_in_delete (data_in);
2766}
2767
2768/* Read intraprocedural analysis information into a stream for LTO WPA. */
2769
2770static void
2771ipa_sra_read_summary (void)
2772{
2773 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2774 struct lto_file_decl_data *file_data;
2775 unsigned int j = 0;
2776
2777 gcc_checking_assert (!func_sums)((void)(!(!func_sums) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2777, __FUNCTION__), 0 : 0))
;
2778 gcc_checking_assert (!call_sums)((void)(!(!call_sums) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2778, __FUNCTION__), 0 : 0))
;
2779 func_sums
2780 = (new (ggc_alloc_no_dtor <ipa_sra_function_summaries> ())
2781 ipa_sra_function_summaries (symtab, true));
2782 call_sums = new ipa_sra_call_summaries (symtab);
2783
2784 while ((file_data = file_data_vec[j++]))
2785 {
2786 size_t len;
2787 const char *data
2788 = lto_get_summary_section_data (file_data, LTO_section_ipa_sra, &len);
2789 if (data)
2790 isra_read_summary_section (file_data, data, len);
2791 }
2792}
2793
2794/* Dump all IPA-SRA summary data for all cgraph nodes and edges to file F. */
2795
2796static void
2797ipa_sra_dump_all_summaries (FILE *f)
2798{
2799 cgraph_node *node;
2800 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)for ((node) = symtab->first_function_with_gimple_body (); (
node); (node) = symtab->next_function_with_gimple_body (node
))
2801 {
2802 fprintf (f, "\nSummary for node %s:\n", node->dump_name ());
2803
2804 isra_func_summary *ifs = func_sums->get (node);
2805 if (!ifs)
2806 fprintf (f, " Function does not have any associated IPA-SRA "
2807 "summary\n");
2808 else
2809 {
2810 if (!ifs->m_candidate)
2811 {
2812 fprintf (f, " Not a candidate function\n");
2813 continue;
2814 }
2815 if (ifs->m_returns_value)
2816 fprintf (f, " Returns value\n");
2817 if (vec_safe_is_empty (ifs->m_parameters))
2818 fprintf (f, " No parameter information. \n");
2819 else
2820 for (unsigned i = 0; i < ifs->m_parameters->length (); ++i)
2821 {
2822 fprintf (f, " Descriptor for parameter %i:\n", i);
2823 dump_isra_param_descriptor (f, &(*ifs->m_parameters)[i]);
2824 }
2825 fprintf (f, "\n");
2826 }
2827
2828 struct cgraph_edge *cs;
2829 for (cs = node->callees; cs; cs = cs->next_callee)
2830 {
2831 fprintf (f, " Summary for edge %s->%s:\n", cs->caller->dump_name (),
2832 cs->callee->dump_name ());
2833 isra_call_summary *csum = call_sums->get (cs);
2834 if (csum)
2835 csum->dump (f);
2836 else
2837 fprintf (f, " Call summary is MISSING!\n");
2838 }
2839
2840 }
2841 fprintf (f, "\n\n");
2842}
2843
2844/* Perform function-scope viability tests that can be only made at IPA level
2845 and return false if the function is deemed unsuitable for IPA-SRA. */
2846
2847static bool
2848ipa_sra_ipa_function_checks (cgraph_node *node)
2849{
2850 if (!node->can_be_local_p ())
2851 {
2852 if (dump_file)
2853 fprintf (dump_file, "Function %s disqualified because it cannot be "
2854 "made local.\n", node->dump_name ());
2855 return false;
2856 }
2857 if (!node->can_change_signature)
2858 {
2859 if (dump_file)
2860 fprintf (dump_file, "Function can not change signature.\n");
2861 return false;
2862 }
2863
2864 return true;
2865}
2866
2867/* Issues found out by check_callers_for_issues. */
2868
2869struct caller_issues
2870{
2871 /* The candidate being considered. */
2872 cgraph_node *candidate;
2873 /* There is a thunk among callers. */
2874 bool thunk;
2875 /* Call site with no available information. */
2876 bool unknown_callsite;
2877 /* Call from outside the the candidate's comdat group. */
2878 bool call_from_outside_comdat;
2879 /* There is a bit-aligned load into one of non-gimple-typed arguments. */
2880 bool bit_aligned_aggregate_argument;
2881};
2882
2883/* Worker for call_for_symbol_and_aliases, set any flags of passed caller_issues
2884 that apply. */
2885
2886static bool
2887check_for_caller_issues (struct cgraph_node *node, void *data)
2888{
2889 struct caller_issues *issues = (struct caller_issues *) data;
2890
2891 for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
2892 {
2893 if (cs->caller->thunk)
2894 {
2895 issues->thunk = true;
2896 /* TODO: We should be able to process at least some types of
2897 thunks. */
2898 return true;
2899 }
2900 if (issues->candidate->calls_comdat_local
2901 && issues->candidate->same_comdat_group
2902 && !issues->candidate->in_same_comdat_group_p (cs->caller))
2903 {
2904 issues->call_from_outside_comdat = true;
2905 return true;
2906 }
2907
2908 isra_call_summary *csum = call_sums->get (cs);
2909 if (!csum)
2910 {
2911 issues->unknown_callsite = true;
2912 return true;
2913 }
2914
2915 if (csum->m_bit_aligned_arg)
2916 issues->bit_aligned_aggregate_argument = true;
2917 }
2918 return false;
2919}
2920
2921/* Look at all incoming edges to NODE, including aliases and thunks and look
2922 for problems. Return true if NODE type should not be modified at all. */
2923
2924static bool
2925check_all_callers_for_issues (cgraph_node *node)
2926{
2927 struct caller_issues issues;
2928 memset (&issues, 0, sizeof (issues));
2929 issues.candidate = node;
2930
2931 node->call_for_symbol_and_aliases (check_for_caller_issues, &issues, true);
2932 if (issues.unknown_callsite)
2933 {
2934 if (dump_file && (dump_flags & TDF_DETAILS))
2935 fprintf (dump_file, "A call of %s has not been analyzed. Disabling "
2936 "all modifications.\n", node->dump_name ());
2937 return true;
2938 }
2939 /* TODO: We should be able to process at least some types of thunks. */
2940 if (issues.thunk)
2941 {
2942 if (dump_file && (dump_flags & TDF_DETAILS))
2943 fprintf (dump_file, "A call of %s is through thunk, which are not"
2944 " handled yet. Disabling all modifications.\n",
2945 node->dump_name ());
2946 return true;
2947 }
2948 if (issues.call_from_outside_comdat)
2949 {
2950 if (dump_file)
2951 fprintf (dump_file, "Function would become private comdat called "
2952 "outside of its comdat group.\n");
2953 return true;
2954 }
2955
2956 if (issues.bit_aligned_aggregate_argument)
2957 {
2958 /* Let's only remove parameters/return values from such functions.
2959 TODO: We could only prevent splitting the problematic parameters if
2960 anybody thinks it is worth it. */
2961 if (dump_file && (dump_flags & TDF_DETAILS))
2962 fprintf (dump_file, "A call of %s has bit-aligned aggregate argument,"
2963 " disabling parameter splitting.\n", node->dump_name ());
2964
2965 isra_func_summary *ifs = func_sums->get (node);
2966 gcc_checking_assert (ifs)((void)(!(ifs) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2966, __FUNCTION__), 0 : 0))
;
2967 unsigned param_count = vec_safe_length (ifs->m_parameters);
2968 for (unsigned i = 0; i < param_count; i++)
2969 (*ifs->m_parameters)[i].split_candidate = false;
2970 }
2971 return false;
2972}
2973
2974/* Find the access with corresponding OFFSET and SIZE among accesses in
2975 PARAM_DESC and return it or NULL if such an access is not there. */
2976
2977static param_access *
2978find_param_access (isra_param_desc *param_desc, unsigned offset, unsigned size)
2979{
2980 unsigned pclen = vec_safe_length (param_desc->accesses);
2981
2982 /* The search is linear but the number of stored accesses is bound by
2983 PARAM_IPA_SRA_MAX_REPLACEMENTS, so most probably 8. */
2984
2985 for (unsigned i = 0; i < pclen; i++)
2986 if ((*param_desc->accesses)[i]->unit_offset == offset
2987 && (*param_desc->accesses)[i]->unit_size == size)
2988 return (*param_desc->accesses)[i];
2989
2990 return NULLnullptr;
2991}
2992
2993/* Return iff the total size of definite replacement SIZE would violate the
2994 limit set for it in PARAM. */
2995
2996static bool
2997size_would_violate_limit_p (isra_param_desc *desc, unsigned size)
2998{
2999 unsigned limit = desc->param_size_limit;
3000 if (size > limit
3001 || (!desc->by_ref && size == limit))
3002 return true;
3003 return false;
3004}
3005
3006/* Increase reached size of DESC by SIZE or disqualify it if it would violate
3007 the set limit. IDX is the parameter number which is dumped when
3008 disqualifying. */
3009
3010static void
3011bump_reached_size (isra_param_desc *desc, unsigned size, unsigned idx)
3012{
3013 unsigned after = desc->size_reached + size;
3014 if (size_would_violate_limit_p (desc, after))
3015 {
3016 if (dump_file && (dump_flags & TDF_DETAILS))
3017 fprintf (dump_file, " ...size limit reached, disqualifying "
3018 "candidate parameter %u\n", idx);
3019 desc->split_candidate = false;
3020 return;
3021 }
3022 desc->size_reached = after;
3023}
3024
3025/* Take all actions required to deal with an edge CS that represents a call to
3026 an unknown or un-analyzed function, for both parameter removal and
3027 splitting. */
3028
3029static void
3030process_edge_to_unknown_caller (cgraph_edge *cs)
3031{
3032 isra_func_summary *from_ifs = func_sums->get (cs->caller);
3033 gcc_checking_assert (from_ifs)((void)(!(from_ifs) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3033, __FUNCTION__), 0 : 0))
;
3034 isra_call_summary *csum = call_sums->get (cs);
3035
3036 if (dump_file && (dump_flags & TDF_DETAILS))
3037 fprintf (dump_file, "Processing an edge to an unknown caller from %s:\n",
3038 cs->caller->dump_name ());
3039
3040 unsigned args_count = csum->m_arg_flow.length ();
3041 for (unsigned i = 0; i < args_count; i++)
3042 {
3043 isra_param_flow *ipf = &csum->m_arg_flow[i];
3044
3045 if (ipf->pointer_pass_through)
3046 {
3047 isra_param_desc *param_desc
3048 = &(*from_ifs->m_parameters)[get_single_param_flow_source (ipf)];
3049 param_desc->locally_unused = false;
3050 param_desc->split_candidate = false;
3051 continue;
3052 }
3053 if (ipf->aggregate_pass_through)
3054 {
3055 unsigned idx = get_single_param_flow_source (ipf);
3056 isra_param_desc *param_desc = &(*from_ifs->m_parameters)[idx];
3057
3058 param_desc->locally_unused = false;
3059 if (!param_desc->split_candidate)
3060 continue;
3061 gcc_assert (!param_desc->by_ref)((void)(!(!param_desc->by_ref) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3061, __FUNCTION__), 0 : 0))
;
3062 param_access *pacc = find_param_access (param_desc, ipf->unit_offset,
3063 ipf->unit_size);
3064 gcc_checking_assert (pacc)((void)(!(pacc) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3064, __FUNCTION__), 0 : 0))
;
3065 pacc->certain = true;
3066 if (overlapping_certain_accesses_p (param_desc, NULLnullptr))
3067 {
3068 if (dump_file && (dump_flags & TDF_DETAILS))
3069 fprintf (dump_file, " ...leading to overlap, "
3070 " disqualifying candidate parameter %u\n",
3071 idx);
3072 param_desc->split_candidate = false;
3073 }
3074 else
3075 bump_reached_size (param_desc, pacc->unit_size, idx);
3076 ipf->aggregate_pass_through = false;
3077 continue;
3078 }
3079
3080 for (int j = 0; j < ipf->length; j++)
3081 {
3082 int input_idx = ipf->inputs[j];
3083 (*from_ifs->m_parameters)[input_idx].locally_unused = false;
3084 }
3085 }
3086}
3087
3088/* Propagate parameter removal information through cross-SCC edge CS,
3089 i.e. decrease the use count in the caller parameter descriptor for each use
3090 in this call. */
3091
3092static void
3093param_removal_cross_scc_edge (cgraph_edge *cs)
3094{
3095 enum availability availability;
3096 cgraph_node *callee = cs->callee->function_symbol (&availability);
3097 isra_func_summary *to_ifs = func_sums->get (callee);
3098 if (!to_ifs || !to_ifs->m_candidate
3099 || (availability < AVAIL_AVAILABLE)
3100 || vec_safe_is_empty (to_ifs->m_parameters))
3101 {
3102 process_edge_to_unknown_caller (cs);
3103 return;
3104 }
3105 isra_func_summary *from_ifs = func_sums->get (cs->caller);
3106 gcc_checking_assert (from_ifs)((void)(!(from_ifs) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3106, __FUNCTION__), 0 : 0))
;
3107
3108 isra_call_summary *csum = call_sums->get (cs);
3109 unsigned args_count = csum->m_arg_flow.length ();
3110 unsigned param_count = vec_safe_length (to_ifs->m_parameters);
3111
3112 for (unsigned i = 0; i < args_count; i++)
3113 {
3114 bool unused_in_callee;
3115 if (i < param_count)
3116 unused_in_callee = (*to_ifs->m_parameters)[i].locally_unused;
3117 else
3118 unused_in_callee = false;
3119
3120 if (!unused_in_callee)
3121 {
3122 isra_param_flow *ipf = &csum->m_arg_flow[i];
3123 for (int j = 0; j < ipf->length; j++)
3124 {
3125 int input_idx = ipf->inputs[j];
3126 (*from_ifs->m_parameters)[input_idx].locally_unused = false;
3127 }
3128 }
3129 }
3130}
3131
3132/* Unless it is already there, push NODE which is also described by IFS to
3133 STACK. */
3134
3135static void
3136isra_push_node_to_stack (cgraph_node *node, isra_func_summary *ifs,
3137 vec<cgraph_node *> *stack)
3138{
3139 if (!ifs->m_queued)
3140 {
3141 ifs->m_queued = true;
3142 stack->safe_push (node);
3143 }
3144}
3145
3146/* If parameter with index INPUT_IDX is marked as locally unused, mark it as
3147 used and push CALLER on STACK. */
3148
3149static void
3150isra_mark_caller_param_used (isra_func_summary *from_ifs, int input_idx,
3151 cgraph_node *caller, vec<cgraph_node *> *stack)
3152{
3153 if ((*from_ifs->m_parameters)[input_idx].locally_unused)
3154 {
3155 (*from_ifs->m_parameters)[input_idx].locally_unused = false;
3156 isra_push_node_to_stack (caller, from_ifs, stack);
3157 }
3158}
3159
3160
3161/* Propagate information that any parameter is not used only locally within a
3162 SCC across CS to the caller, which must be in the same SCC as the
3163 callee. Push any callers that need to be re-processed to STACK. */
3164
3165static void
3166propagate_used_across_scc_edge (cgraph_edge *cs, vec<cgraph_node *> *stack)
3167{
3168 isra_func_summary *from_ifs = func_sums->get (cs->caller);
3169 if (!from_ifs || vec_safe_is_empty (from_ifs->m_parameters))
3170 return;
3171
3172 isra_call_summary *csum = call_sums->get (cs);
3173 gcc_checking_assert (csum)((void)(!(csum) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3173, __FUNCTION__), 0 : 0))
;
3174 unsigned args_count = csum->m_arg_flow.length ();
3175 enum availability availability;
3176 cgraph_node *callee = cs->callee->function_symbol (&availability);
3177 isra_func_summary *to_ifs = func_sums->get (callee);
3178
3179 unsigned param_count
3180 = (to_ifs && (availability >= AVAIL_AVAILABLE))
3181 ? vec_safe_length (to_ifs->m_parameters) : 0;
3182 for (unsigned i = 0; i < args_count; i++)
3183 {
3184 if (i < param_count
3185 && (*to_ifs->m_parameters)[i].locally_unused)
3186 continue;
3187
3188 /* The argument is needed in the callee it, we must mark the parameter as
3189 used also in the caller and its callers within this SCC. */
3190 isra_param_flow *ipf = &csum->m_arg_flow[i];
3191 for (int j = 0; j < ipf->length; j++)
3192 {
3193 int input_idx = ipf->inputs[j];
3194 isra_mark_caller_param_used (from_ifs, input_idx, cs->caller, stack);
3195 }
3196 }
3197}
3198
3199/* Propagate information that any parameter is not used only locally within a
3200 SCC (i.e. is used also elsewhere) to all callers of NODE that are in the
3201 same SCC. Push any callers that need to be re-processed to STACK. */
3202
3203static bool
3204propagate_used_to_scc_callers (cgraph_node *node, void *data)
3205{
3206 vec<cgraph_node *> *stack = (vec<cgraph_node *> *) data;
3207 cgraph_edge *cs;
3208 for (cs = node->callers; cs; cs = cs->next_caller)
3209 if (ipa_edge_within_scc (cs))
3210 propagate_used_across_scc_edge (cs, stack);
3211 return false;
3212}
3213
3214/* Return true iff all certain accesses in ARG_DESC are also present as
3215 certain accesses in PARAM_DESC. */
3216
3217static bool
3218all_callee_accesses_present_p (isra_param_desc *param_desc,
3219 isra_param_desc *arg_desc)
3220{
3221 unsigned aclen = vec_safe_length (arg_desc->accesses);
3222 for (unsigned j = 0; j < aclen; j++)
3223 {
3224 param_access *argacc = (*arg_desc->accesses)[j];
3225 if (!argacc->certain)
3226 continue;
3227 param_access *pacc = find_param_access (param_desc, argacc->unit_offset,
3228 argacc->unit_size);
3229 if (!pacc
3230 || !pacc->certain
3231 || !types_compatible_p (argacc->type, pacc->type))
3232 return false;
3233 }
3234 return true;
3235}
3236
3237/* Type internal to function pull_accesses_from_callee. Unfortunately gcc 4.8
3238 does not allow instantiating an auto_vec with a type defined within a
3239 function so it is a global type. */
3240enum acc_prop_kind {ACC_PROP_DONT, ACC_PROP_COPY, ACC_PROP_CERTAIN};
3241
3242
3243/* Attempt to propagate all definite accesses from ARG_DESC to PARAM_DESC,
3244 (which belongs to CALLER) if they would not violate some constraint there.
3245 If successful, return NULL, otherwise return the string reason for failure
3246 (which can be written to the dump file). DELTA_OFFSET is the known offset
3247 of the actual argument withing the formal parameter (so of ARG_DESCS within
3248 PARAM_DESCS), ARG_SIZE is the size of the actual argument or zero, if not
3249 known. In case of success, set *CHANGE_P to true if propagation actually
3250 changed anything. */
3251
3252static const char *
3253pull_accesses_from_callee (cgraph_node *caller, isra_param_desc *param_desc,
3254 isra_param_desc *arg_desc,
3255 unsigned delta_offset, unsigned arg_size,
3256 bool *change_p)
3257{
3258 unsigned pclen = vec_safe_length (param_desc->accesses);
3259 unsigned aclen = vec_safe_length (arg_desc->accesses);
3260 unsigned prop_count = 0;
3261 unsigned prop_size = 0;
3262 bool change = false;
3263
3264 auto_vec <enum acc_prop_kind, 8> prop_kinds (aclen);
3265 for (unsigned j = 0; j < aclen; j++)
3266 {
3267 param_access *argacc = (*arg_desc->accesses)[j];
3268 prop_kinds.safe_push (ACC_PROP_DONT);
3269
3270 if (arg_size > 0
3271 && argacc->unit_offset + argacc->unit_size > arg_size)
3272 return "callee access outsize size boundary";
3273
3274 if (!argacc->certain)
3275 continue;
3276
3277 unsigned offset = argacc->unit_offset + delta_offset;
3278 /* Given that accesses are initially stored according to increasing
3279 offset and decreasing size in case of equal offsets, the following
3280 searches could be written more efficiently if we kept the ordering
3281 when copying. But the number of accesses is capped at
3282 PARAM_IPA_SRA_MAX_REPLACEMENTS (so most likely 8) and the code gets
3283 messy quickly, so let's improve on that only if necessary. */
3284
3285 bool exact_match = false;
3286 for (unsigned i = 0; i < pclen; i++)
3287 {
3288 /* Check for overlaps. */
3289 param_access *pacc = (*param_desc->accesses)[i];
3290 if (pacc->unit_offset == offset
3291 && pacc->unit_size == argacc->unit_size)
3292 {
3293 if (argacc->alias_ptr_type != pacc->alias_ptr_type
3294 || !types_compatible_p (argacc->type, pacc->type))
3295 return "propagated access types would not match existing ones";
3296
3297 exact_match = true;
3298 if (!pacc->certain)
3299 {
3300 prop_kinds[j] = ACC_PROP_CERTAIN;
3301 prop_size += argacc->unit_size;
3302 change = true;
3303 }
3304 continue;
3305 }
3306
3307 if (offset < pacc->unit_offset + pacc->unit_size
3308 && offset + argacc->unit_size > pacc->unit_offset)
3309 {
3310 /* None permissible with load accesses, possible to fit into
3311 argument ones. */
3312 if (pacc->certain
3313 || offset < pacc->unit_offset
3314 || (offset + argacc->unit_size
3315 > pacc->unit_offset + pacc->unit_size))
3316 return "a propagated access would conflict in caller";
3317 }
3318 }
3319
3320 if (!exact_match)
3321 {
3322 prop_kinds[j] = ACC_PROP_COPY;
3323 prop_count++;
3324 prop_size += argacc->unit_size;
3325 change = true;
3326 }
3327 }
3328
3329 if (!change)
3330 return NULLnullptr;
3331
3332 if ((prop_count + pclen
3333 > (unsigned) opt_for_fn (caller->decl, param_ipa_sra_max_replacements)(opts_for_fn (caller->decl)->x_param_ipa_sra_max_replacements
)
)
3334 || size_would_violate_limit_p (param_desc,
3335 param_desc->size_reached + prop_size))
3336 return "propagating accesses would violate the count or size limit";
3337
3338 *change_p = true;
3339 for (unsigned j = 0; j < aclen; j++)
3340 {
3341 if (prop_kinds[j] == ACC_PROP_COPY)
3342 {
3343 param_access *argacc = (*arg_desc->accesses)[j];
3344
3345 param_access *copy = ggc_cleared_alloc<param_access> ();
3346 copy->unit_offset = argacc->unit_offset + delta_offset;
3347 copy->unit_size = argacc->unit_size;
3348 copy->type = argacc->type;
3349 copy->alias_ptr_type = argacc->alias_ptr_type;
3350 copy->certain = true;
3351 vec_safe_push (param_desc->accesses, copy);
3352 }
3353 else if (prop_kinds[j] == ACC_PROP_CERTAIN)
3354 {
3355 param_access *argacc = (*arg_desc->accesses)[j];
3356 param_access *csp
3357 = find_param_access (param_desc, argacc->unit_offset + delta_offset,
3358 argacc->unit_size);
3359 csp->certain = true;
3360 }
3361 }
3362
3363 param_desc->size_reached += prop_size;
3364
3365 return NULLnullptr;
3366}
3367
3368/* Propagate parameter splitting information through call graph edge CS.
3369 Return true if any changes that might need to be propagated within SCCs have
3370 been made. The function also clears the aggregate_pass_through and
3371 pointer_pass_through in call summaries which do not need to be processed
3372 again if this CS is revisited when iterating while changes are propagated
3373 within an SCC. */
3374
3375static bool
3376param_splitting_across_edge (cgraph_edge *cs)
3377{
3378 bool res = false;
3379 bool cross_scc = !ipa_edge_within_scc (cs);
3380 enum availability availability;
3381 cgraph_node *callee = cs->callee->function_symbol (&availability);
3382 isra_func_summary *from_ifs = func_sums->get (cs->caller);
3383 gcc_checking_assert (from_ifs && from_ifs->m_parameters)((void)(!(from_ifs && from_ifs->m_parameters) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3383, __FUNCTION__), 0 : 0))
;
3384
3385 isra_call_summary *csum = call_sums->get (cs);
3386 gcc_checking_assert (csum)((void)(!(csum) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3386, __FUNCTION__), 0 : 0))
;
3387 unsigned args_count = csum->m_arg_flow.length ();
3388 isra_func_summary *to_ifs = func_sums->get (callee);
3389 unsigned param_count
3390 = ((to_ifs && to_ifs->m_candidate && (availability >= AVAIL_AVAILABLE))
3391 ? vec_safe_length (to_ifs->m_parameters)
3392 : 0);
3393
3394 if (dump_file && (dump_flags & TDF_DETAILS))
3395 fprintf (dump_file, "Splitting across %s->%s:\n",
3396 cs->caller->dump_name (), callee->dump_name ());
3397
3398 unsigned i;
3399 for (i = 0; (i < args_count) && (i < param_count); i++)
3400 {
3401 isra_param_desc *arg_desc = &(*to_ifs->m_parameters)[i];
3402 isra_param_flow *ipf = &csum->m_arg_flow[i];
3403
3404 if (arg_desc->locally_unused)
3405 {
3406 if (dump_file && (dump_flags & TDF_DETAILS))
3407 fprintf (dump_file, " ->%u: unused in callee\n", i);
3408 ipf->pointer_pass_through = false;
3409 continue;
3410 }
3411
3412 if (ipf->pointer_pass_through)
3413 {
3414 int idx = get_single_param_flow_source (ipf);
3415 isra_param_desc *param_desc = &(*from_ifs->m_parameters)[idx];
3416 if (!param_desc->split_candidate)
3417 continue;
3418 gcc_assert (param_desc->by_ref)((void)(!(param_desc->by_ref) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3418, __FUNCTION__), 0 : 0))
;
3419
3420 if (!arg_desc->split_candidate || !arg_desc->by_ref)
3421 {
3422 if (dump_file && (dump_flags & TDF_DETAILS))
3423 fprintf (dump_file, " %u->%u: not candidate or not by "
3424 "reference in callee\n", idx, i);
3425 param_desc->split_candidate = false;
3426 ipf->pointer_pass_through = false;
3427 res = true;
3428 }
3429 else if (!ipf->safe_to_import_accesses)
3430 {
3431 if (!csum->m_before_any_store
3432 || !all_callee_accesses_present_p (param_desc, arg_desc))
3433 {
3434 if (dump_file && (dump_flags & TDF_DETAILS))
3435 fprintf (dump_file, " %u->%u: cannot import accesses.\n",
3436 idx, i);
3437 param_desc->split_candidate = false;
3438 ipf->pointer_pass_through = false;
3439 res = true;
3440
3441 }
3442 else
3443 {
3444 if (dump_file && (dump_flags & TDF_DETAILS))
3445 fprintf (dump_file, " %u->%u: verified callee accesses "
3446 "present.\n", idx, i);
3447 if (cross_scc)
3448 ipf->pointer_pass_through = false;
3449 }
3450 }
3451 else
3452 {
3453 const char *pull_failure
3454 = pull_accesses_from_callee (cs->caller, param_desc, arg_desc,
3455 0, 0, &res);
3456 if (pull_failure)
3457 {
3458 if (dump_file && (dump_flags & TDF_DETAILS))
3459 fprintf (dump_file, " %u->%u: by_ref access pull "
3460 "failed: %s.\n", idx, i, pull_failure);
3461 param_desc->split_candidate = false;
3462 ipf->pointer_pass_through = false;
3463 res = true;
3464 }
3465 else
3466 {
3467 if (dump_file && (dump_flags & TDF_DETAILS))
3468 fprintf (dump_file, " %u->%u: by_ref access pull "
3469 "succeeded.\n", idx, i);
3470 if (cross_scc)
3471 ipf->pointer_pass_through = false;
3472 }
3473 }
3474 }
3475 else if (ipf->aggregate_pass_through)
3476 {
3477 int idx = get_single_param_flow_source (ipf);
3478 isra_param_desc *param_desc = &(*from_ifs->m_parameters)[idx];
3479 if (!param_desc->split_candidate)
3480 continue;
3481 gcc_assert (!param_desc->by_ref)((void)(!(!param_desc->by_ref) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3481, __FUNCTION__), 0 : 0))
;
3482 param_access *pacc = find_param_access (param_desc, ipf->unit_offset,
3483 ipf->unit_size);
3484 gcc_checking_assert (pacc)((void)(!(pacc) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3484, __FUNCTION__), 0 : 0))
;
3485
3486 if (pacc->certain)
3487 {
3488 if (dump_file && (dump_flags & TDF_DETAILS))
3489 fprintf (dump_file, " %u->%u: already certain\n", idx, i);
3490 ipf->aggregate_pass_through = false;
3491 }
3492 else if (!arg_desc->split_candidate || arg_desc->by_ref)
3493 {
3494 if (dump_file && (dump_flags & TDF_DETAILS))
3495 fprintf (dump_file, " %u->%u: not candidate or by "
3496 "reference in callee\n", idx, i);
3497
3498 pacc->certain = true;
3499 if (overlapping_certain_accesses_p (param_desc, NULLnullptr))
3500 {
3501 if (dump_file && (dump_flags & TDF_DETAILS))
3502 fprintf (dump_file, " ...leading to overlap, "
3503 " disqualifying candidate parameter %u\n",
3504 idx);
3505 param_desc->split_candidate = false;
3506 }
3507 else
3508 bump_reached_size (param_desc, pacc->unit_size, idx);
3509
3510 ipf->aggregate_pass_through = false;
3511 res = true;
3512 }
3513 else
3514 {
3515 const char *pull_failure
3516 = pull_accesses_from_callee (cs->caller, param_desc, arg_desc,
3517 ipf->unit_offset,
3518 ipf->unit_size, &res);
3519 if (pull_failure)
3520 {
3521 if (dump_file && (dump_flags & TDF_DETAILS))
3522 fprintf (dump_file, " %u->%u: arg access pull "
3523 "failed: %s.\n", idx, i, pull_failure);
3524
3525 ipf->aggregate_pass_through = false;
3526 pacc->certain = true;
3527
3528 if (overlapping_certain_accesses_p (param_desc, NULLnullptr))
3529 {
3530 if (dump_file && (dump_flags & TDF_DETAILS))
3531 fprintf (dump_file, " ...leading to overlap, "
3532 " disqualifying candidate parameter %u\n",
3533 idx);
3534 param_desc->split_candidate = false;
3535 }
3536 else
3537 bump_reached_size (param_desc, pacc->unit_size, idx);
3538
3539 res = true;
3540 }
3541 else
3542 {
3543 if (dump_file && (dump_flags & TDF_DETAILS))
3544 fprintf (dump_file, " %u->%u: arg access pull "
3545 "succeeded.\n", idx, i);
3546 if (cross_scc)
3547 ipf->aggregate_pass_through = false;
3548 }
3549 }
3550 }
3551 }
3552
3553 /* Handle argument-parameter count mismatches. */
3554 for (; (i < args_count); i++)
3555 {
3556 isra_param_flow *ipf = &csum->m_arg_flow[i];
3557
3558 if (ipf->pointer_pass_through || ipf->aggregate_pass_through)
3559 {
3560 int idx = get_single_param_flow_source (ipf);
3561 ipf->pointer_pass_through = false;
3562 ipf->aggregate_pass_through = false;
3563 isra_param_desc *param_desc = &(*from_ifs->m_parameters)[idx];
3564 if (!param_desc->split_candidate)
3565 continue;
3566
3567 if (dump_file && (dump_flags & TDF_DETAILS))
3568 fprintf (dump_file, " %u->%u: no corresponding formal parameter\n",
3569 idx, i);
3570 param_desc->split_candidate = false;
3571 res = true;
3572 }
3573 }
3574 return res;
3575}
3576
3577/* Worker for call_for_symbol_and_aliases, look at all callers and if all their
3578 callers ignore the return value, or come from the same SCC and use the
3579 return value only to compute their return value, return false, otherwise
3580 return true. */
3581
3582static bool
3583retval_used_p (cgraph_node *node, void *)
3584{
3585 for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
3586 {
3587 isra_call_summary *csum = call_sums->get (cs);
3588 gcc_checking_assert (csum)((void)(!(csum) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3588, __FUNCTION__), 0 : 0))
;
3589 if (csum->m_return_ignored)
3590 continue;
3591 if (!csum->m_return_returned)
3592 return true;
3593
3594 isra_func_summary *from_ifs = func_sums->get (cs->caller);
3595 if (!from_ifs || !from_ifs->m_candidate)
3596 return true;
3597
3598 if (!ipa_edge_within_scc (cs)
3599 && !from_ifs->m_return_ignored)
3600 return true;
3601 }
3602
3603 return false;
3604}
3605
3606/* Push into NEW_PARAMS all required parameter adjustment entries to copy or
3607 modify parameter which originally had index BASE_INDEX, in the adjustment
3608 vector of parent clone (if any) had PREV_CLONE_INDEX and was described by
3609 PREV_ADJUSTMENT. If the parent clone is the original function,
3610 PREV_ADJUSTMENT is NULL and PREV_CLONE_INDEX is equal to BASE_INDEX. */
3611
3612
3613static void
3614push_param_adjustments_for_index (isra_func_summary *ifs, unsigned base_index,
3615 unsigned prev_clone_index,
3616 ipa_adjusted_param *prev_adjustment,
3617 vec<ipa_adjusted_param, va_gc> **new_params)
3618{
3619 isra_param_desc *desc = &(*ifs->m_parameters)[base_index];
3620 if (desc->locally_unused)
3621 {
3622 if (dump_file)
3623 fprintf (dump_file, " Will remove parameter %u\n", base_index);
3624 return;
3625 }
3626
3627 if (!desc->split_candidate)
3628 {
3629 ipa_adjusted_param adj;
3630 if (prev_adjustment)
3631 {
3632 adj = *prev_adjustment;
3633 adj.prev_clone_adjustment = true;
3634 adj.prev_clone_index = prev_clone_index;
3635 }
3636 else
3637 {
3638 memset (&adj, 0, sizeof (adj));
3639 adj.op = IPA_PARAM_OP_COPY;
3640 adj.base_index = base_index;
3641 adj.prev_clone_index = prev_clone_index;
3642 }
3643 vec_safe_push ((*new_params), adj);
3644 return;
3645 }
3646
3647 if (dump_file)
3648 fprintf (dump_file, " Will split parameter %u\n", base_index);
3649
3650 gcc_assert (!prev_adjustment || prev_adjustment->op == IPA_PARAM_OP_COPY)((void)(!(!prev_adjustment || prev_adjustment->op == IPA_PARAM_OP_COPY
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3650, __FUNCTION__), 0 : 0))
;
3651 unsigned aclen = vec_safe_length (desc->accesses);
3652 for (unsigned j = 0; j < aclen; j++)
3653 {
3654 param_access *pa = (*desc->accesses)[j];
3655 if (!pa->certain)
3656 continue;
3657 if (dump_file)
3658 fprintf (dump_file, " - component at byte offset %u, "
3659 "size %u\n", pa->unit_offset, pa->unit_size);
3660
3661 ipa_adjusted_param adj;
3662 memset (&adj, 0, sizeof (adj));
3663 adj.op = IPA_PARAM_OP_SPLIT;
3664 adj.base_index = base_index;
3665 adj.prev_clone_index = prev_clone_index;
3666 adj.param_prefix_index = IPA_PARAM_PREFIX_ISRA;
3667 adj.reverse = pa->reverse;
3668 adj.type = pa->type;
3669 adj.alias_ptr_type = pa->alias_ptr_type;
3670 adj.unit_offset = pa->unit_offset;
3671 vec_safe_push ((*new_params), adj);
3672 }
3673}
3674
3675/* Worker for all call_for_symbol_thunks_and_aliases. Set calls_comdat_local
3676 flag of all callers of NODE. */
3677
3678static bool
3679mark_callers_calls_comdat_local (struct cgraph_node *node, void *)
3680{
3681 for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
3682 cs->caller->calls_comdat_local = true;
3683 return false;
3684}
3685
3686
3687/* Do final processing of results of IPA propagation regarding NODE, clone it
3688 if appropriate. */
3689
3690static void
3691process_isra_node_results (cgraph_node *node,
3692 hash_map<const char *, unsigned> *clone_num_suffixes)
3693{
3694 isra_func_summary *ifs = func_sums->get (node);
3695 if (!ifs || !ifs->m_candidate)
3696 return;
3697
3698 auto_vec<bool, 16> surviving_params;
3699 bool check_surviving = false;
3700 clone_info *cinfo = clone_info::get (node);
3701 if (cinfo && cinfo->param_adjustments)
3702 {
3703 check_surviving = true;
3704 cinfo->param_adjustments->get_surviving_params (&surviving_params);
3705 }
3706
3707 unsigned param_count = vec_safe_length (ifs->m_parameters);
3708 bool will_change_function = false;
3709 if (ifs->m_returns_value && ifs->m_return_ignored)
3710 will_change_function = true;
3711 else
3712 for (unsigned i = 0; i < param_count; i++)
3713 {
3714 isra_param_desc *desc = &(*ifs->m_parameters)[i];
3715 if ((desc->locally_unused || desc->split_candidate)
3716 /* Make sure we do not clone just to attempt to remove an already
3717 removed unused argument. */
3718 && (!check_surviving
3719 || (i < surviving_params.length ()
3720 && surviving_params[i])))
3721 {
3722 will_change_function = true;
3723 break;
3724 }
3725 }
3726 if (!will_change_function)
3727 return;
3728
3729 if (dump_file)
3730 {
3731 fprintf (dump_file, "\nEvaluating analysis results for %s\n",
3732 node->dump_name ());
3733 if (ifs->m_returns_value && ifs->m_return_ignored)
3734 fprintf (dump_file, " Will remove return value.\n");
3735 }
3736
3737 vec<ipa_adjusted_param, va_gc> *new_params = NULLnullptr;
3738 if (ipa_param_adjustments *old_adjustments
3739 = cinfo ? cinfo->param_adjustments : NULLnullptr)
3740 {
3741 unsigned old_adj_len = vec_safe_length (old_adjustments->m_adj_params);
3742 for (unsigned i = 0; i < old_adj_len; i++)
3743 {
3744 ipa_adjusted_param *old_adj = &(*old_adjustments->m_adj_params)[i];
3745 push_param_adjustments_for_index (ifs, old_adj->base_index, i,
3746 old_adj, &new_params);
3747 }
3748 }
3749 else
3750 for (unsigned i = 0; i < param_count; i++)
3751 push_param_adjustments_for_index (ifs, i, i, NULLnullptr, &new_params);
3752
3753 ipa_param_adjustments *new_adjustments
3754 = (new (ggc_alloc <ipa_param_adjustments> ())
3755 ipa_param_adjustments (new_params, param_count,
3756 ifs->m_returns_value && ifs->m_return_ignored));
3757
3758 if (dump_file && (dump_flags & TDF_DETAILS))
3759 {
3760 fprintf (dump_file, "\n Created adjustments:\n");
3761 new_adjustments->dump (dump_file);
3762 }
3763
3764 unsigned &suffix_counter = clone_num_suffixes->get_or_insert (
3765 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (((const char *) (tree_check ((decl_assembler_name (node->decl
)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3766, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)
3766 node->decl))((const char *) (tree_check ((decl_assembler_name (node->decl
)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3766, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)
);
3767 auto_vec<cgraph_edge *> callers = node->collect_callers ();
3768 cgraph_node *new_node
3769 = node->create_virtual_clone (callers, NULLnullptr, new_adjustments, "isra",
3770 suffix_counter);
3771 suffix_counter++;
3772 if (node->calls_comdat_local && node->same_comdat_group)
3773 {
3774 new_node->add_to_same_comdat_group (node);
3775 new_node->call_for_symbol_and_aliases (mark_callers_calls_comdat_local,
3776 NULLnullptr, true);
3777 }
3778 new_node->calls_comdat_local = node->calls_comdat_local;
3779
3780 if (dump_file)
3781 fprintf (dump_file, " Created new node %s\n", new_node->dump_name ());
3782 callers.release ();
3783}
3784
3785/* Check which parameters of NODE described by IFS have survived until IPA-SRA
3786 and disable transformations for those which have not or which should not
3787 transformed because the associated debug counter reached its limit. Return
3788 true if none survived or if there were no candidates to begin with. */
3789
3790static bool
3791disable_unavailable_parameters (cgraph_node *node, isra_func_summary *ifs)
3792{
3793 bool ret = true;
3794 unsigned len = vec_safe_length (ifs->m_parameters);
3795 if (!len)
3796 return true;
3797
3798 auto_vec<bool, 16> surviving_params;
3799 bool check_surviving = false;
3800 clone_info *cinfo = clone_info::get (node);
3801 if (cinfo && cinfo->param_adjustments)
3802 {
3803 check_surviving = true;
3804 cinfo->param_adjustments->get_surviving_params (&surviving_params);
3805 }
3806 bool dumped_first = false;
3807 for (unsigned i = 0; i < len; i++)
3808 {
3809 isra_param_desc *desc = &(*ifs->m_parameters)[i];
3810 if (!dbg_cnt (ipa_sra_params))
3811 {
3812 desc->locally_unused = false;
3813 desc->split_candidate = false;
3814 }
3815 else if (check_surviving
3816 && (i >= surviving_params.length ()
3817 || !surviving_params[i]))
3818 {
3819 /* Even if the parameter was removed by a previous IPA pass, we do
3820 not clear locally_unused because if it really is unused, this
3821 information might be useful in callers. */
3822 desc->split_candidate = false;
3823
3824 if (dump_file && (dump_flags & TDF_DETAILS))
3825 {
3826 if (!dumped_first)
3827 {
3828 fprintf (dump_file,
3829 "The following parameters of %s are dead on "
3830 "arrival:", node->dump_name ());
3831 dumped_first = true;
3832 }
3833 fprintf (dump_file, " %u", i);
3834 }
3835 }
3836 else if (desc->locally_unused || desc->split_candidate)
3837 ret = false;
3838 }
3839
3840 if (dumped_first)
3841 fprintf (dump_file, "\n");
3842
3843 return ret;
3844}
3845
3846
3847/* Run the interprocedural part of IPA-SRA. */
3848
3849static unsigned int
3850ipa_sra_analysis (void)
3851{
3852 if (dump_file)
3853 {
3854 fprintf (dump_file, "\n========== IPA-SRA IPA stage ==========\n");
3855 ipa_sra_dump_all_summaries (dump_file);
3856 }
3857
3858 gcc_checking_assert (func_sums)((void)(!(func_sums) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3858, __FUNCTION__), 0 : 0))
;
3859 gcc_checking_assert (call_sums)((void)(!(call_sums) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3859, __FUNCTION__), 0 : 0))
;
3860 cgraph_node **order = XCNEWVEC (cgraph_node *, symtab->cgraph_count)((cgraph_node * *) xcalloc ((symtab->cgraph_count), sizeof
(cgraph_node *)))
;
3861 auto_vec <cgraph_node *, 16> stack;
3862 int node_scc_count = ipa_reduced_postorder (order, true, NULLnullptr);
3863
3864 /* One sweep from callees to callers for parameter removal and splitting. */
3865 for (int i = 0; i < node_scc_count; i++)
3866 {
3867 cgraph_node *scc_rep = order[i];
3868 vec<cgraph_node *> cycle_nodes = ipa_get_nodes_in_cycle (scc_rep);
3869 unsigned j;
3870
3871 /* Preliminary IPA function level checks and first step of parameter
3872 removal. */
3873 cgraph_node *v;
3874 FOR_EACH_VEC_ELT (cycle_nodes, j, v)for (j = 0; (cycle_nodes).iterate ((j), &(v)); ++(j))
3875 {
3876 isra_func_summary *ifs = func_sums->get (v);
3877 if (!ifs || !ifs->m_candidate)
3878 continue;
3879 if (!ipa_sra_ipa_function_checks (v)
3880 || check_all_callers_for_issues (v))
3881 {
3882 ifs->zap ();
3883 continue;
3884 }
3885 if (disable_unavailable_parameters (v, ifs))
3886 continue;
3887 for (cgraph_edge *cs = v->indirect_calls; cs; cs = cs->next_callee)
3888 process_edge_to_unknown_caller (cs);
3889 for (cgraph_edge *cs = v->callees; cs; cs = cs->next_callee)
3890 if (!ipa_edge_within_scc (cs))
3891 param_removal_cross_scc_edge (cs);
3892 }
3893
3894 /* Look at edges within the current SCC and propagate used-ness across
3895 them, pushing onto the stack all notes which might need to be
3896 revisited. */
3897 FOR_EACH_VEC_ELT (cycle_nodes, j, v)for (j = 0; (cycle_nodes).iterate ((j), &(v)); ++(j))
3898 v->call_for_symbol_thunks_and_aliases (propagate_used_to_scc_callers,
3899 &stack, true);
3900
3901 /* Keep revisiting and pushing until nothing changes. */
3902 while (!stack.is_empty ())
3903 {
3904 cgraph_node *v = stack.pop ();
3905 isra_func_summary *ifs = func_sums->get (v);
3906 gcc_checking_assert (ifs && ifs->m_queued)((void)(!(ifs && ifs->m_queued) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3906, __FUNCTION__), 0 : 0))
;
3907 ifs->m_queued = false;
3908
3909 v->call_for_symbol_thunks_and_aliases (propagate_used_to_scc_callers,
3910 &stack, true);
3911 }
3912
3913 /* Parameter splitting. */
3914 bool repeat_scc_access_propagation;
3915 do
3916 {
3917 repeat_scc_access_propagation = false;
3918 FOR_EACH_VEC_ELT (cycle_nodes, j, v)for (j = 0; (cycle_nodes).iterate ((j), &(v)); ++(j))
3919 {
3920 isra_func_summary *ifs = func_sums->get (v);
3921 if (!ifs
3922 || !ifs->m_candidate
3923 || vec_safe_is_empty (ifs->m_parameters))
3924 continue;
3925 for (cgraph_edge *cs = v->callees; cs; cs = cs->next_callee)
3926 if (param_splitting_across_edge (cs))
3927 repeat_scc_access_propagation = true;
3928 }
3929 }
3930 while (repeat_scc_access_propagation);
3931
3932 if (flag_checkingglobal_options.x_flag_checking)
3933 FOR_EACH_VEC_ELT (cycle_nodes, j, v)for (j = 0; (cycle_nodes).iterate ((j), &(v)); ++(j))
3934 verify_splitting_accesses (v, true);
3935
3936 cycle_nodes.release ();
3937 }
3938
3939 /* One sweep from caller to callees for result removal. */
3940 for (int i = node_scc_count - 1; i >= 0 ; i--)
3941 {
3942 cgraph_node *scc_rep = order[i];
3943 vec<cgraph_node *> cycle_nodes = ipa_get_nodes_in_cycle (scc_rep);
3944 unsigned j;
3945
3946 cgraph_node *v;
3947 FOR_EACH_VEC_ELT (cycle_nodes, j, v)for (j = 0; (cycle_nodes).iterate ((j), &(v)); ++(j))
3948 {
3949 isra_func_summary *ifs = func_sums->get (v);
3950 if (!ifs || !ifs->m_candidate)
3951 continue;
3952
3953 bool return_needed
3954 = (ifs->m_returns_value
3955 && (!dbg_cnt (ipa_sra_retvalues)
3956 || v->call_for_symbol_and_aliases (retval_used_p,
3957 NULLnullptr, true)));
3958 ifs->m_return_ignored = !return_needed;
3959 if (return_needed)
3960 isra_push_node_to_stack (v, ifs, &stack);
3961 }
3962
3963 while (!stack.is_empty ())
3964 {
3965 cgraph_node *node = stack.pop ();
3966 isra_func_summary *ifs = func_sums->get (node);
3967 gcc_checking_assert (ifs && ifs->m_queued)((void)(!(ifs && ifs->m_queued) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3967, __FUNCTION__), 0 : 0))
;
3968 ifs->m_queued = false;
3969
3970 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
3971 if (ipa_edge_within_scc (cs)
3972 && call_sums->get (cs)->m_return_returned)
3973 {
3974 enum availability av;
3975 cgraph_node *callee = cs->callee->function_symbol (&av);
3976 isra_func_summary *to_ifs = func_sums->get (callee);
3977 if (to_ifs && to_ifs->m_return_ignored)
3978 {
3979 to_ifs->m_return_ignored = false;
3980 isra_push_node_to_stack (callee, to_ifs, &stack);
3981 }
3982 }
3983 }
3984 cycle_nodes.release ();
3985 }
3986
3987 ipa_free_postorder_info ();
3988 free (order);
3989
3990 if (dump_file)
3991 {
3992 if (dump_flags & TDF_DETAILS)
3993 {
3994 fprintf (dump_file, "\n========== IPA-SRA propagation final state "
3995 " ==========\n");
3996 ipa_sra_dump_all_summaries (dump_file);
3997 }
3998 fprintf (dump_file, "\n========== IPA-SRA decisions ==========\n");
3999 }
4000
4001 hash_map<const char *, unsigned> *clone_num_suffixes
4002 = new hash_map<const char *, unsigned>;
4003
4004 cgraph_node *node;
4005 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)for ((node) = symtab->first_function_with_gimple_body (); (
node); (node) = symtab->next_function_with_gimple_body (node
))
4006 process_isra_node_results (node, clone_num_suffixes);
4007
4008 delete clone_num_suffixes;
4009 ggc_delete (func_sums);
4010 func_sums = NULLnullptr;
4011 delete call_sums;
4012 call_sums = NULLnullptr;
4013
4014 if (dump_file)
4015 fprintf (dump_file, "\n========== IPA SRA IPA analysis done "
4016 "==========\n\n");
4017 return 0;
4018}
4019
4020
4021const pass_data pass_data_ipa_sra =
4022{
4023 IPA_PASS, /* type */
4024 "sra", /* name */
4025 OPTGROUP_NONE, /* optinfo_flags */
4026 TV_IPA_SRA, /* tv_id */
4027 0, /* properties_required */
4028 0, /* properties_provided */
4029 0, /* properties_destroyed */
4030 0, /* todo_flags_start */
4031 ( TODO_dump_symtab(1 << 7) | TODO_remove_functions(1 << 8) ), /* todo_flags_finish */
4032};
4033
4034class pass_ipa_sra : public ipa_opt_pass_d
4035{
4036public:
4037 pass_ipa_sra (gcc::context *ctxt)
4038 : ipa_opt_pass_d (pass_data_ipa_sra, ctxt,
4039 ipa_sra_generate_summary, /* generate_summary */
4040 ipa_sra_write_summary, /* write_summary */
4041 ipa_sra_read_summary, /* read_summary */
4042 NULLnullptr , /* write_optimization_summary */
4043 NULLnullptr, /* read_optimization_summary */
4044 NULLnullptr, /* stmt_fixup */
4045 0, /* function_transform_todo_flags_start */
4046 NULLnullptr, /* function_transform */
4047 NULLnullptr) /* variable_transform */
4048 {}
4049
4050 /* opt_pass methods: */
4051 virtual bool gate (function *)
4052 {
4053 /* TODO: We should remove the optimize check after we ensure we never run
4054 IPA passes when not optimizing. */
4055 return (flag_ipa_sraglobal_options.x_flag_ipa_sra && optimizeglobal_options.x_optimize);
4056 }
4057
4058 virtual unsigned int execute (function *) { return ipa_sra_analysis (); }
4059
4060}; // class pass_ipa_sra
4061
4062} // anon namespace
4063
4064/* Intraprocedural part of IPA-SRA analysis. Scan function body of NODE and
4065 create a summary structure describing IPA-SRA opportunities and constraints
4066 in it. */
4067
4068static void
4069ipa_sra_summarize_function (cgraph_node *node)
4070{
4071 if (dump_file)
4072 fprintf (dump_file, "Creating summary for %s/%i:\n", node->name (),
4073 node->order);
4074 if (!ipa_sra_preliminary_function_checks (node))
4075 {
4076 isra_analyze_all_outgoing_calls (node);
4077 return;
4078 }
4079 gcc_obstack_init (&gensum_obstack)_obstack_begin (((&gensum_obstack)), (memory_block_pool::
block_size), (0), (mempool_obstack_chunk_alloc), (mempool_obstack_chunk_free
))
;
4080 isra_func_summary *ifs = func_sums->get_create (node);
4081 ifs->m_candidate = true;
4082 tree ret = TREE_TYPE (TREE_TYPE (node->decl))((contains_struct_check ((((contains_struct_check ((node->
decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 4082, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 4082, __FUNCTION__))->typed.type)
;
4083 ifs->m_returns_value = (TREE_CODE (ret)((enum tree_code) (ret)->base.code) != VOID_TYPE);
4084
4085 decl2desc = new hash_map<tree, gensum_param_desc *>;
4086 unsigned count = 0;
4087 for (tree parm = DECL_ARGUMENTS (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 4087, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
; parm; parm = DECL_CHAIN (parm)(((contains_struct_check (((contains_struct_check ((parm), (TS_DECL_MINIMAL
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 4087, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 4087, __FUNCTION__))->common.chain))
)
4088 count++;
4089
4090 if (count > 0)
4091 {
4092 auto_vec<gensum_param_desc, 16> param_descriptions (count);
4093 param_descriptions.reserve_exact (count);
4094 param_descriptions.quick_grow_cleared (count);
4095
4096 bool cfun_pushed = false;
4097 struct function *fun = DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 4097, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
;
4098 if (create_parameter_descriptors (node, &param_descriptions))
4099 {
4100 push_cfun (fun);
4101 cfun_pushed = true;
4102 final_bbs = BITMAP_ALLOCbitmap_alloc (NULLnullptr);
4103 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,((long *) xcalloc ((by_ref_count * ((fun)->cfg->x_last_basic_block
)), sizeof (long)))
4104 by_ref_count((long *) xcalloc ((by_ref_count * ((fun)->cfg->x_last_basic_block
)), sizeof (long)))
4105 * last_basic_block_for_fn (fun))((long *) xcalloc ((by_ref_count * ((fun)->cfg->x_last_basic_block
)), sizeof (long)))
;
4106 aa_walking_limit = opt_for_fn (node->decl, param_ipa_max_aa_steps)(opts_for_fn (node->decl)->x_param_ipa_max_aa_steps);
4107 scan_function (node, fun);
4108
4109 if (dump_file)
4110 {
4111 dump_gensum_param_descriptors (dump_file, node->decl,
4112 &param_descriptions);
4113 fprintf (dump_file, "----------------------------------------\n");
4114 }
4115 }
4116 process_scan_results (node, fun, ifs, &param_descriptions);
4117
4118 if (cfun_pushed)
4119 pop_cfun ();
4120 if (bb_dereferences)
4121 {
4122 free (bb_dereferences);
4123 bb_dereferences = NULLnullptr;
4124 BITMAP_FREE (final_bbs)((void) (bitmap_obstack_free ((bitmap) final_bbs), (final_bbs
) = (bitmap) nullptr))
;
4125 final_bbs = NULLnullptr;
4126 }
4127 }
4128 isra_analyze_all_outgoing_calls (node);
4129
4130 delete decl2desc;
4131 decl2desc = NULLnullptr;
4132 obstack_free (&gensum_obstack, NULL)__extension__ ({ struct obstack *__o = (&gensum_obstack);
void *__obj = (void *) (nullptr); if (__obj > (void *) __o
->chunk && __obj < (void *) __o->chunk_limit
) __o->next_free = __o->object_base = (char *) __obj; else
_obstack_free (__o, __obj); })
;
4133 if (dump_file)
4134 fprintf (dump_file, "\n\n");
4135 if (flag_checkingglobal_options.x_flag_checking)
4136 verify_splitting_accesses (node, false);
4137 return;
4138}
4139
4140ipa_opt_pass_d *
4141make_pass_ipa_sra (gcc::context *ctxt)
4142{
4143 return new pass_ipa_sra (ctxt);
4144}
4145
4146
4147#include "gt-ipa-sra.h"

/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h

1/* Vector API for GNU compiler.
2 Copyright (C) 2004-2021 Free Software Foundation, Inc.
3 Contributed by Nathan Sidwell <nathan@codesourcery.com>
4 Re-implemented in C++ by Diego Novillo <dnovillo@google.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22#ifndef GCC_VEC_H
23#define GCC_VEC_H
24
25/* Some gen* file have no ggc support as the header file gtype-desc.h is
26 missing. Provide these definitions in case ggc.h has not been included.
27 This is not a problem because any code that runs before gengtype is built
28 will never need to use GC vectors.*/
29
30extern void ggc_free (void *);
31extern size_t ggc_round_alloc_size (size_t requested_size);
32extern void *ggc_realloc (void *, size_t MEM_STAT_DECL);
33
34/* Templated vector type and associated interfaces.
35
36 The interface functions are typesafe and use inline functions,
37 sometimes backed by out-of-line generic functions. The vectors are
38 designed to interoperate with the GTY machinery.
39
40 There are both 'index' and 'iterate' accessors. The index accessor
41 is implemented by operator[]. The iterator returns a boolean
42 iteration condition and updates the iteration variable passed by
43 reference. Because the iterator will be inlined, the address-of
44 can be optimized away.
45
46 Each operation that increases the number of active elements is
47 available in 'quick' and 'safe' variants. The former presumes that
48 there is sufficient allocated space for the operation to succeed
49 (it dies if there is not). The latter will reallocate the
50 vector, if needed. Reallocation causes an exponential increase in
51 vector size. If you know you will be adding N elements, it would
52 be more efficient to use the reserve operation before adding the
53 elements with the 'quick' operation. This will ensure there are at
54 least as many elements as you ask for, it will exponentially
55 increase if there are too few spare slots. If you want reserve a
56 specific number of slots, but do not want the exponential increase
57 (for instance, you know this is the last allocation), use the
58 reserve_exact operation. You can also create a vector of a
59 specific size from the get go.
60
61 You should prefer the push and pop operations, as they append and
62 remove from the end of the vector. If you need to remove several
63 items in one go, use the truncate operation. The insert and remove
64 operations allow you to change elements in the middle of the
65 vector. There are two remove operations, one which preserves the
66 element ordering 'ordered_remove', and one which does not
67 'unordered_remove'. The latter function copies the end element
68 into the removed slot, rather than invoke a memmove operation. The
69 'lower_bound' function will determine where to place an item in the
70 array using insert that will maintain sorted order.
71
72 Vectors are template types with three arguments: the type of the
73 elements in the vector, the allocation strategy, and the physical
74 layout to use
75
76 Four allocation strategies are supported:
77
78 - Heap: allocation is done using malloc/free. This is the
79 default allocation strategy.
80
81 - GC: allocation is done using ggc_alloc/ggc_free.
82
83 - GC atomic: same as GC with the exception that the elements
84 themselves are assumed to be of an atomic type that does
85 not need to be garbage collected. This means that marking
86 routines do not need to traverse the array marking the
87 individual elements. This increases the performance of
88 GC activities.
89
90 Two physical layouts are supported:
91
92 - Embedded: The vector is structured using the trailing array
93 idiom. The last member of the structure is an array of size
94 1. When the vector is initially allocated, a single memory
95 block is created to hold the vector's control data and the
96 array of elements. These vectors cannot grow without
97 reallocation (see discussion on embeddable vectors below).
98
99 - Space efficient: The vector is structured as a pointer to an
100 embedded vector. This is the default layout. It means that
101 vectors occupy a single word of storage before initial
102 allocation. Vectors are allowed to grow (the internal
103 pointer is reallocated but the main vector instance does not
104 need to relocate).
105
106 The type, allocation and layout are specified when the vector is
107 declared.
108
109 If you need to directly manipulate a vector, then the 'address'
110 accessor will return the address of the start of the vector. Also
111 the 'space' predicate will tell you whether there is spare capacity
112 in the vector. You will not normally need to use these two functions.
113
114 Notes on the different layout strategies
115
116 * Embeddable vectors (vec<T, A, vl_embed>)
117
118 These vectors are suitable to be embedded in other data
119 structures so that they can be pre-allocated in a contiguous
120 memory block.
121
122 Embeddable vectors are implemented using the trailing array
123 idiom, thus they are not resizeable without changing the address
124 of the vector object itself. This means you cannot have
125 variables or fields of embeddable vector type -- always use a
126 pointer to a vector. The one exception is the final field of a
127 structure, which could be a vector type.
128
129 You will have to use the embedded_size & embedded_init calls to
130 create such objects, and they will not be resizeable (so the
131 'safe' allocation variants are not available).
132
133 Properties of embeddable vectors:
134
135 - The whole vector and control data are allocated in a single
136 contiguous block. It uses the trailing-vector idiom, so
137 allocation must reserve enough space for all the elements
138 in the vector plus its control data.
139 - The vector cannot be re-allocated.
140 - The vector cannot grow nor shrink.
141 - No indirections needed for access/manipulation.
142 - It requires 2 words of storage (prior to vector allocation).
143
144
145 * Space efficient vector (vec<T, A, vl_ptr>)
146
147 These vectors can grow dynamically and are allocated together
148 with their control data. They are suited to be included in data
149 structures. Prior to initial allocation, they only take a single
150 word of storage.
151
152 These vectors are implemented as a pointer to embeddable vectors.
153 The semantics allow for this pointer to be NULL to represent
154 empty vectors. This way, empty vectors occupy minimal space in
155 the structure containing them.
156
157 Properties:
158
159 - The whole vector and control data are allocated in a single
160 contiguous block.
161 - The whole vector may be re-allocated.
162 - Vector data may grow and shrink.
163 - Access and manipulation requires a pointer test and
164 indirection.
165 - It requires 1 word of storage (prior to vector allocation).
166
167 An example of their use would be,
168
169 struct my_struct {
170 // A space-efficient vector of tree pointers in GC memory.
171 vec<tree, va_gc, vl_ptr> v;
172 };
173
174 struct my_struct *s;
175
176 if (s->v.length ()) { we have some contents }
177 s->v.safe_push (decl); // append some decl onto the end
178 for (ix = 0; s->v.iterate (ix, &elt); ix++)
179 { do something with elt }
180*/
181
182/* Support function for statistics. */
183extern void dump_vec_loc_statistics (void);
184
185/* Hashtable mapping vec addresses to descriptors. */
186extern htab_t vec_mem_usage_hash;
187
188/* Control data for vectors. This contains the number of allocated
189 and used slots inside a vector. */
190
191struct vec_prefix
192{
193 /* FIXME - These fields should be private, but we need to cater to
194 compilers that have stricter notions of PODness for types. */
195
196 /* Memory allocation support routines in vec.c. */
197 void register_overhead (void *, size_t, size_t CXX_MEM_STAT_INFO);
198 void release_overhead (void *, size_t, size_t, bool CXX_MEM_STAT_INFO);
199 static unsigned calculate_allocation (vec_prefix *, unsigned, bool);
200 static unsigned calculate_allocation_1 (unsigned, unsigned);
201
202 /* Note that vec_prefix should be a base class for vec, but we use
203 offsetof() on vector fields of tree structures (e.g.,
204 tree_binfo::base_binfos), and offsetof only supports base types.
205
206 To compensate, we make vec_prefix a field inside vec and make
207 vec a friend class of vec_prefix so it can access its fields. */
208 template <typename, typename, typename> friend struct vec;
209
210 /* The allocator types also need access to our internals. */
211 friend struct va_gc;
212 friend struct va_gc_atomic;
213 friend struct va_heap;
214
215 unsigned m_alloc : 31;
216 unsigned m_using_auto_storage : 1;
217 unsigned m_num;
218};
219
220/* Calculate the number of slots to reserve a vector, making sure that
221 RESERVE slots are free. If EXACT grow exactly, otherwise grow
222 exponentially. PFX is the control data for the vector. */
223
224inline unsigned
225vec_prefix::calculate_allocation (vec_prefix *pfx, unsigned reserve,
226 bool exact)
227{
228 if (exact)
229 return (pfx ? pfx->m_num : 0) + reserve;
230 else if (!pfx)
231 return MAX (4, reserve)((4) > (reserve) ? (4) : (reserve));
232 return calculate_allocation_1 (pfx->m_alloc, pfx->m_num + reserve);
233}
234
235template<typename, typename, typename> struct vec;
236
237/* Valid vector layouts
238
239 vl_embed - Embeddable vector that uses the trailing array idiom.
240 vl_ptr - Space efficient vector that uses a pointer to an
241 embeddable vector. */
242struct vl_embed { };
243struct vl_ptr { };
244
245
246/* Types of supported allocations
247
248 va_heap - Allocation uses malloc/free.
249 va_gc - Allocation uses ggc_alloc.
250 va_gc_atomic - Same as GC, but individual elements of the array
251 do not need to be marked during collection. */
252
253/* Allocator type for heap vectors. */
254struct va_heap
255{
256 /* Heap vectors are frequently regular instances, so use the vl_ptr
257 layout for them. */
258 typedef vl_ptr default_layout;
259
260 template<typename T>
261 static void reserve (vec<T, va_heap, vl_embed> *&, unsigned, bool
262 CXX_MEM_STAT_INFO);
263
264 template<typename T>
265 static void release (vec<T, va_heap, vl_embed> *&);
266};
267
268
269/* Allocator for heap memory. Ensure there are at least RESERVE free
270 slots in V. If EXACT is true, grow exactly, else grow
271 exponentially. As a special case, if the vector had not been
272 allocated and RESERVE is 0, no vector will be created. */
273
274template<typename T>
275inline void
276va_heap::reserve (vec<T, va_heap, vl_embed> *&v, unsigned reserve, bool exact
277 MEM_STAT_DECL)
278{
279 size_t elt_size = sizeof (T);
280 unsigned alloc
281 = vec_prefix::calculate_allocation (v ? &v->m_vecpfx : 0, reserve, exact);
282 gcc_checking_assert (alloc)((void)(!(alloc) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 282, __FUNCTION__), 0 : 0))
;
283
284 if (GATHER_STATISTICS0 && v)
285 v->m_vecpfx.release_overhead (v, elt_size * v->allocated (),
286 v->allocated (), false);
287
288 size_t size = vec<T, va_heap, vl_embed>::embedded_size (alloc);
289 unsigned nelem = v ? v->length () : 0;
290 v = static_cast <vec<T, va_heap, vl_embed> *> (xrealloc (v, size));
291 v->embedded_init (alloc, nelem);
292
293 if (GATHER_STATISTICS0)
294 v->m_vecpfx.register_overhead (v, alloc, elt_size PASS_MEM_STAT);
295}
296
297
298#if GCC_VERSION(4 * 1000 + 2) >= 4007
299#pragma GCC diagnostic push
300#pragma GCC diagnostic ignored "-Wfree-nonheap-object"
301#endif
302
303/* Free the heap space allocated for vector V. */
304
305template<typename T>
306void
307va_heap::release (vec<T, va_heap, vl_embed> *&v)
308{
309 size_t elt_size = sizeof (T);
310 if (v == NULLnullptr)
311 return;
312
313 if (GATHER_STATISTICS0)
314 v->m_vecpfx.release_overhead (v, elt_size * v->allocated (),
315 v->allocated (), true);
316 ::free (v);
317 v = NULLnullptr;
318}
319
320#if GCC_VERSION(4 * 1000 + 2) >= 4007
321#pragma GCC diagnostic pop
322#endif
323
324/* Allocator type for GC vectors. Notice that we need the structure
325 declaration even if GC is not enabled. */
326
327struct va_gc
328{
329 /* Use vl_embed as the default layout for GC vectors. Due to GTY
330 limitations, GC vectors must always be pointers, so it is more
331 efficient to use a pointer to the vl_embed layout, rather than
332 using a pointer to a pointer as would be the case with vl_ptr. */
333 typedef vl_embed default_layout;
334
335 template<typename T, typename A>
336 static void reserve (vec<T, A, vl_embed> *&, unsigned, bool
337 CXX_MEM_STAT_INFO);
338
339 template<typename T, typename A>
340 static void release (vec<T, A, vl_embed> *&v);
341};
342
343
344/* Free GC memory used by V and reset V to NULL. */
345
346template<typename T, typename A>
347inline void
348va_gc::release (vec<T, A, vl_embed> *&v)
349{
350 if (v)
351 ::ggc_free (v);
352 v = NULLnullptr;
353}
354
355
356/* Allocator for GC memory. Ensure there are at least RESERVE free
357 slots in V. If EXACT is true, grow exactly, else grow
358 exponentially. As a special case, if the vector had not been
359 allocated and RESERVE is 0, no vector will be created. */
360
361template<typename T, typename A>
362void
363va_gc::reserve (vec<T, A, vl_embed> *&v, unsigned reserve, bool exact
364 MEM_STAT_DECL)
365{
366 unsigned alloc
367 = vec_prefix::calculate_allocation (v
15.1
'v' is non-null
15.1
'v' is non-null
? &v->m_vecpfx : 0, reserve, exact);
16
'?' condition is true
368 if (!alloc)
17
Assuming 'alloc' is 0
18
Taking true branch
369 {
370 ::ggc_free (v);
371 v = NULLnullptr;
19
Null pointer value stored to field 'm_parameters'
372 return;
373 }
374
375 /* Calculate the amount of space we want. */
376 size_t size = vec<T, A, vl_embed>::embedded_size (alloc);
377
378 /* Ask the allocator how much space it will really give us. */
379 size = ::ggc_round_alloc_size (size);
380
381 /* Adjust the number of slots accordingly. */
382 size_t vec_offset = sizeof (vec_prefix);
383 size_t elt_size = sizeof (T);
384 alloc = (size - vec_offset) / elt_size;
385
386 /* And finally, recalculate the amount of space we ask for. */
387 size = vec_offset + alloc * elt_size;
388
389 unsigned nelem = v ? v->length () : 0;
390 v = static_cast <vec<T, A, vl_embed> *> (::ggc_realloc (v, size
391 PASS_MEM_STAT));
392 v->embedded_init (alloc, nelem);
393}
394
395
396/* Allocator type for GC vectors. This is for vectors of types
397 atomics w.r.t. collection, so allocation and deallocation is
398 completely inherited from va_gc. */
399struct va_gc_atomic : va_gc
400{
401};
402
403
404/* Generic vector template. Default values for A and L indicate the
405 most commonly used strategies.
406
407 FIXME - Ideally, they would all be vl_ptr to encourage using regular
408 instances for vectors, but the existing GTY machinery is limited
409 in that it can only deal with GC objects that are pointers
410 themselves.
411
412 This means that vector operations that need to deal with
413 potentially NULL pointers, must be provided as free
414 functions (see the vec_safe_* functions above). */
415template<typename T,
416 typename A = va_heap,
417 typename L = typename A::default_layout>
418struct GTY((user)) vec
419{
420};
421
422/* Allow C++11 range-based 'for' to work directly on vec<T>*. */
423template<typename T, typename A, typename L>
424T* begin (vec<T,A,L> *v) { return v ? v->begin () : nullptr; }
425template<typename T, typename A, typename L>
426T* end (vec<T,A,L> *v) { return v ? v->end () : nullptr; }
427template<typename T, typename A, typename L>
428const T* begin (const vec<T,A,L> *v) { return v ? v->begin () : nullptr; }
429template<typename T, typename A, typename L>
430const T* end (const vec<T,A,L> *v) { return v ? v->end () : nullptr; }
431
432/* Generic vec<> debug helpers.
433
434 These need to be instantiated for each vec<TYPE> used throughout
435 the compiler like this:
436
437 DEFINE_DEBUG_VEC (TYPE)
438
439 The reason we have a debug_helper() is because GDB can't
440 disambiguate a plain call to debug(some_vec), and it must be called
441 like debug<TYPE>(some_vec). */
442
443template<typename T>
444void
445debug_helper (vec<T> &ref)
446{
447 unsigned i;
448 for (i = 0; i < ref.length (); ++i)
449 {
450 fprintf (stderrstderr, "[%d] = ", i);
451 debug_slim (ref[i]);
452 fputc ('\n', stderrstderr);
453 }
454}
455
456/* We need a separate va_gc variant here because default template
457 argument for functions cannot be used in c++-98. Once this
458 restriction is removed, those variant should be folded with the
459 above debug_helper. */
460
461template<typename T>
462void
463debug_helper (vec<T, va_gc> &ref)
464{
465 unsigned i;
466 for (i = 0; i < ref.length (); ++i)
467 {
468 fprintf (stderrstderr, "[%d] = ", i);
469 debug_slim (ref[i]);
470 fputc ('\n', stderrstderr);
471 }
472}
473
474/* Macro to define debug(vec<T>) and debug(vec<T, va_gc>) helper
475 functions for a type T. */
476
477#define DEFINE_DEBUG_VEC(T)template void debug_helper (vec<T> &); template void
debug_helper (vec<T, va_gc> &); __attribute__ ((__used__
)) void debug (vec<T> &ref) { debug_helper <T>
(ref); } __attribute__ ((__used__)) void debug (vec<T>
*ptr) { if (ptr) debug (*ptr); else fprintf (stderr, "<nil>\n"
); } __attribute__ ((__used__)) void debug (vec<T, va_gc>
&ref) { debug_helper <T> (ref); } __attribute__ ((
__used__)) void debug (vec<T, va_gc> *ptr) { if (ptr) debug
(*ptr); else fprintf (stderr, "<nil>\n"); }
\
478 template void debug_helper (vec<T> &); \
479 template void debug_helper (vec<T, va_gc> &); \
480 /* Define the vec<T> debug functions. */ \
481 DEBUG_FUNCTION__attribute__ ((__used__)) void \
482 debug (vec<T> &ref) \
483 { \
484 debug_helper <T> (ref); \
485 } \
486 DEBUG_FUNCTION__attribute__ ((__used__)) void \
487 debug (vec<T> *ptr) \
488 { \
489 if (ptr) \
490 debug (*ptr); \
491 else \
492 fprintf (stderrstderr, "<nil>\n"); \
493 } \
494 /* Define the vec<T, va_gc> debug functions. */ \
495 DEBUG_FUNCTION__attribute__ ((__used__)) void \
496 debug (vec<T, va_gc> &ref) \
497 { \
498 debug_helper <T> (ref); \
499 } \
500 DEBUG_FUNCTION__attribute__ ((__used__)) void \
501 debug (vec<T, va_gc> *ptr) \
502 { \
503 if (ptr) \
504 debug (*ptr); \
505 else \
506 fprintf (stderrstderr, "<nil>\n"); \
507 }
508
509/* Default-construct N elements in DST. */
510
511template <typename T>
512inline void
513vec_default_construct (T *dst, unsigned n)
514{
515#ifdef BROKEN_VALUE_INITIALIZATION
516 /* Versions of GCC before 4.4 sometimes leave certain objects
517 uninitialized when value initialized, though if the type has
518 user defined default ctor, that ctor is invoked. As a workaround
519 perform clearing first and then the value initialization, which
520 fixes the case when value initialization doesn't initialize due to
521 the bugs and should initialize to all zeros, but still allows
522 vectors for types with user defined default ctor that initializes
523 some or all elements to non-zero. If T has no user defined
524 default ctor and some non-static data members have user defined
525 default ctors that initialize to non-zero the workaround will
526 still not work properly; in that case we just need to provide
527 user defined default ctor. */
528 memset (dst, '\0', sizeof (T) * n);
529#endif
530 for ( ; n; ++dst, --n)
531 ::new (static_cast<void*>(dst)) T ();
532}
533
534/* Copy-construct N elements in DST from *SRC. */
535
536template <typename T>
537inline void
538vec_copy_construct (T *dst, const T *src, unsigned n)
539{
540 for ( ; n; ++dst, ++src, --n)
541 ::new (static_cast<void*>(dst)) T (*src);
542}
543
544/* Type to provide zero-initialized values for vec<T, A, L>. This is
545 used to provide nil initializers for vec instances. Since vec must
546 be a trivially copyable type that can be copied by memcpy and zeroed
547 out by memset, it must have defaulted default and copy ctor and copy
548 assignment. To initialize a vec either use value initialization
549 (e.g., vec() or vec v{ };) or assign it the value vNULL. This isn't
550 needed for file-scope and function-local static vectors, which are
551 zero-initialized by default. */
552struct vnull { };
553constexpr vnull vNULL{ };
554
555
556/* Embeddable vector. These vectors are suitable to be embedded
557 in other data structures so that they can be pre-allocated in a
558 contiguous memory block.
559
560 Embeddable vectors are implemented using the trailing array idiom,
561 thus they are not resizeable without changing the address of the
562 vector object itself. This means you cannot have variables or
563 fields of embeddable vector type -- always use a pointer to a
564 vector. The one exception is the final field of a structure, which
565 could be a vector type.
566
567 You will have to use the embedded_size & embedded_init calls to
568 create such objects, and they will not be resizeable (so the 'safe'
569 allocation variants are not available).
570
571 Properties:
572
573 - The whole vector and control data are allocated in a single
574 contiguous block. It uses the trailing-vector idiom, so
575 allocation must reserve enough space for all the elements
576 in the vector plus its control data.
577 - The vector cannot be re-allocated.
578 - The vector cannot grow nor shrink.
579 - No indirections needed for access/manipulation.
580 - It requires 2 words of storage (prior to vector allocation). */
581
582template<typename T, typename A>
583struct GTY((user)) vec<T, A, vl_embed>
584{
585public:
586 unsigned allocated (void) const { return m_vecpfx.m_alloc; }
587 unsigned length (void) const { return m_vecpfx.m_num; }
588 bool is_empty (void) const { return m_vecpfx.m_num == 0; }
589 T *address (void) { return m_vecdata; }
590 const T *address (void) const { return m_vecdata; }
591 T *begin () { return address (); }
592 const T *begin () const { return address (); }
593 T *end () { return address () + length (); }
594 const T *end () const { return address () + length (); }
595 const T &operator[] (unsigned) const;
596 T &operator[] (unsigned);
597 T &last (void);
598 bool space (unsigned) const;
599 bool iterate (unsigned, T *) const;
600 bool iterate (unsigned, T **) const;
601 vec *copy (ALONE_CXX_MEM_STAT_INFO) const;
602 void splice (const vec &);
603 void splice (const vec *src);
604 T *quick_push (const T &);
605 T &pop (void);
606 void truncate (unsigned);
607 void quick_insert (unsigned, const T &);
608 void ordered_remove (unsigned);
609 void unordered_remove (unsigned);
610 void block_remove (unsigned, unsigned);
611 void qsort (int (*) (const void *, const void *))qsort (int (*) (const void *, const void *));
612 void sort (int (*) (const void *, const void *, void *), void *);
613 void stablesort (int (*) (const void *, const void *, void *), void *);
614 T *bsearch (const void *key, int (*compar)(const void *, const void *));
615 T *bsearch (const void *key,
616 int (*compar)(const void *, const void *, void *), void *);
617 unsigned lower_bound (T, bool (*)(const T &, const T &)) const;
618 bool contains (const T &search) const;
619 static size_t embedded_size (unsigned);
620 void embedded_init (unsigned, unsigned = 0, unsigned = 0);
621 void quick_grow (unsigned len);
622 void quick_grow_cleared (unsigned len);
623
624 /* vec class can access our internal data and functions. */
625 template <typename, typename, typename> friend struct vec;
626
627 /* The allocator types also need access to our internals. */
628 friend struct va_gc;
629 friend struct va_gc_atomic;
630 friend struct va_heap;
631
632 /* FIXME - These fields should be private, but we need to cater to
633 compilers that have stricter notions of PODness for types. */
634 vec_prefix m_vecpfx;
635 T m_vecdata[1];
636};
637
638
639/* Convenience wrapper functions to use when dealing with pointers to
640 embedded vectors. Some functionality for these vectors must be
641 provided via free functions for these reasons:
642
643 1- The pointer may be NULL (e.g., before initial allocation).
644
645 2- When the vector needs to grow, it must be reallocated, so
646 the pointer will change its value.
647
648 Because of limitations with the current GC machinery, all vectors
649 in GC memory *must* be pointers. */
650
651
652/* If V contains no room for NELEMS elements, return false. Otherwise,
653 return true. */
654template<typename T, typename A>
655inline bool
656vec_safe_space (const vec<T, A, vl_embed> *v, unsigned nelems)
657{
658 return v ? v->space (nelems) : nelems == 0;
659}
660
661
662/* If V is NULL, return 0. Otherwise, return V->length(). */
663template<typename T, typename A>
664inline unsigned
665vec_safe_length (const vec<T, A, vl_embed> *v)
666{
667 return v ? v->length () : 0;
4
Assuming 'v' is non-null
5
'?' condition is true
6
Returning value, which participates in a condition later
668}
669
670
671/* If V is NULL, return NULL. Otherwise, return V->address(). */
672template<typename T, typename A>
673inline T *
674vec_safe_address (vec<T, A, vl_embed> *v)
675{
676 return v ? v->address () : NULLnullptr;
677}
678
679
680/* If V is NULL, return true. Otherwise, return V->is_empty(). */
681template<typename T, typename A>
682inline bool
683vec_safe_is_empty (vec<T, A, vl_embed> *v)
684{
685 return v ? v->is_empty () : true;
686}
687
688/* If V does not have space for NELEMS elements, call
689 V->reserve(NELEMS, EXACT). */
690template<typename T, typename A>
691inline bool
692vec_safe_reserve (vec<T, A, vl_embed> *&v, unsigned nelems, bool exact = false
693 CXX_MEM_STAT_INFO)
694{
695 bool extend = nelems
11.1
'nelems' is not equal to 0
11.1
'nelems' is not equal to 0
? !vec_safe_space (v, nelems) : false;
12
'?' condition is true
13
Assuming the condition is true
696 if (extend
13.1
'extend' is true
13.1
'extend' is true
)
14
Taking true branch
697 A::reserve (v, nelems, exact PASS_MEM_STAT);
15
Calling 'va_gc::reserve'
20
Returning from 'va_gc::reserve'
698 return extend;
699}
700
701template<typename T, typename A>
702inline bool
703vec_safe_reserve_exact (vec<T, A, vl_embed> *&v, unsigned nelems
704 CXX_MEM_STAT_INFO)
705{
706 return vec_safe_reserve (v, nelems, true PASS_MEM_STAT);
11
Calling 'vec_safe_reserve<isra_param_desc, va_gc>'
21
Returning from 'vec_safe_reserve<isra_param_desc, va_gc>'
707}
708
709
710/* Allocate GC memory for V with space for NELEMS slots. If NELEMS
711 is 0, V is initialized to NULL. */
712
713template<typename T, typename A>
714inline void
715vec_alloc (vec<T, A, vl_embed> *&v, unsigned nelems CXX_MEM_STAT_INFO)
716{
717 v = NULLnullptr;
718 vec_safe_reserve (v, nelems, false PASS_MEM_STAT);
719}
720
721
722/* Free the GC memory allocated by vector V and set it to NULL. */
723
724template<typename T, typename A>
725inline void
726vec_free (vec<T, A, vl_embed> *&v)
727{
728 A::release (v);
729}
730
731
732/* Grow V to length LEN. Allocate it, if necessary. */
733template<typename T, typename A>
734inline void
735vec_safe_grow (vec<T, A, vl_embed> *&v, unsigned len,
736 bool exact = false CXX_MEM_STAT_INFO)
737{
738 unsigned oldlen = vec_safe_length (v);
739 gcc_checking_assert (len >= oldlen)((void)(!(len >= oldlen) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 739, __FUNCTION__), 0 : 0))
;
740 vec_safe_reserve (v, len - oldlen, exact PASS_MEM_STAT);
741 v->quick_grow (len);
742}
743
744
745/* If V is NULL, allocate it. Call V->safe_grow_cleared(LEN). */
746template<typename T, typename A>
747inline void
748vec_safe_grow_cleared (vec<T, A, vl_embed> *&v, unsigned len,
749 bool exact = false CXX_MEM_STAT_INFO)
750{
751 unsigned oldlen = vec_safe_length (v);
752 vec_safe_grow (v, len, exact PASS_MEM_STAT);
753 vec_default_construct (v->address () + oldlen, len - oldlen);
754}
755
756
757/* Assume V is not NULL. */
758
759template<typename T>
760inline void
761vec_safe_grow_cleared (vec<T, va_heap, vl_ptr> *&v,
762 unsigned len, bool exact = false CXX_MEM_STAT_INFO)
763{
764 v->safe_grow_cleared (len, exact PASS_MEM_STAT);
765}
766
767/* If V does not have space for NELEMS elements, call
768 V->reserve(NELEMS, EXACT). */
769
770template<typename T>
771inline bool
772vec_safe_reserve (vec<T, va_heap, vl_ptr> *&v, unsigned nelems, bool exact = false
773 CXX_MEM_STAT_INFO)
774{
775 return v->reserve (nelems, exact);
776}
777
778
779/* If V is NULL return false, otherwise return V->iterate(IX, PTR). */
780template<typename T, typename A>
781inline bool
782vec_safe_iterate (const vec<T, A, vl_embed> *v, unsigned ix, T **ptr)
783{
784 if (v)
785 return v->iterate (ix, ptr);
786 else
787 {
788 *ptr = 0;
789 return false;
790 }
791}
792
793template<typename T, typename A>
794inline bool
795vec_safe_iterate (const vec<T, A, vl_embed> *v, unsigned ix, T *ptr)
796{
797 if (v)
798 return v->iterate (ix, ptr);
799 else
800 {
801 *ptr = 0;
802 return false;
803 }
804}
805
806
807/* If V has no room for one more element, reallocate it. Then call
808 V->quick_push(OBJ). */
809template<typename T, typename A>
810inline T *
811vec_safe_push (vec<T, A, vl_embed> *&v, const T &obj CXX_MEM_STAT_INFO)
812{
813 vec_safe_reserve (v, 1, false PASS_MEM_STAT);
814 return v->quick_push (obj);
815}
816
817
818/* if V has no room for one more element, reallocate it. Then call
819 V->quick_insert(IX, OBJ). */
820template<typename T, typename A>
821inline void
822vec_safe_insert (vec<T, A, vl_embed> *&v, unsigned ix, const T &obj
823 CXX_MEM_STAT_INFO)
824{
825 vec_safe_reserve (v, 1, false PASS_MEM_STAT);
826 v->quick_insert (ix, obj);
827}
828
829
830/* If V is NULL, do nothing. Otherwise, call V->truncate(SIZE). */
831template<typename T, typename A>
832inline void
833vec_safe_truncate (vec<T, A, vl_embed> *v, unsigned size)
834{
835 if (v)
836 v->truncate (size);
837}
838
839
840/* If SRC is not NULL, return a pointer to a copy of it. */
841template<typename T, typename A>
842inline vec<T, A, vl_embed> *
843vec_safe_copy (vec<T, A, vl_embed> *src CXX_MEM_STAT_INFO)
844{
845 return src ? src->copy (ALONE_PASS_MEM_STAT) : NULLnullptr;
846}
847
848/* Copy the elements from SRC to the end of DST as if by memcpy.
849 Reallocate DST, if necessary. */
850template<typename T, typename A>
851inline void
852vec_safe_splice (vec<T, A, vl_embed> *&dst, const vec<T, A, vl_embed> *src
853 CXX_MEM_STAT_INFO)
854{
855 unsigned src_len = vec_safe_length (src);
856 if (src_len)
857 {
858 vec_safe_reserve_exact (dst, vec_safe_length (dst) + src_len
859 PASS_MEM_STAT);
860 dst->splice (*src);
861 }
862}
863
864/* Return true if SEARCH is an element of V. Note that this is O(N) in the
865 size of the vector and so should be used with care. */
866
867template<typename T, typename A>
868inline bool
869vec_safe_contains (vec<T, A, vl_embed> *v, const T &search)
870{
871 return v ? v->contains (search) : false;
872}
873
874/* Index into vector. Return the IX'th element. IX must be in the
875 domain of the vector. */
876
877template<typename T, typename A>
878inline const T &
879vec<T, A, vl_embed>::operator[] (unsigned ix) const
880{
881 gcc_checking_assert (ix < m_vecpfx.m_num)((void)(!(ix < m_vecpfx.m_num) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 881, __FUNCTION__), 0 : 0))
;
882 return m_vecdata[ix];
883}
884
885template<typename T, typename A>
886inline T &
887vec<T, A, vl_embed>::operator[] (unsigned ix)
888{
889 gcc_checking_assert (ix < m_vecpfx.m_num)((void)(!(ix < m_vecpfx.m_num) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 889, __FUNCTION__), 0 : 0))
;
890 return m_vecdata[ix];
891}
892
893
894/* Get the final element of the vector, which must not be empty. */
895
896template<typename T, typename A>
897inline T &
898vec<T, A, vl_embed>::last (void)
899{
900 gcc_checking_assert (m_vecpfx.m_num > 0)((void)(!(m_vecpfx.m_num > 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 900, __FUNCTION__), 0 : 0))
;
901 return (*this)[m_vecpfx.m_num - 1];
902}
903
904
905/* If this vector has space for NELEMS additional entries, return
906 true. You usually only need to use this if you are doing your
907 own vector reallocation, for instance on an embedded vector. This
908 returns true in exactly the same circumstances that vec::reserve
909 will. */
910
911template<typename T, typename A>
912inline bool
913vec<T, A, vl_embed>::space (unsigned nelems) const
914{
915 return m_vecpfx.m_alloc - m_vecpfx.m_num >= nelems;
916}
917
918
919/* Return iteration condition and update PTR to point to the IX'th
920 element of this vector. Use this to iterate over the elements of a
921 vector as follows,
922
923 for (ix = 0; vec<T, A>::iterate (v, ix, &ptr); ix++)
924 continue; */
925
926template<typename T, typename A>
927inline bool
928vec<T, A, vl_embed>::iterate (unsigned ix, T *ptr) const
929{
930 if (ix < m_vecpfx.m_num)
931 {
932 *ptr = m_vecdata[ix];
933 return true;
934 }
935 else
936 {
937 *ptr = 0;
938 return false;
939 }
940}
941
942
943/* Return iteration condition and update *PTR to point to the
944 IX'th element of this vector. Use this to iterate over the
945 elements of a vector as follows,
946
947 for (ix = 0; v->iterate (ix, &ptr); ix++)
948 continue;
949
950 This variant is for vectors of objects. */
951
952template<typename T, typename A>
953inline bool
954vec<T, A, vl_embed>::iterate (unsigned ix, T **ptr) const
955{
956 if (ix < m_vecpfx.m_num)
957 {
958 *ptr = CONST_CAST (T *, &m_vecdata[ix])(const_cast<T *> ((&m_vecdata[ix])));
959 return true;
960 }
961 else
962 {
963 *ptr = 0;
964 return false;
965 }
966}
967
968
969/* Return a pointer to a copy of this vector. */
970
971template<typename T, typename A>
972inline vec<T, A, vl_embed> *
973vec<T, A, vl_embed>::copy (ALONE_MEM_STAT_DECLvoid) const
974{
975 vec<T, A, vl_embed> *new_vec = NULLnullptr;
976 unsigned len = length ();
977 if (len)
978 {
979 vec_alloc (new_vec, len PASS_MEM_STAT);
980 new_vec->embedded_init (len, len);
981 vec_copy_construct (new_vec->address (), m_vecdata, len);
982 }
983 return new_vec;
984}
985
986
987/* Copy the elements from SRC to the end of this vector as if by memcpy.
988 The vector must have sufficient headroom available. */
989
990template<typename T, typename A>
991inline void
992vec<T, A, vl_embed>::splice (const vec<T, A, vl_embed> &src)
993{
994 unsigned len = src.length ();
995 if (len)
996 {
997 gcc_checking_assert (space (len))((void)(!(space (len)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 997, __FUNCTION__), 0 : 0))
;
998 vec_copy_construct (end (), src.address (), len);
999 m_vecpfx.m_num += len;
1000 }
1001}
1002
1003template<typename T, typename A>
1004inline void
1005vec<T, A, vl_embed>::splice (const vec<T, A, vl_embed> *src)
1006{
1007 if (src)
1008 splice (*src);
1009}
1010
1011
1012/* Push OBJ (a new element) onto the end of the vector. There must be
1013 sufficient space in the vector. Return a pointer to the slot
1014 where OBJ was inserted. */
1015
1016template<typename T, typename A>
1017inline T *
1018vec<T, A, vl_embed>::quick_push (const T &obj)
1019{
1020 gcc_checking_assert (space (1))((void)(!(space (1)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1020, __FUNCTION__), 0 : 0))
;
1021 T *slot = &m_vecdata[m_vecpfx.m_num++];
1022 *slot = obj;
1023 return slot;
1024}
1025
1026
1027/* Pop and return the last element off the end of the vector. */
1028
1029template<typename T, typename A>
1030inline T &
1031vec<T, A, vl_embed>::pop (void)
1032{
1033 gcc_checking_assert (length () > 0)((void)(!(length () > 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1033, __FUNCTION__), 0 : 0))
;
1034 return m_vecdata[--m_vecpfx.m_num];
1035}
1036
1037
1038/* Set the length of the vector to SIZE. The new length must be less
1039 than or equal to the current length. This is an O(1) operation. */
1040
1041template<typename T, typename A>
1042inline void
1043vec<T, A, vl_embed>::truncate (unsigned size)
1044{
1045 gcc_checking_assert (length () >= size)((void)(!(length () >= size) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1045, __FUNCTION__), 0 : 0))
;
1046 m_vecpfx.m_num = size;
1047}
1048
1049
1050/* Insert an element, OBJ, at the IXth position of this vector. There
1051 must be sufficient space. */
1052
1053template<typename T, typename A>
1054inline void
1055vec<T, A, vl_embed>::quick_insert (unsigned ix, const T &obj)
1056{
1057 gcc_checking_assert (length () < allocated ())((void)(!(length () < allocated ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1057, __FUNCTION__), 0 : 0))
;
1058 gcc_checking_assert (ix <= length ())((void)(!(ix <= length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1058, __FUNCTION__), 0 : 0))
;
1059 T *slot = &m_vecdata[ix];
1060 memmove (slot + 1, slot, (m_vecpfx.m_num++ - ix) * sizeof (T));
1061 *slot = obj;
1062}
1063
1064
1065/* Remove an element from the IXth position of this vector. Ordering of
1066 remaining elements is preserved. This is an O(N) operation due to
1067 memmove. */
1068
1069template<typename T, typename A>
1070inline void
1071vec<T, A, vl_embed>::ordered_remove (unsigned ix)
1072{
1073 gcc_checking_assert (ix < length ())((void)(!(ix < length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1073, __FUNCTION__), 0 : 0))
;
1074 T *slot = &m_vecdata[ix];
1075 memmove (slot, slot + 1, (--m_vecpfx.m_num - ix) * sizeof (T));
1076}
1077
1078
1079/* Remove elements in [START, END) from VEC for which COND holds. Ordering of
1080 remaining elements is preserved. This is an O(N) operation. */
1081
1082#define VEC_ORDERED_REMOVE_IF_FROM_TO(vec, read_index, write_index, \{ ((void)(!((end) <= (vec).length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1083, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (start); read_index < (end); ++read_index) { elem_ptr =
&(vec)[read_index]; bool remove_p = (cond); if (remove_p
) continue; if (read_index != write_index) (vec)[write_index]
= (vec)[read_index]; write_index++; } if (read_index - write_index
> 0) (vec).block_remove (write_index, read_index - write_index
); }
1083 elem_ptr, start, end, cond){ ((void)(!((end) <= (vec).length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1083, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (start); read_index < (end); ++read_index) { elem_ptr =
&(vec)[read_index]; bool remove_p = (cond); if (remove_p
) continue; if (read_index != write_index) (vec)[write_index]
= (vec)[read_index]; write_index++; } if (read_index - write_index
> 0) (vec).block_remove (write_index, read_index - write_index
); }
\
1084 { \
1085 gcc_assert ((end) <= (vec).length ())((void)(!((end) <= (vec).length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1085, __FUNCTION__), 0 : 0))
; \
1086 for (read_index = write_index = (start); read_index < (end); \
1087 ++read_index) \
1088 { \
1089 elem_ptr = &(vec)[read_index]; \
1090 bool remove_p = (cond); \
1091 if (remove_p) \
1092 continue; \
1093 \
1094 if (read_index != write_index) \
1095 (vec)[write_index] = (vec)[read_index]; \
1096 \
1097 write_index++; \
1098 } \
1099 \
1100 if (read_index - write_index > 0) \
1101 (vec).block_remove (write_index, read_index - write_index); \
1102 }
1103
1104
1105/* Remove elements from VEC for which COND holds. Ordering of remaining
1106 elements is preserved. This is an O(N) operation. */
1107
1108#define VEC_ORDERED_REMOVE_IF(vec, read_index, write_index, elem_ptr, \{ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1109, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (0); read_index < ((vec).length ()); ++read_index) { elem_ptr
= &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p
) continue; if (read_index != write_index) ((vec))[write_index
] = ((vec))[read_index]; write_index++; } if (read_index - write_index
> 0) ((vec)).block_remove (write_index, read_index - write_index
); }
1109 cond){ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1109, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (0); read_index < ((vec).length ()); ++read_index) { elem_ptr
= &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p
) continue; if (read_index != write_index) ((vec))[write_index
] = ((vec))[read_index]; write_index++; } if (read_index - write_index
> 0) ((vec)).block_remove (write_index, read_index - write_index
); }
\
1110 VEC_ORDERED_REMOVE_IF_FROM_TO ((vec), read_index, write_index, \{ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1111, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (0); read_index < ((vec).length ()); ++read_index) { elem_ptr
= &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p
) continue; if (read_index != write_index) ((vec))[write_index
] = ((vec))[read_index]; write_index++; } if (read_index - write_index
> 0) ((vec)).block_remove (write_index, read_index - write_index
); }
1111 elem_ptr, 0, (vec).length (), (cond)){ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1111, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (0); read_index < ((vec).length ()); ++read_index) { elem_ptr
= &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p
) continue; if (read_index != write_index) ((vec))[write_index
] = ((vec))[read_index]; write_index++; } if (read_index - write_index
> 0) ((vec)).block_remove (write_index, read_index - write_index
); }
1112
1113/* Remove an element from the IXth position of this vector. Ordering of
1114 remaining elements is destroyed. This is an O(1) operation. */
1115
1116template<typename T, typename A>
1117inline void
1118vec<T, A, vl_embed>::unordered_remove (unsigned ix)
1119{
1120 gcc_checking_assert (ix < length ())((void)(!(ix < length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1120, __FUNCTION__), 0 : 0))
;
1121 m_vecdata[ix] = m_vecdata[--m_vecpfx.m_num];
1122}
1123
1124
1125/* Remove LEN elements starting at the IXth. Ordering is retained.
1126 This is an O(N) operation due to memmove. */
1127
1128template<typename T, typename A>
1129inline void
1130vec<T, A, vl_embed>::block_remove (unsigned ix, unsigned len)
1131{
1132 gcc_checking_assert (ix + len <= length ())((void)(!(ix + len <= length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1132, __FUNCTION__), 0 : 0))
;
1133 T *slot = &m_vecdata[ix];
1134 m_vecpfx.m_num -= len;
1135 memmove (slot, slot + len, (m_vecpfx.m_num - ix) * sizeof (T));
1136}
1137
1138
1139/* Sort the contents of this vector with qsort. CMP is the comparison
1140 function to pass to qsort. */
1141
1142template<typename T, typename A>
1143inline void
1144vec<T, A, vl_embed>::qsort (int (*cmp) (const void *, const void *))qsort (int (*cmp) (const void *, const void *))
1145{
1146 if (length () > 1)
1147 gcc_qsort (address (), length (), sizeof (T), cmp);
1148}
1149
1150/* Sort the contents of this vector with qsort. CMP is the comparison
1151 function to pass to qsort. */
1152
1153template<typename T, typename A>
1154inline void
1155vec<T, A, vl_embed>::sort (int (*cmp) (const void *, const void *, void *),
1156 void *data)
1157{
1158 if (length () > 1)
1159 gcc_sort_r (address (), length (), sizeof (T), cmp, data);
1160}
1161
1162/* Sort the contents of this vector with gcc_stablesort_r. CMP is the
1163 comparison function to pass to qsort. */
1164
1165template<typename T, typename A>
1166inline void
1167vec<T, A, vl_embed>::stablesort (int (*cmp) (const void *, const void *,
1168 void *), void *data)
1169{
1170 if (length () > 1)
1171 gcc_stablesort_r (address (), length (), sizeof (T), cmp, data);
1172}
1173
1174/* Search the contents of the sorted vector with a binary search.
1175 CMP is the comparison function to pass to bsearch. */
1176
1177template<typename T, typename A>
1178inline T *
1179vec<T, A, vl_embed>::bsearch (const void *key,
1180 int (*compar) (const void *, const void *))
1181{
1182 const void *base = this->address ();
1183 size_t nmemb = this->length ();
1184 size_t size = sizeof (T);
1185 /* The following is a copy of glibc stdlib-bsearch.h. */
1186 size_t l, u, idx;
1187 const void *p;
1188 int comparison;
1189
1190 l = 0;
1191 u = nmemb;
1192 while (l < u)
1193 {
1194 idx = (l + u) / 2;
1195 p = (const void *) (((const char *) base) + (idx * size));
1196 comparison = (*compar) (key, p);
1197 if (comparison < 0)
1198 u = idx;
1199 else if (comparison > 0)
1200 l = idx + 1;
1201 else
1202 return (T *)const_cast<void *>(p);
1203 }
1204
1205 return NULLnullptr;
1206}
1207
1208/* Search the contents of the sorted vector with a binary search.
1209 CMP is the comparison function to pass to bsearch. */
1210
1211template<typename T, typename A>
1212inline T *
1213vec<T, A, vl_embed>::bsearch (const void *key,
1214 int (*compar) (const void *, const void *,
1215 void *), void *data)
1216{
1217 const void *base = this->address ();
1218 size_t nmemb = this->length ();
1219 size_t size = sizeof (T);
1220 /* The following is a copy of glibc stdlib-bsearch.h. */
1221 size_t l, u, idx;
1222 const void *p;
1223 int comparison;
1224
1225 l = 0;
1226 u = nmemb;
1227 while (l < u)
1228 {
1229 idx = (l + u) / 2;
1230 p = (const void *) (((const char *) base) + (idx * size));
1231 comparison = (*compar) (key, p, data);
1232 if (comparison < 0)
1233 u = idx;
1234 else if (comparison > 0)
1235 l = idx + 1;
1236 else
1237 return (T *)const_cast<void *>(p);
1238 }
1239
1240 return NULLnullptr;
1241}
1242
1243/* Return true if SEARCH is an element of V. Note that this is O(N) in the
1244 size of the vector and so should be used with care. */
1245
1246template<typename T, typename A>
1247inline bool
1248vec<T, A, vl_embed>::contains (const T &search) const
1249{
1250 unsigned int len = length ();
1251 for (unsigned int i = 0; i < len; i++)
1252 if ((*this)[i] == search)
1253 return true;
1254
1255 return false;
1256}
1257
1258/* Find and return the first position in which OBJ could be inserted
1259 without changing the ordering of this vector. LESSTHAN is a
1260 function that returns true if the first argument is strictly less
1261 than the second. */
1262
1263template<typename T, typename A>
1264unsigned
1265vec<T, A, vl_embed>::lower_bound (T obj, bool (*lessthan)(const T &, const T &))
1266 const
1267{
1268 unsigned int len = length ();
1269 unsigned int half, middle;
1270 unsigned int first = 0;
1271 while (len > 0)
1272 {
1273 half = len / 2;
1274 middle = first;
1275 middle += half;
1276 T middle_elem = (*this)[middle];
1277 if (lessthan (middle_elem, obj))
1278 {
1279 first = middle;
1280 ++first;
1281 len = len - half - 1;
1282 }
1283 else
1284 len = half;
1285 }
1286 return first;
1287}
1288
1289
1290/* Return the number of bytes needed to embed an instance of an
1291 embeddable vec inside another data structure.
1292
1293 Use these methods to determine the required size and initialization
1294 of a vector V of type T embedded within another structure (as the
1295 final member):
1296
1297 size_t vec<T, A, vl_embed>::embedded_size (unsigned alloc);
1298 void v->embedded_init (unsigned alloc, unsigned num);
1299
1300 These allow the caller to perform the memory allocation. */
1301
1302template<typename T, typename A>
1303inline size_t
1304vec<T, A, vl_embed>::embedded_size (unsigned alloc)
1305{
1306 struct alignas (T) U { char data[sizeof (T)]; };
1307 typedef vec<U, A, vl_embed> vec_embedded;
1308 typedef typename std::conditional<std::is_standard_layout<T>::value,
1309 vec, vec_embedded>::type vec_stdlayout;
1310 static_assert (sizeof (vec_stdlayout) == sizeof (vec), "");
1311 static_assert (alignof (vec_stdlayout) == alignof (vec), "");
1312 return offsetof (vec_stdlayout, m_vecdata)__builtin_offsetof(vec_stdlayout, m_vecdata) + alloc * sizeof (T);
1313}
1314
1315
1316/* Initialize the vector to contain room for ALLOC elements and
1317 NUM active elements. */
1318
1319template<typename T, typename A>
1320inline void
1321vec<T, A, vl_embed>::embedded_init (unsigned alloc, unsigned num, unsigned aut)
1322{
1323 m_vecpfx.m_alloc = alloc;
1324 m_vecpfx.m_using_auto_storage = aut;
1325 m_vecpfx.m_num = num;
1326}
1327
1328
1329/* Grow the vector to a specific length. LEN must be as long or longer than
1330 the current length. The new elements are uninitialized. */
1331
1332template<typename T, typename A>
1333inline void
1334vec<T, A, vl_embed>::quick_grow (unsigned len)
1335{
1336 gcc_checking_assert (length () <= len && len <= m_vecpfx.m_alloc)((void)(!(length () <= len && len <= m_vecpfx.m_alloc
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1336, __FUNCTION__), 0 : 0))
;
1337 m_vecpfx.m_num = len;
1338}
1339
1340
1341/* Grow the vector to a specific length. LEN must be as long or longer than
1342 the current length. The new elements are initialized to zero. */
1343
1344template<typename T, typename A>
1345inline void
1346vec<T, A, vl_embed>::quick_grow_cleared (unsigned len)
1347{
1348 unsigned oldlen = length ();
1349 size_t growby = len - oldlen;
1350 quick_grow (len);
1351 if (growby != 0)
1352 vec_default_construct (address () + oldlen, growby);
1353}
1354
1355/* Garbage collection support for vec<T, A, vl_embed>. */
1356
1357template<typename T>
1358void
1359gt_ggc_mx (vec<T, va_gc> *v)
1360{
1361 extern void gt_ggc_mx (T &);
1362 for (unsigned i = 0; i < v->length (); i++)
1363 gt_ggc_mx ((*v)[i]);
1364}
1365
1366template<typename T>
1367void
1368gt_ggc_mx (vec<T, va_gc_atomic, vl_embed> *v ATTRIBUTE_UNUSED__attribute__ ((__unused__)))
1369{
1370 /* Nothing to do. Vectors of atomic types wrt GC do not need to
1371 be traversed. */
1372}
1373
1374
1375/* PCH support for vec<T, A, vl_embed>. */
1376
1377template<typename T, typename A>
1378void
1379gt_pch_nx (vec<T, A, vl_embed> *v)
1380{
1381 extern void gt_pch_nx (T &);
1382 for (unsigned i = 0; i < v->length (); i++)
1383 gt_pch_nx ((*v)[i]);
1384}
1385
1386template<typename T, typename A>
1387void
1388gt_pch_nx (vec<T *, A, vl_embed> *v, gt_pointer_operator op, void *cookie)
1389{
1390 for (unsigned i = 0; i < v->length (); i++)
1391 op (&((*v)[i]), cookie);
1392}
1393
1394template<typename T, typename A>
1395void
1396gt_pch_nx (vec<T, A, vl_embed> *v, gt_pointer_operator op, void *cookie)
1397{
1398 extern void gt_pch_nx (T *, gt_pointer_operator, void *);
1399 for (unsigned i = 0; i < v->length (); i++)
1400 gt_pch_nx (&((*v)[i]), op, cookie);
1401}
1402
1403
1404/* Space efficient vector. These vectors can grow dynamically and are
1405 allocated together with their control data. They are suited to be
1406 included in data structures. Prior to initial allocation, they
1407 only take a single word of storage.
1408
1409 These vectors are implemented as a pointer to an embeddable vector.
1410 The semantics allow for this pointer to be NULL to represent empty
1411 vectors. This way, empty vectors occupy minimal space in the
1412 structure containing them.
1413
1414 Properties:
1415
1416 - The whole vector and control data are allocated in a single
1417 contiguous block.
1418 - The whole vector may be re-allocated.
1419 - Vector data may grow and shrink.
1420 - Access and manipulation requires a pointer test and
1421 indirection.
1422 - It requires 1 word of storage (prior to vector allocation).
1423
1424
1425 Limitations:
1426
1427 These vectors must be PODs because they are stored in unions.
1428 (http://en.wikipedia.org/wiki/Plain_old_data_structures).
1429 As long as we use C++03, we cannot have constructors nor
1430 destructors in classes that are stored in unions. */
1431
1432template<typename T, size_t N = 0>
1433class auto_vec;
1434
1435template<typename T>
1436struct vec<T, va_heap, vl_ptr>
1437{
1438public:
1439 /* Default ctors to ensure triviality. Use value-initialization
1440 (e.g., vec() or vec v{ };) or vNULL to create a zero-initialized
1441 instance. */
1442 vec () = default;
1443 vec (const vec &) = default;
1444 /* Initialization from the generic vNULL. */
1445 vec (vnull): m_vec () { }
1446 /* Same as default ctor: vec storage must be released manually. */
1447 ~vec () = default;
1448
1449 /* Defaulted same as copy ctor. */
1450 vec& operator= (const vec &) = default;
1451
1452 /* Prevent implicit conversion from auto_vec. Use auto_vec::to_vec()
1453 instead. */
1454 template <size_t N>
1455 vec (auto_vec<T, N> &) = delete;
1456
1457 template <size_t N>
1458 void operator= (auto_vec<T, N> &) = delete;
1459
1460 /* Memory allocation and deallocation for the embedded vector.
1461 Needed because we cannot have proper ctors/dtors defined. */
1462 void create (unsigned nelems CXX_MEM_STAT_INFO);
1463 void release (void);
1464
1465 /* Vector operations. */
1466 bool exists (void) const
1467 { return m_vec != NULLnullptr; }
1468
1469 bool is_empty (void) const
1470 { return m_vec ? m_vec->is_empty () : true; }
1471
1472 unsigned length (void) const
1473 { return m_vec ? m_vec->length () : 0; }
1474
1475 T *address (void)
1476 { return m_vec ? m_vec->m_vecdata : NULLnullptr; }
1477
1478 const T *address (void) const
1479 { return m_vec ? m_vec->m_vecdata : NULLnullptr; }
1480
1481 T *begin () { return address (); }
1482 const T *begin () const { return address (); }
1483 T *end () { return begin () + length (); }
1484 const T *end () const { return begin () + length (); }
1485 const T &operator[] (unsigned ix) const
1486 { return (*m_vec)[ix]; }
1487
1488 bool operator!=(const vec &other) const
1489 { return !(*this == other); }
1490
1491 bool operator==(const vec &other) const
1492 { return address () == other.address (); }
1493
1494 T &operator[] (unsigned ix)
1495 { return (*m_vec)[ix]; }
1496
1497 T &last (void)
1498 { return m_vec->last (); }
1499
1500 bool space (int nelems) const
1501 { return m_vec ? m_vec->space (nelems) : nelems == 0; }
1502
1503 bool iterate (unsigned ix, T *p) const;
1504 bool iterate (unsigned ix, T **p) const;
1505 vec copy (ALONE_CXX_MEM_STAT_INFO) const;
1506 bool reserve (unsigned, bool = false CXX_MEM_STAT_INFO);
1507 bool reserve_exact (unsigned CXX_MEM_STAT_INFO);
1508 void splice (const vec &);
1509 void safe_splice (const vec & CXX_MEM_STAT_INFO);
1510 T *quick_push (const T &);
1511 T *safe_push (const T &CXX_MEM_STAT_INFO);
1512 T &pop (void);
1513 void truncate (unsigned);
1514 void safe_grow (unsigned, bool = false CXX_MEM_STAT_INFO);
1515 void safe_grow_cleared (unsigned, bool = false CXX_MEM_STAT_INFO);
1516 void quick_grow (unsigned);
1517 void quick_grow_cleared (unsigned);
1518 void quick_insert (unsigned, const T &);
1519 void safe_insert (unsigned, const T & CXX_MEM_STAT_INFO);
1520 void ordered_remove (unsigned);
1521 void unordered_remove (unsigned);
1522 void block_remove (unsigned, unsigned);
1523 void qsort (int (*) (const void *, const void *))qsort (int (*) (const void *, const void *));
1524 void sort (int (*) (const void *, const void *, void *), void *);
1525 void stablesort (int (*) (const void *, const void *, void *), void *);
1526 T *bsearch (const void *key, int (*compar)(const void *, const void *));
1527 T *bsearch (const void *key,
1528 int (*compar)(const void *, const void *, void *), void *);
1529 unsigned lower_bound (T, bool (*)(const T &, const T &)) const;
1530 bool contains (const T &search) const;
1531 void reverse (void);
1532
1533 bool using_auto_storage () const;
1534
1535 /* FIXME - This field should be private, but we need to cater to
1536 compilers that have stricter notions of PODness for types. */
1537 vec<T, va_heap, vl_embed> *m_vec;
1538};
1539
1540
1541/* auto_vec is a subclass of vec that automatically manages creating and
1542 releasing the internal vector. If N is non zero then it has N elements of
1543 internal storage. The default is no internal storage, and you probably only
1544 want to ask for internal storage for vectors on the stack because if the
1545 size of the vector is larger than the internal storage that space is wasted.
1546 */
1547template<typename T, size_t N /* = 0 */>
1548class auto_vec : public vec<T, va_heap>
1549{
1550public:
1551 auto_vec ()
1552 {
1553 m_auto.embedded_init (MAX (N, 2)((N) > (2) ? (N) : (2)), 0, 1);
1554 this->m_vec = &m_auto;
1555 }
1556
1557 auto_vec (size_t s CXX_MEM_STAT_INFO)
1558 {
1559 if (s > N)
1560 {
1561 this->create (s PASS_MEM_STAT);
1562 return;
1563 }
1564
1565 m_auto.embedded_init (MAX (N, 2)((N) > (2) ? (N) : (2)), 0, 1);
1566 this->m_vec = &m_auto;
1567 }
1568
1569 ~auto_vec ()
1570 {
1571 this->release ();
1572 }
1573
1574 /* Explicitly convert to the base class. There is no conversion
1575 from a const auto_vec because a copy of the returned vec can
1576 be used to modify *THIS.
1577 This is a legacy function not to be used in new code. */
1578 vec<T, va_heap> to_vec_legacy () {
1579 return *static_cast<vec<T, va_heap> *>(this);
1580 }
1581
1582private:
1583 vec<T, va_heap, vl_embed> m_auto;
1584 T m_data[MAX (N - 1, 1)((N - 1) > (1) ? (N - 1) : (1))];
1585};
1586
1587/* auto_vec is a sub class of vec whose storage is released when it is
1588 destroyed. */
1589template<typename T>
1590class auto_vec<T, 0> : public vec<T, va_heap>
1591{
1592public:
1593 auto_vec () { this->m_vec = NULLnullptr; }
1594 auto_vec (size_t n CXX_MEM_STAT_INFO) { this->create (n PASS_MEM_STAT); }
1595 ~auto_vec () { this->release (); }
1596
1597 auto_vec (vec<T, va_heap>&& r)
1598 {
1599 gcc_assert (!r.using_auto_storage ())((void)(!(!r.using_auto_storage ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1599, __FUNCTION__), 0 : 0))
;
1600 this->m_vec = r.m_vec;
1601 r.m_vec = NULLnullptr;
1602 }
1603
1604 auto_vec (auto_vec<T> &&r)
1605 {
1606 gcc_assert (!r.using_auto_storage ())((void)(!(!r.using_auto_storage ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1606, __FUNCTION__), 0 : 0))
;
1607 this->m_vec = r.m_vec;
1608 r.m_vec = NULLnullptr;
1609 }
1610
1611 auto_vec& operator= (vec<T, va_heap>&& r)
1612 {
1613 if (this == &r)
1614 return *this;
1615
1616 gcc_assert (!r.using_auto_storage ())((void)(!(!r.using_auto_storage ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1616, __FUNCTION__), 0 : 0))
;
1617 this->release ();
1618 this->m_vec = r.m_vec;
1619 r.m_vec = NULLnullptr;
1620 return *this;
1621 }
1622
1623 auto_vec& operator= (auto_vec<T> &&r)
1624 {
1625 if (this == &r)
1626 return *this;
1627
1628 gcc_assert (!r.using_auto_storage ())((void)(!(!r.using_auto_storage ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1628, __FUNCTION__), 0 : 0))
;
1629 this->release ();
1630 this->m_vec = r.m_vec;
1631 r.m_vec = NULLnullptr;
1632 return *this;
1633 }
1634
1635 /* Explicitly convert to the base class. There is no conversion
1636 from a const auto_vec because a copy of the returned vec can
1637 be used to modify *THIS.
1638 This is a legacy function not to be used in new code. */
1639 vec<T, va_heap> to_vec_legacy () {
1640 return *static_cast<vec<T, va_heap> *>(this);
1641 }
1642
1643 // You probably don't want to copy a vector, so these are deleted to prevent
1644 // unintentional use. If you really need a copy of the vectors contents you
1645 // can use copy ().
1646 auto_vec(const auto_vec &) = delete;
1647 auto_vec &operator= (const auto_vec &) = delete;
1648};
1649
1650
1651/* Allocate heap memory for pointer V and create the internal vector
1652 with space for NELEMS elements. If NELEMS is 0, the internal
1653 vector is initialized to empty. */
1654
1655template<typename T>
1656inline void
1657vec_alloc (vec<T> *&v, unsigned nelems CXX_MEM_STAT_INFO)
1658{
1659 v = new vec<T>;
1660 v->create (nelems PASS_MEM_STAT);
1661}
1662
1663
1664/* A subclass of auto_vec <char *> that frees all of its elements on
1665 deletion. */
1666
1667class auto_string_vec : public auto_vec <char *>
1668{
1669 public:
1670 ~auto_string_vec ();
1671};
1672
1673/* A subclass of auto_vec <T *> that deletes all of its elements on
1674 destruction.
1675
1676 This is a crude way for a vec to "own" the objects it points to
1677 and clean up automatically.
1678
1679 For example, no attempt is made to delete elements when an item
1680 within the vec is overwritten.
1681
1682 We can't rely on gnu::unique_ptr within a container,
1683 since we can't rely on move semantics in C++98. */
1684
1685template <typename T>
1686class auto_delete_vec : public auto_vec <T *>
1687{
1688 public:
1689 auto_delete_vec () {}
1690 auto_delete_vec (size_t s) : auto_vec <T *> (s) {}
1691
1692 ~auto_delete_vec ();
1693
1694private:
1695 DISABLE_COPY_AND_ASSIGN(auto_delete_vec)auto_delete_vec (const auto_delete_vec&) = delete; void operator
= (const auto_delete_vec &) = delete
;
1696};
1697
1698/* Conditionally allocate heap memory for VEC and its internal vector. */
1699
1700template<typename T>
1701inline void
1702vec_check_alloc (vec<T, va_heap> *&vec, unsigned nelems CXX_MEM_STAT_INFO)
1703{
1704 if (!vec)
1705 vec_alloc (vec, nelems PASS_MEM_STAT);
1706}
1707
1708
1709/* Free the heap memory allocated by vector V and set it to NULL. */
1710
1711template<typename T>
1712inline void
1713vec_free (vec<T> *&v)
1714{
1715 if (v == NULLnullptr)
1716 return;
1717
1718 v->release ();
1719 delete v;
1720 v = NULLnullptr;
1721}
1722
1723
1724/* Return iteration condition and update PTR to point to the IX'th
1725 element of this vector. Use this to iterate over the elements of a
1726 vector as follows,
1727
1728 for (ix = 0; v.iterate (ix, &ptr); ix++)
1729 continue; */
1730
1731template<typename T>
1732inline bool
1733vec<T, va_heap, vl_ptr>::iterate (unsigned ix, T *ptr) const
1734{
1735 if (m_vec)
1736 return m_vec->iterate (ix, ptr);
1737 else
1738 {
1739 *ptr = 0;
1740 return false;
1741 }
1742}
1743
1744
1745/* Return iteration condition and update *PTR to point to the
1746 IX'th element of this vector. Use this to iterate over the
1747 elements of a vector as follows,
1748
1749 for (ix = 0; v->iterate (ix, &ptr); ix++)
1750 continue;
1751
1752 This variant is for vectors of objects. */
1753
1754template<typename T>
1755inline bool
1756vec<T, va_heap, vl_ptr>::iterate (unsigned ix, T **ptr) const
1757{
1758 if (m_vec)
1759 return m_vec->iterate (ix, ptr);
1760 else
1761 {
1762 *ptr = 0;
1763 return false;
1764 }
1765}
1766
1767
1768/* Convenience macro for forward iteration. */
1769#define FOR_EACH_VEC_ELT(V, I, P)for (I = 0; (V).iterate ((I), &(P)); ++(I)) \
1770 for (I = 0; (V).iterate ((I), &(P)); ++(I))
1771
1772#define FOR_EACH_VEC_SAFE_ELT(V, I, P)for (I = 0; vec_safe_iterate ((V), (I), &(P)); ++(I)) \
1773 for (I = 0; vec_safe_iterate ((V), (I), &(P)); ++(I))
1774
1775/* Likewise, but start from FROM rather than 0. */
1776#define FOR_EACH_VEC_ELT_FROM(V, I, P, FROM)for (I = (FROM); (V).iterate ((I), &(P)); ++(I)) \
1777 for (I = (FROM); (V).iterate ((I), &(P)); ++(I))
1778
1779/* Convenience macro for reverse iteration. */
1780#define FOR_EACH_VEC_ELT_REVERSE(V, I, P)for (I = (V).length () - 1; (V).iterate ((I), &(P)); (I)--
)
\
1781 for (I = (V).length () - 1; \
1782 (V).iterate ((I), &(P)); \
1783 (I)--)
1784
1785#define FOR_EACH_VEC_SAFE_ELT_REVERSE(V, I, P)for (I = vec_safe_length (V) - 1; vec_safe_iterate ((V), (I),
&(P)); (I)--)
\
1786 for (I = vec_safe_length (V) - 1; \
1787 vec_safe_iterate ((V), (I), &(P)); \
1788 (I)--)
1789
1790/* auto_string_vec's dtor, freeing all contained strings, automatically
1791 chaining up to ~auto_vec <char *>, which frees the internal buffer. */
1792
1793inline
1794auto_string_vec::~auto_string_vec ()
1795{
1796 int i;
1797 char *str;
1798 FOR_EACH_VEC_ELT (*this, i, str)for (i = 0; (*this).iterate ((i), &(str)); ++(i))
1799 free (str);
1800}
1801
1802/* auto_delete_vec's dtor, deleting all contained items, automatically
1803 chaining up to ~auto_vec <T*>, which frees the internal buffer. */
1804
1805template <typename T>
1806inline
1807auto_delete_vec<T>::~auto_delete_vec ()
1808{
1809 int i;
1810 T *item;
1811 FOR_EACH_VEC_ELT (*this, i, item)for (i = 0; (*this).iterate ((i), &(item)); ++(i))
1812 delete item;
1813}
1814
1815
1816/* Return a copy of this vector. */
1817
1818template<typename T>
1819inline vec<T, va_heap, vl_ptr>
1820vec<T, va_heap, vl_ptr>::copy (ALONE_MEM_STAT_DECLvoid) const
1821{
1822 vec<T, va_heap, vl_ptr> new_vec{ };
1823 if (length ())
1824 new_vec.m_vec = m_vec->copy (ALONE_PASS_MEM_STAT);
1825 return new_vec;
1826}
1827
1828
1829/* Ensure that the vector has at least RESERVE slots available (if
1830 EXACT is false), or exactly RESERVE slots available (if EXACT is
1831 true).
1832
1833 This may create additional headroom if EXACT is false.
1834
1835 Note that this can cause the embedded vector to be reallocated.
1836 Returns true iff reallocation actually occurred. */
1837
1838template<typename T>
1839inline bool
1840vec<T, va_heap, vl_ptr>::reserve (unsigned nelems, bool exact MEM_STAT_DECL)
1841{
1842 if (space (nelems))
1843 return false;
1844
1845 /* For now play a game with va_heap::reserve to hide our auto storage if any,
1846 this is necessary because it doesn't have enough information to know the
1847 embedded vector is in auto storage, and so should not be freed. */
1848 vec<T, va_heap, vl_embed> *oldvec = m_vec;
1849 unsigned int oldsize = 0;
1850 bool handle_auto_vec = m_vec && using_auto_storage ();
1851 if (handle_auto_vec)
1852 {
1853 m_vec = NULLnullptr;
1854 oldsize = oldvec->length ();
1855 nelems += oldsize;
1856 }
1857
1858 va_heap::reserve (m_vec, nelems, exact PASS_MEM_STAT);
1859 if (handle_auto_vec)
1860 {
1861 vec_copy_construct (m_vec->address (), oldvec->address (), oldsize);
1862 m_vec->m_vecpfx.m_num = oldsize;
1863 }
1864
1865 return true;
1866}
1867
1868
1869/* Ensure that this vector has exactly NELEMS slots available. This
1870 will not create additional headroom. Note this can cause the
1871 embedded vector to be reallocated. Returns true iff reallocation
1872 actually occurred. */
1873
1874template<typename T>
1875inline bool
1876vec<T, va_heap, vl_ptr>::reserve_exact (unsigned nelems MEM_STAT_DECL)
1877{
1878 return reserve (nelems, true PASS_MEM_STAT);
1879}
1880
1881
1882/* Create the internal vector and reserve NELEMS for it. This is
1883 exactly like vec::reserve, but the internal vector is
1884 unconditionally allocated from scratch. The old one, if it
1885 existed, is lost. */
1886
1887template<typename T>
1888inline void
1889vec<T, va_heap, vl_ptr>::create (unsigned nelems MEM_STAT_DECL)
1890{
1891 m_vec = NULLnullptr;
1892 if (nelems > 0)
1893 reserve_exact (nelems PASS_MEM_STAT);
1894}
1895
1896