Bug Summary

File:build/gcc/ipa-sra.cc
Warning:line 464, column 4
Called C++ object pointer is null

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-suse-linux -analyze -disable-free -clear-ast-before-backend -disable-llvm-verifier -discard-value-names -main-file-name ipa-sra.cc -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -ffp-contract=on -fno-rounding-math -mconstructor-aliases -funwind-tables=2 -target-cpu x86-64 -tune-cpu generic -debugger-tuning=gdb -fcoverage-compilation-dir=/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -resource-dir /usr/lib64/clang/15.0.7 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13/backward -internal-isystem /usr/lib64/clang/15.0.7/include -internal-isystem /usr/local/include -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../x86_64-suse-linux/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir=/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -D__GCC_HAVE_DWARF2_CFI_ASM=1 -o /buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2023-03-27-141847-20772-1/report-7qwJQP.plist -x c++ /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc

/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc

1/* Interprocedural scalar replacement of aggregates
2 Copyright (C) 2019-2023 Free Software Foundation, Inc.
3 Contributed by Martin Jambor <mjambor@suse.cz>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21/* IPA-SRA is an interprocedural pass that removes unused function return
22 values (turning functions returning a value which is never used into void
23 functions) and removes unused function parameters. It can also replace an
24 aggregate parameter by a set of other parameters representing part of the
25 original, turning those passed by reference into new ones which pass the
26 value directly.
27
28 The pass is a true IPA one, which means that it works in three stages in
29 order to be able to take advantage of LTO. First, summaries about functions
30 and each calls are generated. Function summaries (often called call graph
31 node summaries) contain mainly information about which parameters are
32 potential transformation candidates and which bits of candidates are
33 accessed. We differentiate between accesses done as a part of a call
34 statement (which might be not necessary if the callee is also transformed)
35 and others (which are mandatory). Call summaries (often called call graph
36 edge summaries) contain information about which function formal parameters
37 feed into which actual call arguments so that if two parameters are only
38 used in a sum which is then passed to another function which then however
39 does not use this parameter, all three parameters of the two functions can
40 be eliminated. Edge summaries also have flags whether the return value is
41 used or if it is only returned in the caller too. In LTO mode these
42 summaries are then streamed to the object file in the compilation phase and
43 streamed back in in the WPA analysis stage.
44
45 The interprocedural analysis phase traverses the graph in topological order
46 in two sweeps, one in each direction. First, from callees to callers for
47 parameter removal and splitting. Each strongly-connected component is
48 processed iteratively until the situation in it stabilizes. The pass from
49 callers to callees is then carried out to remove unused return values in a
50 very similar fashion.
51
52 Because parameter manipulation has big implications for call redirection
53 which is done only after all call graph nodes materialize, the
54 transformation phase is not part of this patch but is carried out by the
55 clone materialization and edge redirection itself, see comments in
56 ipa-param-manipulation.h for more details. */
57
58
59#include "config.h"
60#include "system.h"
61#include "coretypes.h"
62#include "backend.h"
63#include "tree.h"
64#include "gimple.h"
65#include "predict.h"
66#include "tree-pass.h"
67#include "ssa.h"
68#include "cgraph.h"
69#include "gimple-pretty-print.h"
70#include "alias.h"
71#include "tree-eh.h"
72#include "gimple-iterator.h"
73#include "gimple-walk.h"
74#include "tree-dfa.h"
75#include "tree-sra.h"
76#include "alloc-pool.h"
77#include "symbol-summary.h"
78#include "dbgcnt.h"
79#include "tree-inline.h"
80#include "ipa-utils.h"
81#include "builtins.h"
82#include "cfganal.h"
83#include "tree-streamer.h"
84#include "internal-fn.h"
85#include "symtab-clones.h"
86#include "attribs.h"
87#include "ipa-prop.h"
88
89static void ipa_sra_summarize_function (cgraph_node *);
90
91/* Bits used to track size of an aggregate in bytes interprocedurally. */
92#define ISRA_ARG_SIZE_LIMIT_BITS16 16
93#define ISRA_ARG_SIZE_LIMIT(1 << 16) (1 << ISRA_ARG_SIZE_LIMIT_BITS16)
94/* How many parameters can feed into a call actual argument and still be
95 tracked. */
96#define IPA_SRA_MAX_PARAM_FLOW_LEN7 7
97
98/* Structure describing accesses to a specific portion of an aggregate
99 parameter, as given by the offset and size. Any smaller accesses that occur
100 within a function that fall within another access form a tree. The pass
101 cannot analyze parameters with only partially overlapping accesses. */
102
103struct GTY(()) param_access
104{
105 /* Type that a potential replacement should have. This field only has
106 meaning in the summary building and transformation phases, when it is
107 reconstructed from the body. Must not be touched in IPA analysis
108 stage. */
109 tree type;
110
111 /* Alias reference type to be used in MEM_REFs when adjusting caller
112 arguments. */
113 tree alias_ptr_type;
114
115 /* Values returned by get_ref_base_and_extent but converted to bytes and
116 stored as unsigned ints. */
117 unsigned unit_offset;
118 unsigned unit_size : ISRA_ARG_SIZE_LIMIT_BITS16;
119
120 /* Set once we are sure that the access will really end up in a potentially
121 transformed function - initially not set for portions of formal parameters
122 that are only used as actual function arguments passed to callees. */
123 unsigned certain : 1;
124 /* Set if the access has reverse scalar storage order. */
125 unsigned reverse : 1;
126};
127
128/* This structure has the same purpose as the one above and additionally it
129 contains some fields that are only necessary in the summary generation
130 phase. */
131
132struct gensum_param_access
133{
134 /* Values returned by get_ref_base_and_extent. */
135 HOST_WIDE_INTlong offset;
136 HOST_WIDE_INTlong size;
137
138 /* if this access has any children (in terms of the definition above), this
139 points to the first one. */
140 struct gensum_param_access *first_child;
141 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
142 described above. */
143 struct gensum_param_access *next_sibling;
144
145 /* Type that a potential replacement should have. This field only has
146 meaning in the summary building and transformation phases, when it is
147 reconstructed from the body. Must not be touched in IPA analysis
148 stage. */
149 tree type;
150 /* Alias reference type to be used in MEM_REFs when adjusting caller
151 arguments. */
152 tree alias_ptr_type;
153
154 /* Cumulative count of all loads. */
155 profile_count load_count;
156 /* Have there been writes to or reads from this exact location except for as
157 arguments to a function call that can be tracked. */
158 bool nonarg;
159
160 /* Set if the access has reverse scalar storage order. */
161 bool reverse;
162};
163
164/* Summary describing a parameter in the IPA stages. */
165
166struct GTY(()) isra_param_desc
167{
168 /* List of access representatives to the parameters, sorted according to
169 their offset. */
170 vec <param_access *, va_gc> *accesses;
171
172 /* Unit size limit of total size of all replacements. */
173 unsigned param_size_limit : ISRA_ARG_SIZE_LIMIT_BITS16;
174 /* Sum of unit sizes of all certain replacements. */
175 unsigned size_reached : ISRA_ARG_SIZE_LIMIT_BITS16;
176 /* Minimum offset that is known to be safe to dereference because of callers
177 pass pointers to DECLs of at least this size or because of dereferences in
178 callers. */
179 unsigned safe_size : ISRA_ARG_SIZE_LIMIT_BITS16;
180
181 /* A parameter that is used only in call arguments and can be removed if all
182 concerned actual arguments are removed. */
183 unsigned locally_unused : 1;
184 /* An aggregate that is a candidate for breaking up or complete removal. */
185 unsigned split_candidate : 1;
186 /* Is this a parameter passing stuff by reference? */
187 unsigned by_ref : 1;
188 /* Parameter hint set during IPA analysis when there is a caller which does
189 not construct the argument just to pass it to calls. Only meaningful for
190 by_ref parameters. */
191 unsigned not_specially_constructed : 1;
192 /* Only meaningful for by_ref parameters. If set, this parameter can only be
193 a split candidate if all callers pass pointers that are known to point to
194 a chunk of memory large enough to contain all accesses. */
195 unsigned conditionally_dereferenceable : 1;
196 /* Set when safe_size has been updated from at least one caller. */
197 unsigned safe_size_set : 1;
198};
199
200/* Structure used when generating summaries that describes a parameter. */
201
202struct gensum_param_desc
203{
204 /* Roots of param_accesses. */
205 gensum_param_access *accesses;
206 /* Number of accesses in the access tree rooted in field accesses. */
207 unsigned access_count;
208
209 /* If the below is non-zero, this is the number of uses as actual arguments. */
210 int call_uses;
211 /* Number of times this parameter has been directly passed to. */
212 unsigned ptr_pt_count;
213
214 /* Size limit of total size of all replacements. */
215 unsigned param_size_limit;
216 /* Sum of sizes of nonarg accesses. */
217 unsigned nonarg_acc_size;
218
219 /* A parameter that is used only in call arguments and can be removed if all
220 concerned actual arguments are removed. */
221 bool locally_unused;
222 /* An aggregate that is a candidate for breaking up or a pointer passing data
223 by reference that is a candidate for being converted to a set of
224 parameters passing those data by value. */
225 bool split_candidate;
226 /* Is this a parameter passing stuff by reference (either a pointer or a
227 source language reference type)? */
228 bool by_ref;
229 /* If this parameter passes stuff by reference, can it be safely dereferenced
230 without performing further checks (for example because it is a
231 REFERENCE_TYPE)? */
232 bool safe_ref;
233 /* Only meaningful for by_ref parameters. If set, this parameter can only be
234 a split candidate if all callers pass pointers that are known to point to
235 a chunk of memory large enough to contain all accesses. */
236 bool conditionally_dereferenceable;
237
238 /* The number of this parameter as they are ordered in function decl. */
239 int param_number;
240 /* For parameters passing data by reference, this is parameter index to
241 compute indices to bb_dereferences. */
242 int deref_index;
243};
244
245/* Properly deallocate accesses of DESC. TODO: Since this data structure is
246 allocated in GC memory, this is not necessary and we can consider removing
247 the function. */
248
249static void
250free_param_decl_accesses (isra_param_desc *desc)
251{
252 unsigned len = vec_safe_length (desc->accesses);
253 for (unsigned i = 0; i < len; ++i)
254 ggc_free ((*desc->accesses)[i]);
255 vec_free (desc->accesses);
256}
257
258/* Class used to convey information about functions from the
259 intra-procedural analysis stage to inter-procedural one. */
260
261class GTY((for_user)) isra_func_summary
262{
263public:
264 /* initialize the object. */
265
266 isra_func_summary ()
267 : m_parameters (NULLnullptr), m_candidate (false), m_returns_value (false),
268 m_return_ignored (false), m_queued (false)
269 {}
270
271 /* Destroy m_parameters. */
272
273 ~isra_func_summary ();
274
275 /* Mark the function as not a candidate for any IPA-SRA transformation.
276 Return true if it was a candidate until now. */
277
278 bool zap ();
279
280 /* Vector of parameter descriptors corresponding to the function being
281 analyzed. */
282 vec<isra_param_desc, va_gc> *m_parameters;
283
284 /* Whether the node is even a candidate for any IPA-SRA transformation at
285 all. */
286 unsigned m_candidate : 1;
287
288 /* Whether the original function returns any value. */
289 unsigned m_returns_value : 1;
290
291 /* Set to true if all call statements do not actually use the returned
292 value. */
293
294 unsigned m_return_ignored : 1;
295
296 /* Whether the node is already queued in IPA SRA stack during processing of
297 call graphs SCCs. */
298
299 unsigned m_queued : 1;
300};
301
302/* Deallocate the memory pointed to by isra_func_summary. TODO: Since this
303 data structure is allocated in GC memory, this is not necessary and we can
304 consider removing the destructor. */
305
306isra_func_summary::~isra_func_summary ()
307{
308 unsigned len = vec_safe_length (m_parameters);
309 for (unsigned i = 0; i < len; ++i)
310 free_param_decl_accesses (&(*m_parameters)[i]);
311 vec_free (m_parameters);
312}
313
314/* Mark the function as not a candidate for any IPA-SRA transformation. Return
315 true if it was a candidate until now. */
316
317bool
318isra_func_summary::zap ()
319{
320 bool ret = m_candidate;
321 m_candidate = false;
322
323 /* TODO: see the destructor above. */
324 unsigned len = vec_safe_length (m_parameters);
325 for (unsigned i = 0; i < len; ++i)
326 free_param_decl_accesses (&(*m_parameters)[i]);
327 vec_free (m_parameters);
328
329 return ret;
330}
331
332/* Structure to describe which formal parameters feed into a particular actual
333 argument. */
334
335struct isra_param_flow
336{
337 /* Number of elements in array inputs that contain valid data. */
338 char length;
339 /* Indices of formal parameters that feed into the described actual argument.
340 If aggregate_pass_through or pointer_pass_through below are true, it must
341 contain exactly one element which is passed through from a formal
342 parameter if the given number. Otherwise, the array contains indices of
343 callee's formal parameters which are used to calculate value of this
344 actual argument. */
345 unsigned char inputs[IPA_SRA_MAX_PARAM_FLOW_LEN7];
346
347 /* Offset within the formal parameter. */
348 unsigned unit_offset;
349 /* When aggregate_pass_through is set, this is the size of the portion of an
350 aggregate formal parameter that is being passed. Otherwise, this is size
351 of pointed to memory that is known to be valid be dereferenced. */
352 unsigned unit_size : ISRA_ARG_SIZE_LIMIT_BITS16;
353
354 /* True when the value of this actual argument is a portion of a formal
355 parameter. */
356 unsigned aggregate_pass_through : 1;
357 /* True when the value of this actual copy is a verbatim pass through of an
358 obtained pointer. */
359 unsigned pointer_pass_through : 1;
360 /* True when it is safe to copy access candidates here from the callee, which
361 would mean introducing dereferences into callers of the caller. */
362 unsigned safe_to_import_accesses : 1;
363 /* True when the passed value is an address of a structure that has been
364 constructed in the caller just to be passed by reference to functions
365 (i.e. is never read). */
366 unsigned constructed_for_calls : 1;
367};
368
369/* Structure used to convey information about calls from the intra-procedural
370 analysis stage to inter-procedural one. */
371
372class isra_call_summary
373{
374public:
375 isra_call_summary ()
376 : m_arg_flow (), m_return_ignored (false), m_return_returned (false),
377 m_bit_aligned_arg (false), m_before_any_store (false)
378 {}
379
380 void init_inputs (unsigned arg_count);
381 void dump (FILE *f);
382
383 /* Information about what formal parameters of the caller are used to compute
384 individual actual arguments of this call. */
385 auto_vec <isra_param_flow> m_arg_flow;
386
387 /* Set to true if the call statement does not have a LHS. */
388 unsigned m_return_ignored : 1;
389
390 /* Set to true if the LHS of call statement is only used to construct the
391 return value of the caller. */
392 unsigned m_return_returned : 1;
393
394 /* Set when any of the call arguments are not byte-aligned. */
395 unsigned m_bit_aligned_arg : 1;
396
397 /* Set to true if the call happend before any (other) store to memory in the
398 caller. */
399 unsigned m_before_any_store : 1;
400};
401
402/* Class to manage function summaries. */
403
404class GTY((user)) ipa_sra_function_summaries
405 : public function_summary <isra_func_summary *>
406{
407public:
408 ipa_sra_function_summaries (symbol_table *table, bool ggc):
409 function_summary<isra_func_summary *> (table, ggc) { }
410
411 void duplicate (cgraph_node *, cgraph_node *,
412 isra_func_summary *old_sum,
413 isra_func_summary *new_sum) final override;
414 void insert (cgraph_node *, isra_func_summary *) final override;
415};
416
417/* Hook that is called by summary when a node is duplicated. */
418
419void
420ipa_sra_function_summaries::duplicate (cgraph_node *, cgraph_node *,
421 isra_func_summary *old_sum,
422 isra_func_summary *new_sum)
423{
424 /* TODO: Somehow stop copying when ISRA is doing the cloning, it is
425 useless. */
426 new_sum->m_candidate = old_sum->m_candidate;
427 new_sum->m_returns_value = old_sum->m_returns_value;
428 new_sum->m_return_ignored = old_sum->m_return_ignored;
429 gcc_assert (!old_sum->m_queued)((void)(!(!old_sum->m_queued) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 429, __FUNCTION__), 0 : 0))
;
1
Assuming field 'm_queued' is 0
2
'?' condition is false
430 new_sum->m_queued = false;
431
432 unsigned param_count = vec_safe_length (old_sum->m_parameters);
3
Calling 'vec_safe_length<isra_param_desc, va_gc>'
7
Returning from 'vec_safe_length<isra_param_desc, va_gc>'
433 if (!param_count)
8
Assuming 'param_count' is not equal to 0
9
Taking false branch
434 return;
435 vec_safe_reserve_exact (new_sum->m_parameters, param_count);
436 new_sum->m_parameters->quick_grow_cleared (param_count);
437 for (unsigned i = 0; i
9.1
'i' is < 'param_count'
9.1
'i' is < 'param_count'
< param_count; i++)
10
Loop condition is true. Entering loop body
438 {
439 isra_param_desc *s = &(*old_sum->m_parameters)[i];
440 isra_param_desc *d = &(*new_sum->m_parameters)[i];
441
442 d->param_size_limit = s->param_size_limit;
443 d->size_reached = s->size_reached;
444 d->safe_size = s->safe_size;
445 d->locally_unused = s->locally_unused;
446 d->split_candidate = s->split_candidate;
447 d->by_ref = s->by_ref;
448 d->not_specially_constructed = s->not_specially_constructed;
449 d->conditionally_dereferenceable = s->conditionally_dereferenceable;
450 d->safe_size_set = s->safe_size_set;
451
452 unsigned acc_count = vec_safe_length (s->accesses);
453 vec_safe_reserve_exact (d->accesses, acc_count);
11
Calling 'vec_safe_reserve_exact<param_access *, va_gc>'
24
Returning from 'vec_safe_reserve_exact<param_access *, va_gc>'
454 for (unsigned j = 0; j
24.1
'j' is < 'acc_count'
24.1
'j' is < 'acc_count'
< acc_count; j++)
25
Loop condition is true. Entering loop body
455 {
456 param_access *from = (*s->accesses)[j];
457 param_access *to = ggc_cleared_alloc<param_access> ();
458 to->type = from->type;
459 to->alias_ptr_type = from->alias_ptr_type;
460 to->unit_offset = from->unit_offset;
461 to->unit_size = from->unit_size;
462 to->certain = from->certain;
463 to->reverse = from->reverse;
464 d->accesses->quick_push (to);
26
Called C++ object pointer is null
465 }
466 }
467}
468
469/* Pointer to the pass function summary holder. */
470
471static GTY(()) ipa_sra_function_summaries *func_sums;
472
473/* Hook that is called by summary when new node appears. */
474
475void
476ipa_sra_function_summaries::insert (cgraph_node *node, isra_func_summary *)
477{
478 if (opt_for_fn (node->decl, flag_ipa_sra)(opts_for_fn (node->decl)->x_flag_ipa_sra))
479 {
480 push_cfun (DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 480, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
);
481 ipa_sra_summarize_function (node);
482 pop_cfun ();
483 }
484 else
485 func_sums->remove (node);
486}
487
488/* Class to manage call summaries. */
489
490class ipa_sra_call_summaries: public call_summary <isra_call_summary *>
491{
492public:
493 ipa_sra_call_summaries (symbol_table *table):
494 call_summary<isra_call_summary *> (table) { }
495
496 /* Duplicate info when an edge is cloned. */
497 void duplicate (cgraph_edge *, cgraph_edge *,
498 isra_call_summary *old_sum,
499 isra_call_summary *new_sum) final override;
500};
501
502static ipa_sra_call_summaries *call_sums;
503
504
505/* Initialize m_arg_flow of a particular instance of isra_call_summary.
506 ARG_COUNT is the number of actual arguments passed. */
507
508void
509isra_call_summary::init_inputs (unsigned arg_count)
510{
511 if (arg_count == 0)
512 {
513 gcc_checking_assert (m_arg_flow.length () == 0)((void)(!(m_arg_flow.length () == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 513, __FUNCTION__), 0 : 0))
;
514 return;
515 }
516 if (m_arg_flow.length () == 0)
517 {
518 m_arg_flow.reserve_exact (arg_count);
519 m_arg_flow.quick_grow_cleared (arg_count);
520 }
521 else
522 gcc_checking_assert (arg_count == m_arg_flow.length ())((void)(!(arg_count == m_arg_flow.length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 522, __FUNCTION__), 0 : 0))
;
523}
524
525/* Dump all information in call summary to F. */
526
527void
528isra_call_summary::dump (FILE *f)
529{
530 if (m_return_ignored)
531 fprintf (f, " return value ignored\n");
532 if (m_return_returned)
533 fprintf (f, " return value used only to compute caller return value\n");
534 if (m_before_any_store)
535 fprintf (f, " happens before any store to memory\n");
536 for (unsigned i = 0; i < m_arg_flow.length (); i++)
537 {
538 fprintf (f, " Parameter %u:\n", i);
539 isra_param_flow *ipf = &m_arg_flow[i];
540
541 if (ipf->length)
542 {
543 bool first = true;
544 fprintf (f, " Scalar param sources: ");
545 for (int j = 0; j < ipf->length; j++)
546 {
547 if (!first)
548 fprintf (f, ", ");
549 else
550 first = false;
551 fprintf (f, "%i", (int) ipf->inputs[j]);
552 }
553 fprintf (f, "\n");
554 }
555 if (ipf->aggregate_pass_through)
556 fprintf (f, " Aggregate pass through from the param given above, "
557 "unit offset: %u , unit size: %u\n",
558 ipf->unit_offset, ipf->unit_size);
559 else if (ipf->unit_size > 0)
560 fprintf (f, " Known dereferenceable size: %u\n", ipf->unit_size);
561 if (ipf->pointer_pass_through)
562 fprintf (f, " Pointer pass through from the param given above, "
563 "safe_to_import_accesses: %u\n", ipf->safe_to_import_accesses);
564 if (ipf->constructed_for_calls)
565 fprintf (f, " Variable constructed just to be passed to "
566 "calls.\n");
567 }
568}
569
570/* Duplicate edge summary when an edge is cloned. */
571
572void
573ipa_sra_call_summaries::duplicate (cgraph_edge *, cgraph_edge *,
574 isra_call_summary *old_sum,
575 isra_call_summary *new_sum)
576{
577 unsigned arg_count = old_sum->m_arg_flow.length ();
578 new_sum->init_inputs (arg_count);
579 for (unsigned i = 0; i < arg_count; i++)
580 new_sum->m_arg_flow[i] = old_sum->m_arg_flow[i];
581
582 new_sum->m_return_ignored = old_sum->m_return_ignored;
583 new_sum->m_return_returned = old_sum->m_return_returned;
584 new_sum->m_bit_aligned_arg = old_sum->m_bit_aligned_arg;
585 new_sum->m_before_any_store = old_sum->m_before_any_store;
586}
587
588
589/* With all GTY stuff done, we can move to anonymous namespace. */
590namespace {
591/* Quick mapping from a decl to its param descriptor. */
592
593hash_map<tree, gensum_param_desc *> *decl2desc;
594
595/* All local DECLs ever loaded from of and of those that have their address
596 assigned to a variable. */
597
598hash_set <tree> *loaded_decls;
599
600/* Countdown of allowed Alias Analysis steps during summary building. */
601
602int aa_walking_limit;
603
604/* This is a table in which for each basic block and parameter there is a
605 distance (offset + size) in that parameter which is dereferenced and
606 accessed in that BB. */
607HOST_WIDE_INTlong *bb_dereferences = NULLnullptr;
608/* How many by-reference parameters there are in the current function. */
609int unsafe_by_ref_count;
610
611/* Bitmap of BBs that can cause the function to "stop" progressing by
612 returning, throwing externally, looping infinitely or calling a function
613 which might abort etc.. */
614bitmap final_bbs;
615
616/* Obstack to allocate various small structures required only when generating
617 summary for a function. */
618struct obstack gensum_obstack;
619
620/* Return false the function is apparently unsuitable for IPA-SRA based on it's
621 attributes, return true otherwise. NODE is the cgraph node of the current
622 function. */
623
624static bool
625ipa_sra_preliminary_function_checks (cgraph_node *node)
626{
627 if (!node->can_change_signature)
628 {
629 if (dump_file)
630 fprintf (dump_file, "Function cannot change signature.\n");
631 return false;
632 }
633
634 if (!tree_versionable_function_p (node->decl))
635 {
636 if (dump_file)
637 fprintf (dump_file, "Function is not versionable.\n");
638 return false;
639 }
640
641 if (!opt_for_fn (node->decl, optimize)(opts_for_fn (node->decl)->x_optimize)
642 || !opt_for_fn (node->decl, flag_ipa_sra)(opts_for_fn (node->decl)->x_flag_ipa_sra))
643 {
644 if (dump_file)
645 fprintf (dump_file, "Not optimizing or IPA-SRA turned off for this "
646 "function.\n");
647 return false;
648 }
649
650 if (DECL_VIRTUAL_P (node->decl)((contains_struct_check ((node->decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 650, __FUNCTION__))->decl_common.virtual_flag)
)
651 {
652 if (dump_file)
653 fprintf (dump_file, "Function is a virtual method.\n");
654 return false;
655 }
656
657 struct function *fun = DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 657, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
;
658 if (fun->stdarg)
659 {
660 if (dump_file)
661 fprintf (dump_file, "Function uses stdarg. \n");
662 return false;
663 }
664
665 if (DECL_DISREGARD_INLINE_LIMITS (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 665, __FUNCTION__, (FUNCTION_DECL)))->function_decl.disregard_inline_limits
)
)
666 {
667 if (dump_file)
668 fprintf (dump_file, "Always inline function will be inlined "
669 "anyway. \n");
670 return false;
671 }
672
673 return true;
674}
675
676/* Print access tree starting at ACCESS to F. */
677
678static void
679dump_gensum_access (FILE *f, gensum_param_access *access, unsigned indent)
680{
681 fprintf (f, " ");
682 for (unsigned i = 0; i < indent; i++)
683 fprintf (f, " ");
684 fprintf (f, " * Access to offset: " HOST_WIDE_INT_PRINT_DEC"%" "l" "d",
685 access->offset);
686 fprintf (f, ", size: " HOST_WIDE_INT_PRINT_DEC"%" "l" "d", access->size);
687 fprintf (f, ", type: ");
688 print_generic_expr (f, access->type);
689 fprintf (f, ", alias_ptr_type: ");
690 print_generic_expr (f, access->alias_ptr_type);
691 fprintf (f, ", load_count: ");
692 access->load_count.dump (f);
693 fprintf (f, ", nonarg: %u, reverse: %u\n", access->nonarg, access->reverse);
694 for (gensum_param_access *ch = access->first_child;
695 ch;
696 ch = ch->next_sibling)
697 dump_gensum_access (f, ch, indent + 2);
698}
699
700
701/* Print access tree starting at ACCESS to F. */
702
703static void
704dump_isra_access (FILE *f, param_access *access)
705{
706 fprintf (f, " * Access to unit offset: %u", access->unit_offset);
707 fprintf (f, ", unit size: %u", access->unit_size);
708 fprintf (f, ", type: ");
709 print_generic_expr (f, access->type);
710 fprintf (f, ", alias_ptr_type: ");
711 print_generic_expr (f, access->alias_ptr_type);
712 if (access->certain)
713 fprintf (f, ", certain");
714 else
715 fprintf (f, ", not certain");
716 if (access->reverse)
717 fprintf (f, ", reverse");
718 fprintf (f, "\n");
719}
720
721/* Dump access tree starting at ACCESS to stderr. */
722
723DEBUG_FUNCTION__attribute__ ((__used__)) void
724debug_isra_access (param_access *access)
725{
726 dump_isra_access (stderrstderr, access);
727}
728
729/* Dump DESC to F. */
730
731static void
732dump_gensum_param_descriptor (FILE *f, gensum_param_desc *desc)
733{
734 if (desc->locally_unused)
735 fprintf (f, " unused with %i call_uses\n", desc->call_uses);
736 if (!desc->split_candidate)
737 {
738 fprintf (f, " not a candidate\n");
739 return;
740 }
741 if (desc->by_ref)
742 fprintf (f, " %s%s by_ref with %u pass throughs\n",
743 desc->safe_ref ? "safe" : "unsafe",
744 desc->conditionally_dereferenceable
745 ? " conditionally_dereferenceable" : " ok",
746 desc->ptr_pt_count);
747
748 for (gensum_param_access *acc = desc->accesses; acc; acc = acc->next_sibling)
749 dump_gensum_access (f, acc, 2);
750}
751
752/* Dump all parameter descriptors in IFS, assuming it describes FNDECL, to
753 F. */
754
755static void
756dump_gensum_param_descriptors (FILE *f, tree fndecl,
757 vec<gensum_param_desc> *param_descriptions)
758{
759 tree parm = DECL_ARGUMENTS (fndecl)((tree_check ((fndecl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 759, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
;
760 for (unsigned i = 0;
761 i < param_descriptions->length ();
762 ++i, parm = DECL_CHAIN (parm)(((contains_struct_check (((contains_struct_check ((parm), (TS_DECL_MINIMAL
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 762, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 762, __FUNCTION__))->common.chain))
)
763 {
764 fprintf (f, " Descriptor for parameter %i ", i);
765 print_generic_expr (f, parm, TDF_UID);
766 fprintf (f, "\n");
767 dump_gensum_param_descriptor (f, &(*param_descriptions)[i]);
768 }
769}
770
771
772/* Dump DESC to F. If HINTS is true, also dump IPA-analysis computed
773 hints. */
774
775static void
776dump_isra_param_descriptor (FILE *f, isra_param_desc *desc, bool hints)
777{
778 if (desc->locally_unused)
779 {
780 fprintf (f, " (locally) unused\n");
781 }
782 if (!desc->split_candidate)
783 {
784 fprintf (f, " not a candidate for splitting");
785 if (hints && desc->by_ref && desc->safe_size_set)
786 fprintf (f, ", safe_size: %u", (unsigned) desc->safe_size);
787 fprintf (f, "\n");
788 return;
789 }
790 fprintf (f, " param_size_limit: %u, size_reached: %u%s",
791 desc->param_size_limit, desc->size_reached,
792 desc->by_ref ? ", by_ref" : "");
793 if (desc->by_ref && desc->conditionally_dereferenceable)
794 fprintf (f, ", conditionally_dereferenceable");
795 if (hints)
796 {
797 if (desc->by_ref && !desc->not_specially_constructed)
798 fprintf (f, ", args_specially_constructed");
799 if (desc->by_ref && desc->safe_size_set)
800 fprintf (f, ", safe_size: %u", (unsigned) desc->safe_size);
801 }
802 fprintf (f, "\n");
803
804 for (unsigned i = 0; i < vec_safe_length (desc->accesses); ++i)
805 {
806 param_access *access = (*desc->accesses)[i];
807 dump_isra_access (f, access);
808 }
809}
810
811/* Dump all parameter descriptors in IFS, assuming it describes FNDECL, to F.
812 If HINTS is true, also dump IPA-analysis computed hints. */
813
814static void
815dump_isra_param_descriptors (FILE *f, tree fndecl, isra_func_summary *ifs,
816 bool hints)
817{
818 tree parm = DECL_ARGUMENTS (fndecl)((tree_check ((fndecl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 818, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
;
819 if (!ifs->m_parameters)
820 {
821 fprintf (f, " parameter descriptors not available\n");
822 return;
823 }
824
825 for (unsigned i = 0;
826 i < ifs->m_parameters->length ();
827 ++i, parm = DECL_CHAIN (parm)(((contains_struct_check (((contains_struct_check ((parm), (TS_DECL_MINIMAL
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 827, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 827, __FUNCTION__))->common.chain))
)
828 {
829 fprintf (f, " Descriptor for parameter %i ", i);
830 print_generic_expr (f, parm, TDF_UID);
831 fprintf (f, "\n");
832 dump_isra_param_descriptor (f, &(*ifs->m_parameters)[i], hints);
833 }
834}
835
836/* Add SRC to inputs of PARAM_FLOW, unless it would exceed storage. If the
837 function fails return false, otherwise return true. SRC must fit into an
838 unsigned char. Used for purposes of transitive unused parameter
839 removal. */
840
841static bool
842add_src_to_param_flow (isra_param_flow *param_flow, int src)
843{
844 gcc_checking_assert (src >= 0 && src <= UCHAR_MAX)((void)(!(src >= 0 && src <= (127*2 +1)) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 844, __FUNCTION__), 0 : 0))
;
845 if (param_flow->length == IPA_SRA_MAX_PARAM_FLOW_LEN7)
846 return false;
847
848 param_flow->inputs[(int) param_flow->length] = src;
849 param_flow->length++;
850 return true;
851}
852
853/* Add a SRC to the inputs of PARAM_FLOW unless it is already there and assert
854 it is the only input. Used for purposes of transitive parameter
855 splitting. */
856
857static void
858set_single_param_flow_source (isra_param_flow *param_flow, int src)
859{
860 gcc_checking_assert (src >= 0 && src <= UCHAR_MAX)((void)(!(src >= 0 && src <= (127*2 +1)) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 860, __FUNCTION__), 0 : 0))
;
861 if (param_flow->length == 0)
862 {
863 param_flow->inputs[0] = src;
864 param_flow->length = 1;
865 }
866 else if (param_flow->length == 1)
867 gcc_assert (param_flow->inputs[0] == src)((void)(!(param_flow->inputs[0] == src) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 867, __FUNCTION__), 0 : 0))
;
868 else
869 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 869, __FUNCTION__))
;
870}
871
872/* Assert that there is only a single value in PARAM_FLOW's inputs and return
873 it. */
874
875static unsigned
876get_single_param_flow_source (const isra_param_flow *param_flow)
877{
878 gcc_assert (param_flow->length == 1)((void)(!(param_flow->length == 1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 878, __FUNCTION__), 0 : 0))
;
879 return param_flow->inputs[0];
880}
881
882/* Inspect all uses of NAME and simple arithmetic calculations involving NAME
883 in FUN represented with NODE and return a negative number if any of them is
884 used for something else than either an actual call argument, simple
885 arithmetic operation or debug statement. If there are no such uses, return
886 the number of actual arguments that this parameter eventually feeds to (or
887 zero if there is none). For any such parameter, mark PARM_NUM as one of its
888 sources. ANALYZED is a bitmap that tracks which SSA names we have already
889 started investigating. */
890
891static int
892isra_track_scalar_value_uses (function *fun, cgraph_node *node, tree name,
893 int parm_num, bitmap analyzed)
894{
895 int res = 0;
896 imm_use_iterator imm_iter;
897 gimple *stmt;
898
899 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)for (struct auto_end_imm_use_stmt_traverse auto_end_imm_use_stmt_traverse
((((stmt) = first_imm_use_stmt (&(imm_iter), (name))), &
(imm_iter))); !end_imm_use_stmt_p (&(imm_iter)); (void) (
(stmt) = next_imm_use_stmt (&(imm_iter))))
900 {
901 if (is_gimple_debug (stmt))
902 continue;
903
904 /* TODO: We could handle at least const builtin functions like arithmetic
905 operations below. */
906 if (is_gimple_call (stmt))
907 {
908 int all_uses = 0;
909 use_operand_p use_p;
910 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)for ((use_p) = first_imm_use_on_stmt (&(imm_iter)); !end_imm_use_on_stmt_p
(&(imm_iter)); (void) ((use_p) = next_imm_use_on_stmt (&
(imm_iter))))
911 all_uses++;
912
913 gcall *call = as_a <gcall *> (stmt);
914 unsigned arg_count;
915 if (gimple_call_internal_p (call)
916 || (arg_count = gimple_call_num_args (call)) == 0)
917 {
918 res = -1;
919 break;
920 }
921
922 cgraph_edge *cs = node->get_edge (stmt);
923 gcc_checking_assert (cs)((void)(!(cs) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 923, __FUNCTION__), 0 : 0))
;
924 isra_call_summary *csum = call_sums->get_create (cs);
925 csum->init_inputs (arg_count);
926
927 int simple_uses = 0;
928 for (unsigned i = 0; i < arg_count; i++)
929 if (gimple_call_arg (call, i) == name)
930 {
931 if (!add_src_to_param_flow (&csum->m_arg_flow[i], parm_num))
932 {
933 simple_uses = -1;
934 break;
935 }
936 simple_uses++;
937 }
938
939 if (simple_uses < 0
940 || all_uses != simple_uses)
941 {
942 res = -1;
943 break;
944 }
945 res += all_uses;
946 }
947 else if (!stmt_unremovable_because_of_non_call_eh_p (fun, stmt)
948 && ((is_gimple_assign (stmt) && !gimple_has_volatile_ops (stmt))
949 || gimple_code (stmt) == GIMPLE_PHI))
950 {
951 tree lhs;
952 if (gimple_code (stmt) == GIMPLE_PHI)
953 lhs = gimple_phi_result (stmt);
954 else
955 lhs = gimple_assign_lhs (stmt);
956
957 if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) != SSA_NAME)
958 {
959 res = -1;
960 break;
961 }
962 gcc_assert (!gimple_vdef (stmt))((void)(!(!gimple_vdef (stmt)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 962, __FUNCTION__), 0 : 0))
;
963 if (bitmap_set_bit (analyzed, SSA_NAME_VERSION (lhs)(tree_check ((lhs), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 963, __FUNCTION__, (SSA_NAME)))->base.u.version
))
964 {
965 int tmp = isra_track_scalar_value_uses (fun, node, lhs, parm_num,
966 analyzed);
967 if (tmp < 0)
968 {
969 res = tmp;
970 break;
971 }
972 res += tmp;
973 }
974 }
975 else
976 {
977 res = -1;
978 break;
979 }
980 }
981 return res;
982}
983
984/* Inspect all uses of PARM, which must be a gimple register, in FUN (which is
985 also described by NODE) and simple arithmetic calculations involving PARM
986 and return false if any of them is used for something else than either an
987 actual call argument, simple arithmetic operation or debug statement. If
988 there are no such uses, return true and store the number of actual arguments
989 that this parameter eventually feeds to (or zero if there is none) to
990 *CALL_USES_P. For any such parameter, mark PARM_NUM as one of its
991 sources.
992
993 This function is similar to ptr_parm_has_nonarg_uses but its results are
994 meant for unused parameter removal, as opposed to splitting of parameters
995 passed by reference or converting them to passed by value. */
996
997static bool
998isra_track_scalar_param_local_uses (function *fun, cgraph_node *node, tree parm,
999 int parm_num, int *call_uses_p)
1000{
1001 gcc_checking_assert (is_gimple_reg (parm))((void)(!(is_gimple_reg (parm)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1001, __FUNCTION__), 0 : 0))
;
1002
1003 tree name = ssa_default_def (fun, parm);
1004 if (!name || has_zero_uses (name))
1005 {
1006 *call_uses_p = 0;
1007 return false;
1008 }
1009
1010 /* Edge summaries can only handle callers with fewer than 256 parameters. */
1011 if (parm_num > UCHAR_MAX(127*2 +1))
1012 return true;
1013
1014 bitmap analyzed = BITMAP_ALLOCbitmap_alloc (NULLnullptr);
1015 int call_uses = isra_track_scalar_value_uses (fun, node, name, parm_num,
1016 analyzed);
1017 BITMAP_FREE (analyzed)((void) (bitmap_obstack_free ((bitmap) analyzed), (analyzed) =
(bitmap) nullptr))
;
1018 if (call_uses < 0)
1019 return true;
1020 *call_uses_p = call_uses;
1021 return false;
1022}
1023
1024/* Scan immediate uses of a default definition SSA name of a parameter PARM and
1025 examine whether there are any nonarg uses that are not actual arguments or
1026 otherwise infeasible uses. If so, return true, otherwise return false.
1027 Create pass-through IPA flow records for any direct uses as argument calls
1028 and if returning false, store their number into *PT_COUNT_P. NODE and FUN
1029 must represent the function that is currently analyzed, PARM_NUM must be the
1030 index of the analyzed parameter.
1031
1032 This function is similar to isra_track_scalar_param_local_uses but its
1033 results are meant for splitting of parameters passed by reference or turning
1034 them into bits passed by value, as opposed to generic unused parameter
1035 removal. */
1036
1037static bool
1038ptr_parm_has_nonarg_uses (cgraph_node *node, function *fun, tree parm,
1039 int parm_num, unsigned *pt_count_p)
1040{
1041 imm_use_iterator ui;
1042 gimple *stmt;
1043 tree name = ssa_default_def (fun, parm);
1044 bool ret = false;
1045 unsigned pt_count = 0;
1046
1047 if (!name || has_zero_uses (name))
1048 return false;
1049
1050 /* Edge summaries can only handle callers with fewer than 256 parameters. */
1051 if (parm_num > UCHAR_MAX(127*2 +1))
1052 return true;
1053
1054 FOR_EACH_IMM_USE_STMT (stmt, ui, name)for (struct auto_end_imm_use_stmt_traverse auto_end_imm_use_stmt_traverse
((((stmt) = first_imm_use_stmt (&(ui), (name))), &(ui
))); !end_imm_use_stmt_p (&(ui)); (void) ((stmt) = next_imm_use_stmt
(&(ui))))
1055 {
1056 unsigned uses_ok = 0;
1057 use_operand_p use_p;
1058
1059 if (is_gimple_debug (stmt))
1060 continue;
1061
1062 if (gimple_assign_single_p (stmt))
1063 {
1064 tree rhs = gimple_assign_rhs1 (stmt);
1065 if (!TREE_THIS_VOLATILE (rhs)((rhs)->base.volatile_flag))
1066 {
1067 while (handled_component_p (rhs))
1068 rhs = TREE_OPERAND (rhs, 0)(*((const_cast<tree*> (tree_operand_check ((rhs), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1068, __FUNCTION__)))))
;
1069 if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == MEM_REF
1070 && TREE_OPERAND (rhs, 0)(*((const_cast<tree*> (tree_operand_check ((rhs), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1070, __FUNCTION__)))))
== name
1071 && integer_zerop (TREE_OPERAND (rhs, 1)(*((const_cast<tree*> (tree_operand_check ((rhs), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1071, __FUNCTION__)))))
)
1072 && types_compatible_p (TREE_TYPE (rhs)((contains_struct_check ((rhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1072, __FUNCTION__))->typed.type)
,
1073 TREE_TYPE (TREE_TYPE (name))((contains_struct_check ((((contains_struct_check ((name), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1073, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1073, __FUNCTION__))->typed.type)
))
1074 uses_ok++;
1075 }
1076 }
1077 else if (is_gimple_call (stmt))
1078 {
1079 gcall *call = as_a <gcall *> (stmt);
1080 unsigned arg_count;
1081 if (gimple_call_internal_p (call)
1082 || (arg_count = gimple_call_num_args (call)) == 0)
1083 {
1084 ret = true;
1085 break;
1086 }
1087
1088 cgraph_edge *cs = node->get_edge (stmt);
1089 gcc_checking_assert (cs)((void)(!(cs) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1089, __FUNCTION__), 0 : 0))
;
1090 isra_call_summary *csum = call_sums->get_create (cs);
1091 csum->init_inputs (arg_count);
1092
1093 for (unsigned i = 0; i < arg_count; ++i)
1094 {
1095 tree arg = gimple_call_arg (stmt, i);
1096
1097 if (arg == name)
1098 {
1099 /* TODO: Allow &MEM_REF[name + offset] here,
1100 ipa_param_body_adjustments::modify_call_stmt has to be
1101 adjusted too. */
1102 csum->m_arg_flow[i].pointer_pass_through = true;
1103 set_single_param_flow_source (&csum->m_arg_flow[i], parm_num);
1104 pt_count++;
1105 uses_ok++;
1106 continue;
1107 }
1108
1109 if (!TREE_THIS_VOLATILE (arg)((arg)->base.volatile_flag))
1110 {
1111 while (handled_component_p (arg))
1112 arg = TREE_OPERAND (arg, 0)(*((const_cast<tree*> (tree_operand_check ((arg), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1112, __FUNCTION__)))))
;
1113 if (TREE_CODE (arg)((enum tree_code) (arg)->base.code) == MEM_REF
1114 && TREE_OPERAND (arg, 0)(*((const_cast<tree*> (tree_operand_check ((arg), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1114, __FUNCTION__)))))
== name
1115 && integer_zerop (TREE_OPERAND (arg, 1)(*((const_cast<tree*> (tree_operand_check ((arg), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1115, __FUNCTION__)))))
)
1116 && types_compatible_p (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1116, __FUNCTION__))->typed.type)
,
1117 TREE_TYPE (TREE_TYPE (name))((contains_struct_check ((((contains_struct_check ((name), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1117, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1117, __FUNCTION__))->typed.type)
))
1118 uses_ok++;
1119 }
1120 }
1121 }
1122
1123 /* If the number of valid uses does not match the number of
1124 uses in this stmt there is an unhandled use. */
1125 unsigned all_uses = 0;
1126 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)for ((use_p) = first_imm_use_on_stmt (&(ui)); !end_imm_use_on_stmt_p
(&(ui)); (void) ((use_p) = next_imm_use_on_stmt (&(ui
))))
1127 all_uses++;
1128
1129 gcc_checking_assert (uses_ok <= all_uses)((void)(!(uses_ok <= all_uses) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1129, __FUNCTION__), 0 : 0))
;
1130 if (uses_ok != all_uses)
1131 {
1132 ret = true;
1133 break;
1134 }
1135 }
1136
1137 *pt_count_p = pt_count;
1138 return ret;
1139}
1140
1141/* Initialize vector of parameter descriptors of NODE. Return true if there
1142 are any candidates for splitting or unused aggregate parameter removal (the
1143 function may return false if there are candidates for removal of register
1144 parameters). */
1145
1146static bool
1147create_parameter_descriptors (cgraph_node *node,
1148 vec<gensum_param_desc> *param_descriptions)
1149{
1150 function *fun = DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1150, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
;
1151 bool ret = false;
1152
1153 int num = 0;
1154 for (tree parm = DECL_ARGUMENTS (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1154, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
;
1155 parm;
1156 parm = DECL_CHAIN (parm)(((contains_struct_check (((contains_struct_check ((parm), (TS_DECL_MINIMAL
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1156, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1156, __FUNCTION__))->common.chain))
, num++)
1157 {
1158 const char *msg;
1159 gensum_param_desc *desc = &(*param_descriptions)[num];
1160 /* param_descriptions vector is grown cleared in the caller. */
1161 desc->param_number = num;
1162 decl2desc->put (parm, desc);
1163
1164 if (dump_file && (dump_flags & TDF_DETAILS))
1165 print_generic_expr (dump_file, parm, TDF_UID);
1166
1167 int scalar_call_uses;
1168 tree type = TREE_TYPE (parm)((contains_struct_check ((parm), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1168, __FUNCTION__))->typed.type)
;
1169 if (TREE_THIS_VOLATILE (parm)((parm)->base.volatile_flag))
1170 {
1171 if (dump_file && (dump_flags & TDF_DETAILS))
1172 fprintf (dump_file, " not a candidate, is volatile\n");
1173 continue;
1174 }
1175 if (!is_gimple_reg_type (type) && is_va_list_type (type))
1176 {
1177 if (dump_file && (dump_flags & TDF_DETAILS))
1178 fprintf (dump_file, " not a candidate, is a va_list type\n");
1179 continue;
1180 }
1181 if (TREE_ADDRESSABLE (parm)((parm)->base.addressable_flag))
1182 {
1183 if (dump_file && (dump_flags & TDF_DETAILS))
1184 fprintf (dump_file, " not a candidate, is addressable\n");
1185 continue;
1186 }
1187 if (TREE_ADDRESSABLE (type)((type)->base.addressable_flag))
1188 {
1189 if (dump_file && (dump_flags & TDF_DETAILS))
1190 fprintf (dump_file, " not a candidate, type cannot be split\n");
1191 continue;
1192 }
1193
1194 if (is_gimple_reg (parm)
1195 && !isra_track_scalar_param_local_uses (fun, node, parm, num,
1196 &scalar_call_uses))
1197 {
1198 if (dump_file && (dump_flags & TDF_DETAILS))
1199 fprintf (dump_file, " is a scalar with only %i call uses\n",
1200 scalar_call_uses);
1201
1202 desc->locally_unused = true;
1203 desc->call_uses = scalar_call_uses;
1204 }
1205
1206 if (POINTER_TYPE_P (type)(((enum tree_code) (type)->base.code) == POINTER_TYPE || (
(enum tree_code) (type)->base.code) == REFERENCE_TYPE)
)
1207 {
1208 desc->by_ref = true;
1209 if (TREE_CODE (type)((enum tree_code) (type)->base.code) == REFERENCE_TYPE
1210 || (num == 0
1211 && TREE_CODE (TREE_TYPE (node->decl))((enum tree_code) (((contains_struct_check ((node->decl), (
TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1211, __FUNCTION__))->typed.type))->base.code)
== METHOD_TYPE))
1212 desc->safe_ref = true;
1213 else
1214 desc->safe_ref = false;
1215 type = TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1215, __FUNCTION__))->typed.type)
;
1216
1217 if (TREE_CODE (type)((enum tree_code) (type)->base.code) == FUNCTION_TYPE
1218 || TREE_CODE (type)((enum tree_code) (type)->base.code) == METHOD_TYPE)
1219 {
1220 if (dump_file && (dump_flags & TDF_DETAILS))
1221 fprintf (dump_file, " not a candidate, reference to "
1222 "a function\n");
1223 continue;
1224 }
1225 if (TYPE_VOLATILE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1225, __FUNCTION__))->base.volatile_flag)
)
1226 {
1227 if (dump_file && (dump_flags & TDF_DETAILS))
1228 fprintf (dump_file, " not a candidate, reference to "
1229 "a volatile type\n");
1230 continue;
1231 }
1232 if (TREE_CODE (type)((enum tree_code) (type)->base.code) == ARRAY_TYPE
1233 && TYPE_NONALIASED_COMPONENT (type)((tree_check ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1233, __FUNCTION__, (ARRAY_TYPE)))->type_common.transparent_aggr_flag
)
)
1234 {
1235 if (dump_file && (dump_flags & TDF_DETAILS))
1236 fprintf (dump_file, " not a candidate, reference to "
1237 "a nonaliased component array\n");
1238 continue;
1239 }
1240 if (!is_gimple_reg (parm))
1241 {
1242 if (dump_file && (dump_flags & TDF_DETAILS))
1243 fprintf (dump_file, " not a candidate, a reference which is "
1244 "not a gimple register (probably addressable)\n");
1245 continue;
1246 }
1247 if (is_va_list_type (type))
1248 {
1249 if (dump_file && (dump_flags & TDF_DETAILS))
1250 fprintf (dump_file, " not a candidate, reference to "
1251 "a va list\n");
1252 continue;
1253 }
1254 if (ptr_parm_has_nonarg_uses (node, fun, parm, num,
1255 &desc->ptr_pt_count))
1256 {
1257 if (dump_file && (dump_flags & TDF_DETAILS))
1258 fprintf (dump_file, " not a candidate, reference has "
1259 "nonarg uses\n");
1260 continue;
1261 }
1262 }
1263 else if (!AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || (((
enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum
tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code
) (type)->base.code) == QUAL_UNION_TYPE))
)
1264 {
1265 /* This is in an else branch because scalars passed by reference are
1266 still candidates to be passed by value. */
1267 if (dump_file && (dump_flags & TDF_DETAILS))
1268 fprintf (dump_file, " not a candidate, not an aggregate\n");
1269 continue;
1270 }
1271
1272 if (!COMPLETE_TYPE_P (type)(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1272, __FUNCTION__))->type_common.size) != (tree) nullptr
)
)
1273 {
1274 if (dump_file && (dump_flags & TDF_DETAILS))
1275 fprintf (dump_file, " not a candidate, not a complete type\n");
1276 continue;
1277 }
1278 if (!tree_fits_uhwi_p (TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1278, __FUNCTION__))->type_common.size)
))
1279 {
1280 if (dump_file && (dump_flags & TDF_DETAILS))
1281 fprintf (dump_file, " not a candidate, size not representable\n");
1282 continue;
1283 }
1284 unsigned HOST_WIDE_INTlong type_size
1285 = tree_to_uhwi (TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1285, __FUNCTION__))->type_common.size)
) / BITS_PER_UNIT(8);
1286 if (type_size == 0
1287 || type_size >= ISRA_ARG_SIZE_LIMIT(1 << 16))
1288 {
1289 if (dump_file && (dump_flags & TDF_DETAILS))
1290 fprintf (dump_file, " not a candidate, has zero or huge size\n");
1291 continue;
1292 }
1293 if (type_internals_preclude_sra_p (type, &msg))
1294 {
1295 if (dump_file && (dump_flags & TDF_DETAILS))
1296 fprintf (dump_file, " not a candidate, %s\n", msg);
1297 continue;
1298 }
1299
1300 if (dump_file && (dump_flags & TDF_DETAILS))
1301 fprintf (dump_file, " is a candidate\n");
1302
1303 ret = true;
1304 desc->split_candidate = true;
1305 if (desc->by_ref && !desc->safe_ref)
1306 desc->deref_index = unsafe_by_ref_count++;
1307 }
1308 return ret;
1309}
1310
1311/* Return pointer to descriptor of parameter DECL or NULL if it cannot be
1312 found, which happens if DECL is for a static chain. */
1313
1314static gensum_param_desc *
1315get_gensum_param_desc (tree decl)
1316{
1317 if (!decl2desc)
1318 return NULLnullptr;
1319 gcc_checking_assert (TREE_CODE (decl) == PARM_DECL)((void)(!(((enum tree_code) (decl)->base.code) == PARM_DECL
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1319, __FUNCTION__), 0 : 0))
;
1320 gensum_param_desc **slot = decl2desc->get (decl);
1321 if (!slot)
1322 /* This can happen for static chains which we cannot handle so far. */
1323 return NULLnullptr;
1324 gcc_checking_assert (*slot)((void)(!(*slot) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1324, __FUNCTION__), 0 : 0))
;
1325 return *slot;
1326}
1327
1328
1329/* Remove parameter described by DESC from candidates for IPA-SRA splitting and
1330 write REASON to the dump file if there is one. */
1331
1332static void
1333disqualify_split_candidate (gensum_param_desc *desc, const char *reason)
1334{
1335 if (!desc->split_candidate)
1336 return;
1337
1338 if (dump_file && (dump_flags & TDF_DETAILS))
1339 fprintf (dump_file, "! Disqualifying parameter number %i - %s\n",
1340 desc->param_number, reason);
1341
1342 desc->split_candidate = false;
1343}
1344
1345/* Remove DECL from candidates for IPA-SRA and write REASON to the dump file if
1346 there is one. */
1347
1348static void
1349disqualify_split_candidate (tree decl, const char *reason)
1350{
1351 gensum_param_desc *desc = get_gensum_param_desc (decl);
1352 if (desc)
1353 disqualify_split_candidate (desc, reason);
1354}
1355
1356/* Allocate a new access to DESC and fill it in with OFFSET and SIZE. But
1357 first, check that there are not too many of them already. If so, do not
1358 allocate anything and return NULL. */
1359
1360static gensum_param_access *
1361allocate_access (gensum_param_desc *desc,
1362 HOST_WIDE_INTlong offset, HOST_WIDE_INTlong size)
1363{
1364 if (desc->access_count
1365 == (unsigned) param_ipa_sra_max_replacementsglobal_options.x_param_ipa_sra_max_replacements)
1366 {
1367 disqualify_split_candidate (desc, "Too many replacement candidates");
1368 return NULLnullptr;
1369 }
1370
1371 gensum_param_access *access
1372 = (gensum_param_access *) obstack_alloc (&gensum_obstack,__extension__ ({ struct obstack *__h = (&gensum_obstack);
__extension__ ({ struct obstack *__o = (__h); size_t __len =
((sizeof (gensum_param_access))); if (__extension__ ({ struct
obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit -
__o1->next_free); }) < __len) _obstack_newchunk (__o, __len
); ((void) ((__o)->next_free += (__len))); }); __extension__
({ struct obstack *__o1 = (__h); void *__value = (void *) __o1
->object_base; if (__o1->next_free == __value) __o1->
maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t
) < sizeof (void *) ? ((__o1->object_base) + (((__o1->
next_free) - (__o1->object_base) + (__o1->alignment_mask
)) & ~(__o1->alignment_mask))) : (char *) (((ptrdiff_t
) (__o1->next_free) + (__o1->alignment_mask)) & ~(__o1
->alignment_mask))); if ((size_t) (__o1->next_free - (char
*) __o1->chunk) > (size_t) (__o1->chunk_limit - (char
*) __o1->chunk)) __o1->next_free = __o1->chunk_limit
; __o1->object_base = __o1->next_free; __value; }); })
1373 sizeof (gensum_param_access))__extension__ ({ struct obstack *__h = (&gensum_obstack);
__extension__ ({ struct obstack *__o = (__h); size_t __len =
((sizeof (gensum_param_access))); if (__extension__ ({ struct
obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit -
__o1->next_free); }) < __len) _obstack_newchunk (__o, __len
); ((void) ((__o)->next_free += (__len))); }); __extension__
({ struct obstack *__o1 = (__h); void *__value = (void *) __o1
->object_base; if (__o1->next_free == __value) __o1->
maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t
) < sizeof (void *) ? ((__o1->object_base) + (((__o1->
next_free) - (__o1->object_base) + (__o1->alignment_mask
)) & ~(__o1->alignment_mask))) : (char *) (((ptrdiff_t
) (__o1->next_free) + (__o1->alignment_mask)) & ~(__o1
->alignment_mask))); if ((size_t) (__o1->next_free - (char
*) __o1->chunk) > (size_t) (__o1->chunk_limit - (char
*) __o1->chunk)) __o1->next_free = __o1->chunk_limit
; __o1->object_base = __o1->next_free; __value; }); })
;
1374 memset (access, 0, sizeof (*access));
1375 access->offset = offset;
1376 access->size = size;
1377 access->load_count = profile_count::zero ();
1378 return access;
1379}
1380
1381/* In what context scan_expr_access has been called, whether it deals with a
1382 load, a function argument, or a store. Please note that in rare
1383 circumstances when it is not clear if the access is a load or store,
1384 ISRA_CTX_STORE is used too. */
1385
1386enum isra_scan_context {ISRA_CTX_LOAD, ISRA_CTX_ARG, ISRA_CTX_STORE};
1387
1388/* Return an access describing memory access to the variable described by DESC
1389 at OFFSET with SIZE in context CTX, starting at pointer to the linked list
1390 at a certain tree level FIRST. Attempt to create it and put into the
1391 appropriate place in the access tree if does not exist, but fail and return
1392 NULL if there are already too many accesses, if it would create a partially
1393 overlapping access or if an access would end up within a pre-existing
1394 non-call access. */
1395
1396static gensum_param_access *
1397get_access_1 (gensum_param_desc *desc, gensum_param_access **first,
1398 HOST_WIDE_INTlong offset, HOST_WIDE_INTlong size, isra_scan_context ctx)
1399{
1400 gensum_param_access *access = *first, **ptr = first;
1401
1402 if (!access)
1403 {
1404 /* No pre-existing access at this level, just create it. */
1405 gensum_param_access *a = allocate_access (desc, offset, size);
1406 if (!a)
1407 return NULLnullptr;
1408 *first = a;
1409 return *first;
1410 }
1411
1412 if (access->offset >= offset + size)
1413 {
1414 /* We want to squeeze it in front of the very first access, just do
1415 it. */
1416 gensum_param_access *r = allocate_access (desc, offset, size);
1417 if (!r)
1418 return NULLnullptr;
1419 r->next_sibling = access;
1420 *first = r;
1421 return r;
1422 }
1423
1424 /* Skip all accesses that have to come before us until the next sibling is
1425 already too far. */
1426 while (offset >= access->offset + access->size
1427 && access->next_sibling
1428 && access->next_sibling->offset < offset + size)
1429 {
1430 ptr = &access->next_sibling;
1431 access = access->next_sibling;
1432 }
1433
1434 /* At this point we know we do not belong before access. */
1435 gcc_assert (access->offset < offset + size)((void)(!(access->offset < offset + size) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1435, __FUNCTION__), 0 : 0))
;
1436
1437 if (access->offset == offset && access->size == size)
1438 /* We found what we were looking for. */
1439 return access;
1440
1441 if (access->offset <= offset
1442 && access->offset + access->size >= offset + size)
1443 {
1444 /* We fit into access which is larger than us. We need to find/create
1445 something below access. But we only allow nesting in call
1446 arguments. */
1447 if (access->nonarg)
1448 return NULLnullptr;
1449
1450 return get_access_1 (desc, &access->first_child, offset, size, ctx);
1451 }
1452
1453 if (offset <= access->offset
1454 && offset + size >= access->offset + access->size)
1455 /* We are actually bigger than access, which fully fits into us, take its
1456 place and make all accesses fitting into it its children. */
1457 {
1458 /* But first, we only allow nesting in call arguments so check if that is
1459 what we are trying to represent. */
1460 if (ctx != ISRA_CTX_ARG)
1461 return NULLnullptr;
1462
1463 gensum_param_access *r = allocate_access (desc, offset, size);
1464 if (!r)
1465 return NULLnullptr;
1466 r->first_child = access;
1467
1468 while (access->next_sibling
1469 && access->next_sibling->offset < offset + size)
1470 access = access->next_sibling;
1471 if (access->offset + access->size > offset + size)
1472 {
1473 /* This must be a different access, which are sorted, so the
1474 following must be true and this signals a partial overlap. */
1475 gcc_assert (access->offset > offset)((void)(!(access->offset > offset) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1475, __FUNCTION__), 0 : 0))
;
1476 return NULLnullptr;
1477 }
1478
1479 r->next_sibling = access->next_sibling;
1480 access->next_sibling = NULLnullptr;
1481 *ptr = r;
1482 return r;
1483 }
1484
1485 if (offset >= access->offset + access->size)
1486 {
1487 /* We belong after access. */
1488 gensum_param_access *r = allocate_access (desc, offset, size);
1489 if (!r)
1490 return NULLnullptr;
1491 r->next_sibling = access->next_sibling;
1492 access->next_sibling = r;
1493 return r;
1494 }
1495
1496 if (offset < access->offset)
1497 {
1498 /* We know the following, otherwise we would have created a
1499 super-access. */
1500 gcc_checking_assert (offset + size < access->offset + access->size)((void)(!(offset + size < access->offset + access->size
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1500, __FUNCTION__), 0 : 0))
;
1501 return NULLnullptr;
1502 }
1503
1504 if (offset + size > access->offset + access->size)
1505 {
1506 /* Likewise. */
1507 gcc_checking_assert (offset > access->offset)((void)(!(offset > access->offset) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1507, __FUNCTION__), 0 : 0))
;
1508 return NULLnullptr;
1509 }
1510
1511 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1511, __FUNCTION__))
;
1512}
1513
1514/* Return an access describing memory access to the variable described by DESC
1515 at OFFSET with SIZE in context CTX, mark it as used in context CTX. Attempt
1516 to create if it does not exist, but fail and return NULL if there are
1517 already too many accesses, if it would create a partially overlapping access
1518 or if an access would end up in a non-call access. */
1519
1520static gensum_param_access *
1521get_access (gensum_param_desc *desc, HOST_WIDE_INTlong offset, HOST_WIDE_INTlong size,
1522 isra_scan_context ctx)
1523{
1524 gcc_checking_assert (desc->split_candidate)((void)(!(desc->split_candidate) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1524, __FUNCTION__), 0 : 0))
;
1525
1526 gensum_param_access *access = get_access_1 (desc, &desc->accesses, offset,
1527 size, ctx);
1528 if (!access)
1529 {
1530 disqualify_split_candidate (desc,
1531 "Bad access overlap or too many accesses");
1532 return NULLnullptr;
1533 }
1534
1535 switch (ctx)
1536 {
1537 case ISRA_CTX_STORE:
1538 gcc_assert (!desc->by_ref)((void)(!(!desc->by_ref) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1538, __FUNCTION__), 0 : 0))
;
1539 /* Fall-through */
1540 case ISRA_CTX_LOAD:
1541 access->nonarg = true;
1542 break;
1543 case ISRA_CTX_ARG:
1544 break;
1545 }
1546
1547 return access;
1548}
1549
1550/* Verify that parameter access tree starting with ACCESS is in good shape.
1551 PARENT_OFFSET and PARENT_SIZE are the corresponding fields of parent of
1552 ACCESS or zero if there is none. */
1553
1554static bool
1555verify_access_tree_1 (gensum_param_access *access, HOST_WIDE_INTlong parent_offset,
1556 HOST_WIDE_INTlong parent_size)
1557{
1558 while (access)
1559 {
1560 gcc_assert (access->offset >= 0 && access->size >= 0)((void)(!(access->offset >= 0 && access->size
>= 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1560, __FUNCTION__), 0 : 0))
;
1561
1562 if (parent_size != 0)
1563 {
1564 if (access->offset < parent_offset)
1565 {
1566 error ("Access offset before parent offset");
1567 return true;
1568 }
1569 if (access->size >= parent_size)
1570 {
1571 error ("Access size greater or equal to its parent size");
1572 return true;
1573 }
1574 if (access->offset + access->size > parent_offset + parent_size)
1575 {
1576 error ("Access terminates outside of its parent");
1577 return true;
1578 }
1579 }
1580
1581 if (verify_access_tree_1 (access->first_child, access->offset,
1582 access->size))
1583 return true;
1584
1585 if (access->next_sibling
1586 && (access->next_sibling->offset < access->offset + access->size))
1587 {
1588 error ("Access overlaps with its sibling");
1589 return true;
1590 }
1591
1592 access = access->next_sibling;
1593 }
1594 return false;
1595}
1596
1597/* Verify that parameter access tree starting with ACCESS is in good shape,
1598 halt compilation and dump the tree to stderr if not. */
1599
1600DEBUG_FUNCTION__attribute__ ((__used__)) void
1601isra_verify_access_tree (gensum_param_access *access)
1602{
1603 if (verify_access_tree_1 (access, 0, 0))
1604 {
1605 for (; access; access = access->next_sibling)
1606 dump_gensum_access (stderrstderr, access, 2);
1607 internal_error ("IPA-SRA access verification failed");
1608 }
1609}
1610
1611
1612/* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1613 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1614
1615static bool
1616asm_visit_addr (gimple *, tree op, tree, void *)
1617{
1618 op = get_base_address (op);
1619 if (op
1620 && TREE_CODE (op)((enum tree_code) (op)->base.code) == PARM_DECL)
1621 disqualify_split_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1622
1623 return false;
1624}
1625
1626/* Mark a dereference of parameter identified by DESC of distance DIST in a
1627 basic block BB, unless the BB has already been marked as a potentially
1628 final. */
1629
1630static void
1631mark_param_dereference (gensum_param_desc *desc, HOST_WIDE_INTlong dist,
1632 basic_block bb)
1633{
1634 gcc_assert (desc->by_ref)((void)(!(desc->by_ref) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1634, __FUNCTION__), 0 : 0))
;
1635 gcc_checking_assert (desc->split_candidate)((void)(!(desc->split_candidate) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1635, __FUNCTION__), 0 : 0))
;
1636
1637 if (desc->safe_ref
1638 || bitmap_bit_p (final_bbs, bb->index))
1639 return;
1640
1641 int idx = bb->index * unsafe_by_ref_count + desc->deref_index;
1642 if (bb_dereferences[idx] < dist)
1643 bb_dereferences[idx] = dist;
1644}
1645
1646/* Return true, if any potential replacements should use NEW_TYPE as opposed to
1647 previously recorded OLD_TYPE. */
1648
1649static bool
1650type_prevails_p (tree old_type, tree new_type)
1651{
1652 if (old_type == new_type)
1653 return false;
1654
1655 /* Non-aggregates are always better. */
1656 if (!is_gimple_reg_type (old_type)
1657 && is_gimple_reg_type (new_type))
1658 return true;
1659 if (is_gimple_reg_type (old_type)
1660 && !is_gimple_reg_type (new_type))
1661 return false;
1662
1663 /* Prefer any complex or vector type over any other scalar type. */
1664 if (TREE_CODE (old_type)((enum tree_code) (old_type)->base.code) != COMPLEX_TYPE
1665 && TREE_CODE (old_type)((enum tree_code) (old_type)->base.code) != VECTOR_TYPE
1666 && (TREE_CODE (new_type)((enum tree_code) (new_type)->base.code) == COMPLEX_TYPE
1667 || TREE_CODE (new_type)((enum tree_code) (new_type)->base.code) == VECTOR_TYPE))
1668 return true;
1669 if ((TREE_CODE (old_type)((enum tree_code) (old_type)->base.code) == COMPLEX_TYPE
1670 || TREE_CODE (old_type)((enum tree_code) (old_type)->base.code) == VECTOR_TYPE)
1671 && TREE_CODE (new_type)((enum tree_code) (new_type)->base.code) != COMPLEX_TYPE
1672 && TREE_CODE (new_type)((enum tree_code) (new_type)->base.code) != VECTOR_TYPE)
1673 return false;
1674
1675 /* Use the integral type with the bigger precision. */
1676 if (INTEGRAL_TYPE_P (old_type)(((enum tree_code) (old_type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (old_type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (old_type)->base.code) == INTEGER_TYPE
)
1677 && INTEGRAL_TYPE_P (new_type)(((enum tree_code) (new_type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (new_type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (new_type)->base.code) == INTEGER_TYPE
)
)
1678 return (TYPE_PRECISION (new_type)((tree_class_check ((new_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1678, __FUNCTION__))->type_common.precision)
> TYPE_PRECISION (old_type)((tree_class_check ((old_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1678, __FUNCTION__))->type_common.precision)
);
1679
1680 /* Attempt to disregard any integral type with non-full precision. */
1681 if (INTEGRAL_TYPE_P (old_type)(((enum tree_code) (old_type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (old_type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (old_type)->base.code) == INTEGER_TYPE
)
1682 && (TREE_INT_CST_LOW (TYPE_SIZE (old_type))((unsigned long) (*tree_int_cst_elt_check ((((tree_class_check
((old_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1682, __FUNCTION__))->type_common.size)), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1682, __FUNCTION__)))
1683 != TYPE_PRECISION (old_type)((tree_class_check ((old_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1683, __FUNCTION__))->type_common.precision)
))
1684 return true;
1685 if (INTEGRAL_TYPE_P (new_type)(((enum tree_code) (new_type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (new_type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (new_type)->base.code) == INTEGER_TYPE
)
1686 && (TREE_INT_CST_LOW (TYPE_SIZE (new_type))((unsigned long) (*tree_int_cst_elt_check ((((tree_class_check
((new_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1686, __FUNCTION__))->type_common.size)), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1686, __FUNCTION__)))
1687 != TYPE_PRECISION (new_type)((tree_class_check ((new_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1687, __FUNCTION__))->type_common.precision)
))
1688 return false;
1689 /* Stabilize the selection. */
1690 return TYPE_UID (old_type)((tree_class_check ((old_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1690, __FUNCTION__))->type_common.uid)
< TYPE_UID (new_type)((tree_class_check ((new_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1690, __FUNCTION__))->type_common.uid)
;
1691}
1692
1693/* When scanning an expression which is a call argument, this structure
1694 specifies the call and the position of the argument. */
1695
1696struct scan_call_info
1697{
1698 /* Call graph edge representing the call. */
1699 cgraph_edge *cs;
1700 /* Total number of arguments in the call. */
1701 unsigned argument_count;
1702 /* Number of the actual argument being scanned. */
1703 unsigned arg_idx;
1704};
1705
1706/* Record use of ACCESS which belongs to a parameter described by DESC in a
1707 call argument described by CALL_INFO. */
1708
1709static void
1710record_nonregister_call_use (gensum_param_desc *desc,
1711 scan_call_info *call_info,
1712 unsigned unit_offset, unsigned unit_size)
1713{
1714 isra_call_summary *csum = call_sums->get_create (call_info->cs);
1715 csum->init_inputs (call_info->argument_count);
1716
1717 isra_param_flow *param_flow = &csum->m_arg_flow[call_info->arg_idx];
1718 param_flow->aggregate_pass_through = true;
1719 set_single_param_flow_source (param_flow, desc->param_number);
1720 param_flow->unit_offset = unit_offset;
1721 param_flow->unit_size = unit_size;
1722 desc->call_uses++;
1723}
1724
1725/* Callback of walk_aliased_vdefs, just mark that there was a possible
1726 modification. */
1727
1728static bool
1729mark_maybe_modified (ao_ref *, tree, void *data)
1730{
1731 bool *maybe_modified = (bool *) data;
1732 *maybe_modified = true;
1733 return true;
1734}
1735
1736/* Analyze expression EXPR from GIMPLE for accesses to parameters. CTX
1737 specifies whether EXPR is used in a load, store or as an argument call. BB
1738 must be the basic block in which expr resides. If CTX specifies call
1739 argument context, CALL_INFO must describe that call and argument position,
1740 otherwise it is ignored. */
1741
1742static void
1743scan_expr_access (tree expr, gimple *stmt, isra_scan_context ctx,
1744 basic_block bb, scan_call_info *call_info = NULLnullptr)
1745{
1746 poly_int64 poffset, psize, pmax_size;
1747 HOST_WIDE_INTlong offset, size, max_size;
1748 tree base;
1749 bool deref = false;
1750 bool reverse;
1751
1752 if (TREE_CODE (expr)((enum tree_code) (expr)->base.code) == ADDR_EXPR)
1753 {
1754 if (ctx == ISRA_CTX_ARG)
1755 return;
1756 tree t = get_base_address (TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1756, __FUNCTION__)))))
);
1757 if (TREE_CODE (t)((enum tree_code) (t)->base.code) == VAR_DECL && !TREE_STATIC (t)((t)->base.static_flag))
1758 loaded_decls->add (t);
1759 return;
1760 }
1761 if (TREE_CODE (expr)((enum tree_code) (expr)->base.code) == SSA_NAME
1762 || CONSTANT_CLASS_P (expr)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code
) (expr)->base.code))] == tcc_constant)
)
1763 return;
1764
1765 if (TREE_CODE (expr)((enum tree_code) (expr)->base.code) == BIT_FIELD_REF
1766 || TREE_CODE (expr)((enum tree_code) (expr)->base.code) == IMAGPART_EXPR
1767 || TREE_CODE (expr)((enum tree_code) (expr)->base.code) == REALPART_EXPR)
1768 expr = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1768, __FUNCTION__)))))
;
1769
1770 base = get_ref_base_and_extent (expr, &poffset, &psize, &pmax_size, &reverse);
1771
1772 if (TREE_CODE (base)((enum tree_code) (base)->base.code) == MEM_REF)
1773 {
1774 tree op = TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1774, __FUNCTION__)))))
;
1775 if (TREE_CODE (op)((enum tree_code) (op)->base.code) != SSA_NAME
1776 || !SSA_NAME_IS_DEFAULT_DEF (op)(tree_check ((op), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1776, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
)
1777 return;
1778 base = SSA_NAME_VAR (op)((tree_check ((op), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1778, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((op)->ssa_name.var)->base
.code) == IDENTIFIER_NODE ? (tree) nullptr : (op)->ssa_name
.var)
;
1779 if (!base)
1780 return;
1781 deref = true;
1782 }
1783 else if (TREE_CODE (base)((enum tree_code) (base)->base.code) == VAR_DECL
1784 && !TREE_STATIC (base)((base)->base.static_flag)
1785 && (ctx == ISRA_CTX_ARG
1786 || ctx == ISRA_CTX_LOAD))
1787 loaded_decls->add (base);
1788
1789 if (TREE_CODE (base)((enum tree_code) (base)->base.code) != PARM_DECL)
1790 return;
1791
1792 gensum_param_desc *desc = get_gensum_param_desc (base);
1793 if (!desc || !desc->split_candidate)
1794 return;
1795
1796 if (!poffset.is_constant (&offset)
1797 || !psize.is_constant (&size)
1798 || !pmax_size.is_constant (&max_size))
1799 {
1800 disqualify_split_candidate (desc, "Encountered a polynomial-sized "
1801 "access.");
1802 return;
1803 }
1804 if (size < 0 || size != max_size)
1805 {
1806 disqualify_split_candidate (desc, "Encountered a variable sized access.");
1807 return;
1808 }
1809 if (TREE_CODE (expr)((enum tree_code) (expr)->base.code) == COMPONENT_REF
1810 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1))((tree_check (((*((const_cast<tree*> (tree_operand_check
((expr), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1810, __FUNCTION__)))))), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1810, __FUNCTION__, (FIELD_DECL)))->decl_common.decl_flag_1
)
)
1811 {
1812 disqualify_split_candidate (desc, "Encountered a bit-field access.");
1813 return;
1814 }
1815 if (offset < 0)
1816 {
1817 disqualify_split_candidate (desc, "Encountered an access at a "
1818 "negative offset.");
1819 return;
1820 }
1821 gcc_assert ((offset % BITS_PER_UNIT) == 0)((void)(!((offset % (8)) == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1821, __FUNCTION__), 0 : 0))
;
1822 gcc_assert ((size % BITS_PER_UNIT) == 0)((void)(!((size % (8)) == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1822, __FUNCTION__), 0 : 0))
;
1823 if ((offset / BITS_PER_UNIT(8)) >= (UINT_MAX(2147483647 *2U +1U) - ISRA_ARG_SIZE_LIMIT(1 << 16))
1824 || (size / BITS_PER_UNIT(8)) >= ISRA_ARG_SIZE_LIMIT(1 << 16))
1825 {
1826 disqualify_split_candidate (desc, "Encountered an access with too big "
1827 "offset or size");
1828 return;
1829 }
1830
1831 tree type = TREE_TYPE (expr)((contains_struct_check ((expr), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1831, __FUNCTION__))->typed.type)
;
1832 unsigned int exp_align = get_object_alignment (expr);
1833
1834 if (exp_align < TYPE_ALIGN (type)(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1834, __FUNCTION__))->type_common.align) ? ((unsigned)1)
<< (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1834, __FUNCTION__))->type_common.align) - 1) : 0)
)
1835 {
1836 disqualify_split_candidate (desc, "Underaligned access.");
1837 return;
1838 }
1839
1840 if (deref)
1841 {
1842 if (!desc->by_ref)
1843 {
1844 disqualify_split_candidate (desc, "Dereferencing a non-reference.");
1845 return;
1846 }
1847 else if (ctx == ISRA_CTX_STORE)
1848 {
1849 disqualify_split_candidate (desc, "Storing to data passed by "
1850 "reference.");
1851 return;
1852 }
1853
1854 if (!aa_walking_limit)
1855 {
1856 disqualify_split_candidate (desc, "Out of alias analysis step "
1857 "limit.");
1858 return;
1859 }
1860
1861 gcc_checking_assert (gimple_vuse (stmt))((void)(!(gimple_vuse (stmt)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1861, __FUNCTION__), 0 : 0))
;
1862 bool maybe_modified = false;
1863 ao_ref ar;
1864
1865 ao_ref_init (&ar, expr);
1866 bitmap visited = BITMAP_ALLOCbitmap_alloc (NULLnullptr);
1867 int walked = walk_aliased_vdefs (&ar, gimple_vuse (stmt),
1868 mark_maybe_modified, &maybe_modified,
1869 &visited, NULLnullptr, aa_walking_limit);
1870 BITMAP_FREE (visited)((void) (bitmap_obstack_free ((bitmap) visited), (visited) = (
bitmap) nullptr))
;
1871 if (walked > 0)
1872 {
1873 gcc_assert (aa_walking_limit > walked)((void)(!(aa_walking_limit > walked) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1873, __FUNCTION__), 0 : 0))
;
1874 aa_walking_limit = aa_walking_limit - walked;
1875 }
1876 if (walked < 0)
1877 aa_walking_limit = 0;
1878 if (maybe_modified || walked < 0)
1879 {
1880 disqualify_split_candidate (desc, "Data passed by reference possibly "
1881 "modified through an alias.");
1882 return;
1883 }
1884 else
1885 mark_param_dereference (desc, offset + size, bb);
1886 }
1887 else
1888 /* Pointer parameters with direct uses should have been ruled out by
1889 analyzing SSA default def when looking at the parameters. */
1890 gcc_assert (!desc->by_ref)((void)(!(!desc->by_ref) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1890, __FUNCTION__), 0 : 0))
;
1891
1892 gensum_param_access *access = get_access (desc, offset, size, ctx);
1893 if (!access)
1894 return;
1895
1896 if (ctx == ISRA_CTX_ARG)
1897 {
1898 gcc_checking_assert (call_info)((void)(!(call_info) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1898, __FUNCTION__), 0 : 0))
;
1899
1900 if (!deref)
1901 record_nonregister_call_use (desc, call_info, offset / BITS_PER_UNIT(8),
1902 size / BITS_PER_UNIT(8));
1903 else
1904 /* This is not a pass-through of a pointer, this is a use like any
1905 other. */
1906 access->nonarg = true;
1907 }
1908 else if (ctx == ISRA_CTX_LOAD && bb->count.initialized_p ())
1909 access->load_count += bb->count;
1910
1911 if (!access->type)
1912 {
1913 access->type = type;
1914 access->alias_ptr_type = reference_alias_ptr_type (expr);
1915 access->reverse = reverse;
1916 }
1917 else
1918 {
1919 if (exp_align < TYPE_ALIGN (access->type)(((tree_class_check ((access->type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1919, __FUNCTION__))->type_common.align) ? ((unsigned)1)
<< (((tree_class_check ((access->type), (tcc_type),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1919, __FUNCTION__))->type_common.align) - 1) : 0)
)
1920 {
1921 disqualify_split_candidate (desc, "Reference has lower alignment "
1922 "than a previous one.");
1923 return;
1924 }
1925 if (access->alias_ptr_type != reference_alias_ptr_type (expr))
1926 {
1927 disqualify_split_candidate (desc, "Multiple alias pointer types.");
1928 return;
1929 }
1930 if (access->reverse != reverse)
1931 {
1932 disqualify_split_candidate (desc, "Both normal and reverse "
1933 "scalar storage order.");
1934 return;
1935 }
1936 if (!deref
1937 && (AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || (((
enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum
tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code
) (type)->base.code) == QUAL_UNION_TYPE))
|| AGGREGATE_TYPE_P (access->type)(((enum tree_code) (access->type)->base.code) == ARRAY_TYPE
|| (((enum tree_code) (access->type)->base.code) == RECORD_TYPE
|| ((enum tree_code) (access->type)->base.code) == UNION_TYPE
|| ((enum tree_code) (access->type)->base.code) == QUAL_UNION_TYPE
))
)
1938 && (TYPE_MAIN_VARIANT (access->type)((tree_class_check ((access->type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1938, __FUNCTION__))->type_common.main_variant)
!= TYPE_MAIN_VARIANT (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 1938, __FUNCTION__))->type_common.main_variant)
))
1939 {
1940 /* We need the same aggregate type on all accesses to be able to
1941 distinguish transformation spots from pass-through arguments in
1942 the transformation phase. */
1943 disqualify_split_candidate (desc, "We do not support aggregate "
1944 "type punning.");
1945 return;
1946 }
1947
1948 if (type_prevails_p (access->type, type))
1949 access->type = type;
1950 }
1951}
1952
1953/* Scan body function described by NODE and FUN and create access trees for
1954 parameters. */
1955
1956static void
1957scan_function (cgraph_node *node, struct function *fun)
1958{
1959 basic_block bb;
1960
1961 FOR_EACH_BB_FN (bb, fun)for (bb = (fun)->cfg->x_entry_block_ptr->next_bb; bb
!= (fun)->cfg->x_exit_block_ptr; bb = bb->next_bb)
1962 {
1963 gimple_stmt_iterator gsi;
1964 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1965 {
1966 gimple *stmt = gsi_stmt (gsi);
1967
1968 if (final_bbs && stmt_can_throw_external (fun, stmt))
1969 bitmap_set_bit (final_bbs, bb->index);
1970 switch (gimple_code (stmt))
1971 {
1972 case GIMPLE_RETURN:
1973 {
1974 tree t = gimple_return_retval (as_a <greturn *> (stmt));
1975 if (t != NULL_TREE(tree) nullptr)
1976 scan_expr_access (t, stmt, ISRA_CTX_LOAD, bb);
1977 if (final_bbs)
1978 bitmap_set_bit (final_bbs, bb->index);
1979 }
1980 break;
1981
1982 case GIMPLE_ASSIGN:
1983 if (gimple_assign_single_p (stmt)
1984 && !gimple_clobber_p (stmt))
1985 {
1986 tree rhs = gimple_assign_rhs1 (stmt);
1987 scan_expr_access (rhs, stmt, ISRA_CTX_LOAD, bb);
1988 tree lhs = gimple_assign_lhs (stmt);
1989 scan_expr_access (lhs, stmt, ISRA_CTX_STORE, bb);
1990 }
1991 break;
1992
1993 case GIMPLE_CALL:
1994 {
1995 unsigned argument_count = gimple_call_num_args (stmt);
1996 isra_scan_context ctx = ISRA_CTX_ARG;
1997 scan_call_info call_info, *call_info_p = &call_info;
1998 if (gimple_call_internal_p (stmt))
1999 {
2000 call_info_p = NULLnullptr;
2001 ctx = ISRA_CTX_LOAD;
2002 internal_fn ifn = gimple_call_internal_fn (stmt);
2003 if (internal_store_fn_p (ifn))
2004 ctx = ISRA_CTX_STORE;
2005 }
2006 else
2007 {
2008 call_info.cs = node->get_edge (stmt);
2009 call_info.argument_count = argument_count;
2010 }
2011
2012 for (unsigned i = 0; i < argument_count; i++)
2013 {
2014 call_info.arg_idx = i;
2015 scan_expr_access (gimple_call_arg (stmt, i), stmt,
2016 ctx, bb, call_info_p);
2017 }
2018
2019 tree lhs = gimple_call_lhs (stmt);
2020 if (lhs)
2021 scan_expr_access (lhs, stmt, ISRA_CTX_STORE, bb);
2022 int flags = gimple_call_flags (stmt);
2023 if (final_bbs
2024 && (((flags & (ECF_CONST(1 << 0) | ECF_PURE(1 << 1))) == 0)
2025 || (flags & ECF_LOOPING_CONST_OR_PURE(1 << 2))))
2026 bitmap_set_bit (final_bbs, bb->index);
2027 }
2028 break;
2029
2030 case GIMPLE_ASM:
2031 {
2032 gasm *asm_stmt = as_a <gasm *> (stmt);
2033 walk_stmt_load_store_addr_ops (asm_stmt, NULLnullptr, NULLnullptr, NULLnullptr,
2034 asm_visit_addr);
2035 if (final_bbs)
2036 bitmap_set_bit (final_bbs, bb->index);
2037
2038 for (unsigned i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
2039 {
2040 tree t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i))((tree_check ((gimple_asm_input_op (asm_stmt, i)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 2040, __FUNCTION__, (TREE_LIST)))->list.value)
;
2041 scan_expr_access (t, stmt, ISRA_CTX_LOAD, bb);
2042 }
2043 for (unsigned i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
2044 {
2045 tree t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i))((tree_check ((gimple_asm_output_op (asm_stmt, i)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 2045, __FUNCTION__, (TREE_LIST)))->list.value)
;
2046 scan_expr_access (t, stmt, ISRA_CTX_STORE, bb);
2047 }
2048 }
2049 break;
2050
2051 default:
2052 break;
2053 }
2054 }
2055 }
2056}
2057
2058/* Return true if SSA_NAME NAME of function described by FUN is only used in
2059 return statements, or if results of any operations it is involved in are
2060 only used in return statements. ANALYZED is a bitmap that tracks which SSA
2061 names we have already started investigating. */
2062
2063static bool
2064ssa_name_only_returned_p (function *fun, tree name, bitmap analyzed)
2065{
2066 bool res = true;
2067 imm_use_iterator imm_iter;
2068 gimple *stmt;
2069
2070 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)for (struct auto_end_imm_use_stmt_traverse auto_end_imm_use_stmt_traverse
((((stmt) = first_imm_use_stmt (&(imm_iter), (name))), &
(imm_iter))); !end_imm_use_stmt_p (&(imm_iter)); (void) (
(stmt) = next_imm_use_stmt (&(imm_iter))))
2071 {
2072 if (is_gimple_debug (stmt))
2073 continue;
2074
2075 if (gimple_code (stmt) == GIMPLE_RETURN)
2076 {
2077 tree t = gimple_return_retval (as_a <greturn *> (stmt));
2078 if (t != name)
2079 {
2080 res = false;
2081 break;
2082 }
2083 }
2084 else if (!stmt_unremovable_because_of_non_call_eh_p (fun, stmt)
2085 && ((is_gimple_assign (stmt) && !gimple_has_volatile_ops (stmt))
2086 || gimple_code (stmt) == GIMPLE_PHI))
2087 {
2088 /* TODO: And perhaps for const function calls too? */
2089 tree lhs;
2090 if (gimple_code (stmt) == GIMPLE_PHI)
2091 lhs = gimple_phi_result (stmt);
2092 else
2093 lhs = gimple_assign_lhs (stmt);
2094
2095 if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) != SSA_NAME)
2096 {
2097 res = false;
2098 break;
2099 }
2100 gcc_assert (!gimple_vdef (stmt))((void)(!(!gimple_vdef (stmt)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 2100, __FUNCTION__), 0 : 0))
;
2101 if (bitmap_set_bit (analyzed, SSA_NAME_VERSION (lhs)(tree_check ((lhs), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 2101, __FUNCTION__, (SSA_NAME)))->base.u.version
)
2102 && !ssa_name_only_returned_p (fun, lhs, analyzed))
2103 {
2104 res = false;
2105 break;
2106 }
2107 }
2108 else
2109 {
2110 res = false;
2111 break;
2112 }
2113 }
2114 return res;
2115}
2116
2117/* Inspect the uses of the return value of the call associated with CS, and if
2118 it is not used or if it is only used to construct the return value of the
2119 caller, mark it as such in call or caller summary. Also check for
2120 misaligned arguments. */
2121
2122static void
2123isra_analyze_call (cgraph_edge *cs)
2124{
2125 gcall *call_stmt = cs->call_stmt;
2126 unsigned count = gimple_call_num_args (call_stmt);
2127 isra_call_summary *csum = call_sums->get_create (cs);
2128
2129 for (unsigned i = 0; i < count; i++)
2130 {
2131 tree arg = gimple_call_arg (call_stmt, i);
2132 if (TREE_CODE (arg)((enum tree_code) (arg)->base.code) == ADDR_EXPR)
2133 {
2134 poly_int64 poffset, psize, pmax_size;
2135 bool reverse;
2136 tree base = get_ref_base_and_extent (TREE_OPERAND (arg, 0)(*((const_cast<tree*> (tree_operand_check ((arg), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 2136, __FUNCTION__)))))
, &poffset,
2137 &psize, &pmax_size, &reverse);
2138 HOST_WIDE_INTlong offset;
2139 unsigned HOST_WIDE_INTlong ds;
2140 if (DECL_P (base)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code
) (base)->base.code))] == tcc_declaration)
2141 && (poffset.is_constant (&offset))
2142 && tree_fits_uhwi_p (DECL_SIZE (base)((contains_struct_check ((base), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 2142, __FUNCTION__))->decl_common.size)
)
2143 && ((ds = tree_to_uhwi (DECL_SIZE (base)((contains_struct_check ((base), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 2143, __FUNCTION__))->decl_common.size)
) - offset)
2144 < ISRA_ARG_SIZE_LIMIT(1 << 16) * BITS_PER_UNIT(8)))
2145 {
2146 csum->init_inputs (count);
2147 gcc_assert (!csum->m_arg_flow[i].aggregate_pass_through)((void)(!(!csum->m_arg_flow[i].aggregate_pass_through) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 2147, __FUNCTION__), 0 : 0))
;
2148 csum->m_arg_flow[i].unit_size = ds / BITS_PER_UNIT(8);
2149 }
2150
2151 if (TREE_CODE (base)((enum tree_code) (base)->base.code) == VAR_DECL
2152 && !TREE_STATIC (base)((base)->base.static_flag)
2153 && !loaded_decls->contains (base))
2154 {
2155 csum->init_inputs (count);
2156 csum->m_arg_flow[i].constructed_for_calls = true;
2157 }
2158 }
2159
2160 if (is_gimple_reg (arg))
2161 continue;
2162
2163 tree offset;
2164 poly_int64 bitsize, bitpos;
2165 machine_mode mode;
2166 int unsignedp, reversep, volatilep = 0;
2167 get_inner_reference (arg, &bitsize, &bitpos, &offset, &mode,
2168 &unsignedp, &reversep, &volatilep);
2169 if (!multiple_p (bitpos, BITS_PER_UNIT(8)))
2170 {
2171 csum->m_bit_aligned_arg = true;
2172 break;
2173 }
2174 }
2175
2176 tree lhs = gimple_call_lhs (call_stmt);
2177 if (lhs)
2178 {
2179 /* TODO: Also detect aggregates on a LHS of a call that are only returned
2180 from this function (without being read anywhere). */
2181 if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME)
2182 {
2183 bitmap analyzed = BITMAP_ALLOCbitmap_alloc (NULLnullptr);
2184 if (ssa_name_only_returned_p (DECL_STRUCT_FUNCTION (cs->caller->decl)((tree_check ((cs->caller->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 2184, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
,
2185 lhs, analyzed))
2186 csum->m_return_returned = true;
2187 BITMAP_FREE (analyzed)((void) (bitmap_obstack_free ((bitmap) analyzed), (analyzed) =
(bitmap) nullptr))
;
2188 }
2189 }
2190 else
2191 csum->m_return_ignored = true;
2192}
2193
2194/* Look at all calls going out of NODE, described also by IFS and perform all
2195 analyses necessary for IPA-SRA that are not done at body scan time or done
2196 even when body is not scanned because the function is not a candidate. */
2197
2198static void
2199isra_analyze_all_outgoing_calls (cgraph_node *node)
2200{
2201 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2202 isra_analyze_call (cs);
2203 for (cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2204 isra_analyze_call (cs);
2205}
2206
2207/* Dump a dereferences table with heading STR to file F. */
2208
2209static void
2210dump_dereferences_table (FILE *f, struct function *fun, const char *str)
2211{
2212 basic_block bb;
2213
2214 fprintf (dump_file, "%s", str);
2215 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),for (bb = ((fun)->cfg->x_entry_block_ptr); bb != ((fun)
->cfg->x_exit_block_ptr); bb = bb->next_bb)
2216 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)for (bb = ((fun)->cfg->x_entry_block_ptr); bb != ((fun)
->cfg->x_exit_block_ptr); bb = bb->next_bb)
2217 {
2218 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
2219 if (bb != EXIT_BLOCK_PTR_FOR_FN (fun)((fun)->cfg->x_exit_block_ptr))
2220 {
2221 int i;
2222 for (i = 0; i < unsafe_by_ref_count; i++)
2223 {
2224 int idx = bb->index * unsafe_by_ref_count + i;
2225 fprintf (f, " %4" HOST_WIDE_INT_PRINT"l" "d", bb_dereferences[idx]);
2226 }
2227 }
2228 fprintf (f, "\n");
2229 }
2230 fprintf (dump_file, "\n");
2231}
2232
2233/* Propagate distances in bb_dereferences in the opposite direction than the
2234 control flow edges, in each step storing the maximum of the current value
2235 and the minimum of all successors. These steps are repeated until the table
2236 stabilizes. Note that BBs which might terminate the functions (according to
2237 final_bbs bitmap) never updated in this way. */
2238
2239static void
2240propagate_dereference_distances (struct function *fun)
2241{
2242 basic_block bb;
2243
2244 if (dump_file && (dump_flags & TDF_DETAILS))
2245 dump_dereferences_table (dump_file, fun,
2246 "Dereference table before propagation:\n");
2247
2248 auto_vec<basic_block> queue (last_basic_block_for_fn (fun)((fun)->cfg->x_last_basic_block));
2249 queue.quick_push (ENTRY_BLOCK_PTR_FOR_FN (fun)((fun)->cfg->x_entry_block_ptr));
2250 FOR_EACH_BB_FN (bb, fun)for (bb = (fun)->cfg->x_entry_block_ptr->next_bb; bb
!= (fun)->cfg->x_exit_block_ptr; bb = bb->next_bb)
2251 {
2252 queue.quick_push (bb);
2253 bb->aux = bb;
2254 }
2255
2256 while (!queue.is_empty ())
2257 {
2258 edge_iterator ei;
2259 edge e;
2260 bool change = false;
2261 int i;
2262
2263 bb = queue.pop ();
2264 bb->aux = NULLnullptr;
2265
2266 if (bitmap_bit_p (final_bbs, bb->index))
2267 continue;
2268
2269 for (i = 0; i < unsafe_by_ref_count; i++)
2270 {
2271 int idx = bb->index * unsafe_by_ref_count + i;
2272 bool first = true;
2273 HOST_WIDE_INTlong inh = 0;
2274
2275 FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
2276 {
2277 int succ_idx = e->dest->index * unsafe_by_ref_count + i;
2278
2279 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (fun)((fun)->cfg->x_exit_block_ptr))
2280 continue;
2281
2282 if (first)
2283 {
2284 first = false;
2285 inh = bb_dereferences [succ_idx];
2286 }
2287 else if (bb_dereferences [succ_idx] < inh)
2288 inh = bb_dereferences [succ_idx];
2289 }
2290
2291 if (!first && bb_dereferences[idx] < inh)
2292 {
2293 bb_dereferences[idx] = inh;
2294 change = true;
2295 }
2296 }
2297
2298 if (change)
2299 FOR_EACH_EDGE (e, ei, bb->preds)for ((ei) = ei_start_1 (&((bb->preds))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
2300 {
2301 if (e->src->aux)
2302 continue;
2303
2304 e->src->aux = e->src;
2305 queue.quick_push (e->src);
2306 }
2307 }
2308
2309 if (dump_file && (dump_flags & TDF_DETAILS))
2310 dump_dereferences_table (dump_file, fun,
2311 "Dereference table after propagation:\n");
2312}
2313
2314/* Return true if the ACCESS loads happen frequently enough in FUN to risk
2315 moving them to the caller and only pass the result. */
2316
2317static bool
2318dereference_probable_p (struct function *fun, gensum_param_access *access)
2319{
2320 int threshold = opt_for_fn (fun->decl, param_ipa_sra_deref_prob_threshold)(opts_for_fn (fun->decl)->x_param_ipa_sra_deref_prob_threshold
)
;
2321 return access->load_count
2322 >= ENTRY_BLOCK_PTR_FOR_FN (fun)((fun)->cfg->x_entry_block_ptr)->count.apply_scale (threshold, 100);
2323}
2324
2325/* Perform basic checks on ACCESS to PARM (of FUN) described by DESC and all
2326 its children, return true if the parameter cannot be split, otherwise return
2327 false and update *NONARG_ACC_SIZE and *ONLY_CALLS. ENTRY_BB_INDEX must be
2328 the index of the entry BB in the function of PARM. */
2329
2330static bool
2331check_gensum_access (struct function *fun, tree parm, gensum_param_desc *desc,
2332 gensum_param_access *access,
2333 HOST_WIDE_INTlong *nonarg_acc_size, bool *only_calls,
2334 int entry_bb_index)
2335{
2336 if (access->nonarg)
2337 {
2338 *only_calls = false;
2339 *nonarg_acc_size += access->size;
2340
2341 if (access->first_child)
2342 {
2343 disqualify_split_candidate (desc, "Overlapping non-call uses.");
2344 return true;
2345 }
2346 }
2347 /* Do not decompose a non-BLKmode param in a way that would create
2348 BLKmode params. Especially for by-reference passing (thus,
2349 pointer-type param) this is hardly worthwhile. */
2350 if (DECL_MODE (parm)((contains_struct_check ((parm), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 2350, __FUNCTION__))->decl_common.mode)
!= BLKmode((void) 0, E_BLKmode)
2351 && TYPE_MODE (access->type)((((enum tree_code) ((tree_class_check ((access->type), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 2351, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(access->type) : (access->type)->type_common.mode)
== BLKmode((void) 0, E_BLKmode))
2352 {
2353 disqualify_split_candidate (desc, "Would convert a non-BLK to a BLK.");
2354 return true;
2355 }
2356
2357 if (desc->by_ref)
2358 {
2359 if (desc->safe_ref)
2360 {
2361 if (!dereference_probable_p (fun, access))
2362 {
2363 disqualify_split_candidate (desc, "Dereferences in callers "
2364 "would happen much more frequently.");
2365 return true;
2366 }
2367 }
2368 else
2369 {
2370 int idx = (entry_bb_index * unsafe_by_ref_count + desc->deref_index);
2371 if ((access->offset + access->size) > bb_dereferences[idx])
2372 {
2373 if (!dereference_probable_p (fun, access))
2374 {
2375 disqualify_split_candidate (desc, "Would create a possibly "
2376 "illegal dereference in a "
2377 "caller.");
2378 return true;
2379 }
2380 desc->conditionally_dereferenceable = true;
2381 }
2382 }
2383 }
2384
2385 for (gensum_param_access *ch = access->first_child;
2386 ch;
2387 ch = ch->next_sibling)
2388 if (check_gensum_access (fun, parm, desc, ch, nonarg_acc_size, only_calls,
2389 entry_bb_index))
2390 return true;
2391
2392 return false;
2393}
2394
2395/* Copy data from FROM and all of its children to a vector of accesses in IPA
2396 descriptor DESC. */
2397
2398static void
2399copy_accesses_to_ipa_desc (gensum_param_access *from, isra_param_desc *desc)
2400{
2401 param_access *to = ggc_cleared_alloc<param_access> ();
2402 gcc_checking_assert ((from->offset % BITS_PER_UNIT) == 0)((void)(!((from->offset % (8)) == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 2402, __FUNCTION__), 0 : 0))
;
2403 gcc_checking_assert ((from->size % BITS_PER_UNIT) == 0)((void)(!((from->size % (8)) == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 2403, __FUNCTION__), 0 : 0))
;
2404 to->unit_offset = from->offset / BITS_PER_UNIT(8);
2405 to->unit_size = from->size / BITS_PER_UNIT(8);
2406 to->type = from->type;
2407 to->alias_ptr_type = from->alias_ptr_type;
2408 to->certain = from->nonarg;
2409 to->reverse = from->reverse;
2410 vec_safe_push (desc->accesses, to);
2411
2412 for (gensum_param_access *ch = from->first_child;
2413 ch;
2414 ch = ch->next_sibling)
2415 copy_accesses_to_ipa_desc (ch, desc);
2416}
2417
2418/* Analyze function body scan results stored in param_accesses and
2419 param_accesses, detect possible transformations and store information of
2420 those in function summary. NODE, FUN and IFS are all various structures
2421 describing the currently analyzed function. */
2422
2423static void
2424process_scan_results (cgraph_node *node, struct function *fun,
2425 isra_func_summary *ifs,
2426 vec<gensum_param_desc> *param_descriptions)
2427{
2428 bool check_pass_throughs = false;
2429 bool dereferences_propagated = false;
2430 tree parm = DECL_ARGUMENTS (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 2430, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
;
2431 unsigned param_count = param_descriptions->length();
2432
2433 for (unsigned desc_index = 0;
2434 desc_index < param_count;
2435 desc_index++, parm = DECL_CHAIN (parm)(((contains_struct_check (((contains_struct_check ((parm), (TS_DECL_MINIMAL
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 2435, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 2435, __FUNCTION__))->common.chain))
)
2436 {
2437 gensum_param_desc *desc = &(*param_descriptions)[desc_index];
2438 if (!desc->split_candidate)
2439 continue;
2440
2441 if (flag_checkingglobal_options.x_flag_checking)
2442 isra_verify_access_tree (desc->accesses);
2443
2444 if (!dereferences_propagated
2445 && desc->by_ref
2446 && !desc->safe_ref
2447 && desc->accesses)
2448 {
2449 propagate_dereference_distances (fun);
2450 dereferences_propagated = true;
2451 }
2452
2453 HOST_WIDE_INTlong nonarg_acc_size = 0;
2454 bool only_calls = true;
2455 bool check_failed = false;
2456
2457 int entry_bb_index = ENTRY_BLOCK_PTR_FOR_FN (fun)((fun)->cfg->x_entry_block_ptr)->index;
2458 for (gensum_param_access *acc = desc->accesses;
2459 acc;
2460 acc = acc->next_sibling)
2461 if (check_gensum_access (fun, parm, desc, acc, &nonarg_acc_size,
2462 &only_calls, entry_bb_index))
2463 {
2464 check_failed = true;
2465 break;
2466 }
2467 if (check_failed)
2468 continue;
2469
2470 if (only_calls)
2471 desc->locally_unused = true;
2472
2473 HOST_WIDE_INTlong cur_param_size
2474 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm))((tree_class_check ((((contains_struct_check ((parm), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 2474, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 2474, __FUNCTION__))->type_common.size)
);
2475 HOST_WIDE_INTlong param_size_limit, optimistic_limit;
2476 if (!desc->by_ref || optimize_function_for_size_p (fun))
2477 {
2478 param_size_limit = cur_param_size;
2479 optimistic_limit = cur_param_size;
2480 }
2481 else
2482 {
2483 param_size_limit
2484 = opt_for_fn (node->decl,(opts_for_fn (node->decl)->x_param_ipa_sra_ptr_growth_factor
)
2485 param_ipa_sra_ptr_growth_factor)(opts_for_fn (node->decl)->x_param_ipa_sra_ptr_growth_factor
)
* cur_param_size;
2486 optimistic_limit
2487 = (opt_for_fn (node->decl, param_ipa_sra_ptrwrap_growth_factor)(opts_for_fn (node->decl)->x_param_ipa_sra_ptrwrap_growth_factor
)
2488 * param_size_limit);
2489 }
2490
2491 if (nonarg_acc_size > optimistic_limit
2492 || (!desc->by_ref && nonarg_acc_size == param_size_limit))
2493 {
2494 disqualify_split_candidate (desc, "Would result into a too big set "
2495 "of replacements even in best "
2496 "scenarios.");
2497 }
2498 else
2499 {
2500 /* create_parameter_descriptors makes sure unit sizes of all
2501 candidate parameters fit unsigned integers restricted to
2502 ISRA_ARG_SIZE_LIMIT. */
2503 desc->param_size_limit = param_size_limit / BITS_PER_UNIT(8);
2504 desc->nonarg_acc_size = nonarg_acc_size / BITS_PER_UNIT(8);
2505 if (desc->split_candidate && desc->ptr_pt_count)
2506 {
2507 gcc_assert (desc->by_ref)((void)(!(desc->by_ref) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 2507, __FUNCTION__), 0 : 0))
;
2508 check_pass_throughs = true;
2509 }
2510 }
2511 }
2512
2513 /* When a pointer parameter is passed-through to a callee, in which it is
2514 only used to read only one or a few items, we can attempt to transform it
2515 to obtaining and passing through the items instead of the pointer. But we
2516 must take extra care that 1) we do not introduce any segfault by moving
2517 dereferences above control flow and that 2) the data is not modified
2518 through an alias in this function. The IPA analysis must not introduce
2519 any accesses candidates unless it can prove both.
2520
2521 The current solution is very crude as it consists of ensuring that the
2522 call postdominates entry BB and that the definition of VUSE of the call is
2523 default definition. TODO: For non-recursive callees in the same
2524 compilation unit we could do better by doing analysis in topological order
2525 an looking into access candidates of callees, using their alias_ptr_types
2526 to attempt real AA. We could also use the maximum known dereferenced
2527 offset in this function at IPA level.
2528
2529 TODO: Measure the overhead and the effect of just being pessimistic.
2530 Maybe this is only -O3 material? */
2531
2532 hash_map<gimple *, bool> analyzed_stmts;
2533 bitmap always_executed_bbs = NULLnullptr;
2534 if (check_pass_throughs)
2535 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2536 {
2537 gcall *call_stmt = cs->call_stmt;
2538 tree vuse = gimple_vuse (call_stmt);
2539
2540 /* If the callee is a const function, we don't get a VUSE. In such
2541 case there will be no memory accesses in the called function (or the
2542 const attribute is wrong) and then we just don't care. */
2543 bool uses_memory_as_obtained = vuse && SSA_NAME_IS_DEFAULT_DEF (vuse)(tree_check ((vuse), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 2543, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
;
2544
2545 unsigned count = gimple_call_num_args (call_stmt);
2546 isra_call_summary *csum = call_sums->get_create (cs);
2547 csum->init_inputs (count);
2548 csum->m_before_any_store = uses_memory_as_obtained;
2549 for (unsigned argidx = 0; argidx < count; argidx++)
2550 {
2551 if (!csum->m_arg_flow[argidx].pointer_pass_through)
2552 continue;
2553 unsigned pidx
2554 = get_single_param_flow_source (&csum->m_arg_flow[argidx]);
2555 gensum_param_desc *desc = &(*param_descriptions)[pidx];
2556 if (!desc->split_candidate)
2557 {
2558 csum->m_arg_flow[argidx].pointer_pass_through = false;
2559 continue;
2560 }
2561 if (!uses_memory_as_obtained)
2562 continue;
2563
2564 if (desc->safe_ref)
2565 {
2566 csum->m_arg_flow[argidx].safe_to_import_accesses = true;
2567 continue;
2568 }
2569
2570 /* Walk basic block and see if its execution can terminate earlier.
2571 Keep the info for later re-use to avoid quadratic behavoiur here. */
2572 gimple_stmt_iterator gsi = gsi_for_stmt (call_stmt);
2573 bool safe = true;
2574 int n = 0;
2575 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
2576 {
2577 bool *b = analyzed_stmts.get (gsi_stmt (gsi));
2578 if (b)
2579 {
2580 safe = *b;
2581 gsi_next (&gsi);
2582 break;
2583 }
2584 n++;
2585 if (stmt_may_terminate_function_p (fun, gsi_stmt (gsi), false))
2586 {
2587 safe = false;
2588 break;
2589 }
2590 }
2591 if (n)
2592 {
2593 if (gsi_end_p (gsi))
2594 gsi = gsi_start_bb (gimple_bb (call_stmt));
2595 for (; gsi_stmt (gsi) != call_stmt; gsi_next (&gsi))
2596 analyzed_stmts.get_or_insert (gsi_stmt (gsi)) = safe;
2597 }
2598
2599 if (safe && !always_executed_bbs)
2600 {
2601 mark_dfs_back_edges ();
2602 always_executed_bbs = find_always_executed_bbs (fun, false);
2603 }
2604 if (safe && bitmap_bit_p (always_executed_bbs, gimple_bb (call_stmt)->index))
2605 csum->m_arg_flow[argidx].safe_to_import_accesses = true;
2606 }
2607
2608 }
2609 BITMAP_FREE (always_executed_bbs)((void) (bitmap_obstack_free ((bitmap) always_executed_bbs), (
always_executed_bbs) = (bitmap) nullptr))
;
2610
2611 /* TODO: Add early exit if we disqualified everything. This also requires
2612 that we either relax the restriction that
2613 ipa_param_adjustments.m_always_copy_start must be the number of PARM_DECLs
2614 or store the number of parameters to IPA-SRA function summary and use that
2615 when just removing params. */
2616
2617 vec_safe_reserve_exact (ifs->m_parameters, param_count);
2618 ifs->m_parameters->quick_grow_cleared (param_count);
2619 for (unsigned desc_index = 0; desc_index < param_count; desc_index++)
2620 {
2621 gensum_param_desc *s = &(*param_descriptions)[desc_index];
2622 isra_param_desc *d = &(*ifs->m_parameters)[desc_index];
2623
2624 d->param_size_limit = s->param_size_limit;
2625 d->size_reached = s->nonarg_acc_size;
2626 d->locally_unused = s->locally_unused;
2627 d->split_candidate = s->split_candidate;
2628 d->by_ref = s->by_ref;
2629 d->conditionally_dereferenceable = s->conditionally_dereferenceable;
2630
2631 for (gensum_param_access *acc = s->accesses;
2632 acc;
2633 acc = acc->next_sibling)
2634 copy_accesses_to_ipa_desc (acc, d);
2635 }
2636
2637 if (dump_file)
2638 dump_isra_param_descriptors (dump_file, node->decl, ifs, false);
2639}
2640
2641/* Return true if there are any overlaps among certain accesses of DESC. If
2642 non-NULL, set *CERTAIN_ACCESS_PRESENT_P upon encountering a certain access
2643 too. DESC is assumed to be a split candidate that is not locally
2644 unused. */
2645
2646static bool
2647overlapping_certain_accesses_p (isra_param_desc *desc,
2648 bool *certain_access_present_p)
2649{
2650 unsigned pclen = vec_safe_length (desc->accesses);
2651 for (unsigned i = 0; i < pclen; i++)
2652 {
2653 param_access *a1 = (*desc->accesses)[i];
2654
2655 if (!a1->certain)
2656 continue;
2657 if (certain_access_present_p)
2658 *certain_access_present_p = true;
2659 for (unsigned j = i + 1; j < pclen; j++)
2660 {
2661 param_access *a2 = (*desc->accesses)[j];
2662 if (a2->certain
2663 && a1->unit_offset < a2->unit_offset + a2->unit_size
2664 && a1->unit_offset + a1->unit_size > a2->unit_offset)
2665 return true;
2666 }
2667 }
2668 return false;
2669}
2670
2671/* Check for any overlaps of certain param accesses among splitting candidates
2672 and signal an ICE if there are any. If CERTAIN_MUST_EXIST is set, also
2673 check that used splitting candidates have at least one certain access. */
2674
2675static void
2676verify_splitting_accesses (cgraph_node *node, bool certain_must_exist)
2677{
2678 isra_func_summary *ifs = func_sums->get (node);
2679 if (!ifs || !ifs->m_candidate)
2680 return;
2681 unsigned param_count = vec_safe_length (ifs->m_parameters);
2682 for (unsigned pidx = 0; pidx < param_count; pidx++)
2683 {
2684 isra_param_desc *desc = &(*ifs->m_parameters)[pidx];
2685 if (!desc->split_candidate || desc->locally_unused)
2686 continue;
2687
2688 bool certain_access_present = !certain_must_exist;
2689 if (overlapping_certain_accesses_p (desc, &certain_access_present))
2690 internal_error ("function %qs, parameter %u, has IPA-SRA accesses "
2691 "which overlap", node->dump_name (), pidx);
2692 if (!certain_access_present)
2693 internal_error ("function %qs, parameter %u, is used but does not "
2694 "have any certain IPA-SRA access",
2695 node->dump_name (), pidx);
2696 }
2697}
2698
2699/* Intraprocedural part of IPA-SRA analysis. Scan bodies of all functions in
2700 this compilation unit and create summary structures describing IPA-SRA
2701 opportunities and constraints in them. */
2702
2703static void
2704ipa_sra_generate_summary (void)
2705{
2706 struct cgraph_node *node;
2707
2708 gcc_checking_assert (!func_sums)((void)(!(!func_sums) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 2708, __FUNCTION__), 0 : 0))
;
2709 gcc_checking_assert (!call_sums)((void)(!(!call_sums) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 2709, __FUNCTION__), 0 : 0))
;
2710 func_sums
2711 = (new (ggc_alloc_no_dtor <ipa_sra_function_summaries> ())
2712 ipa_sra_function_summaries (symtab, true));
2713 call_sums = new ipa_sra_call_summaries (symtab);
2714
2715 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)for ((node) = symtab->first_function_with_gimple_body (); (
node); (node) = symtab->next_function_with_gimple_body (node
))
2716 ipa_sra_summarize_function (node);
2717 return;
2718}
2719
2720/* Write intraprocedural analysis information about E and all of its outgoing
2721 edges into a stream for LTO WPA. */
2722
2723static void
2724isra_write_edge_summary (output_block *ob, cgraph_edge *e)
2725{
2726 isra_call_summary *csum = call_sums->get (e);
2727 unsigned input_count = csum->m_arg_flow.length ();
2728 streamer_write_uhwi (ob, input_count);
2729 for (unsigned i = 0; i < input_count; i++)
2730 {
2731 isra_param_flow *ipf = &csum->m_arg_flow[i];
2732 streamer_write_hwi (ob, ipf->length);
2733 bitpack_d bp = bitpack_create (ob->main_stream);
2734 for (int j = 0; j < ipf->length; j++)
2735 bp_pack_value (&bp, ipf->inputs[j], 8);
2736 bp_pack_value (&bp, ipf->aggregate_pass_through, 1);
2737 bp_pack_value (&bp, ipf->pointer_pass_through, 1);
2738 bp_pack_value (&bp, ipf->safe_to_import_accesses, 1);
2739 bp_pack_value (&bp, ipf->constructed_for_calls, 1);
2740 streamer_write_bitpack (&bp);
2741 streamer_write_uhwi (ob, ipf->unit_offset);
2742 streamer_write_uhwi (ob, ipf->unit_size);
2743 }
2744 bitpack_d bp = bitpack_create (ob->main_stream);
2745 bp_pack_value (&bp, csum->m_return_ignored, 1);
2746 bp_pack_value (&bp, csum->m_return_returned, 1);
2747 bp_pack_value (&bp, csum->m_bit_aligned_arg, 1);
2748 bp_pack_value (&bp, csum->m_before_any_store, 1);
2749 streamer_write_bitpack (&bp);
2750}
2751
2752/* Write intraprocedural analysis information about NODE and all of its outgoing
2753 edges into a stream for LTO WPA. */
2754
2755static void
2756isra_write_node_summary (output_block *ob, cgraph_node *node)
2757{
2758 isra_func_summary *ifs = func_sums->get (node);
2759 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2760 int node_ref = lto_symtab_encoder_encode (encoder, node);
2761 streamer_write_uhwi (ob, node_ref);
2762
2763 unsigned param_desc_count = vec_safe_length (ifs->m_parameters);
2764 streamer_write_uhwi (ob, param_desc_count);
2765 for (unsigned i = 0; i < param_desc_count; i++)
2766 {
2767 isra_param_desc *desc = &(*ifs->m_parameters)[i];
2768 unsigned access_count = vec_safe_length (desc->accesses);
2769 streamer_write_uhwi (ob, access_count);
2770 for (unsigned j = 0; j < access_count; j++)
2771 {
2772 param_access *acc = (*desc->accesses)[j];
2773 stream_write_tree (ob, acc->type, true)streamer_hooks.write_tree (ob, acc->type, true, true);
2774 stream_write_tree (ob, acc->alias_ptr_type, true)streamer_hooks.write_tree (ob, acc->alias_ptr_type, true, true
)
;
2775 streamer_write_uhwi (ob, acc->unit_offset);
2776 streamer_write_uhwi (ob, acc->unit_size);
2777 bitpack_d bp = bitpack_create (ob->main_stream);
2778 bp_pack_value (&bp, acc->certain, 1);
2779 bp_pack_value (&bp, acc->reverse, 1);
2780 streamer_write_bitpack (&bp);
2781 }
2782 streamer_write_uhwi (ob, desc->param_size_limit);
2783 streamer_write_uhwi (ob, desc->size_reached);
2784 gcc_assert (desc->safe_size == 0)((void)(!(desc->safe_size == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 2784, __FUNCTION__), 0 : 0))
;
2785 bitpack_d bp = bitpack_create (ob->main_stream);
2786 bp_pack_value (&bp, desc->locally_unused, 1);
2787 bp_pack_value (&bp, desc->split_candidate, 1);
2788 bp_pack_value (&bp, desc->by_ref, 1);
2789 gcc_assert (!desc->not_specially_constructed)((void)(!(!desc->not_specially_constructed) ? fancy_abort (
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 2789, __FUNCTION__), 0 : 0))
;
2790 bp_pack_value (&bp, desc->conditionally_dereferenceable, 1);
2791 gcc_assert (!desc->safe_size_set)((void)(!(!desc->safe_size_set) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 2791, __FUNCTION__), 0 : 0))
;
2792 streamer_write_bitpack (&bp);
2793 }
2794 bitpack_d bp = bitpack_create (ob->main_stream);
2795 bp_pack_value (&bp, ifs->m_candidate, 1);
2796 bp_pack_value (&bp, ifs->m_returns_value, 1);
2797 bp_pack_value (&bp, ifs->m_return_ignored, 1);
2798 gcc_assert (!ifs->m_queued)((void)(!(!ifs->m_queued) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 2798, __FUNCTION__), 0 : 0))
;
2799 streamer_write_bitpack (&bp);
2800
2801 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
2802 isra_write_edge_summary (ob, e);
2803 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
2804 isra_write_edge_summary (ob, e);
2805}
2806
2807/* Write intraprocedural analysis information into a stream for LTO WPA. */
2808
2809static void
2810ipa_sra_write_summary (void)
2811{
2812 if (!func_sums || !call_sums)
2813 return;
2814
2815 struct output_block *ob = create_output_block (LTO_section_ipa_sra);
2816 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2817 ob->symbol = NULLnullptr;
2818
2819 unsigned int count = 0;
2820 lto_symtab_encoder_iterator lsei;
2821 for (lsei = lsei_start_function_in_partition (encoder);
2822 !lsei_end_p (lsei);
2823 lsei_next_function_in_partition (&lsei))
2824 {
2825 cgraph_node *node = lsei_cgraph_node (lsei);
2826 if (node->has_gimple_body_p ()
2827 && func_sums->get (node) != NULLnullptr)
2828 count++;
2829 }
2830 streamer_write_uhwi (ob, count);
2831
2832 /* Process all of the functions. */
2833 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
2834 lsei_next_function_in_partition (&lsei))
2835 {
2836 cgraph_node *node = lsei_cgraph_node (lsei);
2837 if (node->has_gimple_body_p ()
2838 && func_sums->get (node) != NULLnullptr)
2839 isra_write_node_summary (ob, node);
2840 }
2841 streamer_write_char_stream (ob->main_stream, 0);
2842 produce_asm (ob, NULLnullptr);
2843 destroy_output_block (ob);
2844}
2845
2846/* Read intraprocedural analysis information about E and all of its outgoing
2847 edges into a stream for LTO WPA. */
2848
2849static void
2850isra_read_edge_summary (struct lto_input_block *ib, cgraph_edge *cs)
2851{
2852 isra_call_summary *csum = call_sums->get_create (cs);
2853 unsigned input_count = streamer_read_uhwi (ib);
2854 csum->init_inputs (input_count);
2855 for (unsigned i = 0; i < input_count; i++)
2856 {
2857 isra_param_flow *ipf = &csum->m_arg_flow[i];
2858 ipf->length = streamer_read_hwi (ib);
2859 bitpack_d bp = streamer_read_bitpack (ib);
2860 for (int j = 0; j < ipf->length; j++)
2861 ipf->inputs[j] = bp_unpack_value (&bp, 8);
2862 ipf->aggregate_pass_through = bp_unpack_value (&bp, 1);
2863 ipf->pointer_pass_through = bp_unpack_value (&bp, 1);
2864 ipf->safe_to_import_accesses = bp_unpack_value (&bp, 1);
2865 ipf->constructed_for_calls = bp_unpack_value (&bp, 1);
2866 ipf->unit_offset = streamer_read_uhwi (ib);
2867 ipf->unit_size = streamer_read_uhwi (ib);
2868 }
2869 bitpack_d bp = streamer_read_bitpack (ib);
2870 csum->m_return_ignored = bp_unpack_value (&bp, 1);
2871 csum->m_return_returned = bp_unpack_value (&bp, 1);
2872 csum->m_bit_aligned_arg = bp_unpack_value (&bp, 1);
2873 csum->m_before_any_store = bp_unpack_value (&bp, 1);
2874}
2875
2876/* Read intraprocedural analysis information about NODE and all of its outgoing
2877 edges into a stream for LTO WPA. */
2878
2879static void
2880isra_read_node_info (struct lto_input_block *ib, cgraph_node *node,
2881 struct data_in *data_in)
2882{
2883 isra_func_summary *ifs = func_sums->get_create (node);
2884 unsigned param_desc_count = streamer_read_uhwi (ib);
2885 if (param_desc_count > 0)
2886 {
2887 vec_safe_reserve_exact (ifs->m_parameters, param_desc_count);
2888 ifs->m_parameters->quick_grow_cleared (param_desc_count);
2889 }
2890 for (unsigned i = 0; i < param_desc_count; i++)
2891 {
2892 isra_param_desc *desc = &(*ifs->m_parameters)[i];
2893 unsigned access_count = streamer_read_uhwi (ib);
2894 for (unsigned j = 0; j < access_count; j++)
2895 {
2896 param_access *acc = ggc_cleared_alloc<param_access> ();
2897 acc->type = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
2898 acc->alias_ptr_type = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
2899 acc->unit_offset = streamer_read_uhwi (ib);
2900 acc->unit_size = streamer_read_uhwi (ib);
2901 bitpack_d bp = streamer_read_bitpack (ib);
2902 acc->certain = bp_unpack_value (&bp, 1);
2903 acc->reverse = bp_unpack_value (&bp, 1);
2904 vec_safe_push (desc->accesses, acc);
2905 }
2906 desc->param_size_limit = streamer_read_uhwi (ib);
2907 desc->size_reached = streamer_read_uhwi (ib);
2908 desc->safe_size = 0;
2909 bitpack_d bp = streamer_read_bitpack (ib);
2910 desc->locally_unused = bp_unpack_value (&bp, 1);
2911 desc->split_candidate = bp_unpack_value (&bp, 1);
2912 desc->by_ref = bp_unpack_value (&bp, 1);
2913 desc->not_specially_constructed = 0;
2914 desc->conditionally_dereferenceable = bp_unpack_value (&bp, 1);
2915 desc->safe_size_set = 0;
2916 }
2917 bitpack_d bp = streamer_read_bitpack (ib);
2918 ifs->m_candidate = bp_unpack_value (&bp, 1);
2919 ifs->m_returns_value = bp_unpack_value (&bp, 1);
2920 ifs->m_return_ignored = bp_unpack_value (&bp, 1);
2921 ifs->m_queued = 0;
2922
2923 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
2924 isra_read_edge_summary (ib, e);
2925 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
2926 isra_read_edge_summary (ib, e);
2927}
2928
2929/* Read IPA-SRA summaries from a section in file FILE_DATA of length LEN with
2930 data DATA. TODO: This function was copied almost verbatim from ipa-prop.cc,
2931 it should be possible to unify them somehow. */
2932
2933static void
2934isra_read_summary_section (struct lto_file_decl_data *file_data,
2935 const char *data, size_t len)
2936{
2937 const struct lto_function_header *header =
2938 (const struct lto_function_header *) data;
2939 const int cfg_offset = sizeof (struct lto_function_header);
2940 const int main_offset = cfg_offset + header->cfg_size;
2941 const int string_offset = main_offset + header->main_size;
2942 struct data_in *data_in;
2943 unsigned int i;
2944 unsigned int count;
2945
2946 lto_input_block ib_main ((const char *) data + main_offset,
2947 header->main_size, file_data->mode_table);
2948
2949 data_in =
2950 lto_data_in_create (file_data, (const char *) data + string_offset,
2951 header->string_size, vNULL);
2952 count = streamer_read_uhwi (&ib_main);
2953
2954 for (i = 0; i < count; i++)
2955 {
2956 unsigned int index;
2957 struct cgraph_node *node;
2958 lto_symtab_encoder_t encoder;
2959
2960 index = streamer_read_uhwi (&ib_main);
2961 encoder = file_data->symtab_node_encoder;
2962 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
2963 index));
2964 gcc_assert (node->definition)((void)(!(node->definition) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 2964, __FUNCTION__), 0 : 0))
;
2965 isra_read_node_info (&ib_main, node, data_in);
2966 }
2967 lto_free_section_data (file_data, LTO_section_ipa_sra, NULLnullptr, data,
2968 len);
2969 lto_data_in_delete (data_in);
2970}
2971
2972/* Read intraprocedural analysis information into a stream for LTO WPA. */
2973
2974static void
2975ipa_sra_read_summary (void)
2976{
2977 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2978 struct lto_file_decl_data *file_data;
2979 unsigned int j = 0;
2980
2981 gcc_checking_assert (!func_sums)((void)(!(!func_sums) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 2981, __FUNCTION__), 0 : 0))
;
2982 gcc_checking_assert (!call_sums)((void)(!(!call_sums) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 2982, __FUNCTION__), 0 : 0))
;
2983 func_sums
2984 = (new (ggc_alloc_no_dtor <ipa_sra_function_summaries> ())
2985 ipa_sra_function_summaries (symtab, true));
2986 call_sums = new ipa_sra_call_summaries (symtab);
2987
2988 while ((file_data = file_data_vec[j++]))
2989 {
2990 size_t len;
2991 const char *data
2992 = lto_get_summary_section_data (file_data, LTO_section_ipa_sra, &len);
2993 if (data)
2994 isra_read_summary_section (file_data, data, len);
2995 }
2996}
2997
2998/* Dump all IPA-SRA summary data for all cgraph nodes and edges to file F. If
2999 HINTS is true, also dump IPA-analysis computed hints. */
3000
3001static void
3002ipa_sra_dump_all_summaries (FILE *f, bool hints)
3003{
3004 cgraph_node *node;
3005 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)for ((node) = symtab->first_function_with_gimple_body (); (
node); (node) = symtab->next_function_with_gimple_body (node
))
3006 {
3007 fprintf (f, "\nSummary for node %s:\n", node->dump_name ());
3008
3009 isra_func_summary *ifs = func_sums->get (node);
3010 if (!ifs)
3011 fprintf (f, " Function does not have any associated IPA-SRA "
3012 "summary\n");
3013 else if (!ifs->m_candidate)
3014 fprintf (f, " Not a candidate function\n");
3015 else
3016 {
3017 if (ifs->m_returns_value)
3018 fprintf (f, " Returns value\n");
3019 if (vec_safe_is_empty (ifs->m_parameters))
3020 fprintf (f, " No parameter information. \n");
3021 else
3022 for (unsigned i = 0; i < ifs->m_parameters->length (); ++i)
3023 {
3024 fprintf (f, " Descriptor for parameter %i:\n", i);
3025 dump_isra_param_descriptor (f, &(*ifs->m_parameters)[i], hints);
3026 }
3027 fprintf (f, "\n");
3028 }
3029
3030 struct cgraph_edge *cs;
3031 for (cs = node->callees; cs; cs = cs->next_callee)
3032 {
3033 fprintf (f, " Summary for edge %s->%s:\n", cs->caller->dump_name (),
3034 cs->callee->dump_name ());
3035 isra_call_summary *csum = call_sums->get (cs);
3036 if (csum)
3037 csum->dump (f);
3038 else
3039 fprintf (f, " Call summary is MISSING!\n");
3040 }
3041
3042 }
3043 fprintf (f, "\n\n");
3044}
3045
3046/* Perform function-scope viability tests that can be only made at IPA level
3047 and return false if the function is deemed unsuitable for IPA-SRA. */
3048
3049static bool
3050ipa_sra_ipa_function_checks (cgraph_node *node)
3051{
3052 if (!node->can_be_local_p ())
3053 {
3054 if (dump_file)
3055 fprintf (dump_file, "Function %s disqualified because it cannot be "
3056 "made local.\n", node->dump_name ());
3057 return false;
3058 }
3059 if (!node->can_change_signature)
3060 {
3061 if (dump_file)
3062 fprintf (dump_file, "Function can not change signature.\n");
3063 return false;
3064 }
3065
3066 return true;
3067}
3068
3069/* Issues found out by check_callers_for_issues. */
3070
3071struct caller_issues
3072{
3073 /* The candidate being considered. */
3074 cgraph_node *candidate;
3075 /* There is a thunk among callers. */
3076 bool thunk;
3077 /* Call site with no available information. */
3078 bool unknown_callsite;
3079 /* Call from outside the candidate's comdat group. */
3080 bool call_from_outside_comdat;
3081 /* There is a bit-aligned load into one of non-gimple-typed arguments. */
3082 bool bit_aligned_aggregate_argument;
3083};
3084
3085/* Worker for call_for_symbol_and_aliases, set any flags of passed caller_issues
3086 that apply. */
3087
3088static bool
3089check_for_caller_issues (struct cgraph_node *node, void *data)
3090{
3091 struct caller_issues *issues = (struct caller_issues *) data;
3092
3093 for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
3094 {
3095 if (cs->caller->thunk)
3096 {
3097 issues->thunk = true;
3098 /* TODO: We should be able to process at least some types of
3099 thunks. */
3100 return true;
3101 }
3102 if (issues->candidate->calls_comdat_local
3103 && issues->candidate->same_comdat_group
3104 && !issues->candidate->in_same_comdat_group_p (cs->caller))
3105 {
3106 issues->call_from_outside_comdat = true;
3107 return true;
3108 }
3109
3110 isra_call_summary *csum = call_sums->get (cs);
3111 if (!csum)
3112 {
3113 issues->unknown_callsite = true;
3114 return true;
3115 }
3116
3117 if (csum->m_bit_aligned_arg)
3118 issues->bit_aligned_aggregate_argument = true;
3119 }
3120 return false;
3121}
3122
3123/* Look at all incoming edges to NODE, including aliases and thunks and look
3124 for problems. Return true if NODE type should not be modified at all. */
3125
3126static bool
3127check_all_callers_for_issues (cgraph_node *node)
3128{
3129 struct caller_issues issues;
3130 memset (&issues, 0, sizeof (issues));
3131 issues.candidate = node;
3132
3133 node->call_for_symbol_and_aliases (check_for_caller_issues, &issues, true);
3134 if (issues.unknown_callsite)
3135 {
3136 if (dump_file && (dump_flags & TDF_DETAILS))
3137 fprintf (dump_file, "A call of %s has not been analyzed. Disabling "
3138 "all modifications.\n", node->dump_name ());
3139 return true;
3140 }
3141 /* TODO: We should be able to process at least some types of thunks. */
3142 if (issues.thunk)
3143 {
3144 if (dump_file && (dump_flags & TDF_DETAILS))
3145 fprintf (dump_file, "A call of %s is through thunk, which are not"
3146 " handled yet. Disabling all modifications.\n",
3147 node->dump_name ());
3148 return true;
3149 }
3150 if (issues.call_from_outside_comdat)
3151 {
3152 if (dump_file)
3153 fprintf (dump_file, "Function would become private comdat called "
3154 "outside of its comdat group.\n");
3155 return true;
3156 }
3157
3158 if (issues.bit_aligned_aggregate_argument)
3159 {
3160 /* Let's only remove parameters/return values from such functions.
3161 TODO: We could only prevent splitting the problematic parameters if
3162 anybody thinks it is worth it. */
3163 if (dump_file && (dump_flags & TDF_DETAILS))
3164 fprintf (dump_file, "A call of %s has bit-aligned aggregate argument,"
3165 " disabling parameter splitting.\n", node->dump_name ());
3166
3167 isra_func_summary *ifs = func_sums->get (node);
3168 gcc_checking_assert (ifs)((void)(!(ifs) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 3168, __FUNCTION__), 0 : 0))
;
3169 unsigned param_count = vec_safe_length (ifs->m_parameters);
3170 for (unsigned i = 0; i < param_count; i++)
3171 (*ifs->m_parameters)[i].split_candidate = false;
3172 }
3173 return false;
3174}
3175
3176/* Find the access with corresponding OFFSET and SIZE among accesses in
3177 PARAM_DESC and return it or NULL if such an access is not there. */
3178
3179static param_access *
3180find_param_access (isra_param_desc *param_desc, unsigned offset, unsigned size)
3181{
3182 unsigned pclen = vec_safe_length (param_desc->accesses);
3183
3184 /* The search is linear but the number of stored accesses is bound by
3185 PARAM_IPA_SRA_MAX_REPLACEMENTS, so most probably 8. */
3186
3187 for (unsigned i = 0; i < pclen; i++)
3188 if ((*param_desc->accesses)[i]->unit_offset == offset
3189 && (*param_desc->accesses)[i]->unit_size == size)
3190 return (*param_desc->accesses)[i];
3191
3192 return NULLnullptr;
3193}
3194
3195/* Return iff the total size of definite replacement SIZE would violate the
3196 limit set for it in PARAM. */
3197
3198static bool
3199size_would_violate_limit_p (isra_param_desc *desc, unsigned size)
3200{
3201 unsigned limit = desc->param_size_limit;
3202 if (size > limit
3203 || (!desc->by_ref && size == limit))
3204 return true;
3205 return false;
3206}
3207
3208/* Increase reached size of DESC by SIZE or disqualify it if it would violate
3209 the set limit. IDX is the parameter number which is dumped when
3210 disqualifying. */
3211
3212static void
3213bump_reached_size (isra_param_desc *desc, unsigned size, unsigned idx)
3214{
3215 unsigned after = desc->size_reached + size;
3216 if (size_would_violate_limit_p (desc, after))
3217 {
3218 if (dump_file && (dump_flags & TDF_DETAILS))
3219 fprintf (dump_file, " ...size limit reached, disqualifying "
3220 "candidate parameter %u\n", idx);
3221 desc->split_candidate = false;
3222 return;
3223 }
3224 desc->size_reached = after;
3225}
3226
3227/* Take all actions required to deal with an edge CS that represents a call to
3228 an unknown or un-analyzed function, for both parameter removal and
3229 splitting. */
3230
3231static void
3232process_edge_to_unknown_caller (cgraph_edge *cs)
3233{
3234 isra_func_summary *from_ifs = func_sums->get (cs->caller);
3235 gcc_checking_assert (from_ifs)((void)(!(from_ifs) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 3235, __FUNCTION__), 0 : 0))
;
3236 isra_call_summary *csum = call_sums->get (cs);
3237
3238 if (dump_file && (dump_flags & TDF_DETAILS))
3239 fprintf (dump_file, "Processing an edge to an unknown caller from %s:\n",
3240 cs->caller->dump_name ());
3241
3242 unsigned args_count = csum->m_arg_flow.length ();
3243 for (unsigned i = 0; i < args_count; i++)
3244 {
3245 isra_param_flow *ipf = &csum->m_arg_flow[i];
3246
3247 if (ipf->pointer_pass_through)
3248 {
3249 isra_param_desc *param_desc
3250 = &(*from_ifs->m_parameters)[get_single_param_flow_source (ipf)];
3251 param_desc->locally_unused = false;
3252 param_desc->split_candidate = false;
3253 continue;
3254 }
3255 if (ipf->aggregate_pass_through)
3256 {
3257 unsigned idx = get_single_param_flow_source (ipf);
3258 isra_param_desc *param_desc = &(*from_ifs->m_parameters)[idx];
3259
3260 param_desc->locally_unused = false;
3261 if (!param_desc->split_candidate)
3262 continue;
3263 gcc_assert (!param_desc->by_ref)((void)(!(!param_desc->by_ref) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 3263, __FUNCTION__), 0 : 0))
;
3264 param_access *pacc = find_param_access (param_desc, ipf->unit_offset,
3265 ipf->unit_size);
3266 gcc_checking_assert (pacc)((void)(!(pacc) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 3266, __FUNCTION__), 0 : 0))
;
3267 pacc->certain = true;
3268 if (overlapping_certain_accesses_p (param_desc, NULLnullptr))
3269 {
3270 if (dump_file && (dump_flags & TDF_DETAILS))
3271 fprintf (dump_file, " ...leading to overlap, "
3272 " disqualifying candidate parameter %u\n",
3273 idx);
3274 param_desc->split_candidate = false;
3275 }
3276 else
3277 bump_reached_size (param_desc, pacc->unit_size, idx);
3278 ipf->aggregate_pass_through = false;
3279 continue;
3280 }
3281
3282 for (int j = 0; j < ipf->length; j++)
3283 {
3284 int input_idx = ipf->inputs[j];
3285 (*from_ifs->m_parameters)[input_idx].locally_unused = false;
3286 }
3287 }
3288}
3289
3290/* Propagate parameter removal information through cross-SCC edge CS,
3291 i.e. decrease the use count in the caller parameter descriptor for each use
3292 in this call. */
3293
3294static void
3295param_removal_cross_scc_edge (cgraph_edge *cs)
3296{
3297 enum availability availability;
3298 cgraph_node *callee = cs->callee->function_symbol (&availability);
3299 isra_func_summary *to_ifs = func_sums->get (callee);
3300 if (!to_ifs || !to_ifs->m_candidate
3301 || (availability < AVAIL_AVAILABLE)
3302 || vec_safe_is_empty (to_ifs->m_parameters))
3303 {
3304 process_edge_to_unknown_caller (cs);
3305 return;
3306 }
3307 isra_func_summary *from_ifs = func_sums->get (cs->caller);
3308 gcc_checking_assert (from_ifs)((void)(!(from_ifs) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 3308, __FUNCTION__), 0 : 0))
;
3309
3310 isra_call_summary *csum = call_sums->get (cs);
3311 unsigned args_count = csum->m_arg_flow.length ();
3312 unsigned param_count = vec_safe_length (to_ifs->m_parameters);
3313
3314 for (unsigned i = 0; i < args_count; i++)
3315 {
3316 bool unused_in_callee;
3317 if (i < param_count)
3318 unused_in_callee = (*to_ifs->m_parameters)[i].locally_unused;
3319 else
3320 unused_in_callee = false;
3321
3322 if (!unused_in_callee)
3323 {
3324 isra_param_flow *ipf = &csum->m_arg_flow[i];
3325 for (int j = 0; j < ipf->length; j++)
3326 {
3327 int input_idx = ipf->inputs[j];
3328 (*from_ifs->m_parameters)[input_idx].locally_unused = false;
3329 }
3330 }
3331 }
3332}
3333
3334/* Unless it is already there, push NODE which is also described by IFS to
3335 STACK. */
3336
3337static void
3338isra_push_node_to_stack (cgraph_node *node, isra_func_summary *ifs,
3339 vec<cgraph_node *> *stack)
3340{
3341 if (!ifs->m_queued)
3342 {
3343 ifs->m_queued = true;
3344 stack->safe_push (node);
3345 }
3346}
3347
3348/* If parameter with index INPUT_IDX is marked as locally unused, mark it as
3349 used and push CALLER on STACK. */
3350
3351static void
3352isra_mark_caller_param_used (isra_func_summary *from_ifs, int input_idx,
3353 cgraph_node *caller, vec<cgraph_node *> *stack)
3354{
3355 if ((*from_ifs->m_parameters)[input_idx].locally_unused)
3356 {
3357 (*from_ifs->m_parameters)[input_idx].locally_unused = false;
3358 isra_push_node_to_stack (caller, from_ifs, stack);
3359 }
3360}
3361
3362/* Combine safe_size of DESC with SIZE and return true if it has changed. */
3363
3364static bool
3365update_safe_size (isra_param_desc *desc, unsigned size)
3366{
3367 if (!desc->safe_size_set)
3368 {
3369 desc->safe_size_set = 1;
3370 desc->safe_size = size;
3371 return true;
3372 }
3373 if (desc->safe_size <= size)
3374 return false;
3375 desc->safe_size = size;
3376 return true;
3377}
3378
3379/* Set all param hints in DESC to the pessimistic values. Return true if any
3380 hints that need to potentially trigger further propagation have changed. */
3381
3382static bool
3383flip_all_hints_pessimistic (isra_param_desc *desc)
3384{
3385 desc->not_specially_constructed = true;
3386 return update_safe_size (desc, 0);
3387}
3388
3389/* Because we have not analyzed or otherwise problematic caller, go over all
3390 parameter int flags of IFS describing a call graph node of a calllee and
3391 turn them pessimistic. Return true if any hints that need to potentially
3392 trigger further propagation have changed. */
3393
3394static bool
3395flip_all_param_hints_pessimistic (isra_func_summary *ifs)
3396{
3397 if (!ifs || !ifs->m_candidate)
3398 return false;
3399
3400 bool ret = false;
3401 unsigned param_count = vec_safe_length (ifs->m_parameters);
3402
3403 for (unsigned i = 0; i < param_count; i++)
3404 ret |= flip_all_hints_pessimistic (&(*ifs->m_parameters)[i]);
3405
3406 return ret;
3407}
3408
3409/* Propagate hints accross edge CS which ultimately leads to a node described
3410 by TO_IFS. Return true if any hints of the callee which should potentially
3411 trigger further propagation have changed. */
3412
3413static bool
3414propagate_param_hints_accross_call (cgraph_edge *cs, isra_func_summary *to_ifs)
3415{
3416 if (!to_ifs || !to_ifs->m_candidate)
3417 return false;
3418
3419 isra_call_summary *csum = call_sums->get (cs);
3420 bool ret = false;
3421 unsigned args_count = csum->m_arg_flow.length ();
3422 unsigned param_count = vec_safe_length (to_ifs->m_parameters);
3423
3424 for (unsigned i = 0; i < param_count; i++)
3425 {
3426 isra_param_desc *desc = &(*to_ifs->m_parameters)[i];
3427 if (i >= args_count)
3428 {
3429 ret |= flip_all_hints_pessimistic (desc);
3430 continue;
3431 }
3432
3433 if (desc->by_ref)
3434 {
3435 isra_param_flow *ipf = &csum->m_arg_flow[i];
3436
3437 if (!ipf->constructed_for_calls)
3438 desc->not_specially_constructed = true;
3439
3440 if (ipf->pointer_pass_through)
3441 {
3442 isra_func_summary *from_ifs = func_sums->get (cs->caller);
3443 int srcidx = get_single_param_flow_source (ipf);
3444 if (vec_safe_length (from_ifs->m_parameters) > (unsigned) srcidx)
3445 {
3446 isra_param_desc *src_d = &(*from_ifs->m_parameters)[srcidx];
3447 if (src_d->safe_size_set)
3448 ret |= update_safe_size (desc, src_d->safe_size);
3449 }
3450 else
3451 ret |= update_safe_size (desc, 0);
3452 }
3453 else if (!ipf->aggregate_pass_through)
3454 ret |= update_safe_size (desc, ipf->unit_size);
3455 else
3456 /* LTOing type-mismatched programs can end up here. */
3457 ret |= update_safe_size (desc, 0);
3458 }
3459 }
3460 return ret;
3461}
3462
3463/* Propagate hints from NODE described by FROM_IFS to all its (dorect) callees,
3464 push those that may need re-visiting onto STACK. */
3465
3466static void
3467propagate_hints_to_all_callees (cgraph_node *node, isra_func_summary *from_ifs,
3468 vec<cgraph_node *> *stack)
3469{
3470 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
3471 {
3472 enum availability availability;
3473 cgraph_node *callee = cs->callee->function_symbol (&availability);
3474 isra_func_summary *to_ifs = func_sums->get (callee);
3475 if (!from_ifs)
3476 {
3477 if (flip_all_param_hints_pessimistic (to_ifs)
3478 && ipa_edge_within_scc (cs))
3479 isra_push_node_to_stack (callee, to_ifs, stack);
3480 }
3481 else if (propagate_param_hints_accross_call (cs, to_ifs)
3482 && ipa_edge_within_scc (cs))
3483 isra_push_node_to_stack (callee, to_ifs, stack);
3484 }
3485}
3486
3487/* Propagate information that any parameter is not used only locally within a
3488 SCC across CS to the caller, which must be in the same SCC as the
3489 callee. Push any callers that need to be re-processed to STACK. */
3490
3491static void
3492propagate_used_across_scc_edge (cgraph_edge *cs, vec<cgraph_node *> *stack)
3493{
3494 isra_func_summary *from_ifs = func_sums->get (cs->caller);
3495 if (!from_ifs || vec_safe_is_empty (from_ifs->m_parameters))
3496 return;
3497
3498 isra_call_summary *csum = call_sums->get (cs);
3499 gcc_checking_assert (csum)((void)(!(csum) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 3499, __FUNCTION__), 0 : 0))
;
3500 unsigned args_count = csum->m_arg_flow.length ();
3501 enum availability availability;
3502 cgraph_node *callee = cs->callee->function_symbol (&availability);
3503 isra_func_summary *to_ifs = func_sums->get (callee);
3504
3505 unsigned param_count
3506 = (to_ifs && (availability >= AVAIL_AVAILABLE))
3507 ? vec_safe_length (to_ifs->m_parameters) : 0;
3508 for (unsigned i = 0; i < args_count; i++)
3509 {
3510 if (i < param_count
3511 && (*to_ifs->m_parameters)[i].locally_unused)
3512 continue;
3513
3514 /* The argument is needed in the callee it, we must mark the parameter as
3515 used also in the caller and its callers within this SCC. */
3516 isra_param_flow *ipf = &csum->m_arg_flow[i];
3517 for (int j = 0; j < ipf->length; j++)
3518 {
3519 int input_idx = ipf->inputs[j];
3520 isra_mark_caller_param_used (from_ifs, input_idx, cs->caller, stack);
3521 }
3522 }
3523}
3524
3525/* Propagate information that any parameter is not used only locally within a
3526 SCC (i.e. is used also elsewhere) to all callers of NODE that are in the
3527 same SCC. Push any callers that need to be re-processed to STACK. */
3528
3529static bool
3530propagate_used_to_scc_callers (cgraph_node *node, void *data)
3531{
3532 vec<cgraph_node *> *stack = (vec<cgraph_node *> *) data;
3533 cgraph_edge *cs;
3534 for (cs = node->callers; cs; cs = cs->next_caller)
3535 if (ipa_edge_within_scc (cs))
3536 propagate_used_across_scc_edge (cs, stack);
3537 return false;
3538}
3539
3540/* Return true iff all certain accesses in ARG_DESC are also present as
3541 certain accesses in PARAM_DESC. */
3542
3543static bool
3544all_callee_accesses_present_p (isra_param_desc *param_desc,
3545 isra_param_desc *arg_desc)
3546{
3547 unsigned aclen = vec_safe_length (arg_desc->accesses);
3548 for (unsigned j = 0; j < aclen; j++)
3549 {
3550 param_access *argacc = (*arg_desc->accesses)[j];
3551 if (!argacc->certain)
3552 continue;
3553 param_access *pacc = find_param_access (param_desc, argacc->unit_offset,
3554 argacc->unit_size);
3555 if (!pacc
3556 || !pacc->certain
3557 || !types_compatible_p (argacc->type, pacc->type))
3558 return false;
3559 }
3560 return true;
3561}
3562
3563/* Type internal to function pull_accesses_from_callee. Unfortunately gcc 4.8
3564 does not allow instantiating an auto_vec with a type defined within a
3565 function so it is a global type. */
3566enum acc_prop_kind {ACC_PROP_DONT, ACC_PROP_COPY, ACC_PROP_CERTAIN};
3567
3568
3569/* Attempt to propagate all definite accesses from ARG_DESC to PARAM_DESC,
3570 (which belongs to CALLER) if they would not violate some constraint there.
3571 If successful, return NULL, otherwise return the string reason for failure
3572 (which can be written to the dump file). DELTA_OFFSET is the known offset
3573 of the actual argument withing the formal parameter (so of ARG_DESCS within
3574 PARAM_DESCS), ARG_SIZE is the size of the actual argument or zero, if not
3575 known. In case of success, set *CHANGE_P to true if propagation actually
3576 changed anything. */
3577
3578static const char *
3579pull_accesses_from_callee (cgraph_node *caller, isra_param_desc *param_desc,
3580 isra_param_desc *arg_desc,
3581 unsigned delta_offset, unsigned arg_size,
3582 bool *change_p)
3583{
3584 unsigned pclen = vec_safe_length (param_desc->accesses);
3585 unsigned aclen = vec_safe_length (arg_desc->accesses);
3586 unsigned prop_count = 0;
3587 unsigned prop_size = 0;
3588 bool change = false;
3589
3590 auto_vec <enum acc_prop_kind, 8> prop_kinds (aclen);
3591 for (unsigned j = 0; j < aclen; j++)
3592 {
3593 param_access *argacc = (*arg_desc->accesses)[j];
3594 prop_kinds.safe_push (ACC_PROP_DONT);
3595
3596 if (arg_size > 0
3597 && argacc->unit_offset + argacc->unit_size > arg_size)
3598 return "callee access outsize size boundary";
3599
3600 if (!argacc->certain)
3601 continue;
3602
3603 unsigned offset = argacc->unit_offset + delta_offset;
3604 /* Given that accesses are initially stored according to increasing
3605 offset and decreasing size in case of equal offsets, the following
3606 searches could be written more efficiently if we kept the ordering
3607 when copying. But the number of accesses is capped at
3608 PARAM_IPA_SRA_MAX_REPLACEMENTS (so most likely 8) and the code gets
3609 messy quickly, so let's improve on that only if necessary. */
3610
3611 bool exact_match = false;
3612 for (unsigned i = 0; i < pclen; i++)
3613 {
3614 /* Check for overlaps. */
3615 param_access *pacc = (*param_desc->accesses)[i];
3616 if (pacc->unit_offset == offset
3617 && pacc->unit_size == argacc->unit_size)
3618 {
3619 if (argacc->alias_ptr_type != pacc->alias_ptr_type
3620 || !types_compatible_p (argacc->type, pacc->type)
3621 || argacc->reverse != pacc->reverse)
3622 return "propagated access types would not match existing ones";
3623
3624 exact_match = true;
3625 if (!pacc->certain)
3626 {
3627 prop_kinds[j] = ACC_PROP_CERTAIN;
3628 prop_size += argacc->unit_size;
3629 change = true;
3630 }
3631 continue;
3632 }
3633
3634 if (offset < pacc->unit_offset + pacc->unit_size
3635 && offset + argacc->unit_size > pacc->unit_offset)
3636 {
3637 /* None permissible with load accesses, possible to fit into
3638 argument ones. */
3639 if (pacc->certain
3640 || offset < pacc->unit_offset
3641 || (offset + argacc->unit_size
3642 > pacc->unit_offset + pacc->unit_size))
3643 return "a propagated access would conflict in caller";
3644 }
3645 }
3646
3647 if (!exact_match)
3648 {
3649 prop_kinds[j] = ACC_PROP_COPY;
3650 prop_count++;
3651 prop_size += argacc->unit_size;
3652 change = true;
3653 }
3654 }
3655
3656 if (!change)
3657 return NULLnullptr;
3658
3659 if ((prop_count + pclen
3660 > (unsigned) opt_for_fn (caller->decl, param_ipa_sra_max_replacements)(opts_for_fn (caller->decl)->x_param_ipa_sra_max_replacements
)
)
3661 || size_would_violate_limit_p (param_desc,
3662 param_desc->size_reached + prop_size))
3663 return "propagating accesses would violate the count or size limit";
3664
3665 *change_p = true;
3666 for (unsigned j = 0; j < aclen; j++)
3667 {
3668 if (prop_kinds[j] == ACC_PROP_COPY)
3669 {
3670 param_access *argacc = (*arg_desc->accesses)[j];
3671
3672 param_access *copy = ggc_cleared_alloc<param_access> ();
3673 copy->unit_offset = argacc->unit_offset + delta_offset;
3674 copy->unit_size = argacc->unit_size;
3675 copy->type = argacc->type;
3676 copy->alias_ptr_type = argacc->alias_ptr_type;
3677 copy->certain = true;
3678 copy->reverse = argacc->reverse;
3679 vec_safe_push (param_desc->accesses, copy);
3680 }
3681 else if (prop_kinds[j] == ACC_PROP_CERTAIN)
3682 {
3683 param_access *argacc = (*arg_desc->accesses)[j];
3684 param_access *csp
3685 = find_param_access (param_desc, argacc->unit_offset + delta_offset,
3686 argacc->unit_size);
3687 csp->certain = true;
3688 }
3689 }
3690
3691 param_desc->size_reached += prop_size;
3692
3693 return NULLnullptr;
3694}
3695
3696/* Propagate parameter splitting information through call graph edge CS.
3697 Return true if any changes that might need to be propagated within SCCs have
3698 been made. The function also clears the aggregate_pass_through and
3699 pointer_pass_through in call summaries which do not need to be processed
3700 again if this CS is revisited when iterating while changes are propagated
3701 within an SCC. */
3702
3703static bool
3704param_splitting_across_edge (cgraph_edge *cs)
3705{
3706 bool res = false;
3707 bool cross_scc = !ipa_edge_within_scc (cs);
3708 enum availability availability;
3709 cgraph_node *callee = cs->callee->function_symbol (&availability);
3710 isra_func_summary *from_ifs = func_sums->get (cs->caller);
3711 gcc_checking_assert (from_ifs && from_ifs->m_parameters)((void)(!(from_ifs && from_ifs->m_parameters) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 3711, __FUNCTION__), 0 : 0))
;
3712
3713 isra_call_summary *csum = call_sums->get (cs);
3714 gcc_checking_assert (csum)((void)(!(csum) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 3714, __FUNCTION__), 0 : 0))
;
3715 unsigned args_count = csum->m_arg_flow.length ();
3716 isra_func_summary *to_ifs = func_sums->get (callee);
3717 unsigned param_count
3718 = ((to_ifs && to_ifs->m_candidate && (availability >= AVAIL_AVAILABLE))
3719 ? vec_safe_length (to_ifs->m_parameters)
3720 : 0);
3721
3722 if (dump_file && (dump_flags & TDF_DETAILS))
3723 fprintf (dump_file, "Splitting across %s->%s:\n",
3724 cs->caller->dump_name (), callee->dump_name ());
3725
3726 unsigned i;
3727 for (i = 0; (i < args_count) && (i < param_count); i++)
3728 {
3729 isra_param_desc *arg_desc = &(*to_ifs->m_parameters)[i];
3730 isra_param_flow *ipf = &csum->m_arg_flow[i];
3731
3732 if (arg_desc->locally_unused)
3733 {
3734 if (dump_file && (dump_flags & TDF_DETAILS))
3735 fprintf (dump_file, " ->%u: unused in callee\n", i);
3736 ipf->pointer_pass_through = false;
3737 continue;
3738 }
3739
3740 if (ipf->pointer_pass_through)
3741 {
3742 int idx = get_single_param_flow_source (ipf);
3743 isra_param_desc *param_desc = &(*from_ifs->m_parameters)[idx];
3744 if (!param_desc->split_candidate)
3745 continue;
3746 gcc_assert (param_desc->by_ref)((void)(!(param_desc->by_ref) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 3746, __FUNCTION__), 0 : 0))
;
3747
3748 if (!arg_desc->split_candidate || !arg_desc->by_ref)
3749 {
3750 if (dump_file && (dump_flags & TDF_DETAILS))
3751 fprintf (dump_file, " %u->%u: not candidate or not by "
3752 "reference in callee\n", idx, i);
3753 param_desc->split_candidate = false;
3754 ipf->pointer_pass_through = false;
3755 res = true;
3756 }
3757 else if (!ipf->safe_to_import_accesses)
3758 {
3759 if (!csum->m_before_any_store
3760 || !all_callee_accesses_present_p (param_desc, arg_desc))
3761 {
3762 if (dump_file && (dump_flags & TDF_DETAILS))
3763 fprintf (dump_file, " %u->%u: cannot import accesses.\n",
3764 idx, i);
3765 param_desc->split_candidate = false;
3766 ipf->pointer_pass_through = false;
3767 res = true;
3768
3769 }
3770 else
3771 {
3772 if (dump_file && (dump_flags & TDF_DETAILS))
3773 fprintf (dump_file, " %u->%u: verified callee accesses "
3774 "present.\n", idx, i);
3775 if (cross_scc)
3776 ipf->pointer_pass_through = false;
3777 }
3778 }
3779 else
3780 {
3781 const char *pull_failure
3782 = pull_accesses_from_callee (cs->caller, param_desc, arg_desc,
3783 0, 0, &res);
3784 if (pull_failure)
3785 {
3786 if (dump_file && (dump_flags & TDF_DETAILS))
3787 fprintf (dump_file, " %u->%u: by_ref access pull "
3788 "failed: %s.\n", idx, i, pull_failure);
3789 param_desc->split_candidate = false;
3790 ipf->pointer_pass_through = false;
3791 res = true;
3792 }
3793 else
3794 {
3795 if (dump_file && (dump_flags & TDF_DETAILS))
3796 fprintf (dump_file, " %u->%u: by_ref access pull "
3797 "succeeded.\n", idx, i);
3798 if (cross_scc)
3799 ipf->pointer_pass_through = false;
3800 }
3801 }
3802 }
3803 else if (ipf->aggregate_pass_through)
3804 {
3805 int idx = get_single_param_flow_source (ipf);
3806 isra_param_desc *param_desc = &(*from_ifs->m_parameters)[idx];
3807 if (!param_desc->split_candidate)
3808 continue;
3809 gcc_assert (!param_desc->by_ref)((void)(!(!param_desc->by_ref) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 3809, __FUNCTION__), 0 : 0))
;
3810 param_access *pacc = find_param_access (param_desc, ipf->unit_offset,
3811 ipf->unit_size);
3812 gcc_checking_assert (pacc)((void)(!(pacc) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 3812, __FUNCTION__), 0 : 0))
;
3813
3814 if (pacc->certain)
3815 {
3816 if (dump_file && (dump_flags & TDF_DETAILS))
3817 fprintf (dump_file, " %u->%u: already certain\n", idx, i);
3818 ipf->aggregate_pass_through = false;
3819 }
3820 else if (!arg_desc->split_candidate || arg_desc->by_ref)
3821 {
3822 if (dump_file && (dump_flags & TDF_DETAILS))
3823 fprintf (dump_file, " %u->%u: not candidate or by "
3824 "reference in callee\n", idx, i);
3825
3826 pacc->certain = true;
3827 if (overlapping_certain_accesses_p (param_desc, NULLnullptr))
3828 {
3829 if (dump_file && (dump_flags & TDF_DETAILS))
3830 fprintf (dump_file, " ...leading to overlap, "
3831 " disqualifying candidate parameter %u\n",
3832 idx);
3833 param_desc->split_candidate = false;
3834 }
3835 else
3836 bump_reached_size (param_desc, pacc->unit_size, idx);
3837
3838 ipf->aggregate_pass_through = false;
3839 res = true;
3840 }
3841 else
3842 {
3843 const char *pull_failure
3844 = pull_accesses_from_callee (cs->caller, param_desc, arg_desc,
3845 ipf->unit_offset,
3846 ipf->unit_size, &res);
3847 if (pull_failure)
3848 {
3849 if (dump_file && (dump_flags & TDF_DETAILS))
3850 fprintf (dump_file, " %u->%u: arg access pull "
3851 "failed: %s.\n", idx, i, pull_failure);
3852
3853 ipf->aggregate_pass_through = false;
3854 pacc->certain = true;
3855
3856 if (overlapping_certain_accesses_p (param_desc, NULLnullptr))
3857 {
3858 if (dump_file && (dump_flags & TDF_DETAILS))
3859 fprintf (dump_file, " ...leading to overlap, "
3860 " disqualifying candidate parameter %u\n",
3861 idx);
3862 param_desc->split_candidate = false;
3863 }
3864 else
3865 bump_reached_size (param_desc, pacc->unit_size, idx);
3866
3867 res = true;
3868 }
3869 else
3870 {
3871 if (dump_file && (dump_flags & TDF_DETAILS))
3872 fprintf (dump_file, " %u->%u: arg access pull "
3873 "succeeded.\n", idx, i);
3874 if (cross_scc)
3875 ipf->aggregate_pass_through = false;
3876 }
3877 }
3878 }
3879 }
3880
3881 /* Handle argument-parameter count mismatches. */
3882 for (; (i < args_count); i++)
3883 {
3884 isra_param_flow *ipf = &csum->m_arg_flow[i];
3885
3886 if (ipf->pointer_pass_through || ipf->aggregate_pass_through)
3887 {
3888 int idx = get_single_param_flow_source (ipf);
3889 ipf->pointer_pass_through = false;
3890 ipf->aggregate_pass_through = false;
3891 isra_param_desc *param_desc = &(*from_ifs->m_parameters)[idx];
3892 if (!param_desc->split_candidate)
3893 continue;
3894
3895 if (dump_file && (dump_flags & TDF_DETAILS))
3896 fprintf (dump_file, " %u->%u: no corresponding formal parameter\n",
3897 idx, i);
3898 param_desc->split_candidate = false;
3899 res = true;
3900 }
3901 }
3902 return res;
3903}
3904
3905/* Worker for call_for_symbol_and_aliases, look at all callers and if all their
3906 callers ignore the return value, or come from the same SCC and use the
3907 return value only to compute their return value, return false, otherwise
3908 return true. */
3909
3910static bool
3911retval_used_p (cgraph_node *node, void *)
3912{
3913 for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
3914 {
3915 isra_call_summary *csum = call_sums->get (cs);
3916 gcc_checking_assert (csum)((void)(!(csum) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 3916, __FUNCTION__), 0 : 0))
;
3917 if (csum->m_return_ignored)
3918 continue;
3919 if (!csum->m_return_returned)
3920 return true;
3921
3922 isra_func_summary *from_ifs = func_sums->get (cs->caller);
3923 if (!from_ifs || !from_ifs->m_candidate)
3924 return true;
3925
3926 if (!ipa_edge_within_scc (cs)
3927 && !from_ifs->m_return_ignored)
3928 return true;
3929 }
3930
3931 return false;
3932}
3933
3934/* Push into NEW_PARAMS all required parameter adjustment entries to copy or
3935 modify parameter which originally had index BASE_INDEX, in the adjustment
3936 vector of parent clone (if any) had PREV_CLONE_INDEX and was described by
3937 PREV_ADJUSTMENT. If IPA-CP has created a transformation summary for the
3938 original node, it needs to be passed in IPCP_TS, otherwise it should be
3939 NULL. If the parent clone is the original function, PREV_ADJUSTMENT is NULL
3940 and PREV_CLONE_INDEX is equal to BASE_INDEX. */
3941
3942static void
3943push_param_adjustments_for_index (isra_func_summary *ifs, unsigned base_index,
3944 unsigned prev_clone_index,
3945 ipa_adjusted_param *prev_adjustment,
3946 ipcp_transformation *ipcp_ts,
3947 vec<ipa_adjusted_param, va_gc> **new_params)
3948{
3949 isra_param_desc *desc = &(*ifs->m_parameters)[base_index];
3950 if (desc->locally_unused)
3951 {
3952 if (dump_file)
3953 fprintf (dump_file, " Will remove parameter %u\n", base_index);
3954 return;
3955 }
3956
3957 if (!desc->split_candidate)
3958 {
3959 ipa_adjusted_param adj;
3960 if (prev_adjustment)
3961 {
3962 adj = *prev_adjustment;
3963 adj.prev_clone_adjustment = true;
3964 adj.prev_clone_index = prev_clone_index;
3965 }
3966 else
3967 {
3968 memset (&adj, 0, sizeof (adj));
3969 adj.op = IPA_PARAM_OP_COPY;
3970 adj.base_index = base_index;
3971 adj.prev_clone_index = prev_clone_index;
3972 }
3973 vec_safe_push ((*new_params), adj);
3974 return;
3975 }
3976
3977 if (dump_file)
3978 fprintf (dump_file, " Will split parameter %u\n", base_index);
3979
3980 gcc_assert (!prev_adjustment || prev_adjustment->op == IPA_PARAM_OP_COPY)((void)(!(!prev_adjustment || prev_adjustment->op == IPA_PARAM_OP_COPY
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 3980, __FUNCTION__), 0 : 0))
;
3981 unsigned aclen = vec_safe_length (desc->accesses);
3982 for (unsigned j = 0; j < aclen; j++)
3983 {
3984 param_access *pa = (*desc->accesses)[j];
3985 if (!pa->certain)
3986 continue;
3987
3988 if (ipcp_ts)
3989 {
3990 ipa_argagg_value_list avl (ipcp_ts);
3991 tree value = avl.get_value (base_index, pa->unit_offset);
3992 if (value && !AGGREGATE_TYPE_P (pa->type)(((enum tree_code) (pa->type)->base.code) == ARRAY_TYPE
|| (((enum tree_code) (pa->type)->base.code) == RECORD_TYPE
|| ((enum tree_code) (pa->type)->base.code) == UNION_TYPE
|| ((enum tree_code) (pa->type)->base.code) == QUAL_UNION_TYPE
))
)
3993 {
3994 if (dump_file)
3995 fprintf (dump_file, " - omitting component at byte "
3996 "offset %u which is known to have a constant value\n ",
3997 pa->unit_offset);
3998 continue;
3999 }
4000 }
4001
4002 if (dump_file)
4003 fprintf (dump_file, " - component at byte offset %u, "
4004 "size %u\n", pa->unit_offset, pa->unit_size);
4005
4006 ipa_adjusted_param adj;
4007 memset (&adj, 0, sizeof (adj));
4008 adj.op = IPA_PARAM_OP_SPLIT;
4009 adj.base_index = base_index;
4010 adj.prev_clone_index = prev_clone_index;
4011 adj.param_prefix_index = IPA_PARAM_PREFIX_ISRA;
4012 adj.reverse = pa->reverse;
4013 adj.type = pa->type;
4014 adj.alias_ptr_type = pa->alias_ptr_type;
4015 adj.unit_offset = pa->unit_offset;
4016 vec_safe_push ((*new_params), adj);
4017 }
4018}
4019
4020/* Worker for all call_for_symbol_thunks_and_aliases. Set calls_comdat_local
4021 flag of all callers of NODE. */
4022
4023static bool
4024mark_callers_calls_comdat_local (struct cgraph_node *node, void *)
4025{
4026 for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
4027 cs->caller->calls_comdat_local = true;
4028 return false;
4029}
4030
4031
4032/* Do final processing of results of IPA propagation regarding NODE, clone it
4033 if appropriate. */
4034
4035static void
4036process_isra_node_results (cgraph_node *node,
4037 hash_map<const char *, unsigned> *clone_num_suffixes)
4038{
4039 isra_func_summary *ifs = func_sums->get (node);
4040 if (!ifs || !ifs->m_candidate)
4041 return;
4042
4043 auto_vec<bool, 16> surviving_params;
4044 bool check_surviving = false;
4045 clone_info *cinfo = clone_info::get (node);
4046 if (cinfo && cinfo->param_adjustments)
4047 {
4048 check_surviving = true;
4049 cinfo->param_adjustments->get_surviving_params (&surviving_params);
4050 }
4051
4052 unsigned param_count = vec_safe_length (ifs->m_parameters);
4053 bool will_change_function = false;
4054 if (ifs->m_returns_value && ifs->m_return_ignored)
4055 will_change_function = true;
4056 else
4057 for (unsigned i = 0; i < param_count; i++)
4058 {
4059 isra_param_desc *desc = &(*ifs->m_parameters)[i];
4060 if ((desc->locally_unused || desc->split_candidate)
4061 /* Make sure we do not clone just to attempt to remove an already
4062 removed unused argument. */
4063 && (!check_surviving
4064 || (i < surviving_params.length ()
4065 && surviving_params[i])))
4066 {
4067 will_change_function = true;
4068 break;
4069 }
4070 }
4071 if (!will_change_function)
4072 return;
4073
4074 if (dump_file)
4075 {
4076 fprintf (dump_file, "\nEvaluating analysis results for %s\n",
4077 node->dump_name ());
4078 if (ifs->m_returns_value && ifs->m_return_ignored)
4079 fprintf (dump_file, " Will remove return value.\n");
4080 }
4081
4082 ipcp_transformation *ipcp_ts = ipcp_get_transformation_summary (node);
4083 vec<ipa_adjusted_param, va_gc> *new_params = NULLnullptr;
4084 if (ipa_param_adjustments *old_adjustments
4085 = cinfo ? cinfo->param_adjustments : NULLnullptr)
4086 {
4087 unsigned old_adj_len = vec_safe_length (old_adjustments->m_adj_params);
4088 for (unsigned i = 0; i < old_adj_len; i++)
4089 {
4090 ipa_adjusted_param *old_adj = &(*old_adjustments->m_adj_params)[i];
4091 push_param_adjustments_for_index (ifs, old_adj->base_index, i,
4092 old_adj, ipcp_ts, &new_params);
4093 }
4094 }
4095 else
4096 for (unsigned i = 0; i < param_count; i++)
4097 push_param_adjustments_for_index (ifs, i, i, NULLnullptr, ipcp_ts, &new_params);
4098
4099 ipa_param_adjustments *new_adjustments
4100 = (new (ggc_alloc <ipa_param_adjustments> ())
4101 ipa_param_adjustments (new_params, param_count,
4102 ifs->m_returns_value && ifs->m_return_ignored));
4103
4104 if (dump_file && (dump_flags & TDF_DETAILS))
4105 {
4106 fprintf (dump_file, "\n Created adjustments:\n");
4107 new_adjustments->dump (dump_file);
4108 }
4109
4110 unsigned &suffix_counter = clone_num_suffixes->get_or_insert (
4111 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (((const char *) (tree_check ((decl_assembler_name (node->decl
)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 4112, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)
4112 node->decl))((const char *) (tree_check ((decl_assembler_name (node->decl
)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 4112, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)
);
4113 auto_vec<cgraph_edge *> callers = node->collect_callers ();
4114 cgraph_node *new_node
4115 = node->create_virtual_clone (callers, NULLnullptr, new_adjustments, "isra",
4116 suffix_counter);
4117 suffix_counter++;
4118 if (node->calls_comdat_local && node->same_comdat_group)
4119 {
4120 new_node->add_to_same_comdat_group (node);
4121 new_node->call_for_symbol_and_aliases (mark_callers_calls_comdat_local,
4122 NULLnullptr, true);
4123 }
4124 new_node->calls_comdat_local = node->calls_comdat_local;
4125
4126 if (dump_file)
4127 fprintf (dump_file, " Created new node %s\n", new_node->dump_name ());
4128 callers.release ();
4129}
4130
4131/* If INDICES is not empty, dump a combination of NODE's dump_name and MSG
4132 followed by the list of numbers in INDICES. */
4133
4134static void
4135dump_list_of_param_indices (const cgraph_node *node, const char* msg,
4136 const vec<unsigned> &indices)
4137{
4138 if (indices.is_empty ())
4139 return;
4140 fprintf (dump_file, "The following parameters of %s %s:", node->dump_name (),
4141 msg);
4142 for (unsigned i : indices)
4143 fprintf (dump_file, " %u", i);
4144 fprintf (dump_file, "\n");
4145}
4146
4147/* Check which parameters of NODE described by IFS have survived until IPA-SRA
4148 and disable transformations for those which have not or which should not
4149 transformed because the associated debug counter reached its limit. Return
4150 true if none survived or if there were no candidates to begin with.
4151 Additionally, also adjust parameter descriptions based on debug counters and
4152 hints propagated earlier. */
4153
4154static bool
4155adjust_parameter_descriptions (cgraph_node *node, isra_func_summary *ifs)
4156{
4157 bool ret = true;
4158 unsigned len = vec_safe_length (ifs->m_parameters);
4159 if (!len)
4160 return true;
4161
4162 auto_vec<bool, 16> surviving_params;
4163 bool check_surviving = false;
4164 clone_info *cinfo = clone_info::get (node);
4165 if (cinfo && cinfo->param_adjustments)
4166 {
4167 check_surviving = true;
4168 cinfo->param_adjustments->get_surviving_params (&surviving_params);
4169 }
4170 ipcp_transformation *ipcp_ts = ipcp_get_transformation_summary (node);
4171 auto_vec <unsigned> dump_dead_indices;
4172 auto_vec <unsigned> dump_bad_cond_indices;
4173 for (unsigned i = 0; i < len; i++)
4174 {
4175 isra_param_desc *desc = &(*ifs->m_parameters)[i];
4176 if (!dbg_cnt (ipa_sra_params))
4177 {
4178 desc->locally_unused = false;
4179 desc->split_candidate = false;
4180 }
4181 else if (check_surviving
4182 && (i >= surviving_params.length ()
4183 || !surviving_params[i]))
4184 {
4185 /* Even if the parameter was removed by a previous IPA pass, we do
4186 not clear locally_unused because if it really is unused, this
4187 information might be useful in callers. */
4188 desc->split_candidate = false;
4189
4190 if (dump_file && (dump_flags & TDF_DETAILS))
4191 dump_dead_indices.safe_push (i);
4192 }
4193 else
4194 {
4195 if (desc->split_candidate && desc->conditionally_dereferenceable)
4196 {
4197 gcc_assert (desc->safe_size_set)((void)(!(desc->safe_size_set) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 4197, __FUNCTION__), 0 : 0))
;
4198 for (param_access *pa : *desc->accesses)
4199 if ((pa->unit_offset + pa->unit_size) > desc->safe_size)
4200 {
4201 if (dump_file && (dump_flags & TDF_DETAILS))
4202 dump_bad_cond_indices.safe_push (i);
4203 desc->split_candidate = false;
4204 break;
4205 }
4206 }
4207
4208 if (desc->split_candidate)
4209 {
4210 if (desc->by_ref && !desc->not_specially_constructed)
4211 {
4212 int extra_factor
4213 = opt_for_fn (node->decl,(opts_for_fn (node->decl)->x_param_ipa_sra_ptrwrap_growth_factor
)
4214 param_ipa_sra_ptrwrap_growth_factor)(opts_for_fn (node->decl)->x_param_ipa_sra_ptrwrap_growth_factor
)
;
4215 desc->param_size_limit = extra_factor * desc->param_size_limit;
4216 }
4217 if (size_would_violate_limit_p (desc, desc->size_reached))
4218 desc->split_candidate = false;
4219 }
4220
4221 /* Avoid ICEs on size-mismatched VIEW_CONVERT_EXPRs when callers and
4222 callees don't agree on types in aggregates and we try to do both
4223 IPA-CP and IPA-SRA. */
4224 if (ipcp_ts && desc->split_candidate)
4225 {
4226 ipa_argagg_value_list avl (ipcp_ts);
4227 for (const param_access *pa : desc->accesses)
4228 {
4229 if (!pa->certain)
4230 continue;
4231 tree value = avl.get_value (i, pa->unit_offset);
4232 if (value
4233 && ((tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value))((tree_class_check ((((contains_struct_check ((value), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 4233, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 4233, __FUNCTION__))->type_common.size)
)
4234 / BITS_PER_UNIT(8))
4235 != pa->unit_size))
4236 {
4237 desc->split_candidate = false;
4238 if (dump_file && (dump_flags & TDF_DETAILS))
4239 dump_dead_indices.safe_push (i);
4240 break;
4241 }
4242 }
4243 }
4244
4245 if (desc->locally_unused || desc->split_candidate)
4246 ret = false;
4247 }
4248 }
4249
4250 dump_list_of_param_indices (node, "are dead on arrival or have a type "
4251 "mismatch with IPA-CP", dump_dead_indices);
4252 dump_list_of_param_indices (node, "are not safe to dereference in all "
4253 "callers", dump_bad_cond_indices);
4254
4255 return ret;
4256}
4257
4258
4259/* Run the interprocedural part of IPA-SRA. */
4260
4261static unsigned int
4262ipa_sra_analysis (void)
4263{
4264 if (dump_file)
4265 {
4266 fprintf (dump_file, "\n========== IPA-SRA IPA stage ==========\n");
4267 ipa_sra_dump_all_summaries (dump_file, false);
4268 }
4269
4270 gcc_checking_assert (func_sums)((void)(!(func_sums) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 4270, __FUNCTION__), 0 : 0))
;
4271 gcc_checking_assert (call_sums)((void)(!(call_sums) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 4271, __FUNCTION__), 0 : 0))
;
4272 cgraph_node **order = XCNEWVEC (cgraph_node *, symtab->cgraph_count)((cgraph_node * *) xcalloc ((symtab->cgraph_count), sizeof
(cgraph_node *)))
;
4273 auto_vec <cgraph_node *, 16> stack;
4274 int node_scc_count = ipa_reduced_postorder (order, true, NULLnullptr);
4275
4276 /* One sweep from callers to callees for return value removal. */
4277 for (int i = node_scc_count - 1; i >= 0 ; i--)
4278 {
4279 cgraph_node *scc_rep = order[i];
4280 vec<cgraph_node *> cycle_nodes = ipa_get_nodes_in_cycle (scc_rep);
4281
4282 /* Preliminary IPA function level checks. */
4283 for (cgraph_node *v : cycle_nodes)
4284 {
4285 isra_func_summary *ifs = func_sums->get (v);
4286 if (!ifs || !ifs->m_candidate)
4287 continue;
4288 if (!ipa_sra_ipa_function_checks (v)
4289 || check_all_callers_for_issues (v))
4290 ifs->zap ();
4291 }
4292
4293 for (cgraph_node *v : cycle_nodes)
4294 {
4295 isra_func_summary *ifs = func_sums->get (v);
4296 if (!ifs || !ifs->m_candidate)
4297 continue;
4298 bool return_needed
4299 = (ifs->m_returns_value
4300 && (!dbg_cnt (ipa_sra_retvalues)
4301 || v->call_for_symbol_and_aliases (retval_used_p,
4302 NULLnullptr, true)));
4303 ifs->m_return_ignored = !return_needed;
4304 if (return_needed)
4305 isra_push_node_to_stack (v, ifs, &stack);
4306 }
4307
4308 while (!stack.is_empty ())
4309 {
4310 cgraph_node *node = stack.pop ();
4311 isra_func_summary *ifs = func_sums->get (node);
4312 gcc_checking_assert (ifs && ifs->m_queued)((void)(!(ifs && ifs->m_queued) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 4312, __FUNCTION__), 0 : 0))
;
4313 ifs->m_queued = false;
4314
4315 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
4316 if (ipa_edge_within_scc (cs)
4317 && call_sums->get (cs)->m_return_returned)
4318 {
4319 enum availability av;
4320 cgraph_node *callee = cs->callee->function_symbol (&av);
4321 isra_func_summary *to_ifs = func_sums->get (callee);
4322 if (to_ifs && to_ifs->m_return_ignored)
4323 {
4324 to_ifs->m_return_ignored = false;
4325 isra_push_node_to_stack (callee, to_ifs, &stack);
4326 }
4327 }
4328 }
4329
4330 /* Parameter hint propagation. */
4331 for (cgraph_node *v : cycle_nodes)
4332 {
4333 isra_func_summary *ifs = func_sums->get (v);
4334 propagate_hints_to_all_callees (v, ifs, &stack);
4335 }
4336
4337 while (!stack.is_empty ())
4338 {
4339 cgraph_node *node = stack.pop ();
4340 isra_func_summary *ifs = func_sums->get (node);
4341 gcc_checking_assert (ifs && ifs->m_queued)((void)(!(ifs && ifs->m_queued) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 4341, __FUNCTION__), 0 : 0))
;
4342 ifs->m_queued = false;
4343 propagate_hints_to_all_callees (node, ifs, &stack);
4344 }
4345
4346 cycle_nodes.release ();
4347 }
4348
4349 /* One sweep from callees to callers for parameter removal and splitting. */
4350 for (int i = 0; i < node_scc_count; i++)
4351 {
4352 cgraph_node *scc_rep = order[i];
4353 vec<cgraph_node *> cycle_nodes = ipa_get_nodes_in_cycle (scc_rep);
4354
4355 /* First step of parameter removal. */
4356 for (cgraph_node *v : cycle_nodes)
4357 {
4358 isra_func_summary *ifs = func_sums->get (v);
4359 if (!ifs || !ifs->m_candidate)
4360 continue;
4361 if (adjust_parameter_descriptions (v, ifs))
4362 continue;
4363 for (cgraph_edge *cs = v->indirect_calls; cs; cs = cs->next_callee)
4364 process_edge_to_unknown_caller (cs);
4365 for (cgraph_edge *cs = v->callees; cs; cs = cs->next_callee)
4366 if (!ipa_edge_within_scc (cs))
4367 param_removal_cross_scc_edge (cs);
4368 }
4369
4370 /* Look at edges within the current SCC and propagate used-ness across
4371 them, pushing onto the stack all notes which might need to be
4372 revisited. */
4373 for (cgraph_node *v : cycle_nodes)
4374 v->call_for_symbol_thunks_and_aliases (propagate_used_to_scc_callers,
4375 &stack, true);
4376
4377 /* Keep revisiting and pushing until nothing changes. */
4378 while (!stack.is_empty ())
4379 {
4380 cgraph_node *v = stack.pop ();
4381 isra_func_summary *ifs = func_sums->get (v);
4382 gcc_checking_assert (ifs && ifs->m_queued)((void)(!(ifs && ifs->m_queued) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 4382, __FUNCTION__), 0 : 0))
;
4383 ifs->m_queued = false;
4384
4385 v->call_for_symbol_thunks_and_aliases (propagate_used_to_scc_callers,
4386 &stack, true);
4387 }
4388
4389 /* Parameter splitting. */
4390 bool repeat_scc_access_propagation;
4391 do
4392 {
4393 repeat_scc_access_propagation = false;
4394 for (cgraph_node *v : cycle_nodes)
4395 {
4396 isra_func_summary *ifs = func_sums->get (v);
4397 if (!ifs
4398 || !ifs->m_candidate
4399 || vec_safe_is_empty (ifs->m_parameters))
4400 continue;
4401 for (cgraph_edge *cs = v->callees; cs; cs = cs->next_callee)
4402 if (param_splitting_across_edge (cs))
4403 repeat_scc_access_propagation = true;
4404 }
4405 }
4406 while (repeat_scc_access_propagation);
4407
4408 if (flag_checkingglobal_options.x_flag_checking)
4409 for (cgraph_node *v : cycle_nodes)
4410 verify_splitting_accesses (v, true);
4411
4412 cycle_nodes.release ();
4413 }
4414
4415 ipa_free_postorder_info ();
4416 free (order);
4417
4418 if (dump_file)
4419 {
4420 if (dump_flags & TDF_DETAILS)
4421 {
4422 fprintf (dump_file, "\n========== IPA-SRA propagation final state "
4423 " ==========\n");
4424 ipa_sra_dump_all_summaries (dump_file, true);
4425 }
4426 fprintf (dump_file, "\n========== IPA-SRA decisions ==========\n");
4427 }
4428
4429 hash_map<const char *, unsigned> *clone_num_suffixes
4430 = new hash_map<const char *, unsigned>;
4431
4432 cgraph_node *node;
4433 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)for ((node) = symtab->first_function_with_gimple_body (); (
node); (node) = symtab->next_function_with_gimple_body (node
))
4434 process_isra_node_results (node, clone_num_suffixes);
4435
4436 delete clone_num_suffixes;
4437 ggc_delete (func_sums);
4438 func_sums = NULLnullptr;
4439 delete call_sums;
4440 call_sums = NULLnullptr;
4441
4442 if (dump_file)
4443 fprintf (dump_file, "\n========== IPA SRA IPA analysis done "
4444 "==========\n\n");
4445 return 0;
4446}
4447
4448
4449const pass_data pass_data_ipa_sra =
4450{
4451 IPA_PASS, /* type */
4452 "sra", /* name */
4453 OPTGROUP_NONE, /* optinfo_flags */
4454 TV_IPA_SRA, /* tv_id */
4455 0, /* properties_required */
4456 0, /* properties_provided */
4457 0, /* properties_destroyed */
4458 0, /* todo_flags_start */
4459 ( TODO_dump_symtab(1 << 7) | TODO_remove_functions(1 << 8) ), /* todo_flags_finish */
4460};
4461
4462class pass_ipa_sra : public ipa_opt_pass_d
4463{
4464public:
4465 pass_ipa_sra (gcc::context *ctxt)
4466 : ipa_opt_pass_d (pass_data_ipa_sra, ctxt,
4467 ipa_sra_generate_summary, /* generate_summary */
4468 ipa_sra_write_summary, /* write_summary */
4469 ipa_sra_read_summary, /* read_summary */
4470 NULLnullptr , /* write_optimization_summary */
4471 NULLnullptr, /* read_optimization_summary */
4472 NULLnullptr, /* stmt_fixup */
4473 0, /* function_transform_todo_flags_start */
4474 NULLnullptr, /* function_transform */
4475 NULLnullptr) /* variable_transform */
4476 {}
4477
4478 /* opt_pass methods: */
4479 bool gate (function *) final override
4480 {
4481 /* TODO: We should remove the optimize check after we ensure we never run
4482 IPA passes when not optimizing. */
4483 return (flag_ipa_sraglobal_options.x_flag_ipa_sra && optimizeglobal_options.x_optimize);
4484 }
4485
4486 unsigned int execute (function *) final override
4487 {
4488 return ipa_sra_analysis ();
4489 }
4490
4491}; // class pass_ipa_sra
4492
4493} // anon namespace
4494
4495/* Intraprocedural part of IPA-SRA analysis. Scan function body of NODE and
4496 create a summary structure describing IPA-SRA opportunities and constraints
4497 in it. */
4498
4499static void
4500ipa_sra_summarize_function (cgraph_node *node)
4501{
4502 if (dump_file)
4503 fprintf (dump_file, "Creating summary for %s/%i:\n", node->name (),
4504 node->order);
4505 gcc_obstack_init (&gensum_obstack)_obstack_begin (((&gensum_obstack)), (memory_block_pool::
block_size), (0), (mempool_obstack_chunk_alloc), (mempool_obstack_chunk_free
))
;
4506 loaded_decls = new hash_set<tree>;
4507
4508 isra_func_summary *ifs = NULLnullptr;
4509 unsigned count = 0;
4510 if (ipa_sra_preliminary_function_checks (node))
4511 {
4512 ifs = func_sums->get_create (node);
4513 ifs->m_candidate = true;
4514 tree ret = TREE_TYPE (TREE_TYPE (node->decl))((contains_struct_check ((((contains_struct_check ((node->
decl), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 4514, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 4514, __FUNCTION__))->typed.type)
;
4515 ifs->m_returns_value = (TREE_CODE (ret)((enum tree_code) (ret)->base.code) != VOID_TYPE);
4516 for (tree parm = DECL_ARGUMENTS (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 4516, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
;
4517 parm;
4518 parm = DECL_CHAIN (parm)(((contains_struct_check (((contains_struct_check ((parm), (TS_DECL_MINIMAL
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 4518, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 4518, __FUNCTION__))->common.chain))
)
4519 count++;
4520 }
4521 auto_vec<gensum_param_desc, 16> param_descriptions (count);
4522
4523 struct function *fun = DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.cc"
, 4523, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
;
4524 bool cfun_pushed = false;
4525 if (count > 0)
4526 {
4527 decl2desc = new hash_map<tree, gensum_param_desc *>;
4528 param_descriptions.reserve_exact (count);
4529 param_descriptions.quick_grow_cleared (count);
4530
4531 if (create_parameter_descriptors (node, &param_descriptions))
4532 {
4533 push_cfun (fun);
4534 cfun_pushed = true;
4535 final_bbs = BITMAP_ALLOCbitmap_alloc (NULLnullptr);
4536 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,((long *) xcalloc ((unsafe_by_ref_count * ((fun)->cfg->
x_last_basic_block)), sizeof (long)))
4537 unsafe_by_ref_count((long *) xcalloc ((unsafe_by_ref_count * ((fun)->cfg->
x_last_basic_block)), sizeof (long)))
4538 * last_basic_block_for_fn (fun))((long *) xcalloc ((unsafe_by_ref_count * ((fun)->cfg->
x_last_basic_block)), sizeof (long)))
;
4539 aa_walking_limit = opt_for_fn (node->decl, param_ipa_max_aa_steps)(opts_for_fn (node->decl)->x_param_ipa_max_aa_steps);
4540 }
4541 }
4542 /* Scan function is run even when there are no removal or splitting
4543 candidates so that we can calculate hints on call edges which can be
4544 useful in callees. */
4545 scan_function (node, fun);
4546
4547 if (count > 0)
4548 {
4549 if (dump_file)
4550 {
4551 dump_gensum_param_descriptors (dump_file, node->decl,
4552 &param_descriptions);
4553 fprintf (dump_file, "----------------------------------------\n");
4554 }
4555
4556 process_scan_results (node, fun, ifs, &param_descriptions);
4557
4558 if (cfun_pushed)
4559 pop_cfun ();
4560 if (bb_dereferences)
4561 {
4562 free (bb_dereferences);
4563 bb_dereferences = NULLnullptr;
4564 BITMAP_FREE (final_bbs)((void) (bitmap_obstack_free ((bitmap) final_bbs), (final_bbs
) = (bitmap) nullptr))
;
4565 final_bbs = NULLnullptr;
4566 }
4567 }
4568 isra_analyze_all_outgoing_calls (node);
4569
4570 delete loaded_decls;
4571 loaded_decls = NULLnullptr;
4572 if (decl2desc)
4573 {
4574 delete decl2desc;
4575 decl2desc = NULLnullptr;
4576 }
4577 obstack_free (&gensum_obstack, NULL)__extension__ ({ struct obstack *__o = (&gensum_obstack);
void *__obj = (void *) (nullptr); if (__obj > (void *) __o
->chunk && __obj < (void *) __o->chunk_limit
) __o->next_free = __o->object_base = (char *) __obj; else
_obstack_free (__o, __obj); })
;
4578 if (dump_file)
4579 fprintf (dump_file, "\n\n");
4580 if (flag_checkingglobal_options.x_flag_checking)
4581 verify_splitting_accesses (node, false);
4582 return;
4583}
4584
4585ipa_opt_pass_d *
4586make_pass_ipa_sra (gcc::context *ctxt)
4587{
4588 return new pass_ipa_sra (ctxt);
4589}
4590
4591
4592#include "gt-ipa-sra.h"

/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h

1/* Vector API for GNU compiler.
2 Copyright (C) 2004-2023 Free Software Foundation, Inc.
3 Contributed by Nathan Sidwell <nathan@codesourcery.com>
4 Re-implemented in C++ by Diego Novillo <dnovillo@google.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22#ifndef GCC_VEC_H
23#define GCC_VEC_H
24
25/* Some gen* file have no ggc support as the header file gtype-desc.h is
26 missing. Provide these definitions in case ggc.h has not been included.
27 This is not a problem because any code that runs before gengtype is built
28 will never need to use GC vectors.*/
29
30extern void ggc_free (void *);
31extern size_t ggc_round_alloc_size (size_t requested_size);
32extern void *ggc_realloc (void *, size_t MEM_STAT_DECL);
33
34/* Templated vector type and associated interfaces.
35
36 The interface functions are typesafe and use inline functions,
37 sometimes backed by out-of-line generic functions. The vectors are
38 designed to interoperate with the GTY machinery.
39
40 There are both 'index' and 'iterate' accessors. The index accessor
41 is implemented by operator[]. The iterator returns a boolean
42 iteration condition and updates the iteration variable passed by
43 reference. Because the iterator will be inlined, the address-of
44 can be optimized away.
45
46 Each operation that increases the number of active elements is
47 available in 'quick' and 'safe' variants. The former presumes that
48 there is sufficient allocated space for the operation to succeed
49 (it dies if there is not). The latter will reallocate the
50 vector, if needed. Reallocation causes an exponential increase in
51 vector size. If you know you will be adding N elements, it would
52 be more efficient to use the reserve operation before adding the
53 elements with the 'quick' operation. This will ensure there are at
54 least as many elements as you ask for, it will exponentially
55 increase if there are too few spare slots. If you want reserve a
56 specific number of slots, but do not want the exponential increase
57 (for instance, you know this is the last allocation), use the
58 reserve_exact operation. You can also create a vector of a
59 specific size from the get go.
60
61 You should prefer the push and pop operations, as they append and
62 remove from the end of the vector. If you need to remove several
63 items in one go, use the truncate operation. The insert and remove
64 operations allow you to change elements in the middle of the
65 vector. There are two remove operations, one which preserves the
66 element ordering 'ordered_remove', and one which does not
67 'unordered_remove'. The latter function copies the end element
68 into the removed slot, rather than invoke a memmove operation. The
69 'lower_bound' function will determine where to place an item in the
70 array using insert that will maintain sorted order.
71
72 Vectors are template types with three arguments: the type of the
73 elements in the vector, the allocation strategy, and the physical
74 layout to use
75
76 Four allocation strategies are supported:
77
78 - Heap: allocation is done using malloc/free. This is the
79 default allocation strategy.
80
81 - GC: allocation is done using ggc_alloc/ggc_free.
82
83 - GC atomic: same as GC with the exception that the elements
84 themselves are assumed to be of an atomic type that does
85 not need to be garbage collected. This means that marking
86 routines do not need to traverse the array marking the
87 individual elements. This increases the performance of
88 GC activities.
89
90 Two physical layouts are supported:
91
92 - Embedded: The vector is structured using the trailing array
93 idiom. The last member of the structure is an array of size
94 1. When the vector is initially allocated, a single memory
95 block is created to hold the vector's control data and the
96 array of elements. These vectors cannot grow without
97 reallocation (see discussion on embeddable vectors below).
98
99 - Space efficient: The vector is structured as a pointer to an
100 embedded vector. This is the default layout. It means that
101 vectors occupy a single word of storage before initial
102 allocation. Vectors are allowed to grow (the internal
103 pointer is reallocated but the main vector instance does not
104 need to relocate).
105
106 The type, allocation and layout are specified when the vector is
107 declared.
108
109 If you need to directly manipulate a vector, then the 'address'
110 accessor will return the address of the start of the vector. Also
111 the 'space' predicate will tell you whether there is spare capacity
112 in the vector. You will not normally need to use these two functions.
113
114 Notes on the different layout strategies
115
116 * Embeddable vectors (vec<T, A, vl_embed>)
117
118 These vectors are suitable to be embedded in other data
119 structures so that they can be pre-allocated in a contiguous
120 memory block.
121
122 Embeddable vectors are implemented using the trailing array
123 idiom, thus they are not resizeable without changing the address
124 of the vector object itself. This means you cannot have
125 variables or fields of embeddable vector type -- always use a
126 pointer to a vector. The one exception is the final field of a
127 structure, which could be a vector type.
128
129 You will have to use the embedded_size & embedded_init calls to
130 create such objects, and they will not be resizeable (so the
131 'safe' allocation variants are not available).
132
133 Properties of embeddable vectors:
134
135 - The whole vector and control data are allocated in a single
136 contiguous block. It uses the trailing-vector idiom, so
137 allocation must reserve enough space for all the elements
138 in the vector plus its control data.
139 - The vector cannot be re-allocated.
140 - The vector cannot grow nor shrink.
141 - No indirections needed for access/manipulation.
142 - It requires 2 words of storage (prior to vector allocation).
143
144
145 * Space efficient vector (vec<T, A, vl_ptr>)
146
147 These vectors can grow dynamically and are allocated together
148 with their control data. They are suited to be included in data
149 structures. Prior to initial allocation, they only take a single
150 word of storage.
151
152 These vectors are implemented as a pointer to embeddable vectors.
153 The semantics allow for this pointer to be NULL to represent
154 empty vectors. This way, empty vectors occupy minimal space in
155 the structure containing them.
156
157 Properties:
158
159 - The whole vector and control data are allocated in a single
160 contiguous block.
161 - The whole vector may be re-allocated.
162 - Vector data may grow and shrink.
163 - Access and manipulation requires a pointer test and
164 indirection.
165 - It requires 1 word of storage (prior to vector allocation).
166
167 An example of their use would be,
168
169 struct my_struct {
170 // A space-efficient vector of tree pointers in GC memory.
171 vec<tree, va_gc, vl_ptr> v;
172 };
173
174 struct my_struct *s;
175
176 if (s->v.length ()) { we have some contents }
177 s->v.safe_push (decl); // append some decl onto the end
178 for (ix = 0; s->v.iterate (ix, &elt); ix++)
179 { do something with elt }
180*/
181
182/* Support function for statistics. */
183extern void dump_vec_loc_statistics (void);
184
185/* Hashtable mapping vec addresses to descriptors. */
186extern htab_t vec_mem_usage_hash;
187
188/* Control data for vectors. This contains the number of allocated
189 and used slots inside a vector. */
190
191struct vec_prefix
192{
193 /* FIXME - These fields should be private, but we need to cater to
194 compilers that have stricter notions of PODness for types. */
195
196 /* Memory allocation support routines in vec.cc. */
197 void register_overhead (void *, size_t, size_t CXX_MEM_STAT_INFO);
198 void release_overhead (void *, size_t, size_t, bool CXX_MEM_STAT_INFO);
199 static unsigned calculate_allocation (vec_prefix *, unsigned, bool);
200 static unsigned calculate_allocation_1 (unsigned, unsigned);
201
202 /* Note that vec_prefix should be a base class for vec, but we use
203 offsetof() on vector fields of tree structures (e.g.,
204 tree_binfo::base_binfos), and offsetof only supports base types.
205
206 To compensate, we make vec_prefix a field inside vec and make
207 vec a friend class of vec_prefix so it can access its fields. */
208 template <typename, typename, typename> friend struct vec;
209
210 /* The allocator types also need access to our internals. */
211 friend struct va_gc;
212 friend struct va_gc_atomic;
213 friend struct va_heap;
214
215 unsigned m_alloc : 31;
216 unsigned m_using_auto_storage : 1;
217 unsigned m_num;
218};
219
220/* Calculate the number of slots to reserve a vector, making sure that
221 RESERVE slots are free. If EXACT grow exactly, otherwise grow
222 exponentially. PFX is the control data for the vector. */
223
224inline unsigned
225vec_prefix::calculate_allocation (vec_prefix *pfx, unsigned reserve,
226 bool exact)
227{
228 if (exact)
229 return (pfx ? pfx->m_num : 0) + reserve;
230 else if (!pfx)
231 return MAX (4, reserve)((4) > (reserve) ? (4) : (reserve));
232 return calculate_allocation_1 (pfx->m_alloc, pfx->m_num + reserve);
233}
234
235template<typename, typename, typename> struct vec;
236
237/* Valid vector layouts
238
239 vl_embed - Embeddable vector that uses the trailing array idiom.
240 vl_ptr - Space efficient vector that uses a pointer to an
241 embeddable vector. */
242struct vl_embed { };
243struct vl_ptr { };
244
245
246/* Types of supported allocations
247
248 va_heap - Allocation uses malloc/free.
249 va_gc - Allocation uses ggc_alloc.
250 va_gc_atomic - Same as GC, but individual elements of the array
251 do not need to be marked during collection. */
252
253/* Allocator type for heap vectors. */
254struct va_heap
255{
256 /* Heap vectors are frequently regular instances, so use the vl_ptr
257 layout for them. */
258 typedef vl_ptr default_layout;
259
260 template<typename T>
261 static void reserve (vec<T, va_heap, vl_embed> *&, unsigned, bool
262 CXX_MEM_STAT_INFO);
263
264 template<typename T>
265 static void release (vec<T, va_heap, vl_embed> *&);
266};
267
268
269/* Allocator for heap memory. Ensure there are at least RESERVE free
270 slots in V. If EXACT is true, grow exactly, else grow
271 exponentially. As a special case, if the vector had not been
272 allocated and RESERVE is 0, no vector will be created. */
273
274template<typename T>
275inline void
276va_heap::reserve (vec<T, va_heap, vl_embed> *&v, unsigned reserve, bool exact
277 MEM_STAT_DECL)
278{
279 size_t elt_size = sizeof (T);
280 unsigned alloc
281 = vec_prefix::calculate_allocation (v ? &v->m_vecpfx : 0, reserve, exact);
282 gcc_checking_assert (alloc)((void)(!(alloc) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 282, __FUNCTION__), 0 : 0))
;
283
284 if (GATHER_STATISTICS0 && v)
285 v->m_vecpfx.release_overhead (v, elt_size * v->allocated (),
286 v->allocated (), false);
287
288 size_t size = vec<T, va_heap, vl_embed>::embedded_size (alloc);
289 unsigned nelem = v ? v->length () : 0;
290 v = static_cast <vec<T, va_heap, vl_embed> *> (xrealloc (v, size));
291 v->embedded_init (alloc, nelem);
292
293 if (GATHER_STATISTICS0)
294 v->m_vecpfx.register_overhead (v, alloc, elt_size PASS_MEM_STAT);
295}
296
297
298#if GCC_VERSION(4 * 1000 + 2) >= 4007
299#pragma GCC diagnostic push
300#pragma GCC diagnostic ignored "-Wfree-nonheap-object"
301#endif
302
303/* Free the heap space allocated for vector V. */
304
305template<typename T>
306void
307va_heap::release (vec<T, va_heap, vl_embed> *&v)
308{
309 size_t elt_size = sizeof (T);
310 if (v == NULLnullptr)
311 return;
312
313 if (GATHER_STATISTICS0)
314 v->m_vecpfx.release_overhead (v, elt_size * v->allocated (),
315 v->allocated (), true);
316 ::free (v);
317 v = NULLnullptr;
318}
319
320#if GCC_VERSION(4 * 1000 + 2) >= 4007
321#pragma GCC diagnostic pop
322#endif
323
324/* Allocator type for GC vectors. Notice that we need the structure
325 declaration even if GC is not enabled. */
326
327struct va_gc
328{
329 /* Use vl_embed as the default layout for GC vectors. Due to GTY
330 limitations, GC vectors must always be pointers, so it is more
331 efficient to use a pointer to the vl_embed layout, rather than
332 using a pointer to a pointer as would be the case with vl_ptr. */
333 typedef vl_embed default_layout;
334
335 template<typename T, typename A>
336 static void reserve (vec<T, A, vl_embed> *&, unsigned, bool
337 CXX_MEM_STAT_INFO);
338
339 template<typename T, typename A>
340 static void release (vec<T, A, vl_embed> *&v);
341};
342
343
344/* Free GC memory used by V and reset V to NULL. */
345
346template<typename T, typename A>
347inline void
348va_gc::release (vec<T, A, vl_embed> *&v)
349{
350 if (v)
351 ::ggc_free (v);
352 v = NULLnullptr;
353}
354
355
356/* Allocator for GC memory. Ensure there are at least RESERVE free
357 slots in V. If EXACT is true, grow exactly, else grow
358 exponentially. As a special case, if the vector had not been
359 allocated and RESERVE is 0, no vector will be created. */
360
361template<typename T, typename A>
362void
363va_gc::reserve (vec<T, A, vl_embed> *&v, unsigned reserve, bool exact
364 MEM_STAT_DECL)
365{
366 unsigned alloc
367 = vec_prefix::calculate_allocation (v
17.1
'v' is non-null
17.1
'v' is non-null
? &v->m_vecpfx : 0, reserve, exact);
18
'?' condition is true
368 if (!alloc)
19
Assuming 'alloc' is 0
20
Taking true branch
369 {
370 ::ggc_free (v);
371 v = NULLnullptr;
21
Null pointer value stored to field 'accesses'
372 return;
373 }
374
375 /* Calculate the amount of space we want. */
376 size_t size = vec<T, A, vl_embed>::embedded_size (alloc);
377
378 /* Ask the allocator how much space it will really give us. */
379 size = ::ggc_round_alloc_size (size);
380
381 /* Adjust the number of slots accordingly. */
382 size_t vec_offset = sizeof (vec_prefix);
383 size_t elt_size = sizeof (T);
384 alloc = (size - vec_offset) / elt_size;
385
386 /* And finally, recalculate the amount of space we ask for. */
387 size = vec_offset + alloc * elt_size;
388
389 unsigned nelem = v ? v->length () : 0;
390 v = static_cast <vec<T, A, vl_embed> *> (::ggc_realloc (v, size
391 PASS_MEM_STAT));
392 v->embedded_init (alloc, nelem);
393}
394
395
396/* Allocator type for GC vectors. This is for vectors of types
397 atomics w.r.t. collection, so allocation and deallocation is
398 completely inherited from va_gc. */
399struct va_gc_atomic : va_gc
400{
401};
402
403
404/* Generic vector template. Default values for A and L indicate the
405 most commonly used strategies.
406
407 FIXME - Ideally, they would all be vl_ptr to encourage using regular
408 instances for vectors, but the existing GTY machinery is limited
409 in that it can only deal with GC objects that are pointers
410 themselves.
411
412 This means that vector operations that need to deal with
413 potentially NULL pointers, must be provided as free
414 functions (see the vec_safe_* functions above). */
415template<typename T,
416 typename A = va_heap,
417 typename L = typename A::default_layout>
418struct GTY((user)) vec
419{
420};
421
422/* Allow C++11 range-based 'for' to work directly on vec<T>*. */
423template<typename T, typename A, typename L>
424T* begin (vec<T,A,L> *v) { return v ? v->begin () : nullptr; }
425template<typename T, typename A, typename L>
426T* end (vec<T,A,L> *v) { return v ? v->end () : nullptr; }
427template<typename T, typename A, typename L>
428const T* begin (const vec<T,A,L> *v) { return v ? v->begin () : nullptr; }
429template<typename T, typename A, typename L>
430const T* end (const vec<T,A,L> *v) { return v ? v->end () : nullptr; }
431
432/* Generic vec<> debug helpers.
433
434 These need to be instantiated for each vec<TYPE> used throughout
435 the compiler like this:
436
437 DEFINE_DEBUG_VEC (TYPE)
438
439 The reason we have a debug_helper() is because GDB can't
440 disambiguate a plain call to debug(some_vec), and it must be called
441 like debug<TYPE>(some_vec). */
442
443template<typename T>
444void
445debug_helper (vec<T> &ref)
446{
447 unsigned i;
448 for (i = 0; i < ref.length (); ++i)
449 {
450 fprintf (stderrstderr, "[%d] = ", i);
451 debug_slim (ref[i]);
452 fputc ('\n', stderrstderr);
453 }
454}
455
456/* We need a separate va_gc variant here because default template
457 argument for functions cannot be used in c++-98. Once this
458 restriction is removed, those variant should be folded with the
459 above debug_helper. */
460
461template<typename T>
462void
463debug_helper (vec<T, va_gc> &ref)
464{
465 unsigned i;
466 for (i = 0; i < ref.length (); ++i)
467 {
468 fprintf (stderrstderr, "[%d] = ", i);
469 debug_slim (ref[i]);
470 fputc ('\n', stderrstderr);
471 }
472}
473
474/* Macro to define debug(vec<T>) and debug(vec<T, va_gc>) helper
475 functions for a type T. */
476
477#define DEFINE_DEBUG_VEC(T)template void debug_helper (vec<T> &); template void
debug_helper (vec<T, va_gc> &); __attribute__ ((__used__
)) void debug (vec<T> &ref) { debug_helper <T>
(ref); } __attribute__ ((__used__)) void debug (vec<T>
*ptr) { if (ptr) debug (*ptr); else fprintf (stderr, "<nil>\n"
); } __attribute__ ((__used__)) void debug (vec<T, va_gc>
&ref) { debug_helper <T> (ref); } __attribute__ ((
__used__)) void debug (vec<T, va_gc> *ptr) { if (ptr) debug
(*ptr); else fprintf (stderr, "<nil>\n"); }
\
478 template void debug_helper (vec<T> &); \
479 template void debug_helper (vec<T, va_gc> &); \
480 /* Define the vec<T> debug functions. */ \
481 DEBUG_FUNCTION__attribute__ ((__used__)) void \
482 debug (vec<T> &ref) \
483 { \
484 debug_helper <T> (ref); \
485 } \
486 DEBUG_FUNCTION__attribute__ ((__used__)) void \
487 debug (vec<T> *ptr) \
488 { \
489 if (ptr) \
490 debug (*ptr); \
491 else \
492 fprintf (stderrstderr, "<nil>\n"); \
493 } \
494 /* Define the vec<T, va_gc> debug functions. */ \
495 DEBUG_FUNCTION__attribute__ ((__used__)) void \
496 debug (vec<T, va_gc> &ref) \
497 { \
498 debug_helper <T> (ref); \
499 } \
500 DEBUG_FUNCTION__attribute__ ((__used__)) void \
501 debug (vec<T, va_gc> *ptr) \
502 { \
503 if (ptr) \
504 debug (*ptr); \
505 else \
506 fprintf (stderrstderr, "<nil>\n"); \
507 }
508
509/* Default-construct N elements in DST. */
510
511template <typename T>
512inline void
513vec_default_construct (T *dst, unsigned n)
514{
515#ifdef BROKEN_VALUE_INITIALIZATION
516 /* Versions of GCC before 4.4 sometimes leave certain objects
517 uninitialized when value initialized, though if the type has
518 user defined default ctor, that ctor is invoked. As a workaround
519 perform clearing first and then the value initialization, which
520 fixes the case when value initialization doesn't initialize due to
521 the bugs and should initialize to all zeros, but still allows
522 vectors for types with user defined default ctor that initializes
523 some or all elements to non-zero. If T has no user defined
524 default ctor and some non-static data members have user defined
525 default ctors that initialize to non-zero the workaround will
526 still not work properly; in that case we just need to provide
527 user defined default ctor. */
528 memset (dst, '\0', sizeof (T) * n);
529#endif
530 for ( ; n; ++dst, --n)
531 ::new (static_cast<void*>(dst)) T ();
532}
533
534/* Copy-construct N elements in DST from *SRC. */
535
536template <typename T>
537inline void
538vec_copy_construct (T *dst, const T *src, unsigned n)
539{
540 for ( ; n; ++dst, ++src, --n)
541 ::new (static_cast<void*>(dst)) T (*src);
542}
543
544/* Type to provide zero-initialized values for vec<T, A, L>. This is
545 used to provide nil initializers for vec instances. Since vec must
546 be a trivially copyable type that can be copied by memcpy and zeroed
547 out by memset, it must have defaulted default and copy ctor and copy
548 assignment. To initialize a vec either use value initialization
549 (e.g., vec() or vec v{ };) or assign it the value vNULL. This isn't
550 needed for file-scope and function-local static vectors, which are
551 zero-initialized by default. */
552struct vnull { };
553constexpr vnull vNULL{ };
554
555
556/* Embeddable vector. These vectors are suitable to be embedded
557 in other data structures so that they can be pre-allocated in a
558 contiguous memory block.
559
560 Embeddable vectors are implemented using the trailing array idiom,
561 thus they are not resizeable without changing the address of the
562 vector object itself. This means you cannot have variables or
563 fields of embeddable vector type -- always use a pointer to a
564 vector. The one exception is the final field of a structure, which
565 could be a vector type.
566
567 You will have to use the embedded_size & embedded_init calls to
568 create such objects, and they will not be resizeable (so the 'safe'
569 allocation variants are not available).
570
571 Properties:
572
573 - The whole vector and control data are allocated in a single
574 contiguous block. It uses the trailing-vector idiom, so
575 allocation must reserve enough space for all the elements
576 in the vector plus its control data.
577 - The vector cannot be re-allocated.
578 - The vector cannot grow nor shrink.
579 - No indirections needed for access/manipulation.
580 - It requires 2 words of storage (prior to vector allocation). */
581
582template<typename T, typename A>
583struct GTY((user)) vec<T, A, vl_embed>
584{
585public:
586 unsigned allocated (void) const { return m_vecpfx.m_alloc; }
587 unsigned length (void) const { return m_vecpfx.m_num; }
588 bool is_empty (void) const { return m_vecpfx.m_num == 0; }
589 T *address (void) { return reinterpret_cast <T *> (this + 1); }
590 const T *address (void) const
591 { return reinterpret_cast <const T *> (this + 1); }
592 T *begin () { return address (); }
593 const T *begin () const { return address (); }
594 T *end () { return address () + length (); }
595 const T *end () const { return address () + length (); }
596 const T &operator[] (unsigned) const;
597 T &operator[] (unsigned);
598 T &last (void);
599 bool space (unsigned) const;
600 bool iterate (unsigned, T *) const;
601 bool iterate (unsigned, T **) const;
602 vec *copy (ALONE_CXX_MEM_STAT_INFO) const;
603 void splice (const vec &);
604 void splice (const vec *src);
605 T *quick_push (const T &);
606 T &pop (void);
607 void truncate (unsigned);
608 void quick_insert (unsigned, const T &);
609 void ordered_remove (unsigned);
610 void unordered_remove (unsigned);
611 void block_remove (unsigned, unsigned);
612 void qsort (int (*) (const void *, const void *))qsort (int (*) (const void *, const void *));
613 void sort (int (*) (const void *, const void *, void *), void *);
614 void stablesort (int (*) (const void *, const void *, void *), void *);
615 T *bsearch (const void *key, int (*compar) (const void *, const void *));
616 T *bsearch (const void *key,
617 int (*compar)(const void *, const void *, void *), void *);
618 unsigned lower_bound (const T &, bool (*) (const T &, const T &)) const;
619 bool contains (const T &search) const;
620 static size_t embedded_size (unsigned);
621 void embedded_init (unsigned, unsigned = 0, unsigned = 0);
622 void quick_grow (unsigned len);
623 void quick_grow_cleared (unsigned len);
624
625 /* vec class can access our internal data and functions. */
626 template <typename, typename, typename> friend struct vec;
627
628 /* The allocator types also need access to our internals. */
629 friend struct va_gc;
630 friend struct va_gc_atomic;
631 friend struct va_heap;
632
633 /* FIXME - This field should be private, but we need to cater to
634 compilers that have stricter notions of PODness for types. */
635 /* Align m_vecpfx to simplify address (). */
636 alignas (T) alignas (vec_prefix) vec_prefix m_vecpfx;
637};
638
639
640/* Convenience wrapper functions to use when dealing with pointers to
641 embedded vectors. Some functionality for these vectors must be
642 provided via free functions for these reasons:
643
644 1- The pointer may be NULL (e.g., before initial allocation).
645
646 2- When the vector needs to grow, it must be reallocated, so
647 the pointer will change its value.
648
649 Because of limitations with the current GC machinery, all vectors
650 in GC memory *must* be pointers. */
651
652
653/* If V contains no room for NELEMS elements, return false. Otherwise,
654 return true. */
655template<typename T, typename A>
656inline bool
657vec_safe_space (const vec<T, A, vl_embed> *v, unsigned nelems)
658{
659 return v ? v->space (nelems) : nelems == 0;
660}
661
662
663/* If V is NULL, return 0. Otherwise, return V->length(). */
664template<typename T, typename A>
665inline unsigned
666vec_safe_length (const vec<T, A, vl_embed> *v)
667{
668 return v ? v->length () : 0;
4
Assuming 'v' is non-null
5
'?' condition is true
6
Returning value, which participates in a condition later
669}
670
671
672/* If V is NULL, return NULL. Otherwise, return V->address(). */
673template<typename T, typename A>
674inline T *
675vec_safe_address (vec<T, A, vl_embed> *v)
676{
677 return v ? v->address () : NULLnullptr;
678}
679
680
681/* If V is NULL, return true. Otherwise, return V->is_empty(). */
682template<typename T, typename A>
683inline bool
684vec_safe_is_empty (vec<T, A, vl_embed> *v)
685{
686 return v ? v->is_empty () : true;
687}
688
689/* If V does not have space for NELEMS elements, call
690 V->reserve(NELEMS, EXACT). */
691template<typename T, typename A>
692inline bool
693vec_safe_reserve (vec<T, A, vl_embed> *&v, unsigned nelems, bool exact = false
694 CXX_MEM_STAT_INFO)
695{
696 bool extend = nelems ? !vec_safe_space (v, nelems) : false;
13
Assuming 'nelems' is not equal to 0
14
'?' condition is true
15
Assuming the condition is true
697 if (extend
15.1
'extend' is true
15.1
'extend' is true
)
16
Taking true branch
698 A::reserve (v, nelems, exact PASS_MEM_STAT);
17
Calling 'va_gc::reserve'
22
Returning from 'va_gc::reserve'
699 return extend;
700}
701
702template<typename T, typename A>
703inline bool
704vec_safe_reserve_exact (vec<T, A, vl_embed> *&v, unsigned nelems
705 CXX_MEM_STAT_INFO)
706{
707 return vec_safe_reserve (v, nelems, true PASS_MEM_STAT);
12
Calling 'vec_safe_reserve<param_access *, va_gc>'
23
Returning from 'vec_safe_reserve<param_access *, va_gc>'
708}
709
710
711/* Allocate GC memory for V with space for NELEMS slots. If NELEMS
712 is 0, V is initialized to NULL. */
713
714template<typename T, typename A>
715inline void
716vec_alloc (vec<T, A, vl_embed> *&v, unsigned nelems CXX_MEM_STAT_INFO)
717{
718 v = NULLnullptr;
719 vec_safe_reserve (v, nelems, false PASS_MEM_STAT);
720}
721
722
723/* Free the GC memory allocated by vector V and set it to NULL. */
724
725template<typename T, typename A>
726inline void
727vec_free (vec<T, A, vl_embed> *&v)
728{
729 A::release (v);
730}
731
732
733/* Grow V to length LEN. Allocate it, if necessary. */
734template<typename T, typename A>
735inline void
736vec_safe_grow (vec<T, A, vl_embed> *&v, unsigned len,
737 bool exact = false CXX_MEM_STAT_INFO)
738{
739 unsigned oldlen = vec_safe_length (v);
740 gcc_checking_assert (len >= oldlen)((void)(!(len >= oldlen) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 740, __FUNCTION__), 0 : 0))
;
741 vec_safe_reserve (v, len - oldlen, exact PASS_MEM_STAT);
742 v->quick_grow (len);
743}
744
745
746/* If V is NULL, allocate it. Call V->safe_grow_cleared(LEN). */
747template<typename T, typename A>
748inline void
749vec_safe_grow_cleared (vec<T, A, vl_embed> *&v, unsigned len,
750 bool exact = false CXX_MEM_STAT_INFO)
751{
752 unsigned oldlen = vec_safe_length (v);
753 vec_safe_grow (v, len, exact PASS_MEM_STAT);
754 vec_default_construct (v->address () + oldlen, len - oldlen);
755}
756
757
758/* Assume V is not NULL. */
759
760template<typename T>
761inline void
762vec_safe_grow_cleared (vec<T, va_heap, vl_ptr> *&v,
763 unsigned len, bool exact = false CXX_MEM_STAT_INFO)
764{
765 v->safe_grow_cleared (len, exact PASS_MEM_STAT);
766}
767
768/* If V does not have space for NELEMS elements, call
769 V->reserve(NELEMS, EXACT). */
770
771template<typename T>
772inline bool
773vec_safe_reserve (vec<T, va_heap, vl_ptr> *&v, unsigned nelems, bool exact = false
774 CXX_MEM_STAT_INFO)
775{
776 return v->reserve (nelems, exact);
777}
778
779
780/* If V is NULL return false, otherwise return V->iterate(IX, PTR). */
781template<typename T, typename A>
782inline bool
783vec_safe_iterate (const vec<T, A, vl_embed> *v, unsigned ix, T **ptr)
784{
785 if (v)
786 return v->iterate (ix, ptr);
787 else
788 {
789 *ptr = 0;
790 return false;
791 }
792}
793
794template<typename T, typename A>
795inline bool
796vec_safe_iterate (const vec<T, A, vl_embed> *v, unsigned ix, T *ptr)
797{
798 if (v)
799 return v->iterate (ix, ptr);
800 else
801 {
802 *ptr = 0;
803 return false;
804 }
805}
806
807
808/* If V has no room for one more element, reallocate it. Then call
809 V->quick_push(OBJ). */
810template<typename T, typename A>
811inline T *
812vec_safe_push (vec<T, A, vl_embed> *&v, const T &obj CXX_MEM_STAT_INFO)
813{
814 vec_safe_reserve (v, 1, false PASS_MEM_STAT);
815 return v->quick_push (obj);
816}
817
818
819/* if V has no room for one more element, reallocate it. Then call
820 V->quick_insert(IX, OBJ). */
821template<typename T, typename A>
822inline void
823vec_safe_insert (vec<T, A, vl_embed> *&v, unsigned ix, const T &obj
824 CXX_MEM_STAT_INFO)
825{
826 vec_safe_reserve (v, 1, false PASS_MEM_STAT);
827 v->quick_insert (ix, obj);
828}
829
830
831/* If V is NULL, do nothing. Otherwise, call V->truncate(SIZE). */
832template<typename T, typename A>
833inline void
834vec_safe_truncate (vec<T, A, vl_embed> *v, unsigned size)
835{
836 if (v)
837 v->truncate (size);
838}
839
840
841/* If SRC is not NULL, return a pointer to a copy of it. */
842template<typename T, typename A>
843inline vec<T, A, vl_embed> *
844vec_safe_copy (vec<T, A, vl_embed> *src CXX_MEM_STAT_INFO)
845{
846 return src ? src->copy (ALONE_PASS_MEM_STAT) : NULLnullptr;
847}
848
849/* Copy the elements from SRC to the end of DST as if by memcpy.
850 Reallocate DST, if necessary. */
851template<typename T, typename A>
852inline void
853vec_safe_splice (vec<T, A, vl_embed> *&dst, const vec<T, A, vl_embed> *src
854 CXX_MEM_STAT_INFO)
855{
856 unsigned src_len = vec_safe_length (src);
857 if (src_len)
858 {
859 vec_safe_reserve_exact (dst, vec_safe_length (dst) + src_len
860 PASS_MEM_STAT);
861 dst->splice (*src);
862 }
863}
864
865/* Return true if SEARCH is an element of V. Note that this is O(N) in the
866 size of the vector and so should be used with care. */
867
868template<typename T, typename A>
869inline bool
870vec_safe_contains (vec<T, A, vl_embed> *v, const T &search)
871{
872 return v ? v->contains (search) : false;
873}
874
875/* Index into vector. Return the IX'th element. IX must be in the
876 domain of the vector. */
877
878template<typename T, typename A>
879inline const T &
880vec<T, A, vl_embed>::operator[] (unsigned ix) const
881{
882 gcc_checking_assert (ix < m_vecpfx.m_num)((void)(!(ix < m_vecpfx.m_num) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 882, __FUNCTION__), 0 : 0))
;
883 return address ()[ix];
884}
885
886template<typename T, typename A>
887inline T &
888vec<T, A, vl_embed>::operator[] (unsigned ix)
889{
890 gcc_checking_assert (ix < m_vecpfx.m_num)((void)(!(ix < m_vecpfx.m_num) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 890, __FUNCTION__), 0 : 0))
;
891 return address ()[ix];
892}
893
894
895/* Get the final element of the vector, which must not be empty. */
896
897template<typename T, typename A>
898inline T &
899vec<T, A, vl_embed>::last (void)
900{
901 gcc_checking_assert (m_vecpfx.m_num > 0)((void)(!(m_vecpfx.m_num > 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 901, __FUNCTION__), 0 : 0))
;
902 return (*this)[m_vecpfx.m_num - 1];
903}
904
905
906/* If this vector has space for NELEMS additional entries, return
907 true. You usually only need to use this if you are doing your
908 own vector reallocation, for instance on an embedded vector. This
909 returns true in exactly the same circumstances that vec::reserve
910 will. */
911
912template<typename T, typename A>
913inline bool
914vec<T, A, vl_embed>::space (unsigned nelems) const
915{
916 return m_vecpfx.m_alloc - m_vecpfx.m_num >= nelems;
917}
918
919
920/* Return iteration condition and update *PTR to (a copy of) the IX'th
921 element of this vector. Use this to iterate over the elements of a
922 vector as follows,
923
924 for (ix = 0; v->iterate (ix, &val); ix++)
925 continue; */
926
927template<typename T, typename A>
928inline bool
929vec<T, A, vl_embed>::iterate (unsigned ix, T *ptr) const
930{
931 if (ix < m_vecpfx.m_num)
932 {
933 *ptr = address ()[ix];
934 return true;
935 }
936 else
937 {
938 *ptr = 0;
939 return false;
940 }
941}
942
943
944/* Return iteration condition and update *PTR to point to the
945 IX'th element of this vector. Use this to iterate over the
946 elements of a vector as follows,
947
948 for (ix = 0; v->iterate (ix, &ptr); ix++)
949 continue;
950
951 This variant is for vectors of objects. */
952
953template<typename T, typename A>
954inline bool
955vec<T, A, vl_embed>::iterate (unsigned ix, T **ptr) const
956{
957 if (ix < m_vecpfx.m_num)
958 {
959 *ptr = CONST_CAST (T *, &address ()[ix])(const_cast<T *> ((&address ()[ix])));
960 return true;
961 }
962 else
963 {
964 *ptr = 0;
965 return false;
966 }
967}
968
969
970/* Return a pointer to a copy of this vector. */
971
972template<typename T, typename A>
973inline vec<T, A, vl_embed> *
974vec<T, A, vl_embed>::copy (ALONE_MEM_STAT_DECLvoid) const
975{
976 vec<T, A, vl_embed> *new_vec = NULLnullptr;
977 unsigned len = length ();
978 if (len)
979 {
980 vec_alloc (new_vec, len PASS_MEM_STAT);
981 new_vec->embedded_init (len, len);
982 vec_copy_construct (new_vec->address (), address (), len);
983 }
984 return new_vec;
985}
986
987
988/* Copy the elements from SRC to the end of this vector as if by memcpy.
989 The vector must have sufficient headroom available. */
990
991template<typename T, typename A>
992inline void
993vec<T, A, vl_embed>::splice (const vec<T, A, vl_embed> &src)
994{
995 unsigned len = src.length ();
996 if (len)
997 {
998 gcc_checking_assert (space (len))((void)(!(space (len)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 998, __FUNCTION__), 0 : 0))
;
999 vec_copy_construct (end (), src.address (), len);
1000 m_vecpfx.m_num += len;
1001 }
1002}
1003
1004template<typename T, typename A>
1005inline void
1006vec<T, A, vl_embed>::splice (const vec<T, A, vl_embed> *src)
1007{
1008 if (src)
1009 splice (*src);
1010}
1011
1012
1013/* Push OBJ (a new element) onto the end of the vector. There must be
1014 sufficient space in the vector. Return a pointer to the slot
1015 where OBJ was inserted. */
1016
1017template<typename T, typename A>
1018inline T *
1019vec<T, A, vl_embed>::quick_push (const T &obj)
1020{
1021 gcc_checking_assert (space (1))((void)(!(space (1)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1021, __FUNCTION__), 0 : 0))
;
1022 T *slot = &address ()[m_vecpfx.m_num++];
1023 *slot = obj;
1024 return slot;
1025}
1026
1027
1028/* Pop and return the last element off the end of the vector. */
1029
1030template<typename T, typename A>
1031inline T &
1032vec<T, A, vl_embed>::pop (void)
1033{
1034 gcc_checking_assert (length () > 0)((void)(!(length () > 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1034, __FUNCTION__), 0 : 0))
;
1035 return address ()[--m_vecpfx.m_num];
1036}
1037
1038
1039/* Set the length of the vector to SIZE. The new length must be less
1040 than or equal to the current length. This is an O(1) operation. */
1041
1042template<typename T, typename A>
1043inline void
1044vec<T, A, vl_embed>::truncate (unsigned size)
1045{
1046 gcc_checking_assert (length () >= size)((void)(!(length () >= size) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1046, __FUNCTION__), 0 : 0))
;
1047 m_vecpfx.m_num = size;
1048}
1049
1050
1051/* Insert an element, OBJ, at the IXth position of this vector. There
1052 must be sufficient space. */
1053
1054template<typename T, typename A>
1055inline void
1056vec<T, A, vl_embed>::quick_insert (unsigned ix, const T &obj)
1057{
1058 gcc_checking_assert (length () < allocated ())((void)(!(length () < allocated ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1058, __FUNCTION__), 0 : 0))
;
1059 gcc_checking_assert (ix <= length ())((void)(!(ix <= length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1059, __FUNCTION__), 0 : 0))
;
1060 T *slot = &address ()[ix];
1061 memmove (slot + 1, slot, (m_vecpfx.m_num++ - ix) * sizeof (T));
1062 *slot = obj;
1063}
1064
1065
1066/* Remove an element from the IXth position of this vector. Ordering of
1067 remaining elements is preserved. This is an O(N) operation due to
1068 memmove. */
1069
1070template<typename T, typename A>
1071inline void
1072vec<T, A, vl_embed>::ordered_remove (unsigned ix)
1073{
1074 gcc_checking_assert (ix < length ())((void)(!(ix < length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1074, __FUNCTION__), 0 : 0))
;
1075 T *slot = &address ()[ix];
1076 memmove (slot, slot + 1, (--m_vecpfx.m_num - ix) * sizeof (T));
1077}
1078
1079
1080/* Remove elements in [START, END) from VEC for which COND holds. Ordering of
1081 remaining elements is preserved. This is an O(N) operation. */
1082
1083#define VEC_ORDERED_REMOVE_IF_FROM_TO(vec, read_index, write_index, \{ ((void)(!((end) <= (vec).length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1084, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (start); read_index < (end); ++read_index) { elem_ptr =
&(vec)[read_index]; bool remove_p = (cond); if (remove_p
) continue; if (read_index != write_index) (vec)[write_index]
= (vec)[read_index]; write_index++; } if (read_index - write_index
> 0) (vec).block_remove (write_index, read_index - write_index
); }
1084 elem_ptr, start, end, cond){ ((void)(!((end) <= (vec).length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1084, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (start); read_index < (end); ++read_index) { elem_ptr =
&(vec)[read_index]; bool remove_p = (cond); if (remove_p
) continue; if (read_index != write_index) (vec)[write_index]
= (vec)[read_index]; write_index++; } if (read_index - write_index
> 0) (vec).block_remove (write_index, read_index - write_index
); }
\
1085 { \
1086 gcc_assert ((end) <= (vec).length ())((void)(!((end) <= (vec).length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1086, __FUNCTION__), 0 : 0))
; \
1087 for (read_index = write_index = (start); read_index < (end); \
1088 ++read_index) \
1089 { \
1090 elem_ptr = &(vec)[read_index]; \
1091 bool remove_p = (cond); \
1092 if (remove_p) \
1093 continue; \
1094 \
1095 if (read_index != write_index) \
1096 (vec)[write_index] = (vec)[read_index]; \
1097 \
1098 write_index++; \
1099 } \
1100 \
1101 if (read_index - write_index > 0) \
1102 (vec).block_remove (write_index, read_index - write_index); \
1103 }
1104
1105
1106/* Remove elements from VEC for which COND holds. Ordering of remaining
1107 elements is preserved. This is an O(N) operation. */
1108
1109#define VEC_ORDERED_REMOVE_IF(vec, read_index, write_index, elem_ptr, \{ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1110, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (0); read_index < ((vec).length ()); ++read_index) { elem_ptr
= &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p
) continue; if (read_index != write_index) ((vec))[write_index
] = ((vec))[read_index]; write_index++; } if (read_index - write_index
> 0) ((vec)).block_remove (write_index, read_index - write_index
); }
1110 cond){ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1110, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (0); read_index < ((vec).length ()); ++read_index) { elem_ptr
= &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p
) continue; if (read_index != write_index) ((vec))[write_index
] = ((vec))[read_index]; write_index++; } if (read_index - write_index
> 0) ((vec)).block_remove (write_index, read_index - write_index
); }
\
1111 VEC_ORDERED_REMOVE_IF_FROM_TO ((vec), read_index, write_index, \{ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1112, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (0); read_index < ((vec).length ()); ++read_index) { elem_ptr
= &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p
) continue; if (read_index != write_index) ((vec))[write_index
] = ((vec))[read_index]; write_index++; } if (read_index - write_index
> 0) ((vec)).block_remove (write_index, read_index - write_index
); }
1112 elem_ptr, 0, (vec).length (), (cond)){ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1112, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (0); read_index < ((vec).length ()); ++read_index) { elem_ptr
= &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p
) continue; if (read_index != write_index) ((vec))[write_index
] = ((vec))[read_index]; write_index++; } if (read_index - write_index
> 0) ((vec)).block_remove (write_index, read_index - write_index
); }
1113
1114/* Remove an element from the IXth position of this vector. Ordering of
1115 remaining elements is destroyed. This is an O(1) operation. */
1116
1117template<typename T, typename A>
1118inline void
1119vec<T, A, vl_embed>::unordered_remove (unsigned ix)
1120{
1121 gcc_checking_assert (ix < length ())((void)(!(ix < length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1121, __FUNCTION__), 0 : 0))
;
1122 T *p = address ();
1123 p[ix] = p[--m_vecpfx.m_num];
1124}
1125
1126
1127/* Remove LEN elements starting at the IXth. Ordering is retained.
1128 This is an O(N) operation due to memmove. */
1129
1130template<typename T, typename A>
1131inline void
1132vec<T, A, vl_embed>::block_remove (unsigned ix, unsigned len)
1133{
1134 gcc_checking_assert (ix + len <= length ())((void)(!(ix + len <= length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1134, __FUNCTION__), 0 : 0))
;
1135 T *slot = &address ()[ix];
1136 m_vecpfx.m_num -= len;
1137 memmove (slot, slot + len, (m_vecpfx.m_num - ix) * sizeof (T));
1138}
1139
1140
1141/* Sort the contents of this vector with qsort. CMP is the comparison
1142 function to pass to qsort. */
1143
1144template<typename T, typename A>
1145inline void
1146vec<T, A, vl_embed>::qsort (int (*cmp) (const void *, const void *))qsort (int (*cmp) (const void *, const void *))
1147{
1148 if (length () > 1)
1149 gcc_qsort (address (), length (), sizeof (T), cmp);
1150}
1151
1152/* Sort the contents of this vector with qsort. CMP is the comparison
1153 function to pass to qsort. */
1154
1155template<typename T, typename A>
1156inline void
1157vec<T, A, vl_embed>::sort (int (*cmp) (const void *, const void *, void *),
1158 void *data)
1159{
1160 if (length () > 1)
1161 gcc_sort_r (address (), length (), sizeof (T), cmp, data);
1162}
1163
1164/* Sort the contents of this vector with gcc_stablesort_r. CMP is the
1165 comparison function to pass to qsort. */
1166
1167template<typename T, typename A>
1168inline void
1169vec<T, A, vl_embed>::stablesort (int (*cmp) (const void *, const void *,
1170 void *), void *data)
1171{
1172 if (length () > 1)
1173 gcc_stablesort_r (address (), length (), sizeof (T), cmp, data);
1174}
1175
1176/* Search the contents of the sorted vector with a binary search.
1177 CMP is the comparison function to pass to bsearch. */
1178
1179template<typename T, typename A>
1180inline T *
1181vec<T, A, vl_embed>::bsearch (const void *key,
1182 int (*compar) (const void *, const void *))
1183{
1184 const void *base = this->address ();
1185 size_t nmemb = this->length ();
1186 size_t size = sizeof (T);
1187 /* The following is a copy of glibc stdlib-bsearch.h. */
1188 size_t l, u, idx;
1189 const void *p;
1190 int comparison;
1191
1192 l = 0;
1193 u = nmemb;
1194 while (l < u)
1195 {
1196 idx = (l + u) / 2;
1197 p = (const void *) (((const char *) base) + (idx * size));
1198 comparison = (*compar) (key, p);
1199 if (comparison < 0)
1200 u = idx;
1201 else if (comparison > 0)
1202 l = idx + 1;
1203 else
1204 return (T *)const_cast<void *>(p);
1205 }
1206
1207 return NULLnullptr;
1208}
1209
1210/* Search the contents of the sorted vector with a binary search.
1211 CMP is the comparison function to pass to bsearch. */
1212
1213template<typename T, typename A>
1214inline T *
1215vec<T, A, vl_embed>::bsearch (const void *key,
1216 int (*compar) (const void *, const void *,
1217 void *), void *data)
1218{
1219 const void *base = this->address ();
1220 size_t nmemb = this->length ();
1221 size_t size = sizeof (T);
1222 /* The following is a copy of glibc stdlib-bsearch.h. */
1223 size_t l, u, idx;
1224 const void *p;
1225 int comparison;
1226
1227 l = 0;
1228 u = nmemb;
1229 while (l < u)
1230 {
1231 idx = (l + u) / 2;
1232 p = (const void *) (((const char *) base) + (idx * size));
1233 comparison = (*compar) (key, p, data);
1234 if (comparison < 0)
1235 u = idx;
1236 else if (comparison > 0)
1237 l = idx + 1;
1238 else
1239 return (T *)const_cast<void *>(p);
1240 }
1241
1242 return NULLnullptr;
1243}
1244
1245/* Return true if SEARCH is an element of V. Note that this is O(N) in the
1246 size of the vector and so should be used with care. */
1247
1248template<typename T, typename A>
1249inline bool
1250vec<T, A, vl_embed>::contains (const T &search) const
1251{
1252 unsigned int len = length ();
1253 const T *p = address ();
1254 for (unsigned int i = 0; i < len; i++)
1255 {
1256 const T *slot = &p[i];
1257 if (*slot == search)
1258 return true;
1259 }
1260
1261 return false;
1262}
1263
1264/* Find and return the first position in which OBJ could be inserted
1265 without changing the ordering of this vector. LESSTHAN is a
1266 function that returns true if the first argument is strictly less
1267 than the second. */
1268
1269template<typename T, typename A>
1270unsigned
1271vec<T, A, vl_embed>::lower_bound (const T &obj,
1272 bool (*lessthan)(const T &, const T &))
1273 const
1274{
1275 unsigned int len = length ();
1276 unsigned int half, middle;
1277 unsigned int first = 0;
1278 while (len > 0)
1279 {
1280 half = len / 2;
1281 middle = first;
1282 middle += half;
1283 const T &middle_elem = address ()[middle];
1284 if (lessthan (middle_elem, obj))
1285 {
1286 first = middle;
1287 ++first;
1288 len = len - half - 1;
1289 }
1290 else
1291 len = half;
1292 }
1293 return first;
1294}
1295
1296
1297/* Return the number of bytes needed to embed an instance of an
1298 embeddable vec inside another data structure.
1299
1300 Use these methods to determine the required size and initialization
1301 of a vector V of type T embedded within another structure (as the
1302 final member):
1303
1304 size_t vec<T, A, vl_embed>::embedded_size (unsigned alloc);
1305 void v->embedded_init (unsigned alloc, unsigned num);
1306
1307 These allow the caller to perform the memory allocation. */
1308
1309template<typename T, typename A>
1310inline size_t
1311vec<T, A, vl_embed>::embedded_size (unsigned alloc)
1312{
1313 struct alignas (T) U { char data[sizeof (T)]; };
1314 typedef vec<U, A, vl_embed> vec_embedded;
1315 typedef typename std::conditional<std::is_standard_layout<T>::value,
1316 vec, vec_embedded>::type vec_stdlayout;
1317 static_assert (sizeof (vec_stdlayout) == sizeof (vec), "");
1318 static_assert (alignof (vec_stdlayout) == alignof (vec), "");
1319 return sizeof (vec_stdlayout) + alloc * sizeof (T);
1320}
1321
1322
1323/* Initialize the vector to contain room for ALLOC elements and
1324 NUM active elements. */
1325
1326template<typename T, typename A>
1327inline void
1328vec<T, A, vl_embed>::embedded_init (unsigned alloc, unsigned num, unsigned aut)
1329{
1330 m_vecpfx.m_alloc = alloc;
1331 m_vecpfx.m_using_auto_storage = aut;
1332 m_vecpfx.m_num = num;
1333}
1334
1335
1336/* Grow the vector to a specific length. LEN must be as long or longer than
1337 the current length. The new elements are uninitialized. */
1338
1339template<typename T, typename A>
1340inline void
1341vec<T, A, vl_embed>::quick_grow (unsigned len)
1342{
1343 gcc_checking_assert (length () <= len && len <= m_vecpfx.m_alloc)((void)(!(length () <= len && len <= m_vecpfx.m_alloc
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1343, __FUNCTION__), 0 : 0))
;
1344 m_vecpfx.m_num = len;
1345}