Bug Summary

File:build/gcc/vec.h
Warning:line 814, column 10
Called C++ object pointer is null

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-unknown-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name stmt.c -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -fno-rounding-math -mconstructor-aliases -munwind-tables -target-cpu x86-64 -tune-cpu generic -debugger-tuning=gdb -fcoverage-compilation-dir=/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -resource-dir /usr/lib64/clang/13.0.0 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11/backward -internal-isystem /usr/lib64/clang/13.0.0/include -internal-isystem /usr/local/include -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../x86_64-suse-linux/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-error=format-diag -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir=/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -D__GCC_HAVE_DWARF2_CFI_ASM=1 -o /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2021-11-20-133755-20252-1/report-T6VqVO.plist -x c++ /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c

/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c

1/* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987-2021 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20/* This file handles the generation of rtl code from tree structure
21 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
22 The functions whose names start with `expand_' are called by the
23 expander to generate RTL instructions for various kinds of constructs. */
24
25#include "config.h"
26#include "system.h"
27#include "coretypes.h"
28#include "backend.h"
29#include "target.h"
30#include "rtl.h"
31#include "tree.h"
32#include "gimple.h"
33#include "cfghooks.h"
34#include "predict.h"
35#include "memmodel.h"
36#include "tm_p.h"
37#include "optabs.h"
38#include "regs.h"
39#include "emit-rtl.h"
40#include "pretty-print.h"
41#include "diagnostic-core.h"
42
43#include "fold-const.h"
44#include "varasm.h"
45#include "stor-layout.h"
46#include "dojump.h"
47#include "explow.h"
48#include "stmt.h"
49#include "expr.h"
50#include "langhooks.h"
51#include "cfganal.h"
52#include "tree-cfg.h"
53#include "dumpfile.h"
54#include "builtins.h"
55
56
57/* Functions and data structures for expanding case statements. */
58
59/* Case label structure, used to hold info on labels within case
60 statements. We handle "range" labels; for a single-value label
61 as in C, the high and low limits are the same.
62
63 We start with a vector of case nodes sorted in ascending order, and
64 the default label as the last element in the vector.
65
66 Switch statements are expanded in jump table form.
67
68*/
69
70class simple_case_node
71{
72public:
73 simple_case_node (tree low, tree high, tree code_label):
74 m_low (low), m_high (high), m_code_label (code_label)
75 {}
76
77 /* Lowest index value for this label. */
78 tree m_low;
79 /* Highest index value for this label. */
80 tree m_high;
81 /* Label to jump to when node matches. */
82 tree m_code_label;
83};
84
85static bool check_unique_operand_names (tree, tree, tree);
86static char *resolve_operand_name_1 (char *, tree, tree, tree);
87
88/* Return the rtx-label that corresponds to a LABEL_DECL,
89 creating it if necessary. */
90
91rtx_insn *
92label_rtx (tree label)
93{
94 gcc_assert (TREE_CODE (label) == LABEL_DECL)((void)(!(((enum tree_code) (label)->base.code) == LABEL_DECL
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 94, __FUNCTION__), 0 : 0))
;
95
96 if (!DECL_RTL_SET_P (label)(((tree_contains_struct[(((enum tree_code) (label)->base.code
))][(TS_DECL_WRTL)])) && (contains_struct_check ((label
), (TS_DECL_WRTL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 96, __FUNCTION__))->decl_with_rtl.rtl != nullptr)
)
97 {
98 rtx_code_label *r = gen_label_rtx ();
99 SET_DECL_RTL (label, r)set_decl_rtl (label, r);
100 if (FORCED_LABEL (label)((tree_check ((label), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 100, __FUNCTION__, (LABEL_DECL)))->base.side_effects_flag
)
|| DECL_NONLOCAL (label)((contains_struct_check ((label), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 100, __FUNCTION__))->decl_common.nonlocal_flag)
)
101 LABEL_PRESERVE_P (r)(__extension__ ({ __typeof ((r)) const _rtx = ((r)); if (((enum
rtx_code) (_rtx)->code) != CODE_LABEL && ((enum rtx_code
) (_rtx)->code) != NOTE) rtl_check_failed_flag ("LABEL_PRESERVE_P"
,_rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 101, __FUNCTION__); _rtx; })->in_struct)
= 1;
102 }
103
104 return as_a <rtx_insn *> (DECL_RTL (label)((contains_struct_check ((label), (TS_DECL_WRTL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 104, __FUNCTION__))->decl_with_rtl.rtl ? (label)->decl_with_rtl
.rtl : (make_decl_rtl (label), (label)->decl_with_rtl.rtl)
)
);
105}
106
107/* As above, but also put it on the forced-reference list of the
108 function that contains it. */
109rtx_insn *
110force_label_rtx (tree label)
111{
112 rtx_insn *ref = label_rtx (label);
113 tree function = decl_function_context (label);
1
Value assigned to 'x_rtl.expr.x_forced_labels'
114
115 gcc_assert (function)((void)(!(function) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 115, __FUNCTION__), 0 : 0))
;
2
Assuming 'function' is non-null
3
'?' condition is false
116
117 vec_safe_push (forced_labels((&x_rtl)->expr.x_forced_labels), ref);
4
Passing value via 1st parameter 'v'
5
Calling 'vec_safe_push<rtx_insn *, va_gc>'
118 return ref;
119}
120
121/* As label_rtx, but ensures (in check build), that returned value is
122 an existing label (i.e. rtx with code CODE_LABEL). */
123rtx_code_label *
124jump_target_rtx (tree label)
125{
126 return as_a <rtx_code_label *> (label_rtx (label));
127}
128
129/* Add an unconditional jump to LABEL as the next sequential instruction. */
130
131void
132emit_jump (rtx label)
133{
134 do_pending_stack_adjust ();
135 emit_jump_insn (targetm.gen_jump (label));
136 emit_barrier ();
137}
138
139/* Handle goto statements and the labels that they can go to. */
140
141/* Specify the location in the RTL code of a label LABEL,
142 which is a LABEL_DECL tree node.
143
144 This is used for the kind of label that the user can jump to with a
145 goto statement, and for alternatives of a switch or case statement.
146 RTL labels generated for loops and conditionals don't go through here;
147 they are generated directly at the RTL level, by other functions below.
148
149 Note that this has nothing to do with defining label *names*.
150 Languages vary in how they do that and what that even means. */
151
152void
153expand_label (tree label)
154{
155 rtx_code_label *label_r = jump_target_rtx (label);
156
157 do_pending_stack_adjust ();
158 emit_label (label_r);
159 if (DECL_NAME (label)((contains_struct_check ((label), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 159, __FUNCTION__))->decl_minimal.name)
)
160 LABEL_NAME (DECL_RTL (label))(((((contains_struct_check ((label), (TS_DECL_WRTL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 160, __FUNCTION__))->decl_with_rtl.rtl ? (label)->decl_with_rtl
.rtl : (make_decl_rtl (label), (label)->decl_with_rtl.rtl)
))->u.fld[6]).rt_str)
= IDENTIFIER_POINTER (DECL_NAME (label))((const char *) (tree_check ((((contains_struct_check ((label
), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 160, __FUNCTION__))->decl_minimal.name)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 160, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)
;
161
162 if (DECL_NONLOCAL (label)((contains_struct_check ((label), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 162, __FUNCTION__))->decl_common.nonlocal_flag)
)
163 {
164 expand_builtin_setjmp_receiver (NULLnullptr);
165 nonlocal_goto_handler_labels((&x_rtl)->x_nonlocal_goto_handler_labels)
166 = gen_rtx_INSN_LIST (VOIDmode((void) 0, E_VOIDmode), label_r,
167 nonlocal_goto_handler_labels((&x_rtl)->x_nonlocal_goto_handler_labels));
168 }
169
170 if (FORCED_LABEL (label)((tree_check ((label), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 170, __FUNCTION__, (LABEL_DECL)))->base.side_effects_flag
)
)
171 vec_safe_push<rtx_insn *> (forced_labels((&x_rtl)->expr.x_forced_labels), label_r);
172
173 if (DECL_NONLOCAL (label)((contains_struct_check ((label), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 173, __FUNCTION__))->decl_common.nonlocal_flag)
|| FORCED_LABEL (label)((tree_check ((label), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 173, __FUNCTION__, (LABEL_DECL)))->base.side_effects_flag
)
)
174 maybe_set_first_label_num (label_r);
175}
176
177/* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
178 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
179 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
180 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
181 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
182 constraint allows the use of a register operand. And, *IS_INOUT
183 will be true if the operand is read-write, i.e., if it is used as
184 an input as well as an output. If *CONSTRAINT_P is not in
185 canonical form, it will be made canonical. (Note that `+' will be
186 replaced with `=' as part of this process.)
187
188 Returns TRUE if all went well; FALSE if an error occurred. */
189
190bool
191parse_output_constraint (const char **constraint_p, int operand_num,
192 int ninputs, int noutputs, bool *allows_mem,
193 bool *allows_reg, bool *is_inout)
194{
195 const char *constraint = *constraint_p;
196 const char *p;
197
198 /* Assume the constraint doesn't allow the use of either a register
199 or memory. */
200 *allows_mem = false;
201 *allows_reg = false;
202
203 /* Allow the `=' or `+' to not be at the beginning of the string,
204 since it wasn't explicitly documented that way, and there is a
205 large body of code that puts it last. Swap the character to
206 the front, so as not to uglify any place else. */
207 p = strchr (constraint, '=');
208 if (!p)
209 p = strchr (constraint, '+');
210
211 /* If the string doesn't contain an `=', issue an error
212 message. */
213 if (!p)
214 {
215 error ("output operand constraint lacks %<=%>");
216 return false;
217 }
218
219 /* If the constraint begins with `+', then the operand is both read
220 from and written to. */
221 *is_inout = (*p == '+');
222
223 /* Canonicalize the output constraint so that it begins with `='. */
224 if (p != constraint || *is_inout)
225 {
226 char *buf;
227 size_t c_len = strlen (constraint);
228
229 if (p != constraint)
230 warning (0, "output constraint %qc for operand %d "
231 "is not at the beginning",
232 *p, operand_num);
233
234 /* Make a copy of the constraint. */
235 buf = XALLOCAVEC (char, c_len + 1)((char *) __builtin_alloca(sizeof (char) * (c_len + 1)));
236 strcpy (buf, constraint);
237 /* Swap the first character and the `=' or `+'. */
238 buf[p - constraint] = buf[0];
239 /* Make sure the first character is an `='. (Until we do this,
240 it might be a `+'.) */
241 buf[0] = '=';
242 /* Replace the constraint with the canonicalized string. */
243 *constraint_p = ggc_alloc_string (buf, c_len);
244 constraint = *constraint_p;
245 }
246
247 /* Loop through the constraint string. */
248 for (p = constraint + 1; *p; )
249 {
250 switch (*p)
251 {
252 case '+':
253 case '=':
254 error ("operand constraint contains incorrectly positioned "
255 "%<+%> or %<=%>");
256 return false;
257
258 case '%':
259 if (operand_num + 1 == ninputs + noutputs)
260 {
261 error ("%<%%%> constraint used with last operand");
262 return false;
263 }
264 break;
265
266 case '?': case '!': case '*': case '&': case '#':
267 case '$': case '^':
268 case 'E': case 'F': case 'G': case 'H':
269 case 's': case 'i': case 'n':
270 case 'I': case 'J': case 'K': case 'L': case 'M':
271 case 'N': case 'O': case 'P': case ',':
272 break;
273
274 case '0': case '1': case '2': case '3': case '4':
275 case '5': case '6': case '7': case '8': case '9':
276 case '[':
277 error ("matching constraint not valid in output operand");
278 return false;
279
280 case '<': case '>':
281 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
282 excepting those that expand_call created. So match memory
283 and hope. */
284 *allows_mem = true;
285 break;
286
287 case 'g': case 'X':
288 *allows_reg = true;
289 *allows_mem = true;
290 break;
291
292 default:
293 if (!ISALPHA (*p)(_sch_istable[(*p) & 0xff] & (unsigned short)(_sch_isalpha
))
)
294 break;
295 enum constraint_num cn = lookup_constraint (p);
296 if (reg_class_for_constraint (cn) != NO_REGS
297 || insn_extra_address_constraint (cn))
298 *allows_reg = true;
299 else if (insn_extra_memory_constraint (cn))
300 *allows_mem = true;
301 else
302 insn_extra_constraint_allows_reg_mem (cn, allows_reg, allows_mem);
303 break;
304 }
305
306 for (size_t len = CONSTRAINT_LEN (*p, p)insn_constraint_len (*p,p); len; len--, p++)
307 if (*p == '\0')
308 break;
309 }
310
311 return true;
312}
313
314/* Similar, but for input constraints. */
315
316bool
317parse_input_constraint (const char **constraint_p, int input_num,
318 int ninputs, int noutputs, int ninout,
319 const char * const * constraints,
320 bool *allows_mem, bool *allows_reg)
321{
322 const char *constraint = *constraint_p;
323 const char *orig_constraint = constraint;
324 size_t c_len = strlen (constraint);
325 size_t j;
326 bool saw_match = false;
327
328 /* Assume the constraint doesn't allow the use of either
329 a register or memory. */
330 *allows_mem = false;
331 *allows_reg = false;
332
333 /* Make sure constraint has neither `=', `+', nor '&'. */
334
335 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j)insn_constraint_len (constraint[j],constraint+j))
336 switch (constraint[j])
337 {
338 case '+': case '=': case '&':
339 if (constraint == orig_constraint)
340 {
341 error ("input operand constraint contains %qc", constraint[j]);
342 return false;
343 }
344 break;
345
346 case '%':
347 if (constraint == orig_constraint
348 && input_num + 1 == ninputs - ninout)
349 {
350 error ("%<%%%> constraint used with last operand");
351 return false;
352 }
353 break;
354
355 case '<': case '>':
356 case '?': case '!': case '*': case '#':
357 case '$': case '^':
358 case 'E': case 'F': case 'G': case 'H':
359 case 's': case 'i': case 'n':
360 case 'I': case 'J': case 'K': case 'L': case 'M':
361 case 'N': case 'O': case 'P': case ',':
362 break;
363
364 /* Whether or not a numeric constraint allows a register is
365 decided by the matching constraint, and so there is no need
366 to do anything special with them. We must handle them in
367 the default case, so that we don't unnecessarily force
368 operands to memory. */
369 case '0': case '1': case '2': case '3': case '4':
370 case '5': case '6': case '7': case '8': case '9':
371 {
372 char *end;
373 unsigned long match;
374
375 saw_match = true;
376
377 match = strtoul (constraint + j, &end, 10);
378 if (match >= (unsigned long) noutputs)
379 {
380 error ("matching constraint references invalid operand number");
381 return false;
382 }
383
384 /* Try and find the real constraint for this dup. Only do this
385 if the matching constraint is the only alternative. */
386 if (*end == '\0'
387 && (j == 0 || (j == 1 && constraint[0] == '%')))
388 {
389 constraint = constraints[match];
390 *constraint_p = constraint;
391 c_len = strlen (constraint);
392 j = 0;
393 /* ??? At the end of the loop, we will skip the first part of
394 the matched constraint. This assumes not only that the
395 other constraint is an output constraint, but also that
396 the '=' or '+' come first. */
397 break;
398 }
399 else
400 j = end - constraint;
401 /* Anticipate increment at end of loop. */
402 j--;
403 }
404 /* Fall through. */
405
406 case 'g': case 'X':
407 *allows_reg = true;
408 *allows_mem = true;
409 break;
410
411 default:
412 if (! ISALPHA (constraint[j])(_sch_istable[(constraint[j]) & 0xff] & (unsigned short
)(_sch_isalpha))
)
413 {
414 error ("invalid punctuation %qc in constraint", constraint[j]);
415 return false;
416 }
417 enum constraint_num cn = lookup_constraint (constraint + j);
418 if (reg_class_for_constraint (cn) != NO_REGS
419 || insn_extra_address_constraint (cn))
420 *allows_reg = true;
421 else if (insn_extra_memory_constraint (cn)
422 || insn_extra_special_memory_constraint (cn)
423 || insn_extra_relaxed_memory_constraint (cn))
424 *allows_mem = true;
425 else
426 insn_extra_constraint_allows_reg_mem (cn, allows_reg, allows_mem);
427 break;
428 }
429
430 if (saw_match && !*allows_reg)
431 warning (0, "matching constraint does not allow a register");
432
433 return true;
434}
435
436/* Return DECL iff there's an overlap between *REGS and DECL, where DECL
437 can be an asm-declared register. Called via walk_tree. */
438
439static tree
440decl_overlaps_hard_reg_set_p (tree *declp, int *walk_subtrees ATTRIBUTE_UNUSED__attribute__ ((__unused__)),
441 void *data)
442{
443 tree decl = *declp;
444 const HARD_REG_SET *const regs = (const HARD_REG_SET *) data;
445
446 if (VAR_P (decl)(((enum tree_code) (decl)->base.code) == VAR_DECL))
447 {
448 if (DECL_HARD_REGISTER (decl)((tree_check ((decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 448, __FUNCTION__, (VAR_DECL)))->decl_with_vis.hard_register
)
449 && REG_P (DECL_RTL (decl))(((enum rtx_code) (((contains_struct_check ((decl), (TS_DECL_WRTL
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 449, __FUNCTION__))->decl_with_rtl.rtl ? (decl)->decl_with_rtl
.rtl : (make_decl_rtl (decl), (decl)->decl_with_rtl.rtl)))
->code) == REG)
450 && REGNO (DECL_RTL (decl))(rhs_regno(((contains_struct_check ((decl), (TS_DECL_WRTL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 450, __FUNCTION__))->decl_with_rtl.rtl ? (decl)->decl_with_rtl
.rtl : (make_decl_rtl (decl), (decl)->decl_with_rtl.rtl)))
)
< FIRST_PSEUDO_REGISTER76)
451 {
452 rtx reg = DECL_RTL (decl)((contains_struct_check ((decl), (TS_DECL_WRTL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 452, __FUNCTION__))->decl_with_rtl.rtl ? (decl)->decl_with_rtl
.rtl : (make_decl_rtl (decl), (decl)->decl_with_rtl.rtl))
;
453
454 if (overlaps_hard_reg_set_p (*regs, GET_MODE (reg)((machine_mode) (reg)->mode), REGNO (reg)(rhs_regno(reg))))
455 return decl;
456 }
457 walk_subtrees = 0;
458 }
459 else if (TYPE_P (decl)(tree_code_type[(int) (((enum tree_code) (decl)->base.code
))] == tcc_type)
|| TREE_CODE (decl)((enum tree_code) (decl)->base.code) == PARM_DECL)
460 walk_subtrees = 0;
461 return NULL_TREE(tree) nullptr;
462}
463
464/* If there is an overlap between *REGS and DECL, return the first overlap
465 found. */
466tree
467tree_overlaps_hard_reg_set (tree decl, HARD_REG_SET *regs)
468{
469 return walk_tree (&decl, decl_overlaps_hard_reg_set_p, regs, NULL)walk_tree_1 (&decl, decl_overlaps_hard_reg_set_p, regs, nullptr
, nullptr)
;
470}
471
472
473/* A subroutine of expand_asm_operands. Check that all operand names
474 are unique. Return true if so. We rely on the fact that these names
475 are identifiers, and so have been canonicalized by get_identifier,
476 so all we need are pointer comparisons. */
477
478static bool
479check_unique_operand_names (tree outputs, tree inputs, tree labels)
480{
481 tree i, j, i_name = NULL_TREE(tree) nullptr;
482
483 for (i = outputs; i ; i = TREE_CHAIN (i)((contains_struct_check ((i), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 483, __FUNCTION__))->common.chain)
)
484 {
485 i_name = TREE_PURPOSE (TREE_PURPOSE (i))((tree_check ((((tree_check ((i), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 485, __FUNCTION__, (TREE_LIST)))->list.purpose)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 485, __FUNCTION__, (TREE_LIST)))->list.purpose)
;
486 if (! i_name)
487 continue;
488
489 for (j = TREE_CHAIN (i)((contains_struct_check ((i), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 489, __FUNCTION__))->common.chain)
; j ; j = TREE_CHAIN (j)((contains_struct_check ((j), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 489, __FUNCTION__))->common.chain)
)
490 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))((tree_check ((((tree_check ((j), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 490, __FUNCTION__, (TREE_LIST)))->list.purpose)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 490, __FUNCTION__, (TREE_LIST)))->list.purpose)
))
491 goto failure;
492 }
493
494 for (i = inputs; i ; i = TREE_CHAIN (i)((contains_struct_check ((i), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 494, __FUNCTION__))->common.chain)
)
495 {
496 i_name = TREE_PURPOSE (TREE_PURPOSE (i))((tree_check ((((tree_check ((i), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 496, __FUNCTION__, (TREE_LIST)))->list.purpose)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 496, __FUNCTION__, (TREE_LIST)))->list.purpose)
;
497 if (! i_name)
498 continue;
499
500 for (j = TREE_CHAIN (i)((contains_struct_check ((i), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 500, __FUNCTION__))->common.chain)
; j ; j = TREE_CHAIN (j)((contains_struct_check ((j), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 500, __FUNCTION__))->common.chain)
)
501 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))((tree_check ((((tree_check ((j), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 501, __FUNCTION__, (TREE_LIST)))->list.purpose)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 501, __FUNCTION__, (TREE_LIST)))->list.purpose)
))
502 goto failure;
503 for (j = outputs; j ; j = TREE_CHAIN (j)((contains_struct_check ((j), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 503, __FUNCTION__))->common.chain)
)
504 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))((tree_check ((((tree_check ((j), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 504, __FUNCTION__, (TREE_LIST)))->list.purpose)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 504, __FUNCTION__, (TREE_LIST)))->list.purpose)
))
505 goto failure;
506 }
507
508 for (i = labels; i ; i = TREE_CHAIN (i)((contains_struct_check ((i), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 508, __FUNCTION__))->common.chain)
)
509 {
510 i_name = TREE_PURPOSE (i)((tree_check ((i), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 510, __FUNCTION__, (TREE_LIST)))->list.purpose)
;
511 if (! i_name)
512 continue;
513
514 for (j = TREE_CHAIN (i)((contains_struct_check ((i), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 514, __FUNCTION__))->common.chain)
; j ; j = TREE_CHAIN (j)((contains_struct_check ((j), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 514, __FUNCTION__))->common.chain)
)
515 if (simple_cst_equal (i_name, TREE_PURPOSE (j)((tree_check ((j), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 515, __FUNCTION__, (TREE_LIST)))->list.purpose)
))
516 goto failure;
517 for (j = inputs; j ; j = TREE_CHAIN (j)((contains_struct_check ((j), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 517, __FUNCTION__))->common.chain)
)
518 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))((tree_check ((((tree_check ((j), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 518, __FUNCTION__, (TREE_LIST)))->list.purpose)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 518, __FUNCTION__, (TREE_LIST)))->list.purpose)
))
519 goto failure;
520 }
521
522 return true;
523
524 failure:
525 error ("duplicate %<asm%> operand name %qs", TREE_STRING_POINTER (i_name)((const char *)((tree_check ((i_name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 525, __FUNCTION__, (STRING_CST)))->string.str))
);
526 return false;
527}
528
529/* Resolve the names of the operands in *POUTPUTS and *PINPUTS to numbers,
530 and replace the name expansions in STRING and in the constraints to
531 those numbers. This is generally done in the front end while creating
532 the ASM_EXPR generic tree that eventually becomes the GIMPLE_ASM. */
533
534tree
535resolve_asm_operand_names (tree string, tree outputs, tree inputs, tree labels)
536{
537 char *buffer;
538 char *p;
539 const char *c;
540 tree t;
541
542 check_unique_operand_names (outputs, inputs, labels);
543
544 /* Substitute [<name>] in input constraint strings. There should be no
545 named operands in output constraints. */
546 for (t = inputs; t ; t = TREE_CHAIN (t)((contains_struct_check ((t), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 546, __FUNCTION__))->common.chain)
)
547 {
548 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)))((const char *)((tree_check ((((tree_check ((((tree_check ((t
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 548, __FUNCTION__, (TREE_LIST)))->list.purpose)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 548, __FUNCTION__, (TREE_LIST)))->list.value)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 548, __FUNCTION__, (STRING_CST)))->string.str))
;
549 if (strchr (c, '[') != NULLnullptr)
550 {
551 p = buffer = xstrdup (c);
552 while ((p = strchr (p, '[')) != NULLnullptr)
553 p = resolve_operand_name_1 (p, outputs, inputs, NULLnullptr);
554 TREE_VALUE (TREE_PURPOSE (t))((tree_check ((((tree_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 554, __FUNCTION__, (TREE_LIST)))->list.purpose)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 554, __FUNCTION__, (TREE_LIST)))->list.value)
555 = build_string (strlen (buffer), buffer);
556 free (buffer);
557 }
558 }
559
560 /* Now check for any needed substitutions in the template. */
561 c = TREE_STRING_POINTER (string)((const char *)((tree_check ((string), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 561, __FUNCTION__, (STRING_CST)))->string.str))
;
562 while ((c = strchr (c, '%')) != NULLnullptr)
563 {
564 if (c[1] == '[')
565 break;
566 else if (ISALPHA (c[1])(_sch_istable[(c[1]) & 0xff] & (unsigned short)(_sch_isalpha
))
&& c[2] == '[')
567 break;
568 else
569 {
570 c += 1 + (c[1] == '%');
571 continue;
572 }
573 }
574
575 if (c)
576 {
577 /* OK, we need to make a copy so we can perform the substitutions.
578 Assume that we will not need extra space--we get to remove '['
579 and ']', which means we cannot have a problem until we have more
580 than 999 operands. */
581 buffer = xstrdup (TREE_STRING_POINTER (string)((const char *)((tree_check ((string), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 581, __FUNCTION__, (STRING_CST)))->string.str))
);
582 p = buffer + (c - TREE_STRING_POINTER (string)((const char *)((tree_check ((string), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 582, __FUNCTION__, (STRING_CST)))->string.str))
);
583
584 while ((p = strchr (p, '%')) != NULLnullptr)
585 {
586 if (p[1] == '[')
587 p += 1;
588 else if (ISALPHA (p[1])(_sch_istable[(p[1]) & 0xff] & (unsigned short)(_sch_isalpha
))
&& p[2] == '[')
589 p += 2;
590 else
591 {
592 p += 1 + (p[1] == '%');
593 continue;
594 }
595
596 p = resolve_operand_name_1 (p, outputs, inputs, labels);
597 }
598
599 string = build_string (strlen (buffer), buffer);
600 free (buffer);
601 }
602
603 return string;
604}
605
606/* A subroutine of resolve_operand_names. P points to the '[' for a
607 potential named operand of the form [<name>]. In place, replace
608 the name and brackets with a number. Return a pointer to the
609 balance of the string after substitution. */
610
611static char *
612resolve_operand_name_1 (char *p, tree outputs, tree inputs, tree labels)
613{
614 char *q;
615 int op, op_inout;
616 tree t;
617
618 /* Collect the operand name. */
619 q = strchr (++p, ']');
620 if (!q)
621 {
622 error ("missing close brace for named operand");
623 return strchr (p, '\0');
624 }
625 *q = '\0';
626
627 /* Resolve the name to a number. */
628 for (op_inout = op = 0, t = outputs; t ; t = TREE_CHAIN (t)((contains_struct_check ((t), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 628, __FUNCTION__))->common.chain)
, op++)
629 {
630 tree name = TREE_PURPOSE (TREE_PURPOSE (t))((tree_check ((((tree_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 630, __FUNCTION__, (TREE_LIST)))->list.purpose)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 630, __FUNCTION__, (TREE_LIST)))->list.purpose)
;
631 if (name && strcmp (TREE_STRING_POINTER (name)((const char *)((tree_check ((name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 631, __FUNCTION__, (STRING_CST)))->string.str))
, p) == 0)
632 goto found;
633 tree constraint = TREE_VALUE (TREE_PURPOSE (t))((tree_check ((((tree_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 633, __FUNCTION__, (TREE_LIST)))->list.purpose)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 633, __FUNCTION__, (TREE_LIST)))->list.value)
;
634 if (constraint && strchr (TREE_STRING_POINTER (constraint)((const char *)((tree_check ((constraint), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 634, __FUNCTION__, (STRING_CST)))->string.str))
, '+') != NULLnullptr)
635 op_inout++;
636 }
637 for (t = inputs; t ; t = TREE_CHAIN (t)((contains_struct_check ((t), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 637, __FUNCTION__))->common.chain)
, op++)
638 {
639 tree name = TREE_PURPOSE (TREE_PURPOSE (t))((tree_check ((((tree_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 639, __FUNCTION__, (TREE_LIST)))->list.purpose)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 639, __FUNCTION__, (TREE_LIST)))->list.purpose)
;
640 if (name && strcmp (TREE_STRING_POINTER (name)((const char *)((tree_check ((name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 640, __FUNCTION__, (STRING_CST)))->string.str))
, p) == 0)
641 goto found;
642 }
643 op += op_inout;
644 for (t = labels; t ; t = TREE_CHAIN (t)((contains_struct_check ((t), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 644, __FUNCTION__))->common.chain)
, op++)
645 {
646 tree name = TREE_PURPOSE (t)((tree_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 646, __FUNCTION__, (TREE_LIST)))->list.purpose)
;
647 if (name && strcmp (TREE_STRING_POINTER (name)((const char *)((tree_check ((name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 647, __FUNCTION__, (STRING_CST)))->string.str))
, p) == 0)
648 goto found;
649 }
650
651 error ("undefined named operand %qs", identifier_to_locale (p));
652 op = 0;
653
654 found:
655 /* Replace the name with the number. Unfortunately, not all libraries
656 get the return value of sprintf correct, so search for the end of the
657 generated string by hand. */
658 sprintf (--p, "%d", op);
659 p = strchr (p, '\0');
660
661 /* Verify the no extra buffer space assumption. */
662 gcc_assert (p <= q)((void)(!(p <= q) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 662, __FUNCTION__), 0 : 0))
;
663
664 /* Shift the rest of the buffer down to fill the gap. */
665 memmove (p, q + 1, strlen (q + 1) + 1);
666
667 return p;
668}
669
670
671/* Generate RTL to return directly from the current function.
672 (That is, we bypass any return value.) */
673
674void
675expand_naked_return (void)
676{
677 rtx_code_label *end_label;
678
679 clear_pending_stack_adjust ();
680 do_pending_stack_adjust ();
681
682 end_label = naked_return_label((&x_rtl)->x_naked_return_label);
683 if (end_label == 0)
684 end_label = naked_return_label((&x_rtl)->x_naked_return_label) = gen_label_rtx ();
685
686 emit_jump (end_label);
687}
688
689/* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE. PROB
690 is the probability of jumping to LABEL. */
691static void
692do_jump_if_equal (machine_mode mode, rtx op0, rtx op1, rtx_code_label *label,
693 int unsignedp, profile_probability prob)
694{
695 do_compare_rtx_and_jump (op0, op1, EQ, unsignedp, mode,
696 NULL_RTX(rtx) 0, NULLnullptr, label, prob);
697}
698
699/* Return the sum of probabilities of outgoing edges of basic block BB. */
700
701static profile_probability
702get_outgoing_edge_probs (basic_block bb)
703{
704 edge e;
705 edge_iterator ei;
706 profile_probability prob_sum = profile_probability::never ();
707 if (!bb)
708 return profile_probability::never ();
709 FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
710 prob_sum += e->probability;
711 return prob_sum;
712}
713
714/* Computes the conditional probability of jumping to a target if the branch
715 instruction is executed.
716 TARGET_PROB is the estimated probability of jumping to a target relative
717 to some basic block BB.
718 BASE_PROB is the probability of reaching the branch instruction relative
719 to the same basic block BB. */
720
721static inline profile_probability
722conditional_probability (profile_probability target_prob,
723 profile_probability base_prob)
724{
725 return target_prob / base_prob;
726}
727
728/* Generate a dispatch tabler, switching on INDEX_EXPR and jumping to
729 one of the labels in CASE_LIST or to the DEFAULT_LABEL.
730 MINVAL, MAXVAL, and RANGE are the extrema and range of the case
731 labels in CASE_LIST. STMT_BB is the basic block containing the statement.
732
733 First, a jump insn is emitted. First we try "casesi". If that
734 fails, try "tablejump". A target *must* have one of them (or both).
735
736 Then, a table with the target labels is emitted.
737
738 The process is unaware of the CFG. The caller has to fix up
739 the CFG itself. This is done in cfgexpand.c. */
740
741static void
742emit_case_dispatch_table (tree index_expr, tree index_type,
743 auto_vec<simple_case_node> &case_list,
744 rtx default_label,
745 edge default_edge, tree minval, tree maxval,
746 tree range, basic_block stmt_bb)
747{
748 int i, ncases;
749 rtx *labelvec;
750 rtx_insn *fallback_label = label_rtx (case_list[0].m_code_label);
751 rtx_code_label *table_label = gen_label_rtx ();
752 bool has_gaps = false;
753 profile_probability default_prob = default_edge ? default_edge->probability
754 : profile_probability::never ();
755 profile_probability base = get_outgoing_edge_probs (stmt_bb);
756 bool try_with_tablejump = false;
757
758 profile_probability new_default_prob = conditional_probability (default_prob,
759 base);
760
761 if (! try_casesi (index_type, index_expr, minval, range,
762 table_label, default_label, fallback_label,
763 new_default_prob))
764 {
765 /* Index jumptables from zero for suitable values of minval to avoid
766 a subtraction. For the rationale see:
767 "http://gcc.gnu.org/ml/gcc-patches/2001-10/msg01234.html". */
768 if (optimize_insn_for_speed_p ()
769 && compare_tree_int (minval, 0) > 0
770 && compare_tree_int (minval, 3) < 0)
771 {
772 minval = build_int_cst (index_type, 0);
773 range = maxval;
774 has_gaps = true;
775 }
776 try_with_tablejump = true;
777 }
778
779 /* Get table of labels to jump to, in order of case index. */
780
781 ncases = tree_to_shwi (range) + 1;
782 labelvec = XALLOCAVEC (rtx, ncases)((rtx *) __builtin_alloca(sizeof (rtx) * (ncases)));
783 memset (labelvec, 0, ncases * sizeof (rtx));
784
785 for (unsigned j = 0; j < case_list.length (); j++)
786 {
787 simple_case_node *n = &case_list[j];
788 /* Compute the low and high bounds relative to the minimum
789 value since that should fit in a HOST_WIDE_INT while the
790 actual values may not. */
791 HOST_WIDE_INTlong i_low
792 = tree_to_uhwi (fold_build2 (MINUS_EXPR, index_type,fold_build2_loc (((location_t) 0), MINUS_EXPR, index_type, n->
m_low, minval )
793 n->m_low, minval)fold_build2_loc (((location_t) 0), MINUS_EXPR, index_type, n->
m_low, minval )
);
794 HOST_WIDE_INTlong i_high
795 = tree_to_uhwi (fold_build2 (MINUS_EXPR, index_type,fold_build2_loc (((location_t) 0), MINUS_EXPR, index_type, n->
m_high, minval )
796 n->m_high, minval)fold_build2_loc (((location_t) 0), MINUS_EXPR, index_type, n->
m_high, minval )
);
797 HOST_WIDE_INTlong i;
798
799 for (i = i_low; i <= i_high; i ++)
800 labelvec[i]
801 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->m_code_label))gen_rtx_fmt_u_stat ((LABEL_REF), (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
))), ((label_rtx (n->m_code_label))) )
;
802 }
803
804 /* The dispatch table may contain gaps, including at the beginning of
805 the table if we tried to avoid the minval subtraction. We fill the
806 dispatch table slots associated with the gaps with the default case label.
807 However, in the event the default case is unreachable, we then use
808 any label from one of the case statements. */
809 rtx gap_label = (default_label) ? default_label : fallback_label;
810
811 for (i = 0; i < ncases; i++)
812 if (labelvec[i] == 0)
813 {
814 has_gaps = true;
815 labelvec[i] = gen_rtx_LABEL_REF (Pmode, gap_label)gen_rtx_fmt_u_stat ((LABEL_REF), (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
))), ((gap_label)) )
;
816 }
817
818 if (has_gaps && default_label)
819 {
820 /* There is at least one entry in the jump table that jumps
821 to default label. The default label can either be reached
822 through the indirect jump or the direct conditional jump
823 before that. Split the probability of reaching the
824 default label among these two jumps. */
825 new_default_prob
826 = conditional_probability (default_prob.apply_scale (1, 2), base);
827 default_prob = default_prob.apply_scale (1, 2);
828 base -= default_prob;
829 }
830 else
831 {
832 base -= default_prob;
833 default_prob = profile_probability::never ();
834 }
835
836 if (default_edge)
837 default_edge->probability = default_prob;
838
839 /* We have altered the probability of the default edge. So the probabilities
840 of all other edges need to be adjusted so that it sums up to
841 REG_BR_PROB_BASE. */
842 if (base > profile_probability::never ())
843 {
844 edge e;
845 edge_iterator ei;
846 FOR_EACH_EDGE (e, ei, stmt_bb->succs)for ((ei) = ei_start_1 (&((stmt_bb->succs))); ei_cond (
(ei), &(e)); ei_next (&(ei)))
847 e->probability /= base;
848 }
849
850 if (try_with_tablejump)
851 {
852 bool ok = try_tablejump (index_type, index_expr, minval, range,
853 table_label, default_label, new_default_prob);
854 gcc_assert (ok)((void)(!(ok) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 854, __FUNCTION__), 0 : 0))
;
855 }
856 /* Output the table. */
857 emit_label (table_label);
858
859 if (CASE_VECTOR_PC_RELATIVE0
860 || (flag_picglobal_options.x_flag_pic && targetm.asm_out.generate_pic_addr_diff_vec ()))
861 emit_jump_table_data (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,gen_rtx_fmt_eEee0_stat ((ADDR_DIFF_VEC), (((!((global_options
.x_ix86_isa_flags & (1UL << 4)) != 0) || (global_options
.x_flag_pic && global_options.x_ix86_cmodel != CM_LARGE_PIC
) ? (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_DImode))))),
((gen_rtx_fmt_u_stat ((LABEL_REF), (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
))), ((table_label)) ))), ((gen_rtvec_v (ncases, labelvec))),
(((const_int_rtx[64]))), (((const_int_rtx[64]))) )
862 gen_rtx_LABEL_REF (Pmode,gen_rtx_fmt_eEee0_stat ((ADDR_DIFF_VEC), (((!((global_options
.x_ix86_isa_flags & (1UL << 4)) != 0) || (global_options
.x_flag_pic && global_options.x_ix86_cmodel != CM_LARGE_PIC
) ? (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_DImode))))),
((gen_rtx_fmt_u_stat ((LABEL_REF), (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
))), ((table_label)) ))), ((gen_rtvec_v (ncases, labelvec))),
(((const_int_rtx[64]))), (((const_int_rtx[64]))) )
863 table_label),gen_rtx_fmt_eEee0_stat ((ADDR_DIFF_VEC), (((!((global_options
.x_ix86_isa_flags & (1UL << 4)) != 0) || (global_options
.x_flag_pic && global_options.x_ix86_cmodel != CM_LARGE_PIC
) ? (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_DImode))))),
((gen_rtx_fmt_u_stat ((LABEL_REF), (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
))), ((table_label)) ))), ((gen_rtvec_v (ncases, labelvec))),
(((const_int_rtx[64]))), (((const_int_rtx[64]))) )
864 gen_rtvec_v (ncases, labelvec),gen_rtx_fmt_eEee0_stat ((ADDR_DIFF_VEC), (((!((global_options
.x_ix86_isa_flags & (1UL << 4)) != 0) || (global_options
.x_flag_pic && global_options.x_ix86_cmodel != CM_LARGE_PIC
) ? (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_DImode))))),
((gen_rtx_fmt_u_stat ((LABEL_REF), (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
))), ((table_label)) ))), ((gen_rtvec_v (ncases, labelvec))),
(((const_int_rtx[64]))), (((const_int_rtx[64]))) )
865 const0_rtx, const0_rtx)gen_rtx_fmt_eEee0_stat ((ADDR_DIFF_VEC), (((!((global_options
.x_ix86_isa_flags & (1UL << 4)) != 0) || (global_options
.x_flag_pic && global_options.x_ix86_cmodel != CM_LARGE_PIC
) ? (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_DImode))))),
((gen_rtx_fmt_u_stat ((LABEL_REF), (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
))), ((table_label)) ))), ((gen_rtvec_v (ncases, labelvec))),
(((const_int_rtx[64]))), (((const_int_rtx[64]))) )
);
866 else
867 emit_jump_table_data (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,gen_rtx_fmt_E_stat ((ADDR_VEC), (((!((global_options.x_ix86_isa_flags
& (1UL << 4)) != 0) || (global_options.x_flag_pic &&
global_options.x_ix86_cmodel != CM_LARGE_PIC) ? (scalar_int_mode
((scalar_int_mode::from_int) E_SImode)) : (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode))))), ((gen_rtvec_v (ncases
, labelvec))) )
868 gen_rtvec_v (ncases, labelvec))gen_rtx_fmt_E_stat ((ADDR_VEC), (((!((global_options.x_ix86_isa_flags
& (1UL << 4)) != 0) || (global_options.x_flag_pic &&
global_options.x_ix86_cmodel != CM_LARGE_PIC) ? (scalar_int_mode
((scalar_int_mode::from_int) E_SImode)) : (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode))))), ((gen_rtvec_v (ncases
, labelvec))) )
);
869
870 /* Record no drop-through after the table. */
871 emit_barrier ();
872}
873
874/* Terminate a case Ada or switch (C) statement
875 in which ORIG_INDEX is the expression to be tested.
876 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
877 type as given in the source before any compiler conversions.
878 Generate the code to test it and jump to the right place. */
879
880void
881expand_case (gswitch *stmt)
882{
883 tree minval = NULL_TREE(tree) nullptr, maxval = NULL_TREE(tree) nullptr, range = NULL_TREE(tree) nullptr;
884 rtx_code_label *default_label;
885 unsigned int count;
886 int i;
887 int ncases = gimple_switch_num_labels (stmt);
888 tree index_expr = gimple_switch_index (stmt);
889 tree index_type = TREE_TYPE (index_expr)((contains_struct_check ((index_expr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 889, __FUNCTION__))->typed.type)
;
890 tree elt;
891 basic_block bb = gimple_bb (stmt);
892 gimple *def_stmt;
893
894 auto_vec<simple_case_node> case_list;
895
896 /* An ERROR_MARK occurs for various reasons including invalid data type.
897 ??? Can this still happen, with GIMPLE and all? */
898 if (index_type == error_mark_nodeglobal_trees[TI_ERROR_MARK])
899 return;
900
901 /* cleanup_tree_cfg removes all SWITCH_EXPR with their index
902 expressions being INTEGER_CST. */
903 gcc_assert (TREE_CODE (index_expr) != INTEGER_CST)((void)(!(((enum tree_code) (index_expr)->base.code) != INTEGER_CST
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 903, __FUNCTION__), 0 : 0))
;
904
905 /* Optimization of switch statements with only one label has already
906 occurred, so we should never see them at this point. */
907 gcc_assert (ncases > 1)((void)(!(ncases > 1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 907, __FUNCTION__), 0 : 0))
;
908
909 do_pending_stack_adjust ();
910
911 /* Find the default case target label. */
912 tree default_lab = CASE_LABEL (gimple_switch_default_label (stmt))(*((const_cast<tree*> (tree_operand_check (((tree_check
((gimple_switch_default_label (stmt)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 912, __FUNCTION__, (CASE_LABEL_EXPR)))), (2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 912, __FUNCTION__)))))
;
913 default_label = jump_target_rtx (default_lab);
914 basic_block default_bb = label_to_block (cfun(cfun + 0), default_lab);
915 edge default_edge = find_edge (bb, default_bb);
916
917 /* Get upper and lower bounds of case values. */
918 elt = gimple_switch_label (stmt, 1);
919 minval = fold_convert (index_type, CASE_LOW (elt))fold_convert_loc (((location_t) 0), index_type, (*((const_cast
<tree*> (tree_operand_check (((tree_check ((elt), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 919, __FUNCTION__, (CASE_LABEL_EXPR)))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 919, __FUNCTION__))))))
;
920 elt = gimple_switch_label (stmt, ncases - 1);
921 if (CASE_HIGH (elt)(*((const_cast<tree*> (tree_operand_check (((tree_check
((elt), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 921, __FUNCTION__, (CASE_LABEL_EXPR)))), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 921, __FUNCTION__)))))
)
922 maxval = fold_convert (index_type, CASE_HIGH (elt))fold_convert_loc (((location_t) 0), index_type, (*((const_cast
<tree*> (tree_operand_check (((tree_check ((elt), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 922, __FUNCTION__, (CASE_LABEL_EXPR)))), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 922, __FUNCTION__))))))
;
923 else
924 maxval = fold_convert (index_type, CASE_LOW (elt))fold_convert_loc (((location_t) 0), index_type, (*((const_cast
<tree*> (tree_operand_check (((tree_check ((elt), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 924, __FUNCTION__, (CASE_LABEL_EXPR)))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 924, __FUNCTION__))))))
;
925
926 /* Try to narrow the index type if it's larger than a word.
927 That is mainly for -O0 where an equivalent optimization
928 done by forward propagation is not run and is aimed at
929 avoiding a call to a comparison routine of libgcc. */
930 if (TYPE_PRECISION (index_type)((tree_class_check ((index_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 930, __FUNCTION__))->type_common.precision)
> BITS_PER_WORD((8) * (((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4))
931 && TREE_CODE (index_expr)((enum tree_code) (index_expr)->base.code) == SSA_NAME
932 && (def_stmt = SSA_NAME_DEF_STMT (index_expr)(tree_check ((index_expr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 932, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
)
933 && is_gimple_assign (def_stmt)
934 && gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
935 {
936 tree inner_index_expr = gimple_assign_rhs1 (def_stmt);
937 tree inner_index_type = TREE_TYPE (inner_index_expr)((contains_struct_check ((inner_index_expr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 937, __FUNCTION__))->typed.type)
;
938
939 if (INTEGRAL_TYPE_P (inner_index_type)(((enum tree_code) (inner_index_type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (inner_index_type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (inner_index_type)->base.code) == INTEGER_TYPE
)
940 && TYPE_PRECISION (inner_index_type)((tree_class_check ((inner_index_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 940, __FUNCTION__))->type_common.precision)
<= BITS_PER_WORD((8) * (((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4))
941 && int_fits_type_p (minval, inner_index_type)
942 && int_fits_type_p (maxval, inner_index_type))
943 {
944 index_expr = inner_index_expr;
945 index_type = inner_index_type;
946 minval = fold_convert (index_type, minval)fold_convert_loc (((location_t) 0), index_type, minval);
947 maxval = fold_convert (index_type, maxval)fold_convert_loc (((location_t) 0), index_type, maxval);
948 }
949 }
950
951 /* Compute span of values. */
952 range = fold_build2 (MINUS_EXPR, index_type, maxval, minval)fold_build2_loc (((location_t) 0), MINUS_EXPR, index_type, maxval
, minval )
;
953
954 /* Listify the labels queue and gather some numbers to decide
955 how to expand this switch(). */
956 count = 0;
957
958 for (i = ncases - 1; i >= 1; --i)
959 {
960 elt = gimple_switch_label (stmt, i);
961 tree low = CASE_LOW (elt)(*((const_cast<tree*> (tree_operand_check (((tree_check
((elt), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 961, __FUNCTION__, (CASE_LABEL_EXPR)))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 961, __FUNCTION__)))))
;
962 gcc_assert (low)((void)(!(low) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 962, __FUNCTION__), 0 : 0))
;
963 tree high = CASE_HIGH (elt)(*((const_cast<tree*> (tree_operand_check (((tree_check
((elt), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 963, __FUNCTION__, (CASE_LABEL_EXPR)))), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 963, __FUNCTION__)))))
;
964 gcc_assert (! high || tree_int_cst_lt (low, high))((void)(!(! high || tree_int_cst_lt (low, high)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 964, __FUNCTION__), 0 : 0))
;
965 tree lab = CASE_LABEL (elt)(*((const_cast<tree*> (tree_operand_check (((tree_check
((elt), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 965, __FUNCTION__, (CASE_LABEL_EXPR)))), (2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 965, __FUNCTION__)))))
;
966
967 /* Count the elements.
968 A range counts double, since it requires two compares. */
969 count++;
970 if (high)
971 count++;
972
973 /* The bounds on the case range, LOW and HIGH, have to be converted
974 to case's index type TYPE. Note that the original type of the
975 case index in the source code is usually "lost" during
976 gimplification due to type promotion, but the case labels retain the
977 original type. Make sure to drop overflow flags. */
978 low = fold_convert (index_type, low)fold_convert_loc (((location_t) 0), index_type, low);
979 if (TREE_OVERFLOW (low)((tree_class_check ((low), (tcc_constant), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 979, __FUNCTION__))->base.public_flag)
)
980 low = wide_int_to_tree (index_type, wi::to_wide (low));
981
982 /* The canonical from of a case label in GIMPLE is that a simple case
983 has an empty CASE_HIGH. For the casesi and tablejump expanders,
984 the back ends want simple cases to have high == low. */
985 if (! high)
986 high = low;
987 high = fold_convert (index_type, high)fold_convert_loc (((location_t) 0), index_type, high);
988 if (TREE_OVERFLOW (high)((tree_class_check ((high), (tcc_constant), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 988, __FUNCTION__))->base.public_flag)
)
989 high = wide_int_to_tree (index_type, wi::to_wide (high));
990
991 case_list.safe_push (simple_case_node (low, high, lab));
992 }
993
994 /* cleanup_tree_cfg removes all SWITCH_EXPR with a single
995 destination, such as one with a default case only.
996 It also removes cases that are out of range for the switch
997 type, so we should never get a zero here. */
998 gcc_assert (count > 0)((void)(!(count > 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 998, __FUNCTION__), 0 : 0))
;
999
1000 rtx_insn *before_case = get_last_insn ();
1001
1002 /* If the default case is unreachable, then set default_label to NULL
1003 so that we omit the range check when generating the dispatch table.
1004 We also remove the edge to the unreachable default case. The block
1005 itself will be automatically removed later. */
1006 if (EDGE_COUNT (default_edge->dest->succs)vec_safe_length (default_edge->dest->succs) == 0
1007 && gimple_seq_unreachable_p (bb_seq (default_edge->dest)))
1008 {
1009 default_label = NULLnullptr;
1010 remove_edge (default_edge);
1011 default_edge = NULLnullptr;
1012 }
1013
1014 emit_case_dispatch_table (index_expr, index_type,
1015 case_list, default_label, default_edge,
1016 minval, maxval, range, bb);
1017
1018 reorder_insns (NEXT_INSN (before_case), get_last_insn (), before_case);
1019
1020 free_temp_slots ();
1021}
1022
1023/* Expand the dispatch to a short decrement chain if there are few cases
1024 to dispatch to. Likewise if neither casesi nor tablejump is available,
1025 or if flag_jump_tables is set. Otherwise, expand as a casesi or a
1026 tablejump. The index mode is always the mode of integer_type_node.
1027 Trap if no case matches the index.
1028
1029 DISPATCH_INDEX is the index expression to switch on. It should be a
1030 memory or register operand.
1031
1032 DISPATCH_TABLE is a set of case labels. The set should be sorted in
1033 ascending order, be contiguous, starting with value 0, and contain only
1034 single-valued case labels. */
1035
1036void
1037expand_sjlj_dispatch_table (rtx dispatch_index,
1038 vec<tree> dispatch_table)
1039{
1040 tree index_type = integer_type_nodeinteger_types[itk_int];
1041 machine_mode index_mode = TYPE_MODE (index_type)((((enum tree_code) ((tree_class_check ((index_type), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 1041, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(index_type) : (index_type)->type_common.mode)
;
1042
1043 int ncases = dispatch_table.length ();
1044
1045 do_pending_stack_adjust ();
1046 rtx_insn *before_case = get_last_insn ();
1047
1048 /* Expand as a decrement-chain if there are 5 or fewer dispatch
1049 labels. This covers more than 98% of the cases in libjava,
1050 and seems to be a reasonable compromise between the "old way"
1051 of expanding as a decision tree or dispatch table vs. the "new
1052 way" with decrement chain or dispatch table. */
1053 if (dispatch_table.length () <= 5
1054 || (!targetm.have_casesi () && !targetm.have_tablejump ())
1055 || !flag_jump_tablesglobal_options.x_flag_jump_tables)
1056 {
1057 /* Expand the dispatch as a decrement chain:
1058
1059 "switch(index) {case 0: do_0; case 1: do_1; ...; case N: do_N;}"
1060
1061 ==>
1062
1063 if (index == 0) do_0; else index--;
1064 if (index == 0) do_1; else index--;
1065 ...
1066 if (index == 0) do_N; else index--;
1067
1068 This is more efficient than a dispatch table on most machines.
1069 The last "index--" is redundant but the code is trivially dead
1070 and will be cleaned up by later passes. */
1071 rtx index = copy_to_mode_reg (index_mode, dispatch_index);
1072 rtx zero = CONST0_RTX (index_mode)(const_tiny_rtx[0][(int) (index_mode)]);
1073 for (int i = 0; i < ncases; i++)
1074 {
1075 tree elt = dispatch_table[i];
1076 rtx_code_label *lab = jump_target_rtx (CASE_LABEL (elt)(*((const_cast<tree*> (tree_operand_check (((tree_check
((elt), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 1076, __FUNCTION__, (CASE_LABEL_EXPR)))), (2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 1076, __FUNCTION__)))))
);
1077 do_jump_if_equal (index_mode, index, zero, lab, 0,
1078 profile_probability::uninitialized ());
1079 force_expand_binop (index_mode, sub_optab,
1080 index, CONST1_RTX (index_mode)(const_tiny_rtx[1][(int) (index_mode)]),
1081 index, 0, OPTAB_DIRECT);
1082 }
1083 }
1084 else
1085 {
1086 /* Similar to expand_case, but much simpler. */
1087 auto_vec<simple_case_node> case_list;
1088 tree index_expr = make_tree (index_type, dispatch_index);
1089 tree minval = build_int_cst (index_type, 0);
1090 tree maxval = CASE_LOW (dispatch_table.last ())(*((const_cast<tree*> (tree_operand_check (((tree_check
((dispatch_table.last ()), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 1090, __FUNCTION__, (CASE_LABEL_EXPR)))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 1090, __FUNCTION__)))))
;
1091 tree range = maxval;
1092 rtx_code_label *default_label = gen_label_rtx ();
1093
1094 for (int i = ncases - 1; i >= 0; --i)
1095 {
1096 tree elt = dispatch_table[i];
1097 tree high = CASE_HIGH (elt)(*((const_cast<tree*> (tree_operand_check (((tree_check
((elt), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 1097, __FUNCTION__, (CASE_LABEL_EXPR)))), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 1097, __FUNCTION__)))))
;
1098 if (high == NULL_TREE(tree) nullptr)
1099 high = CASE_LOW (elt)(*((const_cast<tree*> (tree_operand_check (((tree_check
((elt), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 1099, __FUNCTION__, (CASE_LABEL_EXPR)))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 1099, __FUNCTION__)))))
;
1100 case_list.safe_push (simple_case_node (CASE_LOW (elt)(*((const_cast<tree*> (tree_operand_check (((tree_check
((elt), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 1100, __FUNCTION__, (CASE_LABEL_EXPR)))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 1100, __FUNCTION__)))))
, high,
1101 CASE_LABEL (elt)(*((const_cast<tree*> (tree_operand_check (((tree_check
((elt), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 1101, __FUNCTION__, (CASE_LABEL_EXPR)))), (2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/stmt.c"
, 1101, __FUNCTION__)))))
));
1102 }
1103
1104 emit_case_dispatch_table (index_expr, index_type,
1105 case_list, default_label, NULLnullptr,
1106 minval, maxval, range,
1107 BLOCK_FOR_INSN (before_case));
1108 emit_label (default_label);
1109 }
1110
1111 /* Dispatching something not handled? Trap! */
1112 expand_builtin_trap ();
1113
1114 reorder_insns (NEXT_INSN (before_case), get_last_insn (), before_case);
1115
1116 free_temp_slots ();
1117}
1118
1119

/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h

1/* Vector API for GNU compiler.
2 Copyright (C) 2004-2021 Free Software Foundation, Inc.
3 Contributed by Nathan Sidwell <nathan@codesourcery.com>
4 Re-implemented in C++ by Diego Novillo <dnovillo@google.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22#ifndef GCC_VEC_H
23#define GCC_VEC_H
24
25/* Some gen* file have no ggc support as the header file gtype-desc.h is
26 missing. Provide these definitions in case ggc.h has not been included.
27 This is not a problem because any code that runs before gengtype is built
28 will never need to use GC vectors.*/
29
30extern void ggc_free (void *);
31extern size_t ggc_round_alloc_size (size_t requested_size);
32extern void *ggc_realloc (void *, size_t MEM_STAT_DECL);
33
34/* Templated vector type and associated interfaces.
35
36 The interface functions are typesafe and use inline functions,
37 sometimes backed by out-of-line generic functions. The vectors are
38 designed to interoperate with the GTY machinery.
39
40 There are both 'index' and 'iterate' accessors. The index accessor
41 is implemented by operator[]. The iterator returns a boolean
42 iteration condition and updates the iteration variable passed by
43 reference. Because the iterator will be inlined, the address-of
44 can be optimized away.
45
46 Each operation that increases the number of active elements is
47 available in 'quick' and 'safe' variants. The former presumes that
48 there is sufficient allocated space for the operation to succeed
49 (it dies if there is not). The latter will reallocate the
50 vector, if needed. Reallocation causes an exponential increase in
51 vector size. If you know you will be adding N elements, it would
52 be more efficient to use the reserve operation before adding the
53 elements with the 'quick' operation. This will ensure there are at
54 least as many elements as you ask for, it will exponentially
55 increase if there are too few spare slots. If you want reserve a
56 specific number of slots, but do not want the exponential increase
57 (for instance, you know this is the last allocation), use the
58 reserve_exact operation. You can also create a vector of a
59 specific size from the get go.
60
61 You should prefer the push and pop operations, as they append and
62 remove from the end of the vector. If you need to remove several
63 items in one go, use the truncate operation. The insert and remove
64 operations allow you to change elements in the middle of the
65 vector. There are two remove operations, one which preserves the
66 element ordering 'ordered_remove', and one which does not
67 'unordered_remove'. The latter function copies the end element
68 into the removed slot, rather than invoke a memmove operation. The
69 'lower_bound' function will determine where to place an item in the
70 array using insert that will maintain sorted order.
71
72 Vectors are template types with three arguments: the type of the
73 elements in the vector, the allocation strategy, and the physical
74 layout to use
75
76 Four allocation strategies are supported:
77
78 - Heap: allocation is done using malloc/free. This is the
79 default allocation strategy.
80
81 - GC: allocation is done using ggc_alloc/ggc_free.
82
83 - GC atomic: same as GC with the exception that the elements
84 themselves are assumed to be of an atomic type that does
85 not need to be garbage collected. This means that marking
86 routines do not need to traverse the array marking the
87 individual elements. This increases the performance of
88 GC activities.
89
90 Two physical layouts are supported:
91
92 - Embedded: The vector is structured using the trailing array
93 idiom. The last member of the structure is an array of size
94 1. When the vector is initially allocated, a single memory
95 block is created to hold the vector's control data and the
96 array of elements. These vectors cannot grow without
97 reallocation (see discussion on embeddable vectors below).
98
99 - Space efficient: The vector is structured as a pointer to an
100 embedded vector. This is the default layout. It means that
101 vectors occupy a single word of storage before initial
102 allocation. Vectors are allowed to grow (the internal
103 pointer is reallocated but the main vector instance does not
104 need to relocate).
105
106 The type, allocation and layout are specified when the vector is
107 declared.
108
109 If you need to directly manipulate a vector, then the 'address'
110 accessor will return the address of the start of the vector. Also
111 the 'space' predicate will tell you whether there is spare capacity
112 in the vector. You will not normally need to use these two functions.
113
114 Notes on the different layout strategies
115
116 * Embeddable vectors (vec<T, A, vl_embed>)
117
118 These vectors are suitable to be embedded in other data
119 structures so that they can be pre-allocated in a contiguous
120 memory block.
121
122 Embeddable vectors are implemented using the trailing array
123 idiom, thus they are not resizeable without changing the address
124 of the vector object itself. This means you cannot have
125 variables or fields of embeddable vector type -- always use a
126 pointer to a vector. The one exception is the final field of a
127 structure, which could be a vector type.
128
129 You will have to use the embedded_size & embedded_init calls to
130 create such objects, and they will not be resizeable (so the
131 'safe' allocation variants are not available).
132
133 Properties of embeddable vectors:
134
135 - The whole vector and control data are allocated in a single
136 contiguous block. It uses the trailing-vector idiom, so
137 allocation must reserve enough space for all the elements
138 in the vector plus its control data.
139 - The vector cannot be re-allocated.
140 - The vector cannot grow nor shrink.
141 - No indirections needed for access/manipulation.
142 - It requires 2 words of storage (prior to vector allocation).
143
144
145 * Space efficient vector (vec<T, A, vl_ptr>)
146
147 These vectors can grow dynamically and are allocated together
148 with their control data. They are suited to be included in data
149 structures. Prior to initial allocation, they only take a single
150 word of storage.
151
152 These vectors are implemented as a pointer to embeddable vectors.
153 The semantics allow for this pointer to be NULL to represent
154 empty vectors. This way, empty vectors occupy minimal space in
155 the structure containing them.
156
157 Properties:
158
159 - The whole vector and control data are allocated in a single
160 contiguous block.
161 - The whole vector may be re-allocated.
162 - Vector data may grow and shrink.
163 - Access and manipulation requires a pointer test and
164 indirection.
165 - It requires 1 word of storage (prior to vector allocation).
166
167 An example of their use would be,
168
169 struct my_struct {
170 // A space-efficient vector of tree pointers in GC memory.
171 vec<tree, va_gc, vl_ptr> v;
172 };
173
174 struct my_struct *s;
175
176 if (s->v.length ()) { we have some contents }
177 s->v.safe_push (decl); // append some decl onto the end
178 for (ix = 0; s->v.iterate (ix, &elt); ix++)
179 { do something with elt }
180*/
181
182/* Support function for statistics. */
183extern void dump_vec_loc_statistics (void);
184
185/* Hashtable mapping vec addresses to descriptors. */
186extern htab_t vec_mem_usage_hash;
187
188/* Control data for vectors. This contains the number of allocated
189 and used slots inside a vector. */
190
191struct vec_prefix
192{
193 /* FIXME - These fields should be private, but we need to cater to
194 compilers that have stricter notions of PODness for types. */
195
196 /* Memory allocation support routines in vec.c. */
197 void register_overhead (void *, size_t, size_t CXX_MEM_STAT_INFO);
198 void release_overhead (void *, size_t, size_t, bool CXX_MEM_STAT_INFO);
199 static unsigned calculate_allocation (vec_prefix *, unsigned, bool);
200 static unsigned calculate_allocation_1 (unsigned, unsigned);
201
202 /* Note that vec_prefix should be a base class for vec, but we use
203 offsetof() on vector fields of tree structures (e.g.,
204 tree_binfo::base_binfos), and offsetof only supports base types.
205
206 To compensate, we make vec_prefix a field inside vec and make
207 vec a friend class of vec_prefix so it can access its fields. */
208 template <typename, typename, typename> friend struct vec;
209
210 /* The allocator types also need access to our internals. */
211 friend struct va_gc;
212 friend struct va_gc_atomic;
213 friend struct va_heap;
214
215 unsigned m_alloc : 31;
216 unsigned m_using_auto_storage : 1;
217 unsigned m_num;
218};
219
220/* Calculate the number of slots to reserve a vector, making sure that
221 RESERVE slots are free. If EXACT grow exactly, otherwise grow
222 exponentially. PFX is the control data for the vector. */
223
224inline unsigned
225vec_prefix::calculate_allocation (vec_prefix *pfx, unsigned reserve,
226 bool exact)
227{
228 if (exact
19.1
'exact' is false
19.1
'exact' is false
)
20
Taking false branch
229 return (pfx ? pfx->m_num : 0) + reserve;
230 else if (!pfx
20.1
'pfx' is non-null, which participates in a condition later
20.1
'pfx' is non-null, which participates in a condition later
)
21
Taking false branch
231 return MAX (4, reserve)((4) > (reserve) ? (4) : (reserve));
232 return calculate_allocation_1 (pfx->m_alloc, pfx->m_num + reserve);
22
Returning value, which participates in a condition later
233}
234
235template<typename, typename, typename> struct vec;
236
237/* Valid vector layouts
238
239 vl_embed - Embeddable vector that uses the trailing array idiom.
240 vl_ptr - Space efficient vector that uses a pointer to an
241 embeddable vector. */
242struct vl_embed { };
243struct vl_ptr { };
244
245
246/* Types of supported allocations
247
248 va_heap - Allocation uses malloc/free.
249 va_gc - Allocation uses ggc_alloc.
250 va_gc_atomic - Same as GC, but individual elements of the array
251 do not need to be marked during collection. */
252
253/* Allocator type for heap vectors. */
254struct va_heap
255{
256 /* Heap vectors are frequently regular instances, so use the vl_ptr
257 layout for them. */
258 typedef vl_ptr default_layout;
259
260 template<typename T>
261 static void reserve (vec<T, va_heap, vl_embed> *&, unsigned, bool
262 CXX_MEM_STAT_INFO);
263
264 template<typename T>
265 static void release (vec<T, va_heap, vl_embed> *&);
266};
267
268
269/* Allocator for heap memory. Ensure there are at least RESERVE free
270 slots in V. If EXACT is true, grow exactly, else grow
271 exponentially. As a special case, if the vector had not been
272 allocated and RESERVE is 0, no vector will be created. */
273
274template<typename T>
275inline void
276va_heap::reserve (vec<T, va_heap, vl_embed> *&v, unsigned reserve, bool exact
277 MEM_STAT_DECL)
278{
279 size_t elt_size = sizeof (T);
280 unsigned alloc
281 = vec_prefix::calculate_allocation (v ? &v->m_vecpfx : 0, reserve, exact);
282 gcc_checking_assert (alloc)((void)(!(alloc) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 282, __FUNCTION__), 0 : 0))
;
283
284 if (GATHER_STATISTICS0 && v)
285 v->m_vecpfx.release_overhead (v, elt_size * v->allocated (),
286 v->allocated (), false);
287
288 size_t size = vec<T, va_heap, vl_embed>::embedded_size (alloc);
289 unsigned nelem = v ? v->length () : 0;
290 v = static_cast <vec<T, va_heap, vl_embed> *> (xrealloc (v, size));
291 v->embedded_init (alloc, nelem);
292
293 if (GATHER_STATISTICS0)
294 v->m_vecpfx.register_overhead (v, alloc, elt_size PASS_MEM_STAT);
295}
296
297
298#if GCC_VERSION(4 * 1000 + 2) >= 4007
299#pragma GCC diagnostic push
300#pragma GCC diagnostic ignored "-Wfree-nonheap-object"
301#endif
302
303/* Free the heap space allocated for vector V. */
304
305template<typename T>
306void
307va_heap::release (vec<T, va_heap, vl_embed> *&v)
308{
309 size_t elt_size = sizeof (T);
310 if (v == NULLnullptr)
311 return;
312
313 if (GATHER_STATISTICS0)
314 v->m_vecpfx.release_overhead (v, elt_size * v->allocated (),
315 v->allocated (), true);
316 ::free (v);
317 v = NULLnullptr;
318}
319
320#if GCC_VERSION(4 * 1000 + 2) >= 4007
321#pragma GCC diagnostic pop
322#endif
323
324/* Allocator type for GC vectors. Notice that we need the structure
325 declaration even if GC is not enabled. */
326
327struct va_gc
328{
329 /* Use vl_embed as the default layout for GC vectors. Due to GTY
330 limitations, GC vectors must always be pointers, so it is more
331 efficient to use a pointer to the vl_embed layout, rather than
332 using a pointer to a pointer as would be the case with vl_ptr. */
333 typedef vl_embed default_layout;
334
335 template<typename T, typename A>
336 static void reserve (vec<T, A, vl_embed> *&, unsigned, bool
337 CXX_MEM_STAT_INFO);
338
339 template<typename T, typename A>
340 static void release (vec<T, A, vl_embed> *&v);
341};
342
343
344/* Free GC memory used by V and reset V to NULL. */
345
346template<typename T, typename A>
347inline void
348va_gc::release (vec<T, A, vl_embed> *&v)
349{
350 if (v)
351 ::ggc_free (v);
352 v = NULLnullptr;
353}
354
355
356/* Allocator for GC memory. Ensure there are at least RESERVE free
357 slots in V. If EXACT is true, grow exactly, else grow
358 exponentially. As a special case, if the vector had not been
359 allocated and RESERVE is 0, no vector will be created. */
360
361template<typename T, typename A>
362void
363va_gc::reserve (vec<T, A, vl_embed> *&v, unsigned reserve, bool exact
364 MEM_STAT_DECL)
365{
366 unsigned alloc
367 = vec_prefix::calculate_allocation (v ? &v->m_vecpfx : 0, reserve, exact);
17
Assuming 'v' is non-null
18
'?' condition is true
19
Calling 'vec_prefix::calculate_allocation'
23
Returning from 'vec_prefix::calculate_allocation'
368 if (!alloc)
24
Assuming 'alloc' is 0, which participates in a condition later
25
Taking true branch
369 {
370 ::ggc_free (v);
371 v = NULLnullptr;
26
Null pointer value stored to 'x_rtl.expr.x_forced_labels'
372 return;
373 }
374
375 /* Calculate the amount of space we want. */
376 size_t size = vec<T, A, vl_embed>::embedded_size (alloc);
377
378 /* Ask the allocator how much space it will really give us. */
379 size = ::ggc_round_alloc_size (size);
380
381 /* Adjust the number of slots accordingly. */
382 size_t vec_offset = sizeof (vec_prefix);
383 size_t elt_size = sizeof (T);
384 alloc = (size - vec_offset) / elt_size;
385
386 /* And finally, recalculate the amount of space we ask for. */
387 size = vec_offset + alloc * elt_size;
388
389 unsigned nelem = v ? v->length () : 0;
390 v = static_cast <vec<T, A, vl_embed> *> (::ggc_realloc (v, size
391 PASS_MEM_STAT));
392 v->embedded_init (alloc, nelem);
393}
394
395
396/* Allocator type for GC vectors. This is for vectors of types
397 atomics w.r.t. collection, so allocation and deallocation is
398 completely inherited from va_gc. */
399struct va_gc_atomic : va_gc
400{
401};
402
403
404/* Generic vector template. Default values for A and L indicate the
405 most commonly used strategies.
406
407 FIXME - Ideally, they would all be vl_ptr to encourage using regular
408 instances for vectors, but the existing GTY machinery is limited
409 in that it can only deal with GC objects that are pointers
410 themselves.
411
412 This means that vector operations that need to deal with
413 potentially NULL pointers, must be provided as free
414 functions (see the vec_safe_* functions above). */
415template<typename T,
416 typename A = va_heap,
417 typename L = typename A::default_layout>
418struct GTY((user)) vec
419{
420};
421
422/* Allow C++11 range-based 'for' to work directly on vec<T>*. */
423template<typename T, typename A, typename L>
424T* begin (vec<T,A,L> *v) { return v ? v->begin () : nullptr; }
425template<typename T, typename A, typename L>
426T* end (vec<T,A,L> *v) { return v ? v->end () : nullptr; }
427template<typename T, typename A, typename L>
428const T* begin (const vec<T,A,L> *v) { return v ? v->begin () : nullptr; }
429template<typename T, typename A, typename L>
430const T* end (const vec<T,A,L> *v) { return v ? v->end () : nullptr; }
431
432/* Generic vec<> debug helpers.
433
434 These need to be instantiated for each vec<TYPE> used throughout
435 the compiler like this:
436
437 DEFINE_DEBUG_VEC (TYPE)
438
439 The reason we have a debug_helper() is because GDB can't
440 disambiguate a plain call to debug(some_vec), and it must be called
441 like debug<TYPE>(some_vec). */
442
443template<typename T>
444void
445debug_helper (vec<T> &ref)
446{
447 unsigned i;
448 for (i = 0; i < ref.length (); ++i)
449 {
450 fprintf (stderrstderr, "[%d] = ", i);
451 debug_slim (ref[i]);
452 fputc ('\n', stderrstderr);
453 }
454}
455
456/* We need a separate va_gc variant here because default template
457 argument for functions cannot be used in c++-98. Once this
458 restriction is removed, those variant should be folded with the
459 above debug_helper. */
460
461template<typename T>
462void
463debug_helper (vec<T, va_gc> &ref)
464{
465 unsigned i;
466 for (i = 0; i < ref.length (); ++i)
467 {
468 fprintf (stderrstderr, "[%d] = ", i);
469 debug_slim (ref[i]);
470 fputc ('\n', stderrstderr);
471 }
472}
473
474/* Macro to define debug(vec<T>) and debug(vec<T, va_gc>) helper
475 functions for a type T. */
476
477#define DEFINE_DEBUG_VEC(T)template void debug_helper (vec<T> &); template void
debug_helper (vec<T, va_gc> &); __attribute__ ((__used__
)) void debug (vec<T> &ref) { debug_helper <T>
(ref); } __attribute__ ((__used__)) void debug (vec<T>
*ptr) { if (ptr) debug (*ptr); else fprintf (stderr, "<nil>\n"
); } __attribute__ ((__used__)) void debug (vec<T, va_gc>
&ref) { debug_helper <T> (ref); } __attribute__ ((
__used__)) void debug (vec<T, va_gc> *ptr) { if (ptr) debug
(*ptr); else fprintf (stderr, "<nil>\n"); }
\
478 template void debug_helper (vec<T> &); \
479 template void debug_helper (vec<T, va_gc> &); \
480 /* Define the vec<T> debug functions. */ \
481 DEBUG_FUNCTION__attribute__ ((__used__)) void \
482 debug (vec<T> &ref) \
483 { \
484 debug_helper <T> (ref); \
485 } \
486 DEBUG_FUNCTION__attribute__ ((__used__)) void \
487 debug (vec<T> *ptr) \
488 { \
489 if (ptr) \
490 debug (*ptr); \
491 else \
492 fprintf (stderrstderr, "<nil>\n"); \
493 } \
494 /* Define the vec<T, va_gc> debug functions. */ \
495 DEBUG_FUNCTION__attribute__ ((__used__)) void \
496 debug (vec<T, va_gc> &ref) \
497 { \
498 debug_helper <T> (ref); \
499 } \
500 DEBUG_FUNCTION__attribute__ ((__used__)) void \
501 debug (vec<T, va_gc> *ptr) \
502 { \
503 if (ptr) \
504 debug (*ptr); \
505 else \
506 fprintf (stderrstderr, "<nil>\n"); \
507 }
508
509/* Default-construct N elements in DST. */
510
511template <typename T>
512inline void
513vec_default_construct (T *dst, unsigned n)
514{
515#ifdef BROKEN_VALUE_INITIALIZATION
516 /* Versions of GCC before 4.4 sometimes leave certain objects
517 uninitialized when value initialized, though if the type has
518 user defined default ctor, that ctor is invoked. As a workaround
519 perform clearing first and then the value initialization, which
520 fixes the case when value initialization doesn't initialize due to
521 the bugs and should initialize to all zeros, but still allows
522 vectors for types with user defined default ctor that initializes
523 some or all elements to non-zero. If T has no user defined
524 default ctor and some non-static data members have user defined
525 default ctors that initialize to non-zero the workaround will
526 still not work properly; in that case we just need to provide
527 user defined default ctor. */
528 memset (dst, '\0', sizeof (T) * n);
529#endif
530 for ( ; n; ++dst, --n)
531 ::new (static_cast<void*>(dst)) T ();
532}
533
534/* Copy-construct N elements in DST from *SRC. */
535
536template <typename T>
537inline void
538vec_copy_construct (T *dst, const T *src, unsigned n)
539{
540 for ( ; n; ++dst, ++src, --n)
541 ::new (static_cast<void*>(dst)) T (*src);
542}
543
544/* Type to provide zero-initialized values for vec<T, A, L>. This is
545 used to provide nil initializers for vec instances. Since vec must
546 be a trivially copyable type that can be copied by memcpy and zeroed
547 out by memset, it must have defaulted default and copy ctor and copy
548 assignment. To initialize a vec either use value initialization
549 (e.g., vec() or vec v{ };) or assign it the value vNULL. This isn't
550 needed for file-scope and function-local static vectors, which are
551 zero-initialized by default. */
552struct vnull { };
553constexpr vnull vNULL{ };
554
555
556/* Embeddable vector. These vectors are suitable to be embedded
557 in other data structures so that they can be pre-allocated in a
558 contiguous memory block.
559
560 Embeddable vectors are implemented using the trailing array idiom,
561 thus they are not resizeable without changing the address of the
562 vector object itself. This means you cannot have variables or
563 fields of embeddable vector type -- always use a pointer to a
564 vector. The one exception is the final field of a structure, which
565 could be a vector type.
566
567 You will have to use the embedded_size & embedded_init calls to
568 create such objects, and they will not be resizeable (so the 'safe'
569 allocation variants are not available).
570
571 Properties:
572
573 - The whole vector and control data are allocated in a single
574 contiguous block. It uses the trailing-vector idiom, so
575 allocation must reserve enough space for all the elements
576 in the vector plus its control data.
577 - The vector cannot be re-allocated.
578 - The vector cannot grow nor shrink.
579 - No indirections needed for access/manipulation.
580 - It requires 2 words of storage (prior to vector allocation). */
581
582template<typename T, typename A>
583struct GTY((user)) vec<T, A, vl_embed>
584{
585public:
586 unsigned allocated (void) const { return m_vecpfx.m_alloc; }
587 unsigned length (void) const { return m_vecpfx.m_num; }
588 bool is_empty (void) const { return m_vecpfx.m_num == 0; }
589 T *address (void) { return m_vecdata; }
590 const T *address (void) const { return m_vecdata; }
591 T *begin () { return address (); }
592 const T *begin () const { return address (); }
593 T *end () { return address () + length (); }
594 const T *end () const { return address () + length (); }
595 const T &operator[] (unsigned) const;
596 T &operator[] (unsigned);
597 T &last (void);
598 bool space (unsigned) const;
599 bool iterate (unsigned, T *) const;
600 bool iterate (unsigned, T **) const;
601 vec *copy (ALONE_CXX_MEM_STAT_INFO) const;
602 void splice (const vec &);
603 void splice (const vec *src);
604 T *quick_push (const T &);
605 T &pop (void);
606 void truncate (unsigned);
607 void quick_insert (unsigned, const T &);
608 void ordered_remove (unsigned);
609 void unordered_remove (unsigned);
610 void block_remove (unsigned, unsigned);
611 void qsort (int (*) (const void *, const void *))qsort (int (*) (const void *, const void *));
612 void sort (int (*) (const void *, const void *, void *), void *);
613 void stablesort (int (*) (const void *, const void *, void *), void *);
614 T *bsearch (const void *key, int (*compar)(const void *, const void *));
615 T *bsearch (const void *key,
616 int (*compar)(const void *, const void *, void *), void *);
617 unsigned lower_bound (T, bool (*)(const T &, const T &)) const;
618 bool contains (const T &search) const;
619 static size_t embedded_size (unsigned);
620 void embedded_init (unsigned, unsigned = 0, unsigned = 0);
621 void quick_grow (unsigned len);
622 void quick_grow_cleared (unsigned len);
623
624 /* vec class can access our internal data and functions. */
625 template <typename, typename, typename> friend struct vec;
626
627 /* The allocator types also need access to our internals. */
628 friend struct va_gc;
629 friend struct va_gc_atomic;
630 friend struct va_heap;
631
632 /* FIXME - These fields should be private, but we need to cater to
633 compilers that have stricter notions of PODness for types. */
634 vec_prefix m_vecpfx;
635 T m_vecdata[1];
636};
637
638
639/* Convenience wrapper functions to use when dealing with pointers to
640 embedded vectors. Some functionality for these vectors must be
641 provided via free functions for these reasons:
642
643 1- The pointer may be NULL (e.g., before initial allocation).
644
645 2- When the vector needs to grow, it must be reallocated, so
646 the pointer will change its value.
647
648 Because of limitations with the current GC machinery, all vectors
649 in GC memory *must* be pointers. */
650
651
652/* If V contains no room for NELEMS elements, return false. Otherwise,
653 return true. */
654template<typename T, typename A>
655inline bool
656vec_safe_space (const vec<T, A, vl_embed> *v, unsigned nelems)
657{
658 return v ? v->space (nelems) : nelems == 0;
9
Assuming 'v' is non-null
10
'?' condition is true
11
Value assigned to 'x_rtl.expr.x_forced_labels', which participates in a condition later
12
Returning value, which participates in a condition later
659}
660
661
662/* If V is NULL, return 0. Otherwise, return V->length(). */
663template<typename T, typename A>
664inline unsigned
665vec_safe_length (const vec<T, A, vl_embed> *v)
666{
667 return v ? v->length () : 0;
668}
669
670
671/* If V is NULL, return NULL. Otherwise, return V->address(). */
672template<typename T, typename A>
673inline T *
674vec_safe_address (vec<T, A, vl_embed> *v)
675{
676 return v ? v->address () : NULLnullptr;
677}
678
679
680/* If V is NULL, return true. Otherwise, return V->is_empty(). */
681template<typename T, typename A>
682inline bool
683vec_safe_is_empty (vec<T, A, vl_embed> *v)
684{
685 return v ? v->is_empty () : true;
686}
687
688/* If V does not have space for NELEMS elements, call
689 V->reserve(NELEMS, EXACT). */
690template<typename T, typename A>
691inline bool
692vec_safe_reserve (vec<T, A, vl_embed> *&v, unsigned nelems, bool exact = false
693 CXX_MEM_STAT_INFO)
694{
695 bool extend = nelems
6.1
'nelems' is 1
6.1
'nelems' is 1
? !vec_safe_space (v, nelems) : false;
7
'?' condition is true
8
Calling 'vec_safe_space<rtx_insn *, va_gc>'
13
Returning from 'vec_safe_space<rtx_insn *, va_gc>'
14
Assuming the condition is true
696 if (extend
14.1
'extend' is true
14.1
'extend' is true
)
15
Taking true branch
697 A::reserve (v, nelems, exact PASS_MEM_STAT);
16
Calling 'va_gc::reserve'
27
Returning from 'va_gc::reserve'
698 return extend;
699}
700
701template<typename T, typename A>
702inline bool
703vec_safe_reserve_exact (vec<T, A, vl_embed> *&v, unsigned nelems
704 CXX_MEM_STAT_INFO)
705{
706 return vec_safe_reserve (v, nelems, true PASS_MEM_STAT);
707}
708
709
710/* Allocate GC memory for V with space for NELEMS slots. If NELEMS
711 is 0, V is initialized to NULL. */
712
713template<typename T, typename A>
714inline void
715vec_alloc (vec<T, A, vl_embed> *&v, unsigned nelems CXX_MEM_STAT_INFO)
716{
717 v = NULLnullptr;
718 vec_safe_reserve (v, nelems, false PASS_MEM_STAT);
719}
720
721
722/* Free the GC memory allocated by vector V and set it to NULL. */
723
724template<typename T, typename A>
725inline void
726vec_free (vec<T, A, vl_embed> *&v)
727{
728 A::release (v);
729}
730
731
732/* Grow V to length LEN. Allocate it, if necessary. */
733template<typename T, typename A>
734inline void
735vec_safe_grow (vec<T, A, vl_embed> *&v, unsigned len,
736 bool exact = false CXX_MEM_STAT_INFO)
737{
738 unsigned oldlen = vec_safe_length (v);
739 gcc_checking_assert (len >= oldlen)((void)(!(len >= oldlen) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 739, __FUNCTION__), 0 : 0))
;
740 vec_safe_reserve (v, len - oldlen, exact PASS_MEM_STAT);
741 v->quick_grow (len);
742}
743
744
745/* If V is NULL, allocate it. Call V->safe_grow_cleared(LEN). */
746template<typename T, typename A>
747inline void
748vec_safe_grow_cleared (vec<T, A, vl_embed> *&v, unsigned len,
749 bool exact = false CXX_MEM_STAT_INFO)
750{
751 unsigned oldlen = vec_safe_length (v);
752 vec_safe_grow (v, len, exact PASS_MEM_STAT);
753 vec_default_construct (v->address () + oldlen, len - oldlen);
754}
755
756
757/* Assume V is not NULL. */
758
759template<typename T>
760inline void
761vec_safe_grow_cleared (vec<T, va_heap, vl_ptr> *&v,
762 unsigned len, bool exact = false CXX_MEM_STAT_INFO)
763{
764 v->safe_grow_cleared (len, exact PASS_MEM_STAT);
765}
766
767/* If V does not have space for NELEMS elements, call
768 V->reserve(NELEMS, EXACT). */
769
770template<typename T>
771inline bool
772vec_safe_reserve (vec<T, va_heap, vl_ptr> *&v, unsigned nelems, bool exact = false
773 CXX_MEM_STAT_INFO)
774{
775 return v->reserve (nelems, exact);
776}
777
778
779/* If V is NULL return false, otherwise return V->iterate(IX, PTR). */
780template<typename T, typename A>
781inline bool
782vec_safe_iterate (const vec<T, A, vl_embed> *v, unsigned ix, T **ptr)
783{
784 if (v)
785 return v->iterate (ix, ptr);
786 else
787 {
788 *ptr = 0;
789 return false;
790 }
791}
792
793template<typename T, typename A>
794inline bool
795vec_safe_iterate (const vec<T, A, vl_embed> *v, unsigned ix, T *ptr)
796{
797 if (v)
798 return v->iterate (ix, ptr);
799 else
800 {
801 *ptr = 0;
802 return false;
803 }
804}
805
806
807/* If V has no room for one more element, reallocate it. Then call
808 V->quick_push(OBJ). */
809template<typename T, typename A>
810inline T *
811vec_safe_push (vec<T, A, vl_embed> *&v, const T &obj CXX_MEM_STAT_INFO)
812{
813 vec_safe_reserve (v, 1, false PASS_MEM_STAT);
6
Calling 'vec_safe_reserve<rtx_insn *, va_gc>'
28
Returning from 'vec_safe_reserve<rtx_insn *, va_gc>'
814 return v->quick_push (obj);
29
Called C++ object pointer is null
815}
816
817
818/* if V has no room for one more element, reallocate it. Then call
819 V->quick_insert(IX, OBJ). */
820template<typename T, typename A>
821inline void
822vec_safe_insert (vec<T, A, vl_embed> *&v, unsigned ix, const T &obj
823 CXX_MEM_STAT_INFO)
824{
825 vec_safe_reserve (v, 1, false PASS_MEM_STAT);
826 v->quick_insert (ix, obj);
827}
828
829
830/* If V is NULL, do nothing. Otherwise, call V->truncate(SIZE). */
831template<typename T, typename A>
832inline void
833vec_safe_truncate (vec<T, A, vl_embed> *v, unsigned size)
834{
835 if (v)
836 v->truncate (size);
837}
838
839
840/* If SRC is not NULL, return a pointer to a copy of it. */
841template<typename T, typename A>
842inline vec<T, A, vl_embed> *
843vec_safe_copy (vec<T, A, vl_embed> *src CXX_MEM_STAT_INFO)
844{
845 return src ? src->copy (ALONE_PASS_MEM_STAT) : NULLnullptr;
846}
847
848/* Copy the elements from SRC to the end of DST as if by memcpy.
849 Reallocate DST, if necessary. */
850template<typename T, typename A>
851inline void
852vec_safe_splice (vec<T, A, vl_embed> *&dst, const vec<T, A, vl_embed> *src
853 CXX_MEM_STAT_INFO)
854{
855 unsigned src_len = vec_safe_length (src);
856 if (src_len)
857 {
858 vec_safe_reserve_exact (dst, vec_safe_length (dst) + src_len
859 PASS_MEM_STAT);
860 dst->splice (*src);
861 }
862}
863
864/* Return true if SEARCH is an element of V. Note that this is O(N) in the
865 size of the vector and so should be used with care. */
866
867template<typename T, typename A>
868inline bool
869vec_safe_contains (vec<T, A, vl_embed> *v, const T &search)
870{
871 return v ? v->contains (search) : false;
872}
873
874/* Index into vector. Return the IX'th element. IX must be in the
875 domain of the vector. */
876
877template<typename T, typename A>
878inline const T &
879vec<T, A, vl_embed>::operator[] (unsigned ix) const
880{
881 gcc_checking_assert (ix < m_vecpfx.m_num)((void)(!(ix < m_vecpfx.m_num) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 881, __FUNCTION__), 0 : 0))
;
882 return m_vecdata[ix];
883}
884
885template<typename T, typename A>
886inline T &
887vec<T, A, vl_embed>::operator[] (unsigned ix)
888{
889 gcc_checking_assert (ix < m_vecpfx.m_num)((void)(!(ix < m_vecpfx.m_num) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 889, __FUNCTION__), 0 : 0))
;
890 return m_vecdata[ix];
891}
892
893
894/* Get the final element of the vector, which must not be empty. */
895
896template<typename T, typename A>
897inline T &
898vec<T, A, vl_embed>::last (void)
899{
900 gcc_checking_assert (m_vecpfx.m_num > 0)((void)(!(m_vecpfx.m_num > 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 900, __FUNCTION__), 0 : 0))
;
901 return (*this)[m_vecpfx.m_num - 1];
902}
903
904
905/* If this vector has space for NELEMS additional entries, return
906 true. You usually only need to use this if you are doing your
907 own vector reallocation, for instance on an embedded vector. This
908 returns true in exactly the same circumstances that vec::reserve
909 will. */
910
911template<typename T, typename A>
912inline bool
913vec<T, A, vl_embed>::space (unsigned nelems) const
914{
915 return m_vecpfx.m_alloc - m_vecpfx.m_num >= nelems;
916}
917
918
919/* Return iteration condition and update PTR to point to the IX'th
920 element of this vector. Use this to iterate over the elements of a
921 vector as follows,
922
923 for (ix = 0; vec<T, A>::iterate (v, ix, &ptr); ix++)
924 continue; */
925
926template<typename T, typename A>
927inline bool
928vec<T, A, vl_embed>::iterate (unsigned ix, T *ptr) const
929{
930 if (ix < m_vecpfx.m_num)
931 {
932 *ptr = m_vecdata[ix];
933 return true;
934 }
935 else
936 {
937 *ptr = 0;
938 return false;
939 }
940}
941
942
943/* Return iteration condition and update *PTR to point to the
944 IX'th element of this vector. Use this to iterate over the
945 elements of a vector as follows,
946
947 for (ix = 0; v->iterate (ix, &ptr); ix++)
948 continue;
949
950 This variant is for vectors of objects. */
951
952template<typename T, typename A>
953inline bool
954vec<T, A, vl_embed>::iterate (unsigned ix, T **ptr) const
955{
956 if (ix < m_vecpfx.m_num)
957 {
958 *ptr = CONST_CAST (T *, &m_vecdata[ix])(const_cast<T *> ((&m_vecdata[ix])));
959 return true;
960 }
961 else
962 {
963 *ptr = 0;
964 return false;
965 }
966}
967
968
969/* Return a pointer to a copy of this vector. */
970
971template<typename T, typename A>
972inline vec<T, A, vl_embed> *
973vec<T, A, vl_embed>::copy (ALONE_MEM_STAT_DECLvoid) const
974{
975 vec<T, A, vl_embed> *new_vec = NULLnullptr;
976 unsigned len = length ();
977 if (len)
978 {
979 vec_alloc (new_vec, len PASS_MEM_STAT);
980 new_vec->embedded_init (len, len);
981 vec_copy_construct (new_vec->address (), m_vecdata, len);
982 }
983 return new_vec;
984}
985
986
987/* Copy the elements from SRC to the end of this vector as if by memcpy.
988 The vector must have sufficient headroom available. */
989
990template<typename T, typename A>
991inline void
992vec<T, A, vl_embed>::splice (const vec<T, A, vl_embed> &src)
993{
994 unsigned len = src.length ();
995 if (len)
996 {
997 gcc_checking_assert (space (len))((void)(!(space (len)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 997, __FUNCTION__), 0 : 0))
;
998 vec_copy_construct (end (), src.address (), len);
999 m_vecpfx.m_num += len;
1000 }
1001}
1002
1003template<typename T, typename A>
1004inline void
1005vec<T, A, vl_embed>::splice (const vec<T, A, vl_embed> *src)
1006{
1007 if (src)
1008 splice (*src);
1009}
1010
1011
1012/* Push OBJ (a new element) onto the end of the vector. There must be
1013 sufficient space in the vector. Return a pointer to the slot
1014 where OBJ was inserted. */
1015
1016template<typename T, typename A>
1017inline T *
1018vec<T, A, vl_embed>::quick_push (const T &obj)
1019{
1020 gcc_checking_assert (space (1))((void)(!(space (1)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1020, __FUNCTION__), 0 : 0))
;
1021 T *slot = &m_vecdata[m_vecpfx.m_num++];
1022 *slot = obj;
1023 return slot;
1024}
1025
1026
1027/* Pop and return the last element off the end of the vector. */
1028
1029template<typename T, typename A>
1030inline T &
1031vec<T, A, vl_embed>::pop (void)
1032{
1033 gcc_checking_assert (length () > 0)((void)(!(length () > 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1033, __FUNCTION__), 0 : 0))
;
1034 return m_vecdata[--m_vecpfx.m_num];
1035}
1036
1037
1038/* Set the length of the vector to SIZE. The new length must be less
1039 than or equal to the current length. This is an O(1) operation. */
1040
1041template<typename T, typename A>
1042inline void
1043vec<T, A, vl_embed>::truncate (unsigned size)
1044{
1045 gcc_checking_assert (length () >= size)((void)(!(length () >= size) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1045, __FUNCTION__), 0 : 0))
;
1046 m_vecpfx.m_num = size;
1047}
1048
1049
1050/* Insert an element, OBJ, at the IXth position of this vector. There
1051 must be sufficient space. */
1052
1053template<typename T, typename A>
1054inline void
1055vec<T, A, vl_embed>::quick_insert (unsigned ix, const T &obj)
1056{
1057 gcc_checking_assert (length () < allocated ())((void)(!(length () < allocated ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1057, __FUNCTION__), 0 : 0))
;
1058 gcc_checking_assert (ix <= length ())((void)(!(ix <= length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1058, __FUNCTION__), 0 : 0))
;
1059 T *slot = &m_vecdata[ix];
1060 memmove (slot + 1, slot, (m_vecpfx.m_num++ - ix) * sizeof (T));
1061 *slot = obj;
1062}
1063
1064
1065/* Remove an element from the IXth position of this vector. Ordering of
1066 remaining elements is preserved. This is an O(N) operation due to
1067 memmove. */
1068
1069template<typename T, typename A>
1070inline void
1071vec<T, A, vl_embed>::ordered_remove (unsigned ix)
1072{
1073 gcc_checking_assert (ix < length ())((void)(!(ix < length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1073, __FUNCTION__), 0 : 0))
;
1074 T *slot = &m_vecdata[ix];
1075 memmove (slot, slot + 1, (--m_vecpfx.m_num - ix) * sizeof (T));
1076}
1077
1078
1079/* Remove elements in [START, END) from VEC for which COND holds. Ordering of
1080 remaining elements is preserved. This is an O(N) operation. */
1081
1082#define VEC_ORDERED_REMOVE_IF_FROM_TO(vec, read_index, write_index, \{ ((void)(!((end) <= (vec).length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1083, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (start); read_index < (end); ++read_index) { elem_ptr =
&(vec)[read_index]; bool remove_p = (cond); if (remove_p
) continue; if (read_index != write_index) (vec)[write_index]
= (vec)[read_index]; write_index++; } if (read_index - write_index
> 0) (vec).block_remove (write_index, read_index - write_index
); }
1083 elem_ptr, start, end, cond){ ((void)(!((end) <= (vec).length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1083, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (start); read_index < (end); ++read_index) { elem_ptr =
&(vec)[read_index]; bool remove_p = (cond); if (remove_p
) continue; if (read_index != write_index) (vec)[write_index]
= (vec)[read_index]; write_index++; } if (read_index - write_index
> 0) (vec).block_remove (write_index, read_index - write_index
); }
\
1084 { \
1085 gcc_assert ((end) <= (vec).length ())((void)(!((end) <= (vec).length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1085, __FUNCTION__), 0 : 0))
; \
1086 for (read_index = write_index = (start); read_index < (end); \
1087 ++read_index) \
1088 { \
1089 elem_ptr = &(vec)[read_index]; \
1090 bool remove_p = (cond); \
1091 if (remove_p) \
1092 continue; \
1093 \
1094 if (read_index != write_index) \
1095 (vec)[write_index] = (vec)[read_index]; \
1096 \
1097 write_index++; \
1098 } \
1099 \
1100 if (read_index - write_index > 0) \
1101 (vec).block_remove (write_index, read_index - write_index); \
1102 }
1103
1104
1105/* Remove elements from VEC for which COND holds. Ordering of remaining
1106 elements is preserved. This is an O(N) operation. */
1107
1108#define VEC_ORDERED_REMOVE_IF(vec, read_index, write_index, elem_ptr, \{ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1109, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (0); read_index < ((vec).length ()); ++read_index) { elem_ptr
= &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p
) continue; if (read_index != write_index) ((vec))[write_index
] = ((vec))[read_index]; write_index++; } if (read_index - write_index
> 0) ((vec)).block_remove (write_index, read_index - write_index
); }
1109 cond){ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1109, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (0); read_index < ((vec).length ()); ++read_index) { elem_ptr
= &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p
) continue; if (read_index != write_index) ((vec))[write_index
] = ((vec))[read_index]; write_index++; } if (read_index - write_index
> 0) ((vec)).block_remove (write_index, read_index - write_index
); }
\
1110 VEC_ORDERED_REMOVE_IF_FROM_TO ((vec), read_index, write_index, \{ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1111, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (0); read_index < ((vec).length ()); ++read_index) { elem_ptr
= &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p
) continue; if (read_index != write_index) ((vec))[write_index
] = ((vec))[read_index]; write_index++; } if (read_index - write_index
> 0) ((vec)).block_remove (write_index, read_index - write_index
); }
1111 elem_ptr, 0, (vec).length (), (cond)){ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1111, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (0); read_index < ((vec).length ()); ++read_index) { elem_ptr
= &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p
) continue; if (read_index != write_index) ((vec))[write_index
] = ((vec))[read_index]; write_index++; } if (read_index - write_index
> 0) ((vec)).block_remove (write_index, read_index - write_index
); }
1112
1113/* Remove an element from the IXth position of this vector. Ordering of
1114 remaining elements is destroyed. This is an O(1) operation. */
1115
1116template<typename T, typename A>
1117inline void
1118vec<T, A, vl_embed>::unordered_remove (unsigned ix)
1119{
1120 gcc_checking_assert (ix < length ())((void)(!(ix < length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1120, __FUNCTION__), 0 : 0))
;
1121 m_vecdata[ix] = m_vecdata[--m_vecpfx.m_num];
1122}
1123
1124
1125/* Remove LEN elements starting at the IXth. Ordering is retained.
1126 This is an O(N) operation due to memmove. */
1127
1128template<typename T, typename A>
1129inline void
1130vec<T, A, vl_embed>::block_remove (unsigned ix, unsigned len)
1131{
1132 gcc_checking_assert (ix + len <= length ())((void)(!(ix + len <= length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1132, __FUNCTION__), 0 : 0))
;
1133 T *slot = &m_vecdata[ix];
1134 m_vecpfx.m_num -= len;
1135 memmove (slot, slot + len, (m_vecpfx.m_num - ix) * sizeof (T));
1136}
1137
1138
1139/* Sort the contents of this vector with qsort. CMP is the comparison
1140 function to pass to qsort. */
1141
1142template<typename T, typename A>
1143inline void
1144vec<T, A, vl_embed>::qsort (int (*cmp) (const void *, const void *))qsort (int (*cmp) (const void *, const void *))
1145{
1146 if (length () > 1)
1147 gcc_qsort (address (), length (), sizeof (T), cmp);
1148}
1149
1150/* Sort the contents of this vector with qsort. CMP is the comparison
1151 function to pass to qsort. */
1152
1153template<typename T, typename A>
1154inline void
1155vec<T, A, vl_embed>::sort (int (*cmp) (const void *, const void *, void *),
1156 void *data)
1157{
1158 if (length () > 1)
1159 gcc_sort_r (address (), length (), sizeof (T), cmp, data);
1160}
1161
1162/* Sort the contents of this vector with gcc_stablesort_r. CMP is the
1163 comparison function to pass to qsort. */
1164
1165template<typename T, typename A>
1166inline void
1167vec<T, A, vl_embed>::stablesort (int (*cmp) (const void *, const void *,
1168 void *), void *data)
1169{
1170 if (length () > 1)
1171 gcc_stablesort_r (address (), length (), sizeof (T), cmp, data);
1172}
1173
1174/* Search the contents of the sorted vector with a binary search.
1175 CMP is the comparison function to pass to bsearch. */
1176
1177template<typename T, typename A>
1178inline T *
1179vec<T, A, vl_embed>::bsearch (const void *key,
1180 int (*compar) (const void *, const void *))
1181{
1182 const void *base = this->address ();
1183 size_t nmemb = this->length ();
1184 size_t size = sizeof (T);
1185 /* The following is a copy of glibc stdlib-bsearch.h. */
1186 size_t l, u, idx;
1187 const void *p;
1188 int comparison;
1189
1190 l = 0;
1191 u = nmemb;
1192 while (l < u)
1193 {
1194 idx = (l + u) / 2;
1195 p = (const void *) (((const char *) base) + (idx * size));
1196 comparison = (*compar) (key, p);
1197 if (comparison < 0)
1198 u = idx;
1199 else if (comparison > 0)
1200 l = idx + 1;
1201 else
1202 return (T *)const_cast<void *>(p);
1203 }
1204
1205 return NULLnullptr;
1206}
1207
1208/* Search the contents of the sorted vector with a binary search.
1209 CMP is the comparison function to pass to bsearch. */
1210
1211template<typename T, typename A>
1212inline T *
1213vec<T, A, vl_embed>::bsearch (const void *key,
1214 int (*compar) (const void *, const void *,
1215 void *), void *data)
1216{
1217 const void *base = this->address ();
1218 size_t nmemb = this->length ();
1219 size_t size = sizeof (T);
1220 /* The following is a copy of glibc stdlib-bsearch.h. */
1221 size_t l, u, idx;
1222 const void *p;
1223 int comparison;
1224
1225 l = 0;
1226 u = nmemb;
1227 while (l < u)
1228 {
1229 idx = (l + u) / 2;
1230 p = (const void *) (((const char *) base) + (idx * size));
1231 comparison = (*compar) (key, p, data);
1232 if (comparison < 0)
1233 u = idx;
1234 else if (comparison > 0)
1235 l = idx + 1;
1236 else
1237 return (T *)const_cast<void *>(p);
1238 }
1239
1240 return NULLnullptr;
1241}
1242
1243/* Return true if SEARCH is an element of V. Note that this is O(N) in the
1244 size of the vector and so should be used with care. */
1245
1246template<typename T, typename A>
1247inline bool
1248vec<T, A, vl_embed>::contains (const T &search) const
1249{
1250 unsigned int len = length ();
1251 for (unsigned int i = 0; i < len; i++)
1252 if ((*this)[i] == search)
1253 return true;
1254
1255 return false;
1256}
1257
1258/* Find and return the first position in which OBJ could be inserted
1259 without changing the ordering of this vector. LESSTHAN is a
1260 function that returns true if the first argument is strictly less
1261 than the second. */
1262
1263template<typename T, typename A>
1264unsigned
1265vec<T, A, vl_embed>::lower_bound (T obj, bool (*lessthan)(const T &, const T &))
1266 const
1267{
1268 unsigned int len = length ();
1269 unsigned int half, middle;
1270 unsigned int first = 0;
1271 while (len > 0)
1272 {
1273 half = len / 2;
1274 middle = first;
1275 middle += half;
1276 T middle_elem = (*this)[middle];
1277 if (lessthan (middle_elem, obj))
1278 {
1279 first = middle;
1280 ++first;
1281 len = len - half - 1;
1282 }
1283 else
1284 len = half;
1285 }
1286 return first;
1287}
1288
1289
1290/* Return the number of bytes needed to embed an instance of an
1291 embeddable vec inside another data structure.
1292
1293 Use these methods to determine the required size and initialization
1294 of a vector V of type T embedded within another structure (as the
1295 final member):
1296
1297 size_t vec<T, A, vl_embed>::embedded_size (unsigned alloc);
1298 void v->embedded_init (unsigned alloc, unsigned num);
1299
1300 These allow the caller to perform the memory allocation. */
1301
1302template<typename T, typename A>
1303inline size_t
1304vec<T, A, vl_embed>::embedded_size (unsigned alloc)
1305{
1306 struct alignas (T) U { char data[sizeof (T)]; };
1307 typedef vec<U, A, vl_embed> vec_embedded;
1308 typedef typename std::conditional<std::is_standard_layout<T>::value,
1309 vec, vec_embedded>::type vec_stdlayout;
1310 static_assert (sizeof (vec_stdlayout) == sizeof (vec), "");
1311 static_assert (alignof (vec_stdlayout) == alignof (vec), "");
1312 return offsetof (vec_stdlayout, m_vecdata)__builtin_offsetof(vec_stdlayout, m_vecdata) + alloc * sizeof (T);
1313}
1314
1315
1316/* Initialize the vector to contain room for ALLOC elements and
1317 NUM active elements. */
1318
1319template<typename T, typename A>
1320inline void
1321vec<T, A, vl_embed>::embedded_init (unsigned alloc, unsigned num, unsigned aut)
1322{
1323 m_vecpfx.m_alloc = alloc;
1324 m_vecpfx.m_using_auto_storage = aut;
1325 m_vecpfx.m_num = num;
1326}
1327
1328
1329/* Grow the vector to a specific length. LEN must be as long or longer than
1330 the current length. The new elements are uninitialized. */
1331
1332template<typename T, typename A>
1333inline void
1334vec<T, A, vl_embed>::quick_grow (unsigned len)
1335{
1336 gcc_checking_assert (length () <= len && len <= m_vecpfx.m_alloc)((void)(!(length () <= len && len <= m_vecpfx.m_alloc
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1336, __FUNCTION__), 0 : 0))
;
1337 m_vecpfx.m_num = len;
1338}
1339
1340
1341/* Grow the vector to a specific length. LEN must be as long or longer than
1342 the current length. The new elements are initialized to zero. */
1343
1344template<typename T, typename A>
1345inline void
1346vec<T, A, vl_embed>::quick_grow_cleared (unsigned len)
1347{
1348 unsigned oldlen = length ();
1349 size_t growby = len - oldlen;
1350 quick_grow (len);
1351 if (growby != 0)
1352 vec_default_construct (address () + oldlen, growby);
1353}
1354
1355/* Garbage collection support for vec<T, A, vl_embed>. */
1356
1357template<typename T>
1358void
1359gt_ggc_mx (vec<T, va_gc> *v)
1360{
1361 extern void gt_ggc_mx (T &);
1362 for (unsigned i = 0; i < v->length (); i++)
1363 gt_ggc_mx ((*v)[i]);
1364}
1365
1366template<typename T>
1367void
1368gt_ggc_mx (vec<T, va_gc_atomic, vl_embed> *v ATTRIBUTE_UNUSED__attribute__ ((__unused__)))
1369{
1370 /* Nothing to do. Vectors of atomic types wrt GC do not need to
1371 be traversed. */
1372}
1373
1374
1375/* PCH support for vec<T, A, vl_embed>. */
1376
1377template<typename T, typename A>
1378void
1379gt_pch_nx (vec<T, A, vl_embed> *v)
1380{
1381 extern void gt_pch_nx (T &);
1382 for (unsigned i = 0; i < v->length (); i++)
1383 gt_pch_nx ((*v)[i]);
1384}
1385
1386template<typename T, typename A>
1387void
1388gt_pch_nx (vec<T *, A, vl_embed> *v, gt_pointer_operator op, void *cookie)
1389{
1390 for (unsigned i = 0; i < v->length (); i++)
1391 op (&((*v)[i]), cookie);
1392}
1393
1394template<typename T, typename A>
1395void
1396gt_pch_nx (vec<T, A, vl_embed> *v, gt_pointer_operator op, void *cookie)
1397{
1398 extern void gt_pch_nx (T *, gt_pointer_operator, void *);
1399 for (unsigned i = 0; i < v->length (); i++)
1400 gt_pch_nx (&((*v)[i]), op, cookie);
1401}
1402
1403
1404/* Space efficient vector. These vectors can grow dynamically and are
1405 allocated together with their control data. They are suited to be
1406 included in data structures. Prior to initial allocation, they
1407 only take a single word of storage.
1408
1409 These vectors are implemented as a pointer to an embeddable vector.
1410 The semantics allow for this pointer to be NULL to represent empty
1411 vectors. This way, empty vectors occupy minimal space in the
1412 structure containing them.
1413
1414 Properties:
1415
1416 - The whole vector and control data are allocated in a single
1417 contiguous block.
1418 - The whole vector may be re-allocated.
1419 - Vector data may grow and shrink.
1420 - Access and manipulation requires a pointer test and
1421 indirection.
1422 - It requires 1 word of storage (prior to vector allocation).
1423
1424
1425 Limitations:
1426
1427 These vectors must be PODs because they are stored in unions.
1428 (http://en.wikipedia.org/wiki/Plain_old_data_structures).
1429 As long as we use C++03, we cannot have constructors nor
1430 destructors in classes that are stored in unions. */
1431
1432template<typename T, size_t N = 0>
1433class auto_vec;
1434
1435template<typename T>
1436struct vec<T, va_heap, vl_ptr>
1437{
1438public:
1439 /* Default ctors to ensure triviality. Use value-initialization
1440 (e.g., vec() or vec v{ };) or vNULL to create a zero-initialized
1441 instance. */
1442 vec () = default;
1443 vec (const vec &) = default;
1444 /* Initialization from the generic vNULL. */
1445 vec (vnull): m_vec () { }
1446 /* Same as default ctor: vec storage must be released manually. */
1447 ~vec () = default;
1448
1449 /* Defaulted same as copy ctor. */
1450 vec& operator= (const vec &) = default;
1451
1452 /* Prevent implicit conversion from auto_vec. Use auto_vec::to_vec()
1453 instead. */
1454 template <size_t N>
1455 vec (auto_vec<T, N> &) = delete;
1456
1457 template <size_t N>
1458 void operator= (auto_vec<T, N> &) = delete;
1459
1460 /* Memory allocation and deallocation for the embedded vector.
1461 Needed because we cannot have proper ctors/dtors defined. */
1462 void create (unsigned nelems CXX_MEM_STAT_INFO);
1463 void release (void);
1464
1465 /* Vector operations. */
1466 bool exists (void) const
1467 { return m_vec != NULLnullptr; }
1468
1469 bool is_empty (void) const
1470 { return m_vec ? m_vec->is_empty () : true; }
1471
1472 unsigned length (void) const
1473 { return m_vec ? m_vec->length () : 0; }
1474
1475 T *address (void)
1476 { return m_vec ? m_vec->m_vecdata : NULLnullptr; }
1477
1478 const T *address (void) const
1479 { return m_vec ? m_vec->m_vecdata : NULLnullptr; }
1480
1481 T *begin () { return address (); }
1482 const T *begin () const { return address (); }
1483 T *end () { return begin () + length (); }
1484 const T *end () const { return begin () + length (); }
1485 const T &operator[] (unsigned ix) const
1486 { return (*m_vec)[ix]; }
1487
1488 bool operator!=(const vec &other) const
1489 { return !(*this == other); }
1490
1491 bool operator==(const vec &other) const
1492 { return address () == other.address (); }
1493
1494 T &operator[] (unsigned ix)
1495 { return (*m_vec)[ix]; }
1496
1497 T &last (void)
1498 { return m_vec->last (); }
1499
1500 bool space (int nelems) const
1501 { return m_vec ? m_vec->space (nelems) : nelems == 0; }
1502
1503 bool iterate (unsigned ix, T *p) const;
1504 bool iterate (unsigned ix, T **p) const;
1505 vec copy (ALONE_CXX_MEM_STAT_INFO) const;
1506 bool reserve (unsigned, bool = false CXX_MEM_STAT_INFO);
1507 bool reserve_exact (unsigned CXX_MEM_STAT_INFO);
1508 void splice (const vec &);
1509 void safe_splice (const vec & CXX_MEM_STAT_INFO);
1510 T *quick_push (const T &);
1511 T *safe_push (const T &CXX_MEM_STAT_INFO);
1512 T &pop (void);
1513 void truncate (unsigned);
1514 void safe_grow (unsigned, bool = false CXX_MEM_STAT_INFO);
1515 void safe_grow_cleared (unsigned, bool = false CXX_MEM_STAT_INFO);
1516 void quick_grow (unsigned);
1517 void quick_grow_cleared (unsigned);
1518 void quick_insert (unsigned, const T &);
1519 void safe_insert (unsigned, const T & CXX_MEM_STAT_INFO);
1520 void ordered_remove (unsigned);
1521 void unordered_remove (unsigned);
1522 void block_remove (unsigned, unsigned);
1523 void qsort (int (*) (const void *, const void *))qsort (int (*) (const void *, const void *));
1524 void sort (int (*) (const void *, const void *, void *), void *);
1525 void stablesort (int (*) (const void *, const void *, void *), void *);
1526 T *bsearch (const void *key, int (*compar)(const void *, const void *));
1527 T *bsearch (const void *key,
1528 int (*compar)(const void *, const void *, void *), void *);
1529 unsigned lower_bound (T, bool (*)(const T &, const T &)) const;
1530 bool contains (const T &search) const;
1531 void reverse (void);
1532
1533 bool using_auto_storage () const;
1534
1535 /* FIXME - This field should be private, but we need to cater to
1536 compilers that have stricter notions of PODness for types. */
1537 vec<T, va_heap, vl_embed> *m_vec;
1538};
1539
1540
1541/* auto_vec is a subclass of vec that automatically manages creating and
1542 releasing the internal vector. If N is non zero then it has N elements of
1543 internal storage. The default is no internal storage, and you probably only
1544 want to ask for internal storage for vectors on the stack because if the
1545 size of the vector is larger than the internal storage that space is wasted.
1546 */
1547template<typename T, size_t N /* = 0 */>
1548class auto_vec : public vec<T, va_heap>
1549{
1550public:
1551 auto_vec ()
1552 {
1553 m_auto.embedded_init (MAX (N, 2)((N) > (2) ? (N) : (2)), 0, 1);
1554 this->m_vec = &m_auto;
1555 }
1556
1557 auto_vec (size_t s CXX_MEM_STAT_INFO)
1558 {
1559 if (s > N)
1560 {
1561 this->create (s PASS_MEM_STAT);
1562 return;
1563 }
1564
1565 m_auto.embedded_init (MAX (N, 2)((N) > (2) ? (N) : (2)), 0, 1);
1566 this->m_vec = &m_auto;
1567 }
1568
1569 ~auto_vec ()
1570 {
1571 this->release ();
1572 }
1573
1574 /* Explicitly convert to the base class. There is no conversion
1575 from a const auto_vec because a copy of the returned vec can
1576 be used to modify *THIS.
1577 This is a legacy function not to be used in new code. */
1578 vec<T, va_heap> to_vec_legacy () {
1579 return *static_cast<vec<T, va_heap> *>(this);
1580 }
1581
1582private:
1583 vec<T, va_heap, vl_embed> m_auto;
1584 T m_data[MAX (N - 1, 1)((N - 1) > (1) ? (N - 1) : (1))];
1585};
1586
1587/* auto_vec is a sub class of vec whose storage is released when it is
1588 destroyed. */
1589template<typename T>
1590class auto_vec<T, 0> : public vec<T, va_heap>
1591{
1592public:
1593 auto_vec () { this->m_vec = NULLnullptr; }
1594 auto_vec (size_t n CXX_MEM_STAT_INFO) { this->create (n PASS_MEM_STAT); }
1595 ~auto_vec () { this->release (); }
1596
1597 auto_vec (vec<T, va_heap>&& r)
1598 {
1599 gcc_assert (!r.using_auto_storage ())((void)(!(!r.using_auto_storage ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1599, __FUNCTION__), 0 : 0))
;
1600 this->m_vec = r.m_vec;
1601 r.m_vec = NULLnullptr;
1602 }
1603
1604 auto_vec (auto_vec<T> &&r)
1605 {
1606 gcc_assert (!r.using_auto_storage ())((void)(!(!r.using_auto_storage ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1606, __FUNCTION__), 0 : 0))
;
1607 this->m_vec = r.m_vec;
1608 r.m_vec = NULLnullptr;
1609 }
1610
1611 auto_vec& operator= (vec<T, va_heap>&& r)
1612 {
1613 if (this == &r)
1614 return *this;
1615
1616 gcc_assert (!r.using_auto_storage ())((void)(!(!r.using_auto_storage ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1616, __FUNCTION__), 0 : 0))
;
1617 this->release ();
1618 this->m_vec = r.m_vec;
1619 r.m_vec = NULLnullptr;
1620 return *this;
1621 }
1622
1623 auto_vec& operator= (auto_vec<T> &&r)
1624 {
1625 if (this == &r)
1626 return *this;
1627
1628 gcc_assert (!r.using_auto_storage ())((void)(!(!r.using_auto_storage ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1628, __FUNCTION__), 0 : 0))
;
1629 this->release ();
1630 this->m_vec = r.m_vec;
1631 r.m_vec = NULLnullptr;
1632 return *this;
1633 }
1634
1635 /* Explicitly convert to the base class. There is no conversion
1636 from a const auto_vec because a copy of the returned vec can
1637 be used to modify *THIS.
1638 This is a legacy function not to be used in new code. */
1639 vec<T, va_heap> to_vec_legacy () {
1640 return *static_cast<vec<T, va_heap> *>(this);
1641 }
1642
1643 // You probably don't want to copy a vector, so these are deleted to prevent
1644 // unintentional use. If you really need a copy of the vectors contents you
1645 // can use copy ().
1646 auto_vec(const auto_vec &) = delete;
1647 auto_vec &operator= (const auto_vec &) = delete;
1648};
1649
1650
1651/* Allocate heap memory for pointer V and create the internal vector
1652 with space for NELEMS elements. If NELEMS is 0, the internal
1653 vector is initialized to empty. */
1654
1655template<typename T>
1656inline void
1657vec_alloc (vec<T> *&v, unsigned nelems CXX_MEM_STAT_INFO)
1658{
1659 v = new vec<T>;
1660 v->create (nelems PASS_MEM_STAT);
1661}
1662
1663
1664/* A subclass of auto_vec <char *> that frees all of its elements on
1665 deletion. */
1666
1667class auto_string_vec : public auto_vec <char *>
1668{
1669 public:
1670 ~auto_string_vec ();
1671};
1672
1673/* A subclass of auto_vec <T *> that deletes all of its elements on
1674 destruction.
1675
1676 This is a crude way for a vec to "own" the objects it points to
1677 and clean up automatically.
1678
1679 For example, no attempt is made to delete elements when an item
1680 within the vec is overwritten.
1681
1682 We can't rely on gnu::unique_ptr within a container,
1683 since we can't rely on move semantics in C++98. */
1684
1685template <typename T>
1686class auto_delete_vec : public auto_vec <T *>
1687{
1688 public:
1689 auto_delete_vec () {}
1690 auto_delete_vec (size_t s) : auto_vec <T *> (s) {}
1691
1692 ~auto_delete_vec ();
1693
1694private:
1695 DISABLE_COPY_AND_ASSIGN(auto_delete_vec)auto_delete_vec (const auto_delete_vec&) = delete; void operator
= (const auto_delete_vec &) = delete
;
1696};
1697
1698/* Conditionally allocate heap memory for VEC and its internal vector. */
1699
1700template<typename T>
1701inline void
1702vec_check_alloc (vec<T, va_heap> *&vec, unsigned nelems CXX_MEM_STAT_INFO)
1703{
1704 if (!vec)
1705 vec_alloc (vec, nelems PASS_MEM_STAT);
1706}
1707
1708
1709/* Free the heap memory allocated by vector V and set it to NULL. */
1710
1711template<typename T>
1712inline void
1713vec_free (vec<T> *&v)
1714{
1715 if (v == NULLnullptr)
1716 return;
1717
1718 v->release ();
1719 delete v;
1720 v = NULLnullptr;
1721}
1722
1723
1724/* Return iteration condition and update PTR to point to the IX'th
1725 element of this vector. Use this to iterate over the elements of a
1726 vector as follows,
1727
1728 for (ix = 0; v.iterate (ix, &ptr); ix++)
1729 continue; */
1730
1731template<typename T>
1732inline bool
1733vec<T, va_heap, vl_ptr>::iterate (unsigned ix, T *ptr) const
1734{
1735 if (m_vec)
1736 return m_vec->iterate (ix, ptr);
1737 else
1738 {
1739 *ptr = 0;
1740 return false;
1741 }
1742}
1743
1744
1745/* Return iteration condition and update *PTR to point to the
1746 IX'th element of this vector. Use this to iterate over the
1747 elements of a vector as follows,
1748
1749 for (ix = 0; v->iterate (ix, &ptr); ix++)
1750 continue;
1751
1752 This variant is for vectors of objects. */
1753
1754template<typename T>
1755inline bool
1756vec<T, va_heap, vl_ptr>::iterate (unsigned ix, T **ptr) const
1757{
1758 if (m_vec)
1759 return m_vec->iterate (ix, ptr);
1760 else
1761 {
1762 *ptr = 0;
1763 return false;
1764 }
1765}
1766
1767
1768/* Convenience macro for forward iteration. */
1769#define FOR_EACH_VEC_ELT(V, I, P)for (I = 0; (V).iterate ((I), &(P)); ++(I)) \
1770 for (I = 0; (V).iterate ((I), &(P)); ++(I))
1771
1772#define FOR_EACH_VEC_SAFE_ELT(V, I, P)for (I = 0; vec_safe_iterate ((V), (I), &(P)); ++(I)) \
1773 for (I = 0; vec_safe_iterate ((V), (I), &(P)); ++(I))
1774
1775/* Likewise, but start from FROM rather than 0. */
1776#define FOR_EACH_VEC_ELT_FROM(V, I, P, FROM)for (I = (FROM); (V).iterate ((I), &(P)); ++(I)) \
1777 for (I = (FROM); (V).iterate ((I), &(P)); ++(I))
1778
1779/* Convenience macro for reverse iteration. */
1780#define FOR_EACH_VEC_ELT_REVERSE(V, I, P)for (I = (V).length () - 1; (V).iterate ((I), &(P)); (I)--
)
\
1781 for (I = (V).length () - 1; \
1782 (V).iterate ((I), &(P)); \
1783 (I)--)
1784
1785#define FOR_EACH_VEC_SAFE_ELT_REVERSE(V, I, P)for (I = vec_safe_length (V) - 1; vec_safe_iterate ((V), (I),
&(P)); (I)--)
\
1786 for (I = vec_safe_length (V) - 1; \
1787 vec_safe_iterate ((V), (I), &(P)); \
1788 (I)--)
1789
1790/* auto_string_vec's dtor, freeing all contained strings, automatically
1791 chaining up to ~auto_vec <char *>, which frees the internal buffer. */
1792
1793inline
1794auto_string_vec::~auto_string_vec ()
1795{
1796 int i;
1797 char *str;
1798 FOR_EACH_VEC_ELT (*this, i, str)for (i = 0; (*this).iterate ((i), &(str)); ++(i))
1799 free (str);
1800}
1801
1802/* auto_delete_vec's dtor, deleting all contained items, automatically
1803 chaining up to ~auto_vec <T*>, which frees the internal buffer. */
1804
1805template <typename T>
1806inline
1807auto_delete_vec<T>::~auto_delete_vec ()
1808{
1809 int i;
1810 T *item;
1811 FOR_EACH_VEC_ELT (*this, i, item)for (i = 0; (*this).iterate ((i), &(item)); ++(i))
1812 delete item;
1813}
1814
1815
1816/* Return a copy of this vector. */
1817
1818template<typename T>
1819inline vec<T, va_heap, vl_ptr>
1820vec<T, va_heap, vl_ptr>::copy (ALONE_MEM_STAT_DECLvoid) const
1821{
1822 vec<T, va_heap, vl_ptr> new_vec{ };
1823 if (length ())
1824 new_vec.m_vec = m_vec->copy (ALONE_PASS_MEM_STAT);
1825 return new_vec;
1826}
1827
1828
1829/* Ensure that the vector has at least RESERVE slots available (if
1830 EXACT is false), or exactly RESERVE slots available (if EXACT is
1831 true).
1832
1833 This may create additional headroom if EXACT is false.
1834
1835 Note that this can cause the embedded vector to be reallocated.
1836 Returns true iff reallocation actually occurred. */
1837
1838template<typename T>
1839inline bool
1840vec<T, va_heap, vl_ptr>::reserve (unsigned nelems, bool exact MEM_STAT_DECL)
1841{
1842 if (space (nelems))
1843 return false;
1844
1845 /* For now play a game with va_heap::reserve to hide our auto storage if any,
1846 this is necessary because it doesn't have enough information to know the
1847 embedded vector is in auto storage, and so should not be freed. */
1848 vec<T, va_heap, vl_embed> *oldvec = m_vec;
1849 unsigned int oldsize = 0;
1850 bool handle_auto_vec = m_vec && using_auto_storage ();
1851 if (handle_auto_vec)
1852 {
1853 m_vec = NULLnullptr;
1854 oldsize = oldvec->length ();
1855 nelems += oldsize;
1856 }
1857
1858 va_heap::reserve (m_vec, nelems, exact PASS_MEM_STAT);
1859 if (handle_auto_vec)
1860 {
1861 vec_copy_construct (m_vec->address (), oldvec->address (), oldsize);
1862 m_vec->m_vecpfx.m_num = oldsize;
1863 }
1864
1865 return true;
1866}
1867
1868
1869/* Ensure that this vector has exactly NELEMS slots available. This
1870 will not create additional headroom. Note this can cause the
1871 embedded vector to be reallocated. Returns true iff reallocation
1872 actually occurred. */
1873
1874template<typename T>
1875inline bool
1876vec<T, va_heap, vl_ptr>::reserve_exact (unsigned nelems MEM_STAT_DECL)
1877{
1878 return reserve (nelems, true PASS_MEM_STAT);
1879}
1880
1881
1882/* Create the internal vector and reserve NELEMS for it. This is
1883 exactly like vec::reserve, but the internal vector is
1884 unconditionally allocated from scratch. The old one, if it
1885 existed, is lost. */
1886
1887template<typename T>
1888inline void
1889vec<T, va_heap, vl_ptr>::create (unsigned nelems MEM_STAT_DECL)
1890{
1891 m_vec = NULLnullptr;
1892 if (nelems > 0)
1893 reserve_exact (nelems PASS_MEM_STAT);
1894}
1895
1896
1897/* Free the memory occupied by the embedded vector. */
1898
1899template<typename T>
1900inline void
1901vec<T, va_heap, vl_ptr>::release (void)
1902{
1903 if (!m_vec)
1904 return;
1905
1906 if (using_auto_storage ())
1907 {
1908 m_vec->m_vecpfx.m_num = 0;
1909 return;
1910 }
1911
1912 va_heap::release (m_vec);
1913}
1914
1915/* Copy the elements from SRC to the end of this vector as if by memcpy.
1916 SRC and this vector must be allocated with the same memory
1917 allocation mechanism. This vector is assumed to have sufficient
1918 headroom available. */
1919
1920template<typename T>
1921inline void
1922vec<T, va_heap, vl_ptr>::splice (const vec<T, va_heap, vl_ptr> &src)
1923{
1924 if (src.length ())
1925 m_vec->splice (*(src.m_vec));
1926}
1927
1928
1929/* Copy the elements in SRC to the end of this vector as if by memcpy.
1930 SRC and this vector must be allocated with the same mechanism.
1931 If there is not enough headroom in this vector, it will be reallocated
1932 as needed. */
1933
1934template<typename T>
1935inline void
1936vec<T, va_heap, vl_ptr>::safe_splice (const vec<T, va_heap, vl_ptr> &src
1937 MEM_STAT_DECL)
1938{
1939 if (src.length ())
1940 {
1941 reserve_exact (src.length ());
1942 splice (src);
1943 }
1944}
1945
1946
1947/* Push OBJ (a new element) onto the end of the vector. There must be
1948 sufficient space in the vector. Return a pointer to the slot
1949 where OBJ was inserted. */
1950
1951template<typename T>
1952inline T *
1953vec<T, va_heap, vl_ptr>::quick_push (const T &obj)
1954{
1955 return m_vec->quick_push (obj);
1956}
1957
1958
1959/* Push a new element OBJ onto the end of this vector. Reallocates
1960 the embedded vector, if needed. Return a pointer to the slot where
1961 OBJ was inserted. */
1962
1963template<typename T>
1964inline T *
1965vec<T, va_heap, vl_ptr>::safe_push (const T &obj MEM_STAT_DECL)
1966{
1967 reserve (1, false PASS_MEM_STAT);
1968 return quick_push (obj);
1969}
1970
1971
1972/* Pop and return the last element off the end of the vector. */
1973
1974template<typename T>
1975inline T &
1976vec<T, va_heap, vl_ptr>::pop (void)
1977{
1978 return m_vec->pop ();
1979}
1980
1981
1982/* Set the length of the vector to LEN. The new length must be less
1983 than or equal to the current length. This is an O(1) operation. */
1984
1985template<typename T>
1986inline void
1987vec<T, va_heap, vl_ptr>::truncate (unsigned size)
1988{
1989 if (m_vec)
1990 m_vec->truncate (size);
1991 else
1992 gcc_checking_assert (size == 0)((void)(!(size == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1992, __FUNCTION__), 0 : 0))
;
1993}
1994
1995
1996/* Grow the vector to a specific length. LEN must be as long or
1997 longer than the current length. The new elements are
1998 uninitialized. Reallocate the internal vector, if needed. */
1999
2000template<typename T>
2001inline void
2002vec<T, va_heap, vl_ptr>::safe_grow (unsigned len, bool exact MEM_STAT_DECL)
2003{
2004 unsigned oldlen = length ();
2005 gcc_checking_assert (oldlen <= len)((void)(!(oldlen <= len) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 2005, __FUNCTION__), 0 : 0))
;
2006 reserve (len - oldlen, exact PASS_MEM_STAT);
2007 if (m_vec)
2008 m_vec->quick_grow (len);
2009 else
2010 gcc_checking_assert (len == 0)((void)(!(len == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 2010, __FUNCTION__), 0 : 0))
;
2011}
2012
2013
2014/* Grow the embedded vector to a specific length. LEN must be as
2015 long or longer than the current length. The new elements are
2016 initialized to zero. Reallocate the internal vector, if needed. */
2017
2018template<typename T>
2019inline void
2020vec<T, va_heap, vl_ptr>::safe_grow_cleared (unsigned len, bool exact
2021 MEM_STAT_DECL)
2022{
2023 unsigned oldlen = length ();
2024 size_t growby = len - oldlen;
2025 safe_grow (len, exact PASS_MEM_STAT);
2026 if (growby != 0)
2027 vec_default_construct (address () + oldlen, growby);
2028}
2029
2030
2031/* Same as vec::safe_grow but without reallocation of the internal vector.
2032 If the vector cannot be extended, a runtime assertion will be triggered. */
2033
2034template<typename T>
2035inline void
2036vec<T, va_heap, vl_ptr>::quick_grow (unsigned len)
2037{
2038 gcc_checking_assert (m_vec)((void)(!(m_vec) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 2038, __FUNCTION__), 0 : 0))
;
2039 m_vec->quick_grow (len);
2040}
2041
2042
2043/* Same as vec::quick_grow_cleared but without reallocation of the
2044 internal vector. If the vector cannot be extended, a runtime
2045 assertion will be triggered. */
2046
2047template<typename T>
2048inline void
2049vec<T, va_heap, vl_ptr>::quick_grow_cleared (unsigned len)
2050{
2051 gcc_checking_assert (m_vec)((void)(!(m_vec) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 2051, __FUNCTION__), 0 : 0))
;
2052 m_vec->quick_grow_cleared (len);
2053}
2054
2055
2056/* Insert an element, OBJ, at the IXth position of this vector. There
2057 must be sufficient space. */
2058
2059template<typename T>
2060inline void
2061vec<T, va_heap, vl_ptr>::quick_insert (unsigned ix, const T &obj)
2062{
2063 m_vec->quick_insert (ix, obj);
2064}
2065
2066
2067/* Insert an element, OBJ, at the IXth position of the vector.
2068 Reallocate the embedded vector, if necessary. */
2069
2070template<typename T>
2071inline void
2072vec<T, va_heap, vl_ptr>::safe_insert (unsigned ix, const T &obj MEM_STAT_DECL)
2073{
2074 reserve (1, false PASS_MEM_STAT);
2075 quick_insert (ix, obj);
2076}
2077
2078
2079/* Remove an element from the IXth position of this vector. Ordering of
2080 remaining elements is preserved. This is an O(N) operation due to
2081 a memmove. */
2082
2083template<typename T>
2084inline void
2085vec<T, va_heap, vl_ptr>::ordered_remove (unsigned ix)
2086{
2087 m_vec->ordered_remove (ix);
2088}
2089
2090
2091/* Remove an element from the IXth position of this vector. Ordering
2092 of remaining elements is destroyed. This is an O(1) operation. */
2093
2094template<typename T>
2095inline void
2096vec<T, va_heap, vl_ptr>::unordered_remove (unsigned ix)
2097{
2098 m_vec->unordered_remove (ix);
2099}
2100
2101
2102/* Remove LEN elements starting at the IXth. Ordering is retained.
2103 This is an O(N) operation due to memmove. */
2104
2105template<typename T>
2106inline void
2107vec<T, va_heap, vl_ptr>::block_remove (unsigned ix, unsigned len)
2108{
2109 m_vec->block_remove (ix, len);
2110}
2111
2112
2113/* Sort the contents of this vector with qsort. CMP is the comparison
2114 function to pass to qsort. */
2115
2116template<typename T>
2117inline void
2118vec<T, va_heap, vl_ptr>::qsort (int (*cmp) (const void *, const void *))qsort (int (*cmp) (const void *, const void *))
2119{
2120 if (m_vec)
2121 m_vec->qsort (cmp)qsort (cmp);
2122}
2123
2124/* Sort the contents of this vector with qsort. CMP is the comparison
2125 function to pass to qsort. */
2126
2127template<typename T>
2128inline void
2129vec<T, va_heap, vl_ptr>::sort (int (*cmp) (const void *, const void *,
2130 void *), void *data)
2131{
2132 if (m_vec)
2133 m_vec->sort (cmp, data);
2134}
2135
2136/* Sort the contents of this vector with gcc_stablesort_r. CMP is the
2137 comparison function to pass to qsort. */
2138
2139template<typename T>
2140inline void
2141vec<T, va_heap, vl_ptr>::stablesort (int (*cmp) (const void *, const void *,
2142 void *), void *data)
2143{
2144 if (m_vec)
2145 m_vec->stablesort (cmp, data);
2146}
2147
2148/* Search the contents of the sorted vector with a binary search.
2149 CMP is the comparison function to pass to bsearch. */
2150
2151template<typename T>
2152inline T *
2153vec<T, va_heap, vl_ptr>::bsearch (const void *key,
2154 int (*cmp) (const void *, const void *))
2155{
2156 if (m_vec)
2157 return m_vec->bsearch (key, cmp);
2158 return NULLnullptr;
2159}
2160
2161/* Search the contents of the sorted vector with a binary search.
2162 CMP is the comparison function to pass to bsearch. */
2163
2164template<typename T>
2165inline T *
2166vec<T, va_heap, vl_ptr>::bsearch (const void *key,
2167 int (*cmp) (const void *, const void *,
2168 void *), void *data)
2169{
2170 if (m_vec)
2171 return m_vec->bsearch (key, cmp, data);
2172 return NULLnullptr;
2173}
2174
2175
2176/* Find and return the first position in which OBJ could be inserted
2177 without changing the ordering of this vector. LESSTHAN is a
2178 function that returns true if the first argument is strictly less
2179 than the second. */
2180
2181template<typename T>
2182inline unsigned
2183vec<T, va_heap, vl_ptr>::lower_bound (T obj,
2184 bool (*lessthan)(const T &, const T &))
2185 const
2186{
2187 return m_vec ? m_vec->lower_bound (obj, lessthan) : 0;
2188}
2189
2190/* Return true if SEARCH is an element of V. Note that this is O(N) in the
2191 size of the vector and so should be used with care. */
2192
2193template<typename T>
2194inline bool
2195vec<T, va_heap, vl_ptr>::contains (const T &search) const
2196{
2197 return m_vec ? m_vec->contains (search) : false;
2198}
2199
2200/* Reverse content of the vector. */
2201
2202template<typename T>
2203inline void
2204vec<T, va_heap, vl_ptr>::reverse (void)
2205{
2206 unsigned l = length ();
2207 T *ptr = address ();
2208
2209 for (unsigned i = 0; i < l / 2; i++)
2210 std::swap (ptr[i], ptr[l - i - 1]);
2211}
2212
2213template<typename T>
2214inline bool
2215vec<T, va_heap, vl_ptr>::using_auto_storage () const
2216{
2217 return m_vec ? m_vec->m_vecpfx.m_using_auto_storage : false;
2218}
2219
2220/* Release VEC and call release of all element vectors. */
2221
2222template<typename T>
2223inline void
2224release_vec_vec (vec<vec<T> > &vec)
2225{
2226 for (unsigned i = 0; i < vec.length (); i++)
2227 vec[i].release ();
2228
2229 vec.release ();
2230}
2231
2232// Provide a subset of the std::span functionality. (We can't use std::span
2233// itself because it's a C++20 feature.)
2234//
2235// In addition, provide an invalid value that is distinct from all valid
2236// sequences (including the empty sequence). This can be used to return
2237// failure without having to use std::optional.
2238//
2239// There is no operator bool because it would be ambiguous whether it is
2240// testing for a valid value or an empty sequence.
2241template<typename T>
2242class array_slice
2243{
2244 template<typename OtherT> friend class array_slice;
2245
2246public:
2247 using value_type = T;
2248 using iterator = T *;
2249 using const_iterator = const T *;
2250
2251 array_slice () : m_base (nullptr), m_size (0) {}
2252
2253 template<typename OtherT>
2254 array_slice (array_slice<OtherT> other)
2255 : m_base (other.m_base), m_size (other.m_size) {}
2256
2257 array_slice (iterator base, unsigned int size)
2258 : m_base (base), m_size (size) {}
2259
2260 template<size_t N>
2261 array_slice (T (&array)[N]) : m_base (array), m_size (N) {}
2262
2263 template<typename OtherT>
2264 array_slice (const vec<OtherT> &v)
2265 : m_base (v.address ()), m_size (v.length ()) {}
2266
2267 iterator begin () { return m_base; }
2268 iterator end () { return m_base + m_size; }
2269
2270 const_iterator begin () const { return m_base; }
2271 const_iterator end () const { return m_base + m_size; }
2272
2273 value_type &front ();
2274 value_type &back ();
2275 value_type &operator[] (unsigned int i);
2276
2277 const value_type &front () const;
2278 const value_type &back () const;
2279 const value_type &operator[] (unsigned int i) const;
2280
2281 size_t size () const { return m_size; }
2282 size_t size_bytes () const { return m_size * sizeof (T); }
2283 bool empty () const { return m_size == 0; }
2284
2285 // An invalid array_slice that represents a failed operation. This is
2286 // distinct from an empty slice, which is a valid result in some contexts.
2287 static array_slice invalid () { return { nullptr, ~0U }; }
2288
2289 // True if the array is valid, false if it is an array like INVALID.
2290 bool is_valid () const { return m_base || m_size == 0; }
2291
2292private:
2293 iterator m_base;
2294 unsigned int m_size;
2295};
2296
2297template<typename T>
2298inline typename array_slice<T>::value_type &
2299array_slice<T>::front ()
2300{
2301 gcc_checking_assert (m_size)((void)(!(m_size) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 2301, __FUNCTION__), 0 : 0))
;
2302 return m_base[0];
2303}
2304
2305template<typename T>
2306inline const typename array_slice<T>::value_type &
2307array_slice<T>::front () const
2308{
2309 gcc_checking_assert (m_size)((void)(!(m_size) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 2309, __FUNCTION__), 0 : 0))
;
2310 return m_base[0];
2311}
2312
2313template<typename T>
2314inline typename array_slice<T>::value_type &
2315array_slice<T>::back ()
2316{
2317 gcc_checking_assert (m_size)((void)(!(m_size) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 2317, __FUNCTION__), 0 : 0))
;
2318 return m_base[m_size - 1];
2319}
2320
2321template<typename T>
2322inline const typename array_slice<T>::value_type &
2323array_slice<T>::back () const
2324{
2325 gcc_checking_assert (m_size)((void)(!(m_size) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 2325, __FUNCTION__), 0 : 0))
;
2326 return m_base[m_size - 1];
2327}
2328
2329template<typename T>
2330inline typename array_slice<T>::value_type &
2331array_slice<T>::operator[] (unsigned int i)
2332{
2333 gcc_checking_assert (i < m_size)((void)(!(i < m_size) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 2333, __FUNCTION__), 0 : 0))
;
2334 return m_base[i];
2335}
2336
2337template<typename T>
2338inline const typename array_slice<T>::value_type &
2339array_slice<T>::operator[] (unsigned int i) const
2340{
2341 gcc_checking_assert (i < m_size)((void)(!(i < m_size) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 2341, __FUNCTION__), 0 : 0))
;
2342 return m_base[i];
2343}
2344
2345template<typename T>
2346array_slice<T>
2347make_array_slice (T *base, unsigned int size)
2348{
2349 return array_slice<T> (base, size);
2350}
2351
2352#if (GCC_VERSION(4 * 1000 + 2) >= 3000)
2353# pragma GCC poison m_vec m_vecpfx m_vecdata
2354#endif
2355
2356#endif // GCC_VEC_H