Bug Summary

File:build/gcc/tree-vect-generic.cc
Warning:line 1159, column 7
Called C++ object pointer is null

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-suse-linux -analyze -disable-free -clear-ast-before-backend -disable-llvm-verifier -discard-value-names -main-file-name tree-vect-generic.cc -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -ffp-contract=on -fno-rounding-math -mconstructor-aliases -funwind-tables=2 -target-cpu x86-64 -tune-cpu generic -debugger-tuning=gdb -fcoverage-compilation-dir=/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -resource-dir /usr/lib64/clang/15.0.7 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13/backward -internal-isystem /usr/lib64/clang/15.0.7/include -internal-isystem /usr/local/include -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../x86_64-suse-linux/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir=/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -D__GCC_HAVE_DWARF2_CFI_ASM=1 -o /buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2023-03-27-141847-20772-1/report-8M2m5Q.plist -x c++ /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc

/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc

1/* Lower vector operations to scalar operations.
2 Copyright (C) 2004-2023 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it
7under the terms of the GNU General Public License as published by the
8Free Software Foundation; either version 3, or (at your option) any
9later version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT
12ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "backend.h"
24#include "rtl.h"
25#include "tree.h"
26#include "gimple.h"
27#include "tree-pass.h"
28#include "ssa.h"
29#include "expmed.h"
30#include "optabs-tree.h"
31#include "diagnostic.h"
32#include "fold-const.h"
33#include "stor-layout.h"
34#include "langhooks.h"
35#include "tree-eh.h"
36#include "gimple-iterator.h"
37#include "gimplify-me.h"
38#include "gimplify.h"
39#include "tree-cfg.h"
40#include "tree-vector-builder.h"
41#include "vec-perm-indices.h"
42#include "insn-config.h"
43#include "tree-ssa-dce.h"
44#include "gimple-fold.h"
45#include "gimple-match.h"
46#include "recog.h" /* FIXME: for insn_data */
47
48
49/* Build a ternary operation and gimplify it. Emit code before GSI.
50 Return the gimple_val holding the result. */
51
52static tree
53gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
54 tree type, tree a, tree b, tree c)
55{
56 location_t loc = gimple_location (gsi_stmt (*gsi));
57 return gimple_build (gsi, true, GSI_SAME_STMT, loc, code, type, a, b, c);
58}
59
60/* Build a binary operation and gimplify it. Emit code before GSI.
61 Return the gimple_val holding the result. */
62
63static tree
64gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
65 tree type, tree a, tree b)
66{
67 location_t loc = gimple_location (gsi_stmt (*gsi));
68 return gimple_build (gsi, true, GSI_SAME_STMT, loc, code, type, a, b);
69}
70
71/* Build a unary operation and gimplify it. Emit code before GSI.
72 Return the gimple_val holding the result. */
73
74static tree
75gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
76 tree a)
77{
78 location_t loc = gimple_location (gsi_stmt (*gsi));
79 return gimple_build (gsi, true, GSI_SAME_STMT, loc, code, type, a);
80}
81
82
83static void expand_vector_operations_1 (gimple_stmt_iterator *, bitmap);
84
85/* Return the number of elements in a vector type TYPE that we have
86 already decided needs to be expanded piecewise. We don't support
87 this kind of expansion for variable-length vectors, since we should
88 always check for target support before introducing uses of those. */
89static unsigned int
90nunits_for_known_piecewise_op (const_tree type)
91{
92 return TYPE_VECTOR_SUBPARTS (type).to_constant ();
93}
94
95/* Return true if TYPE1 has more elements than TYPE2, where either
96 type may be a vector or a scalar. */
97
98static inline bool
99subparts_gt (tree type1, tree type2)
100{
101 poly_uint64 n1 = VECTOR_TYPE_P (type1)(((enum tree_code) (type1)->base.code) == VECTOR_TYPE) ? TYPE_VECTOR_SUBPARTS (type1) : 1;
102 poly_uint64 n2 = VECTOR_TYPE_P (type2)(((enum tree_code) (type2)->base.code) == VECTOR_TYPE) ? TYPE_VECTOR_SUBPARTS (type2) : 1;
103 return known_gt (n1, n2)(!maybe_le (n1, n2));
104}
105
106/* Build a constant of type TYPE, made of VALUE's bits replicated
107 every WIDTH bits to fit TYPE's precision. */
108static tree
109build_replicated_const (tree type, unsigned int width, HOST_WIDE_INTlong value)
110{
111 int n = (TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 111, __FUNCTION__))->type_common.precision)
+ HOST_BITS_PER_WIDE_INT64 - 1)
112 / HOST_BITS_PER_WIDE_INT64;
113 unsigned HOST_WIDE_INTlong low, mask;
114 HOST_WIDE_INTlong a[WIDE_INT_MAX_ELTS(((64*(8)) + 64) / 64)];
115 int i;
116
117 gcc_assert (n && n <= WIDE_INT_MAX_ELTS)((void)(!(n && n <= (((64*(8)) + 64) / 64)) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 117, __FUNCTION__), 0 : 0))
;
118
119 if (width == HOST_BITS_PER_WIDE_INT64)
120 low = value;
121 else
122 {
123 mask = ((HOST_WIDE_INTlong)1 << width) - 1;
124 low = (unsigned HOST_WIDE_INTlong) ~0 / mask * (value & mask);
125 }
126
127 for (i = 0; i < n; i++)
128 a[i] = low;
129
130 gcc_assert (TYPE_PRECISION (type) <= MAX_BITSIZE_MODE_ANY_INT)((void)(!(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 130, __FUNCTION__))->type_common.precision) <= (64*(8
))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 130, __FUNCTION__), 0 : 0))
;
131 return wide_int_to_tree
132 (type, wide_int::from_array (a, n, TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 132, __FUNCTION__))->type_common.precision)
));
133}
134
135static GTY(()) tree vector_inner_type;
136static GTY(()) tree vector_last_type;
137static GTY(()) int vector_last_nunits;
138
139/* Return a suitable vector types made of SUBPARTS units each of mode
140 "word_mode" (the global variable). */
141static tree
142build_word_mode_vector_type (int nunits)
143{
144 if (!vector_inner_type)
145 vector_inner_type = lang_hooks.types.type_for_mode (word_mode, 1);
146 else if (vector_last_nunits == nunits)
147 {
148 gcc_assert (TREE_CODE (vector_last_type) == VECTOR_TYPE)((void)(!(((enum tree_code) (vector_last_type)->base.code)
== VECTOR_TYPE) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 148, __FUNCTION__), 0 : 0))
;
149 return vector_last_type;
150 }
151
152 vector_last_nunits = nunits;
153 vector_last_type = build_vector_type (vector_inner_type, nunits);
154 return vector_last_type;
155}
156
157typedef tree (*elem_op_func) (gimple_stmt_iterator *,
158 tree, tree, tree, tree, tree, enum tree_code,
159 tree);
160
161/* Extract the vector element of type TYPE at BITPOS with BITSIZE from T
162 and return it. */
163
164tree
165tree_vec_extract (gimple_stmt_iterator *gsi, tree type,
166 tree t, tree bitsize, tree bitpos)
167{
168 /* We're using the resimplify API and maybe_push_res_to_seq to
169 simplify the BIT_FIELD_REF but restrict the simplification to
170 a single stmt while at the same time following SSA edges for
171 simplification with already emitted CTORs. */
172 gimple_match_op opr;
173 opr.set_op (BIT_FIELD_REF, type, t, bitsize, bitpos);
174 opr.resimplify (NULLnullptr, follow_all_ssa_edges);
175 gimple_seq stmts = NULLnullptr;
176 tree res = maybe_push_res_to_seq (&opr, &stmts);
177 gcc_assert (res)((void)(!(res) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 177, __FUNCTION__), 0 : 0))
;
178 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
179 return res;
180}
181
182static tree
183do_unop (gimple_stmt_iterator *gsi, tree inner_type, tree a,
184 tree b ATTRIBUTE_UNUSED__attribute__ ((__unused__)), tree bitpos, tree bitsize,
185 enum tree_code code, tree type ATTRIBUTE_UNUSED__attribute__ ((__unused__)))
186{
187 a = tree_vec_extract (gsi, inner_type, a, bitsize, bitpos);
188 return gimplify_build1 (gsi, code, inner_type, a);
189}
190
191static tree
192do_binop (gimple_stmt_iterator *gsi, tree inner_type, tree a, tree b,
193 tree bitpos, tree bitsize, enum tree_code code,
194 tree type ATTRIBUTE_UNUSED__attribute__ ((__unused__)))
195{
196 if (TREE_CODE (TREE_TYPE (a))((enum tree_code) (((contains_struct_check ((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 196, __FUNCTION__))->typed.type))->base.code)
== VECTOR_TYPE)
197 a = tree_vec_extract (gsi, inner_type, a, bitsize, bitpos);
198 if (TREE_CODE (TREE_TYPE (b))((enum tree_code) (((contains_struct_check ((b), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 198, __FUNCTION__))->typed.type))->base.code)
== VECTOR_TYPE)
199 b = tree_vec_extract (gsi, inner_type, b, bitsize, bitpos);
200 return gimplify_build2 (gsi, code, inner_type, a, b);
201}
202
203/* Construct expression (A[BITPOS] code B[BITPOS]) ? -1 : 0
204
205 INNER_TYPE is the type of A and B elements
206
207 returned expression is of signed integer type with the
208 size equal to the size of INNER_TYPE. */
209static tree
210do_compare (gimple_stmt_iterator *gsi, tree inner_type, tree a, tree b,
211 tree bitpos, tree bitsize, enum tree_code code, tree type)
212{
213 tree stype = TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 213, __FUNCTION__))->typed.type)
;
214 tree cst_false = build_zero_cst (stype);
215 tree cst_true = build_all_ones_cst (stype);
216 tree cmp;
217
218 a = tree_vec_extract (gsi, inner_type, a, bitsize, bitpos);
219 b = tree_vec_extract (gsi, inner_type, b, bitsize, bitpos);
220
221 cmp = build2 (code, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], a, b);
222 return gimplify_build3 (gsi, COND_EXPR, stype, cmp, cst_true, cst_false);
223}
224
225/* Expand vector addition to scalars. This does bit twiddling
226 in order to increase parallelism:
227
228 a + b = (((int) a & 0x7f7f7f7f) + ((int) b & 0x7f7f7f7f)) ^
229 (a ^ b) & 0x80808080
230
231 a - b = (((int) a | 0x80808080) - ((int) b & 0x7f7f7f7f)) ^
232 (a ^ ~b) & 0x80808080
233
234 -b = (0x80808080 - ((int) b & 0x7f7f7f7f)) ^ (~b & 0x80808080)
235
236 This optimization should be done only if 4 vector items or more
237 fit into a word. */
238static tree
239do_plus_minus (gimple_stmt_iterator *gsi, tree word_type, tree a, tree b,
240 tree bitpos ATTRIBUTE_UNUSED__attribute__ ((__unused__)), tree bitsize ATTRIBUTE_UNUSED__attribute__ ((__unused__)),
241 enum tree_code code, tree type ATTRIBUTE_UNUSED__attribute__ ((__unused__)))
242{
243 unsigned int width = vector_element_bits (TREE_TYPE (a)((contains_struct_check ((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 243, __FUNCTION__))->typed.type)
);
244 tree inner_type = TREE_TYPE (TREE_TYPE (a))((contains_struct_check ((((contains_struct_check ((a), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 244, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 244, __FUNCTION__))->typed.type)
;
245 unsigned HOST_WIDE_INTlong max;
246 tree low_bits, high_bits, a_low, b_low, result_low, signs;
247
248 max = GET_MODE_MASK (TYPE_MODE (inner_type))mode_mask_array[((((enum tree_code) ((tree_class_check ((inner_type
), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 248, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(inner_type) : (inner_type)->type_common.mode)]
;
249 low_bits = build_replicated_const (word_type, width, max >> 1);
250 high_bits = build_replicated_const (word_type, width, max & ~(max >> 1));
251
252 a = tree_vec_extract (gsi, word_type, a, bitsize, bitpos);
253 b = tree_vec_extract (gsi, word_type, b, bitsize, bitpos);
254
255 signs = gimplify_build2 (gsi, BIT_XOR_EXPR, word_type, a, b);
256 b_low = gimplify_build2 (gsi, BIT_AND_EXPR, word_type, b, low_bits);
257 if (code == PLUS_EXPR)
258 a_low = gimplify_build2 (gsi, BIT_AND_EXPR, word_type, a, low_bits);
259 else
260 {
261 a_low = gimplify_build2 (gsi, BIT_IOR_EXPR, word_type, a, high_bits);
262 signs = gimplify_build1 (gsi, BIT_NOT_EXPR, word_type, signs);
263 }
264
265 signs = gimplify_build2 (gsi, BIT_AND_EXPR, word_type, signs, high_bits);
266 result_low = gimplify_build2 (gsi, code, word_type, a_low, b_low);
267 return gimplify_build2 (gsi, BIT_XOR_EXPR, word_type, result_low, signs);
268}
269
270static tree
271do_negate (gimple_stmt_iterator *gsi, tree word_type, tree b,
272 tree unused ATTRIBUTE_UNUSED__attribute__ ((__unused__)), tree bitpos ATTRIBUTE_UNUSED__attribute__ ((__unused__)),
273 tree bitsize ATTRIBUTE_UNUSED__attribute__ ((__unused__)),
274 enum tree_code code ATTRIBUTE_UNUSED__attribute__ ((__unused__)),
275 tree type ATTRIBUTE_UNUSED__attribute__ ((__unused__)))
276{
277 unsigned int width = vector_element_bits (TREE_TYPE (b)((contains_struct_check ((b), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 277, __FUNCTION__))->typed.type)
);
278 tree inner_type = TREE_TYPE (TREE_TYPE (b))((contains_struct_check ((((contains_struct_check ((b), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 278, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 278, __FUNCTION__))->typed.type)
;
279 HOST_WIDE_INTlong max;
280 tree low_bits, high_bits, b_low, result_low, signs;
281
282 max = GET_MODE_MASK (TYPE_MODE (inner_type))mode_mask_array[((((enum tree_code) ((tree_class_check ((inner_type
), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 282, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(inner_type) : (inner_type)->type_common.mode)]
;
283 low_bits = build_replicated_const (word_type, width, max >> 1);
284 high_bits = build_replicated_const (word_type, width, max & ~(max >> 1));
285
286 b = tree_vec_extract (gsi, word_type, b, bitsize, bitpos);
287
288 b_low = gimplify_build2 (gsi, BIT_AND_EXPR, word_type, b, low_bits);
289 signs = gimplify_build1 (gsi, BIT_NOT_EXPR, word_type, b);
290 signs = gimplify_build2 (gsi, BIT_AND_EXPR, word_type, signs, high_bits);
291 result_low = gimplify_build2 (gsi, MINUS_EXPR, word_type, high_bits, b_low);
292 return gimplify_build2 (gsi, BIT_XOR_EXPR, word_type, result_low, signs);
293}
294
295/* Expand a vector operation to scalars, by using many operations
296 whose type is the vector type's inner type. */
297static tree
298expand_vector_piecewise (gimple_stmt_iterator *gsi, elem_op_func f,
299 tree type, tree inner_type,
300 tree a, tree b, enum tree_code code,
301 bool parallel_p, tree ret_type = NULL_TREE(tree) nullptr)
302{
303 vec<constructor_elt, va_gc> *v;
304 tree part_width = TYPE_SIZE (inner_type)((tree_class_check ((inner_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 304, __FUNCTION__))->type_common.size)
;
305 tree index = bitsize_int (0)size_int_kind (0, stk_bitsizetype);
306 int nunits = nunits_for_known_piecewise_op (type);
307 int delta = tree_to_uhwi (part_width) / vector_element_bits (type);
308 int i;
309 location_t loc = gimple_location (gsi_stmt (*gsi));
310
311 if (nunits == 1
312 || warning_suppressed_p (gsi_stmt (*gsi),
313 OPT_Wvector_operation_performance))
314 /* Do not diagnose decomposing single element vectors or when
315 decomposing vectorizer produced operations. */
316 ;
317 else if (ret_type || !parallel_p)
318 warning_at (loc, OPT_Wvector_operation_performance,
319 "vector operation will be expanded piecewise");
320 else
321 warning_at (loc, OPT_Wvector_operation_performance,
322 "vector operation will be expanded in parallel");
323
324 if (!ret_type)
325 ret_type = type;
326 vec_alloc (v, (nunits + delta - 1) / delta);
327 bool constant_p = true;
328 for (i = 0; i < nunits;
329 i += delta, index = int_const_binop (PLUS_EXPR, index, part_width))
330 {
331 tree result = f (gsi, inner_type, a, b, index, part_width, code,
332 ret_type);
333 if (!CONSTANT_CLASS_P (result)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code
) (result)->base.code))] == tcc_constant)
)
334 constant_p = false;
335 constructor_elt ce = {NULL_TREE(tree) nullptr, result};
336 v->quick_push (ce);
337 }
338
339 if (constant_p)
340 return build_vector_from_ctor (ret_type, v);
341 else
342 return build_constructor (ret_type, v);
343}
344
345/* Expand a vector operation to scalars with the freedom to use
346 a scalar integer type, or to use a different size for the items
347 in the vector type. */
348static tree
349expand_vector_parallel (gimple_stmt_iterator *gsi, elem_op_func f, tree type,
350 tree a, tree b, enum tree_code code)
351{
352 tree result, compute_type;
353 int n_words = tree_to_uhwi (TYPE_SIZE_UNIT (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 353, __FUNCTION__))->type_common.size_unit)
) / UNITS_PER_WORD(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? 8 : 4)
;
354 location_t loc = gimple_location (gsi_stmt (*gsi));
355
356 /* We have three strategies. If the type is already correct, just do
357 the operation an element at a time. Else, if the vector is wider than
358 one word, do it a word at a time; finally, if the vector is smaller
359 than one word, do it as a scalar. */
360 if (TYPE_MODE (TREE_TYPE (type))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 360, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 360, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 360, __FUNCTION__))->typed.type)) : (((contains_struct_check
((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 360, __FUNCTION__))->typed.type))->type_common.mode)
== word_mode)
361 return expand_vector_piecewise (gsi, f,
362 type, TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 362, __FUNCTION__))->typed.type)
,
363 a, b, code, true);
364 else if (n_words > 1)
365 {
366 tree word_type = build_word_mode_vector_type (n_words);
367 result = expand_vector_piecewise (gsi, f,
368 word_type, TREE_TYPE (word_type)((contains_struct_check ((word_type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 368, __FUNCTION__))->typed.type)
,
369 a, b, code, true);
370 result = force_gimple_operand_gsi (gsi, result, true, NULLnullptr, true,
371 GSI_SAME_STMT);
372 }
373 else
374 {
375 /* Use a single scalar operation with a mode no wider than word_mode. */
376 if (!warning_suppressed_p (gsi_stmt (*gsi),
377 OPT_Wvector_operation_performance))
378 warning_at (loc, OPT_Wvector_operation_performance,
379 "vector operation will be expanded with a "
380 "single scalar operation");
381 scalar_int_mode mode
382 = int_mode_for_size (tree_to_uhwi (TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 382, __FUNCTION__))->type_common.size)
), 0).require ();
383 compute_type = lang_hooks.types.type_for_mode (mode, 1);
384 result = f (gsi, compute_type, a, b, bitsize_zero_nodeglobal_trees[TI_BITSIZE_ZERO],
385 TYPE_SIZE (compute_type)((tree_class_check ((compute_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 385, __FUNCTION__))->type_common.size)
, code, type);
386 }
387
388 return result;
389}
390
391/* Expand a vector operation to scalars; for integer types we can use
392 special bit twiddling tricks to do the sums a word at a time, using
393 function F_PARALLEL instead of F. These tricks are done only if
394 they can process at least four items, that is, only if the vector
395 holds at least four items and if a word can hold four items. */
396static tree
397expand_vector_addition (gimple_stmt_iterator *gsi,
398 elem_op_func f, elem_op_func f_parallel,
399 tree type, tree a, tree b, enum tree_code code)
400{
401 int parts_per_word = BITS_PER_WORD((8) * (((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4))
/ vector_element_bits (type);
402
403 if (INTEGRAL_TYPE_P (TREE_TYPE (type))(((enum tree_code) (((contains_struct_check ((type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 403, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 403, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 403, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
404 && parts_per_word >= 4
405 && nunits_for_known_piecewise_op (type) >= 4)
406 return expand_vector_parallel (gsi, f_parallel,
407 type, a, b, code);
408 else
409 return expand_vector_piecewise (gsi, f,
410 type, TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 410, __FUNCTION__))->typed.type)
,
411 a, b, code, false);
412}
413
414static bool
415expand_vector_condition (gimple_stmt_iterator *gsi, bitmap dce_ssa_names);
416
417/* Try to expand vector comparison expression OP0 CODE OP1 by
418 querying optab if the following expression:
419 VEC_COND_EXPR< OP0 CODE OP1, {-1,...}, {0,...}>
420 can be expanded. */
421static tree
422expand_vector_comparison (gimple_stmt_iterator *gsi, tree type, tree op0,
423 tree op1, enum tree_code code,
424 bitmap dce_ssa_names)
425{
426 tree lhs = gimple_assign_lhs (gsi_stmt (*gsi));
427 use_operand_p use_p;
428 imm_use_iterator iterator;
429 bool vec_cond_expr_only = true;
430
431 /* As seen in PR95830, we should not expand comparisons that are only
432 feeding a VEC_COND_EXPR statement. */
433 auto_vec<gimple *> uses;
434 FOR_EACH_IMM_USE_FAST (use_p, iterator, lhs)for ((use_p) = first_readonly_imm_use (&(iterator), (lhs)
); !end_readonly_imm_use_p (&(iterator)); (void) ((use_p)
= next_readonly_imm_use (&(iterator))))
8
Loop condition is false. Execution continues on line 449
435 {
436 gimple *use = USE_STMT (use_p)(use_p)->loc.stmt;
437 if (is_gimple_debug (use))
438 continue;
439 if (is_gimple_assign (use)
440 && gimple_assign_rhs_code (use) == VEC_COND_EXPR
441 && gimple_assign_rhs1 (use) == lhs
442 && gimple_assign_rhs2 (use) != lhs
443 && gimple_assign_rhs3 (use) != lhs)
444 uses.safe_push (use);
445 else
446 vec_cond_expr_only = false;
447 }
448
449 if (vec_cond_expr_only
8.1
'vec_cond_expr_only' is true
8.1
'vec_cond_expr_only' is true
)
9
Taking true branch
450 for (gimple *use : uses)
10
Assuming '__begin2' is not equal to '__end2'
451 {
452 gimple_stmt_iterator it = gsi_for_stmt (use);
453 if (!expand_vector_condition (&it, dce_ssa_names))
11
Calling 'expand_vector_condition'
454 {
455 vec_cond_expr_only = false;
456 break;
457 }
458 }
459
460 if (!uses.is_empty () && vec_cond_expr_only)
461 return NULL_TREE(tree) nullptr;
462
463 tree t;
464 if (!expand_vec_cmp_expr_p (TREE_TYPE (op0)((contains_struct_check ((op0), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 464, __FUNCTION__))->typed.type)
, type, code))
465 {
466 if (VECTOR_BOOLEAN_TYPE_P (type)(((enum tree_code) (type)->base.code) == VECTOR_TYPE &&
((enum tree_code) (((contains_struct_check ((type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 466, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
467 && SCALAR_INT_MODE_P (TYPE_MODE (type))(((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 467, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)]) == MODE_INT || ((enum
mode_class) mode_class[((((enum tree_code) ((tree_class_check
((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 467, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)]) == MODE_PARTIAL_INT)
468 && known_lt (GET_MODE_BITSIZE (TYPE_MODE (type)),(!maybe_le (TYPE_VECTOR_SUBPARTS (type) * GET_MODE_BITSIZE ((
as_a <scalar_mode> ((tree_class_check ((((contains_struct_check
((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 471, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 471, __FUNCTION__))->type_common.mode))), GET_MODE_BITSIZE
(((((enum tree_code) ((tree_class_check ((type), (tcc_type),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 468, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode))))
469 TYPE_VECTOR_SUBPARTS (type)(!maybe_le (TYPE_VECTOR_SUBPARTS (type) * GET_MODE_BITSIZE ((
as_a <scalar_mode> ((tree_class_check ((((contains_struct_check
((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 471, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 471, __FUNCTION__))->type_common.mode))), GET_MODE_BITSIZE
(((((enum tree_code) ((tree_class_check ((type), (tcc_type),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 468, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode))))
470 * GET_MODE_BITSIZE (SCALAR_TYPE_MODE(!maybe_le (TYPE_VECTOR_SUBPARTS (type) * GET_MODE_BITSIZE ((
as_a <scalar_mode> ((tree_class_check ((((contains_struct_check
((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 471, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 471, __FUNCTION__))->type_common.mode))), GET_MODE_BITSIZE
(((((enum tree_code) ((tree_class_check ((type), (tcc_type),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 468, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode))))
471 (TREE_TYPE (type))))(!maybe_le (TYPE_VECTOR_SUBPARTS (type) * GET_MODE_BITSIZE ((
as_a <scalar_mode> ((tree_class_check ((((contains_struct_check
((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 471, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 471, __FUNCTION__))->type_common.mode))), GET_MODE_BITSIZE
(((((enum tree_code) ((tree_class_check ((type), (tcc_type),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 468, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode))))
)
472 {
473 tree inner_type = TREE_TYPE (TREE_TYPE (op0))((contains_struct_check ((((contains_struct_check ((op0), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 473, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 473, __FUNCTION__))->typed.type)
;
474 tree part_width = vector_element_bits_tree (TREE_TYPE (op0)((contains_struct_check ((op0), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 474, __FUNCTION__))->typed.type)
);
475 tree index = bitsize_int (0)size_int_kind (0, stk_bitsizetype);
476 int nunits = nunits_for_known_piecewise_op (TREE_TYPE (op0)((contains_struct_check ((op0), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 476, __FUNCTION__))->typed.type)
);
477 int prec = GET_MODE_PRECISION (SCALAR_TYPE_MODE (type)(as_a <scalar_mode> ((tree_class_check ((type), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 477, __FUNCTION__))->type_common.mode))
);
478 tree ret_type = build_nonstandard_integer_type (prec, 1);
479 tree ret_inner_type = boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE];
480 int i;
481 location_t loc = gimple_location (gsi_stmt (*gsi));
482 t = build_zero_cst (ret_type);
483
484 if (TYPE_PRECISION (ret_inner_type)((tree_class_check ((ret_inner_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 484, __FUNCTION__))->type_common.precision)
!= 1)
485 ret_inner_type = build_nonstandard_integer_type (1, 1);
486 if (!warning_suppressed_p (gsi_stmt (*gsi),
487 OPT_Wvector_operation_performance))
488 warning_at (loc, OPT_Wvector_operation_performance,
489 "vector operation will be expanded piecewise");
490 for (i = 0; i < nunits;
491 i++, index = int_const_binop (PLUS_EXPR, index, part_width))
492 {
493 tree a = tree_vec_extract (gsi, inner_type, op0, part_width,
494 index);
495 tree b = tree_vec_extract (gsi, inner_type, op1, part_width,
496 index);
497 tree result = gimplify_build2 (gsi, code, ret_inner_type, a, b);
498 t = gimplify_build3 (gsi, BIT_INSERT_EXPR, ret_type, t, result,
499 bitsize_int (i)size_int_kind (i, stk_bitsizetype));
500 }
501 t = gimplify_build1 (gsi, VIEW_CONVERT_EXPR, type, t);
502 }
503 else
504 t = expand_vector_piecewise (gsi, do_compare, type,
505 TREE_TYPE (TREE_TYPE (op0))((contains_struct_check ((((contains_struct_check ((op0), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 505, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 505, __FUNCTION__))->typed.type)
, op0, op1,
506 code, false);
507 }
508 else
509 t = NULL_TREE(tree) nullptr;
510
511 return t;
512}
513
514/* Helper function of expand_vector_divmod. Gimplify a RSHIFT_EXPR in type
515 of OP0 with shift counts in SHIFTCNTS array and return the temporary holding
516 the result if successful, otherwise return NULL_TREE. */
517static tree
518add_rshift (gimple_stmt_iterator *gsi, tree type, tree op0, int *shiftcnts)
519{
520 optab op;
521 unsigned int i, nunits = nunits_for_known_piecewise_op (type);
522 bool scalar_shift = true;
523
524 for (i = 1; i < nunits; i++)
525 {
526 if (shiftcnts[i] != shiftcnts[0])
527 scalar_shift = false;
528 }
529
530 if (scalar_shift && shiftcnts[0] == 0)
531 return op0;
532
533 if (scalar_shift)
534 {
535 op = optab_for_tree_code (RSHIFT_EXPR, type, optab_scalar);
536 if (op != unknown_optab
537 && optab_handler (op, TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 537, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
) != CODE_FOR_nothing)
538 return gimplify_build2 (gsi, RSHIFT_EXPR, type, op0,
539 build_int_cst (NULL_TREE(tree) nullptr, shiftcnts[0]));
540 }
541
542 op = optab_for_tree_code (RSHIFT_EXPR, type, optab_vector);
543 if (op != unknown_optab
544 && optab_handler (op, TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 544, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
) != CODE_FOR_nothing)
545 {
546 tree_vector_builder vec (type, nunits, 1);
547 for (i = 0; i < nunits; i++)
548 vec.quick_push (build_int_cst (TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 548, __FUNCTION__))->typed.type)
, shiftcnts[i]));
549 return gimplify_build2 (gsi, RSHIFT_EXPR, type, op0, vec.build ());
550 }
551
552 return NULL_TREE(tree) nullptr;
553}
554
555/* Try to expand integer vector division by constant using
556 widening multiply, shifts and additions. */
557static tree
558expand_vector_divmod (gimple_stmt_iterator *gsi, tree type, tree op0,
559 tree op1, enum tree_code code)
560{
561 bool use_pow2 = true;
562 bool has_vector_shift = true;
563 bool use_abs_op1 = false;
564 int mode = -1, this_mode;
565 int pre_shift = -1, post_shift;
566 unsigned int nunits = nunits_for_known_piecewise_op (type);
567 int *shifts = XALLOCAVEC (int, nunits * 4)((int *) __builtin_alloca(sizeof (int) * (nunits * 4)));
568 int *pre_shifts = shifts + nunits;
569 int *post_shifts = pre_shifts + nunits;
570 int *shift_temps = post_shifts + nunits;
571 unsigned HOST_WIDE_INTlong *mulc = XALLOCAVEC (unsigned HOST_WIDE_INT, nunits)((unsigned long *) __builtin_alloca(sizeof (unsigned long) * (
nunits)))
;
572 int prec = TYPE_PRECISION (TREE_TYPE (type))((tree_class_check ((((contains_struct_check ((type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 572, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 572, __FUNCTION__))->type_common.precision)
;
573 int dummy_int;
574 unsigned int i;
575 signop sign_p = TYPE_SIGN (TREE_TYPE (type))((signop) ((tree_class_check ((((contains_struct_check ((type
), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 575, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 575, __FUNCTION__))->base.u.bits.unsigned_flag))
;
576 unsigned HOST_WIDE_INTlong mask = GET_MODE_MASK (TYPE_MODE (TREE_TYPE (type)))mode_mask_array[((((enum tree_code) ((tree_class_check ((((contains_struct_check
((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 576, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 576, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 576, __FUNCTION__))->typed.type)) : (((contains_struct_check
((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 576, __FUNCTION__))->typed.type))->type_common.mode)]
;
577 tree cur_op, mulcst, tem;
578 optab op;
579
580 if (prec > HOST_BITS_PER_WIDE_INT64)
581 return NULL_TREE(tree) nullptr;
582
583 op = optab_for_tree_code (RSHIFT_EXPR, type, optab_vector);
584 if (op == unknown_optab
585 || optab_handler (op, TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 585, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
) == CODE_FOR_nothing)
586 has_vector_shift = false;
587
588 /* Analysis phase. Determine if all op1 elements are either power
589 of two and it is possible to expand it using shifts (or for remainder
590 using masking). Additionally compute the multiplicative constants
591 and pre and post shifts if the division is to be expanded using
592 widening or high part multiplication plus shifts. */
593 for (i = 0; i < nunits; i++)
594 {
595 tree cst = VECTOR_CST_ELT (op1, i)vector_cst_elt (op1, i);
596 unsigned HOST_WIDE_INTlong ml;
597
598 if (TREE_CODE (cst)((enum tree_code) (cst)->base.code) != INTEGER_CST || integer_zerop (cst))
599 return NULL_TREE(tree) nullptr;
600 pre_shifts[i] = 0;
601 post_shifts[i] = 0;
602 mulc[i] = 0;
603 if (use_pow2
604 && (!integer_pow2p (cst) || tree_int_cst_sgn (cst) != 1))
605 use_pow2 = false;
606 if (use_pow2)
607 {
608 shifts[i] = tree_log2 (cst);
609 if (shifts[i] != shifts[0]
610 && code == TRUNC_DIV_EXPR
611 && !has_vector_shift)
612 use_pow2 = false;
613 }
614 if (mode == -2)
615 continue;
616 if (sign_p == UNSIGNED)
617 {
618 unsigned HOST_WIDE_INTlong mh;
619 unsigned HOST_WIDE_INTlong d = TREE_INT_CST_LOW (cst)((unsigned long) (*tree_int_cst_elt_check ((cst), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 619, __FUNCTION__)))
& mask;
620
621 if (d >= (HOST_WIDE_INT_1U1UL << (prec - 1)))
622 /* FIXME: Can transform this into op0 >= op1 ? 1 : 0. */
623 return NULL_TREE(tree) nullptr;
624
625 if (d <= 1)
626 {
627 mode = -2;
628 continue;
629 }
630
631 /* Find a suitable multiplier and right shift count
632 instead of multiplying with D. */
633 mh = choose_multiplier (d, prec, prec, &ml, &post_shift, &dummy_int);
634
635 /* If the suggested multiplier is more than SIZE bits, we can
636 do better for even divisors, using an initial right shift. */
637 if ((mh != 0 && (d & 1) == 0)
638 || (!has_vector_shift && pre_shift != -1))
639 {
640 if (has_vector_shift)
641 pre_shift = ctz_or_zero (d);
642 else if (pre_shift == -1)
643 {
644 unsigned int j;
645 for (j = 0; j < nunits; j++)
646 {
647 tree cst2 = VECTOR_CST_ELT (op1, j)vector_cst_elt (op1, j);
648 unsigned HOST_WIDE_INTlong d2;
649 int this_pre_shift;
650
651 if (!tree_fits_uhwi_p (cst2))
652 return NULL_TREE(tree) nullptr;
653 d2 = tree_to_uhwi (cst2) & mask;
654 if (d2 == 0)
655 return NULL_TREE(tree) nullptr;
656 this_pre_shift = floor_log2 (d2 & -d2);
657 if (pre_shift == -1 || this_pre_shift < pre_shift)
658 pre_shift = this_pre_shift;
659 }
660 if (i != 0 && pre_shift != 0)
661 {
662 /* Restart. */
663 i = -1U;
664 mode = -1;
665 continue;
666 }
667 }
668 if (pre_shift != 0)
669 {
670 if ((d >> pre_shift) <= 1)
671 {
672 mode = -2;
673 continue;
674 }
675 mh = choose_multiplier (d >> pre_shift, prec,
676 prec - pre_shift,
677 &ml, &post_shift, &dummy_int);
678 gcc_assert (!mh)((void)(!(!mh) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 678, __FUNCTION__), 0 : 0))
;
679 pre_shifts[i] = pre_shift;
680 }
681 }
682 if (!mh)
683 this_mode = 0;
684 else
685 this_mode = 1;
686 }
687 else
688 {
689 HOST_WIDE_INTlong d = TREE_INT_CST_LOW (cst)((unsigned long) (*tree_int_cst_elt_check ((cst), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 689, __FUNCTION__)))
;
690 unsigned HOST_WIDE_INTlong abs_d;
691
692 if (d == -1)
693 return NULL_TREE(tree) nullptr;
694
695 /* Since d might be INT_MIN, we have to cast to
696 unsigned HOST_WIDE_INT before negating to avoid
697 undefined signed overflow. */
698 abs_d = (d >= 0
699 ? (unsigned HOST_WIDE_INTlong) d
700 : - (unsigned HOST_WIDE_INTlong) d);
701
702 /* n rem d = n rem -d */
703 if (code == TRUNC_MOD_EXPR && d < 0)
704 {
705 d = abs_d;
706 use_abs_op1 = true;
707 }
708 if (abs_d == HOST_WIDE_INT_1U1UL << (prec - 1))
709 {
710 /* This case is not handled correctly below. */
711 mode = -2;
712 continue;
713 }
714 if (abs_d <= 1)
715 {
716 mode = -2;
717 continue;
718 }
719
720 choose_multiplier (abs_d, prec, prec - 1, &ml,
721 &post_shift, &dummy_int);
722 if (ml >= HOST_WIDE_INT_1U1UL << (prec - 1))
723 {
724 this_mode = 4 + (d < 0);
725 ml |= HOST_WIDE_INT_M1U-1UL << (prec - 1);
726 }
727 else
728 this_mode = 2 + (d < 0);
729 }
730 mulc[i] = ml;
731 post_shifts[i] = post_shift;
732 if ((i && !has_vector_shift && post_shifts[0] != post_shift)
733 || post_shift >= prec
734 || pre_shifts[i] >= prec)
735 this_mode = -2;
736
737 if (i == 0)
738 mode = this_mode;
739 else if (mode != this_mode)
740 mode = -2;
741 }
742
743 if (use_pow2)
744 {
745 tree addend = NULL_TREE(tree) nullptr;
746 if (sign_p == SIGNED)
747 {
748 tree uns_type;
749
750 /* Both division and remainder sequences need
751 op0 < 0 ? mask : 0 computed. It can be either computed as
752 (type) (((uns_type) (op0 >> (prec - 1))) >> (prec - shifts[i]))
753 if none of the shifts is 0, or as the conditional. */
754 for (i = 0; i < nunits; i++)
755 if (shifts[i] == 0)
756 break;
757 uns_type
758 = build_vector_type (build_nonstandard_integer_type (prec, 1),
759 nunits);
760 if (i == nunits && TYPE_MODE (uns_type)((((enum tree_code) ((tree_class_check ((uns_type), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 760, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(uns_type) : (uns_type)->type_common.mode)
== TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 760, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
)
761 {
762 for (i = 0; i < nunits; i++)
763 shift_temps[i] = prec - 1;
764 cur_op = add_rshift (gsi, type, op0, shift_temps);
765 if (cur_op != NULL_TREE(tree) nullptr)
766 {
767 cur_op = gimplify_build1 (gsi, VIEW_CONVERT_EXPR,
768 uns_type, cur_op);
769 for (i = 0; i < nunits; i++)
770 shift_temps[i] = prec - shifts[i];
771 cur_op = add_rshift (gsi, uns_type, cur_op, shift_temps);
772 if (cur_op != NULL_TREE(tree) nullptr)
773 addend = gimplify_build1 (gsi, VIEW_CONVERT_EXPR,
774 type, cur_op);
775 }
776 }
777 if (addend == NULL_TREE(tree) nullptr
778 && expand_vec_cond_expr_p (type, type, LT_EXPR))
779 {
780 tree zero, cst, mask_type, mask;
781 gimple *stmt, *cond;
782
783 mask_type = truth_type_for (type);
784 zero = build_zero_cst (type);
785 mask = make_ssa_name (mask_type);
786 cond = gimple_build_assign (mask, LT_EXPR, op0, zero);
787 gsi_insert_before (gsi, cond, GSI_SAME_STMT);
788 tree_vector_builder vec (type, nunits, 1);
789 for (i = 0; i < nunits; i++)
790 vec.quick_push (build_int_cst (TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 790, __FUNCTION__))->typed.type)
,
791 (HOST_WIDE_INT_1U1UL
792 << shifts[i]) - 1));
793 cst = vec.build ();
794 addend = make_ssa_name (type);
795 stmt
796 = gimple_build_assign (addend, VEC_COND_EXPR, mask, cst, zero);
797 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
798 }
799 }
800 if (code == TRUNC_DIV_EXPR)
801 {
802 if (sign_p == UNSIGNED)
803 {
804 /* q = op0 >> shift; */
805 cur_op = add_rshift (gsi, type, op0, shifts);
806 if (cur_op != NULL_TREE(tree) nullptr)
807 return cur_op;
808 }
809 else if (addend != NULL_TREE(tree) nullptr)
810 {
811 /* t1 = op0 + addend;
812 q = t1 >> shift; */
813 op = optab_for_tree_code (PLUS_EXPR, type, optab_default);
814 if (op != unknown_optab
815 && optab_handler (op, TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 815, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
) != CODE_FOR_nothing)
816 {
817 cur_op = gimplify_build2 (gsi, PLUS_EXPR, type, op0, addend);
818 cur_op = add_rshift (gsi, type, cur_op, shifts);
819 if (cur_op != NULL_TREE(tree) nullptr)
820 return cur_op;
821 }
822 }
823 }
824 else
825 {
826 tree mask;
827 tree_vector_builder vec (type, nunits, 1);
828 for (i = 0; i < nunits; i++)
829 vec.quick_push (build_int_cst (TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 829, __FUNCTION__))->typed.type)
,
830 (HOST_WIDE_INT_1U1UL
831 << shifts[i]) - 1));
832 mask = vec.build ();
833 op = optab_for_tree_code (BIT_AND_EXPR, type, optab_default);
834 if (op != unknown_optab
835 && optab_handler (op, TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 835, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
) != CODE_FOR_nothing)
836 {
837 if (sign_p == UNSIGNED)
838 /* r = op0 & mask; */
839 return gimplify_build2 (gsi, BIT_AND_EXPR, type, op0, mask);
840 else if (addend != NULL_TREE(tree) nullptr)
841 {
842 /* t1 = op0 + addend;
843 t2 = t1 & mask;
844 r = t2 - addend; */
845 op = optab_for_tree_code (PLUS_EXPR, type, optab_default);
846 if (op != unknown_optab
847 && optab_handler (op, TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 847, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
)
848 != CODE_FOR_nothing)
849 {
850 cur_op = gimplify_build2 (gsi, PLUS_EXPR, type, op0,
851 addend);
852 cur_op = gimplify_build2 (gsi, BIT_AND_EXPR, type,
853 cur_op, mask);
854 op = optab_for_tree_code (MINUS_EXPR, type,
855 optab_default);
856 if (op != unknown_optab
857 && optab_handler (op, TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 857, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
)
858 != CODE_FOR_nothing)
859 return gimplify_build2 (gsi, MINUS_EXPR, type,
860 cur_op, addend);
861 }
862 }
863 }
864 }
865 }
866
867 if (mode == -2 || BYTES_BIG_ENDIAN0 != WORDS_BIG_ENDIAN0)
868 return NULL_TREE(tree) nullptr;
869
870 if (!can_mult_highpart_p (TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 870, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
, TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 870, __FUNCTION__))->base.u.bits.unsigned_flag)
))
871 return NULL_TREE(tree) nullptr;
872
873 cur_op = op0;
874
875 switch (mode)
876 {
877 case 0:
878 gcc_assert (sign_p == UNSIGNED)((void)(!(sign_p == UNSIGNED) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 878, __FUNCTION__), 0 : 0))
;
879 /* t1 = oprnd0 >> pre_shift;
880 t2 = t1 h* ml;
881 q = t2 >> post_shift; */
882 cur_op = add_rshift (gsi, type, cur_op, pre_shifts);
883 if (cur_op == NULL_TREE(tree) nullptr)
884 return NULL_TREE(tree) nullptr;
885 break;
886 case 1:
887 gcc_assert (sign_p == UNSIGNED)((void)(!(sign_p == UNSIGNED) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 887, __FUNCTION__), 0 : 0))
;
888 for (i = 0; i < nunits; i++)
889 {
890 shift_temps[i] = 1;
891 post_shifts[i]--;
892 }
893 break;
894 case 2:
895 case 3:
896 case 4:
897 case 5:
898 gcc_assert (sign_p == SIGNED)((void)(!(sign_p == SIGNED) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 898, __FUNCTION__), 0 : 0))
;
899 for (i = 0; i < nunits; i++)
900 shift_temps[i] = prec - 1;
901 break;
902 default:
903 return NULL_TREE(tree) nullptr;
904 }
905
906 tree_vector_builder vec (type, nunits, 1);
907 for (i = 0; i < nunits; i++)
908 vec.quick_push (build_int_cst (TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 908, __FUNCTION__))->typed.type)
, mulc[i]));
909 mulcst = vec.build ();
910
911 cur_op = gimplify_build2 (gsi, MULT_HIGHPART_EXPR, type, cur_op, mulcst);
912
913 switch (mode)
914 {
915 case 0:
916 /* t1 = oprnd0 >> pre_shift;
917 t2 = t1 h* ml;
918 q = t2 >> post_shift; */
919 cur_op = add_rshift (gsi, type, cur_op, post_shifts);
920 break;
921 case 1:
922 /* t1 = oprnd0 h* ml;
923 t2 = oprnd0 - t1;
924 t3 = t2 >> 1;
925 t4 = t1 + t3;
926 q = t4 >> (post_shift - 1); */
927 op = optab_for_tree_code (MINUS_EXPR, type, optab_default);
928 if (op == unknown_optab
929 || optab_handler (op, TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 929, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
) == CODE_FOR_nothing)
930 return NULL_TREE(tree) nullptr;
931 tem = gimplify_build2 (gsi, MINUS_EXPR, type, op0, cur_op);
932 tem = add_rshift (gsi, type, tem, shift_temps);
933 op = optab_for_tree_code (PLUS_EXPR, type, optab_default);
934 if (op == unknown_optab
935 || optab_handler (op, TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 935, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
) == CODE_FOR_nothing)
936 return NULL_TREE(tree) nullptr;
937 tem = gimplify_build2 (gsi, PLUS_EXPR, type, cur_op, tem);
938 cur_op = add_rshift (gsi, type, tem, post_shifts);
939 if (cur_op == NULL_TREE(tree) nullptr)
940 return NULL_TREE(tree) nullptr;
941 break;
942 case 2:
943 case 3:
944 case 4:
945 case 5:
946 /* t1 = oprnd0 h* ml;
947 t2 = t1; [ iff (mode & 2) != 0 ]
948 t2 = t1 + oprnd0; [ iff (mode & 2) == 0 ]
949 t3 = t2 >> post_shift;
950 t4 = oprnd0 >> (prec - 1);
951 q = t3 - t4; [ iff (mode & 1) == 0 ]
952 q = t4 - t3; [ iff (mode & 1) != 0 ] */
953 if ((mode & 2) == 0)
954 {
955 op = optab_for_tree_code (PLUS_EXPR, type, optab_default);
956 if (op == unknown_optab
957 || optab_handler (op, TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 957, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
) == CODE_FOR_nothing)
958 return NULL_TREE(tree) nullptr;
959 cur_op = gimplify_build2 (gsi, PLUS_EXPR, type, cur_op, op0);
960 }
961 cur_op = add_rshift (gsi, type, cur_op, post_shifts);
962 if (cur_op == NULL_TREE(tree) nullptr)
963 return NULL_TREE(tree) nullptr;
964 tem = add_rshift (gsi, type, op0, shift_temps);
965 if (tem == NULL_TREE(tree) nullptr)
966 return NULL_TREE(tree) nullptr;
967 op = optab_for_tree_code (MINUS_EXPR, type, optab_default);
968 if (op == unknown_optab
969 || optab_handler (op, TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 969, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
) == CODE_FOR_nothing)
970 return NULL_TREE(tree) nullptr;
971 if ((mode & 1) == 0)
972 cur_op = gimplify_build2 (gsi, MINUS_EXPR, type, cur_op, tem);
973 else
974 cur_op = gimplify_build2 (gsi, MINUS_EXPR, type, tem, cur_op);
975 break;
976 default:
977 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 977, __FUNCTION__))
;
978 }
979
980 if (code == TRUNC_DIV_EXPR)
981 return cur_op;
982
983 /* We divided. Now finish by:
984 t1 = q * oprnd1;
985 r = oprnd0 - t1; */
986 op = optab_for_tree_code (MULT_EXPR, type, optab_default);
987 if (op == unknown_optab
988 || optab_handler (op, TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 988, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
) == CODE_FOR_nothing)
989 return NULL_TREE(tree) nullptr;
990 if (use_abs_op1)
991 {
992 tree_vector_builder elts;
993 if (!elts.new_unary_operation (type, op1, false))
994 return NULL_TREE(tree) nullptr;
995 unsigned int count = elts.encoded_nelts ();
996 for (unsigned int i = 0; i < count; ++i)
997 {
998 tree elem1 = VECTOR_CST_ELT (op1, i)vector_cst_elt (op1, i);
999
1000 tree elt = const_unop (ABS_EXPR, TREE_TYPE (elem1)((contains_struct_check ((elem1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1000, __FUNCTION__))->typed.type)
, elem1);
1001 if (elt == NULL_TREE(tree) nullptr)
1002 return NULL_TREE(tree) nullptr;
1003 elts.quick_push (elt);
1004 }
1005 op1 = elts.build ();
1006 }
1007 tem = gimplify_build2 (gsi, MULT_EXPR, type, cur_op, op1);
1008 op = optab_for_tree_code (MINUS_EXPR, type, optab_default);
1009 if (op == unknown_optab
1010 || optab_handler (op, TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1010, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
) == CODE_FOR_nothing)
1011 return NULL_TREE(tree) nullptr;
1012 return gimplify_build2 (gsi, MINUS_EXPR, type, op0, tem);
1013}
1014
1015/* Expand a vector condition to scalars, by using many conditions
1016 on the vector's elements. */
1017
1018static bool
1019expand_vector_condition (gimple_stmt_iterator *gsi, bitmap dce_ssa_names)
1020{
1021 gassign *stmt = as_a <gassign *> (gsi_stmt (*gsi));
1022 tree type = TREE_TYPE (gimple_assign_lhs (stmt))((contains_struct_check ((gimple_assign_lhs (stmt)), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1022, __FUNCTION__))->typed.type)
;
1023 tree a = gimple_assign_rhs1 (stmt);
1024 tree a1 = a;
1025 tree a2 = NULL_TREE(tree) nullptr;
1026 bool a_is_comparison = false;
1027 bool a_is_scalar_bitmask = false;
1028 tree b = gimple_assign_rhs2 (stmt);
1029 tree c = gimple_assign_rhs3 (stmt);
1030 vec<constructor_elt, va_gc> *v;
1031 tree constr;
1032 tree inner_type = TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1032, __FUNCTION__))->typed.type)
;
1033 tree width = vector_element_bits_tree (type);
1034 tree cond_type = TREE_TYPE (TREE_TYPE (a))((contains_struct_check ((((contains_struct_check ((a), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1034, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1034, __FUNCTION__))->typed.type)
;
1035 tree comp_inner_type = cond_type;
1036 tree index = bitsize_int (0)size_int_kind (0, stk_bitsizetype);
1037 tree comp_width = width;
1038 tree comp_index = index;
1039 location_t loc = gimple_location (gsi_stmt (*gsi));
1040 tree_code code = TREE_CODE (a)((enum tree_code) (a)->base.code);
1041 gassign *assign = NULLnullptr;
1042
1043 if (code == SSA_NAME)
12
Assuming 'code' is not equal to SSA_NAME
1044 {
1045 assign = dyn_cast<gassign *> (SSA_NAME_DEF_STMT (a)(tree_check ((a), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1045, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
);
1046 if (assign != NULLnullptr
1047 && TREE_CODE_CLASS (gimple_assign_rhs_code (assign))tree_code_type_tmpl <0>::tree_code_type[(int) (gimple_assign_rhs_code
(assign))]
== tcc_comparison)
1048 {
1049 a_is_comparison = true;
1050 a1 = gimple_assign_rhs1 (assign);
1051 a2 = gimple_assign_rhs2 (assign);
1052 code = gimple_assign_rhs_code (assign);
1053 comp_inner_type = TREE_TYPE (TREE_TYPE (a1))((contains_struct_check ((((contains_struct_check ((a1), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1053, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1053, __FUNCTION__))->typed.type)
;
1054 comp_width = vector_element_bits_tree (TREE_TYPE (a1)((contains_struct_check ((a1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1054, __FUNCTION__))->typed.type)
);
1055 }
1056 }
1057
1058 if (expand_vec_cond_expr_p (type, TREE_TYPE (a1)((contains_struct_check ((a1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1058, __FUNCTION__))->typed.type)
, code)
13
Assuming the condition is false
1059 || (integer_all_onesp (b) && integer_zerop (c)
14
Assuming the condition is false
1060 && expand_vec_cmp_expr_p (type, TREE_TYPE (a1)((contains_struct_check ((a1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1060, __FUNCTION__))->typed.type)
, code)))
1061 {
1062 gcc_assert (TREE_CODE (a) == SSA_NAME || TREE_CODE (a) == VECTOR_CST)((void)(!(((enum tree_code) (a)->base.code) == SSA_NAME ||
((enum tree_code) (a)->base.code) == VECTOR_CST) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1062, __FUNCTION__), 0 : 0))
;
1063 return true;
1064 }
1065
1066 /* If a has vector boolean type and is a comparison, above
1067 expand_vec_cond_expr_p might fail, even if both the comparison and
1068 VEC_COND_EXPR could be supported individually. See PR109176. */
1069 if (a_is_comparison
14.1
'a_is_comparison' is false
14.1
'a_is_comparison' is false
1070 && VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (a))(((enum tree_code) (((contains_struct_check ((a), (TS_TYPED),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1070, __FUNCTION__))->typed.type))->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((((contains_struct_check
((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1070, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1070, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
1071 && expand_vec_cond_expr_p (type, TREE_TYPE (a)((contains_struct_check ((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1071, __FUNCTION__))->typed.type)
, SSA_NAME) 1072 && expand_vec_cmp_expr_p (TREE_TYPE (a1)((contains_struct_check ((a1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1072, __FUNCTION__))->typed.type)
, TREE_TYPE (a)((contains_struct_check ((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1072, __FUNCTION__))->typed.type)
, code)) 1073 return true; 1074 1075 /* Handle vector boolean types with bitmasks. If there is a comparison 1076 and we can expand the comparison into the vector boolean bitmask, 1077 or otherwise if it is compatible with type, we can transform 1078 vbfld_1 = x_2 < y_3 ? vbfld_4 : vbfld_5; 1079 into 1080 tmp_6 = x_2 < y_3; 1081 tmp_7 = tmp_6 & vbfld_4; 1082 tmp_8 = ~tmp_6; 1083 tmp_9 = tmp_8 & vbfld_5; 1084 vbfld_1 = tmp_7 | tmp_9; 1085 Similarly for vbfld_10 instead of x_2 < y_3. */ 1086 if (VECTOR_BOOLEAN_TYPE_P (type)(((enum tree_code) (type)->base.code) == VECTOR_TYPE &&
((enum tree_code) (((contains_struct_check ((type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1086, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
15
Assuming field 'code' is equal to VECTOR_TYPE
16
Assuming field 'code' is not equal to BOOLEAN_TYPE
17
Taking false branch
1087 && SCALAR_INT_MODE_P (TYPE_MODE (type))(((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1087, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)]) == MODE_INT || ((enum
mode_class) mode_class[((((enum tree_code) ((tree_class_check
((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1087, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)]) == MODE_PARTIAL_INT)
1088 && known_lt (GET_MODE_BITSIZE (TYPE_MODE (type)),(!maybe_le (TYPE_VECTOR_SUBPARTS (type) * GET_MODE_BITSIZE ((
as_a <scalar_mode> ((tree_class_check ((((contains_struct_check
((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1090, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1090, __FUNCTION__))->type_common.mode))), GET_MODE_BITSIZE
(((((enum tree_code) ((tree_class_check ((type), (tcc_type),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1088, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode))))
1089 TYPE_VECTOR_SUBPARTS (type)(!maybe_le (TYPE_VECTOR_SUBPARTS (type) * GET_MODE_BITSIZE ((
as_a <scalar_mode> ((tree_class_check ((((contains_struct_check
((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1090, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1090, __FUNCTION__))->type_common.mode))), GET_MODE_BITSIZE
(((((enum tree_code) ((tree_class_check ((type), (tcc_type),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1088, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode))))
1090 * GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (type))))(!maybe_le (TYPE_VECTOR_SUBPARTS (type) * GET_MODE_BITSIZE ((
as_a <scalar_mode> ((tree_class_check ((((contains_struct_check
((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1090, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1090, __FUNCTION__))->type_common.mode))), GET_MODE_BITSIZE
(((((enum tree_code) ((tree_class_check ((type), (tcc_type),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1088, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode))))
1091 && (a_is_comparison 1092 ? useless_type_conversion_p (type, TREE_TYPE (a)((contains_struct_check ((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1092, __FUNCTION__))->typed.type)
) 1093 : expand_vec_cmp_expr_p (TREE_TYPE (a1)((contains_struct_check ((a1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1093, __FUNCTION__))->typed.type)
, type, TREE_CODE (a)((enum tree_code) (a)->base.code)))) 1094 { 1095 if (a_is_comparison) 1096 a = gimplify_build2 (gsi, code, type, a1, a2); 1097 a1 = gimplify_build2 (gsi, BIT_AND_EXPR, type, a, b); 1098 a2 = gimplify_build1 (gsi, BIT_NOT_EXPR, type, a); 1099 a2 = gimplify_build2 (gsi, BIT_AND_EXPR, type, a2, c); 1100 a = gimplify_build2 (gsi, BIT_IOR_EXPR, type, a1, a2); 1101 gimple_assign_set_rhs_from_tree (gsi, a); 1102 update_stmt (gsi_stmt (*gsi)); 1103 return true; 1104 } 1105 1106 /* TODO: try and find a smaller vector type. */ 1107 1108 if (!warning_suppressed_p (stmt, OPT_Wvector_operation_performance))
18
Assuming the condition is false
1109 warning_at (loc, OPT_Wvector_operation_performance, 1110 "vector condition will be expanded piecewise"); 1111 1112 if (!a_is_comparison
18.1
'a_is_comparison' is false
18.1
'a_is_comparison' is false
1113 && VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (a))(((enum tree_code) (((contains_struct_check ((a), (TS_TYPED),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1113, __FUNCTION__))->typed.type))->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((((contains_struct_check
((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1113, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1113, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
19
Assuming field 'code' is not equal to VECTOR_TYPE
1114 && SCALAR_INT_MODE_P (TYPE_MODE (TREE_TYPE (a)))(((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((((contains_struct_check ((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1114, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1114, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1114, __FUNCTION__))->typed.type)) : (((contains_struct_check
((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1114, __FUNCTION__))->typed.type))->type_common.mode)
]) == MODE_INT || ((enum mode_class) mode_class[((((enum tree_code
) ((tree_class_check ((((contains_struct_check ((a), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1114, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1114, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1114, __FUNCTION__))->typed.type)) : (((contains_struct_check
((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1114, __FUNCTION__))->typed.type))->type_common.mode)
]) == MODE_PARTIAL_INT)
1115 && known_lt (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (a))),(!maybe_le (TYPE_VECTOR_SUBPARTS (((contains_struct_check ((a
), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1116, __FUNCTION__))->typed.type)) * GET_MODE_BITSIZE ((
as_a <scalar_mode> ((tree_class_check ((((contains_struct_check
((((contains_struct_check ((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1118, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1118, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1118, __FUNCTION__))->type_common.mode))), GET_MODE_BITSIZE
(((((enum tree_code) ((tree_class_check ((((contains_struct_check
((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1115, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1115, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1115, __FUNCTION__))->typed.type)) : (((contains_struct_check
((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1115, __FUNCTION__))->typed.type))->type_common.mode)
)))
1116 TYPE_VECTOR_SUBPARTS (TREE_TYPE (a))(!maybe_le (TYPE_VECTOR_SUBPARTS (((contains_struct_check ((a
), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1116, __FUNCTION__))->typed.type)) * GET_MODE_BITSIZE ((
as_a <scalar_mode> ((tree_class_check ((((contains_struct_check
((((contains_struct_check ((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1118, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1118, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1118, __FUNCTION__))->type_common.mode))), GET_MODE_BITSIZE
(((((enum tree_code) ((tree_class_check ((((contains_struct_check
((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1115, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1115, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1115, __FUNCTION__))->typed.type)) : (((contains_struct_check
((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1115, __FUNCTION__))->typed.type))->type_common.mode)
)))
1117 * GET_MODE_BITSIZE (SCALAR_TYPE_MODE(!maybe_le (TYPE_VECTOR_SUBPARTS (((contains_struct_check ((a
), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1116, __FUNCTION__))->typed.type)) * GET_MODE_BITSIZE ((
as_a <scalar_mode> ((tree_class_check ((((contains_struct_check
((((contains_struct_check ((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1118, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1118, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1118, __FUNCTION__))->type_common.mode))), GET_MODE_BITSIZE
(((((enum tree_code) ((tree_class_check ((((contains_struct_check
((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1115, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1115, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1115, __FUNCTION__))->typed.type)) : (((contains_struct_check
((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1115, __FUNCTION__))->typed.type))->type_common.mode)
)))
1118 (TREE_TYPE (TREE_TYPE (a)))))(!maybe_le (TYPE_VECTOR_SUBPARTS (((contains_struct_check ((a
), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1116, __FUNCTION__))->typed.type)) * GET_MODE_BITSIZE ((
as_a <scalar_mode> ((tree_class_check ((((contains_struct_check
((((contains_struct_check ((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1118, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1118, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1118, __FUNCTION__))->type_common.mode))), GET_MODE_BITSIZE
(((((enum tree_code) ((tree_class_check ((((contains_struct_check
((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1115, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1115, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1115, __FUNCTION__))->typed.type)) : (((contains_struct_check
((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1115, __FUNCTION__))->typed.type))->type_common.mode)
)))
) 1119 { 1120 a_is_scalar_bitmask = true; 1121 int prec = GET_MODE_PRECISION (SCALAR_TYPE_MODE (TREE_TYPE (a))(as_a <scalar_mode> ((tree_class_check ((((contains_struct_check
((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1121, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1121, __FUNCTION__))->type_common.mode))
); 1122 tree atype = build_nonstandard_integer_type (prec, 1); 1123 a = gimplify_build1 (gsi, VIEW_CONVERT_EXPR, atype, a); 1124 } 1125 else if (!a_is_comparison
19.1
'a_is_comparison' is false
19.1
'a_is_comparison' is false
1126 && VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (a))(((enum tree_code) (((contains_struct_check ((a), (TS_TYPED),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1126, __FUNCTION__))->typed.type))->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((((contains_struct_check
((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1126, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1126, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
) 1127 comp_width = vector_element_bits_tree (TREE_TYPE (a)((contains_struct_check ((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1127, __FUNCTION__))->typed.type)
); 1128 1129 int nunits = nunits_for_known_piecewise_op (type); 1130 vec_alloc (v, nunits);
20
Calling 'vec_alloc<constructor_elt, va_gc>'
32
Returning from 'vec_alloc<constructor_elt, va_gc>'
1131 bool constant_p = true; 1132 for (int i = 0; i
32.1
'i' is < 'nunits'
32.1
'i' is < 'nunits'
< nunits; i++)
33
Loop condition is true. Entering loop body
1133 { 1134 tree aa, result; 1135 tree bb = tree_vec_extract (gsi, inner_type, b, width, index); 1136 tree cc = tree_vec_extract (gsi, inner_type, c, width, index); 1137 if (a_is_comparison
33.1
'a_is_comparison' is false
33.1
'a_is_comparison' is false
)
34
Taking false branch
1138 { 1139 tree aa1 = tree_vec_extract (gsi, comp_inner_type, a1, 1140 comp_width, comp_index); 1141 tree aa2 = tree_vec_extract (gsi, comp_inner_type, a2, 1142 comp_width, comp_index); 1143 aa = gimplify_build2 (gsi, code, cond_type, aa1, aa2); 1144 } 1145 else if (a_is_scalar_bitmask
34.1
'a_is_scalar_bitmask' is false
34.1
'a_is_scalar_bitmask' is false
)
35
Taking false branch
1146 { 1147 wide_int w = wi::set_bit_in_zero (i, TYPE_PRECISION (TREE_TYPE (a))((tree_class_check ((((contains_struct_check ((a), (TS_TYPED)
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1147, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1147, __FUNCTION__))->type_common.precision)
); 1148 result = gimplify_build2 (gsi, BIT_AND_EXPR, TREE_TYPE (a)((contains_struct_check ((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1148, __FUNCTION__))->typed.type)
, 1149 a, wide_int_to_tree (TREE_TYPE (a)((contains_struct_check ((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1149, __FUNCTION__))->typed.type)
, w)); 1150 aa = gimplify_build2 (gsi, NE_EXPR, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], result, 1151 build_zero_cst (TREE_TYPE (a)((contains_struct_check ((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1151, __FUNCTION__))->typed.type)
)); 1152 } 1153 else 1154 aa = tree_vec_extract (gsi, cond_type, a, comp_width, comp_index); 1155 result = gimplify_build3 (gsi, COND_EXPR, inner_type, aa, bb, cc); 1156 if (!CONSTANT_CLASS_P (result)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code
) (result)->base.code))] == tcc_constant)
)
36
Assuming the condition is false
37
Taking false branch
1157 constant_p = false; 1158 constructor_elt ce = {NULL_TREE(tree) nullptr, result}; 1159 v->quick_push (ce);
38
Called C++ object pointer is null
1160 index = int_const_binop (PLUS_EXPR, index, width); 1161 if (width == comp_width) 1162 comp_index = index; 1163 else 1164 comp_index = int_const_binop (PLUS_EXPR, comp_index, comp_width); 1165 } 1166 1167 if (constant_p) 1168 constr = build_vector_from_ctor (type, v); 1169 else 1170 constr = build_constructor (type, v); 1171 gimple_assign_set_rhs_from_tree (gsi, constr); 1172 update_stmt (gsi_stmt (*gsi)); 1173 1174 if (a_is_comparison) 1175 bitmap_set_bit (dce_ssa_names, 1176 SSA_NAME_VERSION (gimple_assign_lhs (assign))(tree_check ((gimple_assign_lhs (assign)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1176, __FUNCTION__, (SSA_NAME)))->base.u.version
); 1177 1178 return false; 1179} 1180 1181static tree 1182expand_vector_operation (gimple_stmt_iterator *gsi, tree type, tree compute_type, 1183 gassign *assign, enum tree_code code, 1184 bitmap dce_ssa_names) 1185{ 1186 machine_mode compute_mode = TYPE_MODE (compute_type)((((enum tree_code) ((tree_class_check ((compute_type), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1186, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(compute_type) : (compute_type)->type_common.mode)
;
1
Assuming field 'code' is not equal to VECTOR_TYPE
2
'?' condition is false
1187 1188 /* If the compute mode is not a vector mode (hence we are not decomposing 1189 a BLKmode vector to smaller, hardware-supported vectors), we may want 1190 to expand the operations in parallel. */ 1191 if (!VECTOR_MODE_P (compute_mode)(((enum mode_class) mode_class[compute_mode]) == MODE_VECTOR_BOOL
|| ((enum mode_class) mode_class[compute_mode]) == MODE_VECTOR_INT
|| ((enum mode_class) mode_class[compute_mode]) == MODE_VECTOR_FLOAT
|| ((enum mode_class) mode_class[compute_mode]) == MODE_VECTOR_FRACT
|| ((enum mode_class) mode_class[compute_mode]) == MODE_VECTOR_UFRACT
|| ((enum mode_class) mode_class[compute_mode]) == MODE_VECTOR_ACCUM
|| ((enum mode_class) mode_class[compute_mode]) == MODE_VECTOR_UACCUM
)
)
3
Assuming the condition is true
4
Assuming the condition is true
5
Taking true branch
1192 switch (code)
6
Control jumps to 'case EQ_EXPR:' at line 1220
1193 { 1194 case PLUS_EXPR: 1195 case MINUS_EXPR: 1196 if (ANY_INTEGRAL_TYPE_P (type)((((enum tree_code) (type)->base.code) == ENUMERAL_TYPE ||
((enum tree_code) (type)->base.code) == BOOLEAN_TYPE || (
(enum tree_code) (type)->base.code) == INTEGER_TYPE) || ((
((enum tree_code) (type)->base.code) == COMPLEX_TYPE || ((
(enum tree_code) (type)->base.code) == VECTOR_TYPE)) &&
(((enum tree_code) (((contains_struct_check ((type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1196, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1196, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1196, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)))
&& !TYPE_OVERFLOW_TRAPS (type)(!(any_integral_type_check ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1196, __FUNCTION__))->base.u.bits.unsigned_flag &&
global_options.x_flag_trapv)
) 1197 return expand_vector_addition (gsi, do_binop, do_plus_minus, type, 1198 gimple_assign_rhs1 (assign), 1199 gimple_assign_rhs2 (assign), code); 1200 break; 1201 1202 case NEGATE_EXPR: 1203 if (ANY_INTEGRAL_TYPE_P (type)((((enum tree_code) (type)->base.code) == ENUMERAL_TYPE ||
((enum tree_code) (type)->base.code) == BOOLEAN_TYPE || (
(enum tree_code) (type)->base.code) == INTEGER_TYPE) || ((
((enum tree_code) (type)->base.code) == COMPLEX_TYPE || ((
(enum tree_code) (type)->base.code) == VECTOR_TYPE)) &&
(((enum tree_code) (((contains_struct_check ((type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1203, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1203, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1203, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)))
&& !TYPE_OVERFLOW_TRAPS (type)(!(any_integral_type_check ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1203, __FUNCTION__))->base.u.bits.unsigned_flag &&
global_options.x_flag_trapv)
) 1204 return expand_vector_addition (gsi, do_unop, do_negate, type, 1205 gimple_assign_rhs1 (assign), 1206 NULL_TREE(tree) nullptr, code); 1207 break; 1208 1209 case BIT_AND_EXPR: 1210 case BIT_IOR_EXPR: 1211 case BIT_XOR_EXPR: 1212 return expand_vector_parallel (gsi, do_binop, type, 1213 gimple_assign_rhs1 (assign), 1214 gimple_assign_rhs2 (assign), code); 1215 1216 case BIT_NOT_EXPR: 1217 return expand_vector_parallel (gsi, do_unop, type, 1218 gimple_assign_rhs1 (assign), 1219 NULL_TREE(tree) nullptr, code); 1220 case EQ_EXPR: 1221 case NE_EXPR: 1222 case GT_EXPR: 1223 case LT_EXPR: 1224 case GE_EXPR: 1225 case LE_EXPR: 1226 case UNEQ_EXPR: 1227 case UNGT_EXPR: 1228 case UNLT_EXPR: 1229 case UNGE_EXPR: 1230 case UNLE_EXPR: 1231 case LTGT_EXPR: 1232 case ORDERED_EXPR: 1233 case UNORDERED_EXPR: 1234 { 1235 tree rhs1 = gimple_assign_rhs1 (assign); 1236 tree rhs2 = gimple_assign_rhs2 (assign); 1237 1238 return expand_vector_comparison (gsi, type, rhs1, rhs2, code,
7
Calling 'expand_vector_comparison'
1239 dce_ssa_names); 1240 } 1241 1242 case TRUNC_DIV_EXPR: 1243 case TRUNC_MOD_EXPR: 1244 { 1245 tree rhs1 = gimple_assign_rhs1 (assign); 1246 tree rhs2 = gimple_assign_rhs2 (assign); 1247 tree ret; 1248 1249 if (!optimizeglobal_options.x_optimize 1250 || !VECTOR_INTEGER_TYPE_P (type)((((enum tree_code) (type)->base.code) == VECTOR_TYPE) &&
((enum tree_code) (((contains_struct_check ((type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1250, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
1251 || TREE_CODE (rhs2)((enum tree_code) (rhs2)->base.code) != VECTOR_CST 1252 || !VECTOR_MODE_P (TYPE_MODE (type))(((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1252, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)]) == MODE_VECTOR_BOOL ||
((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1252, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)]) == MODE_VECTOR_INT ||
((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1252, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)]) == MODE_VECTOR_FLOAT
|| ((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1252, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)]) == MODE_VECTOR_FRACT
|| ((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1252, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)]) == MODE_VECTOR_UFRACT
|| ((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1252, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)]) == MODE_VECTOR_ACCUM
|| ((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1252, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)]) == MODE_VECTOR_UACCUM
)
) 1253 break; 1254 1255 ret = expand_vector_divmod (gsi, type, rhs1, rhs2, code); 1256 if (ret != NULL_TREE(tree) nullptr) 1257 return ret; 1258 break; 1259 } 1260 1261 default: 1262 break; 1263 } 1264 1265 if (TREE_CODE_CLASS (code)tree_code_type_tmpl <0>::tree_code_type[(int) (code)] == tcc_unary) 1266 return expand_vector_piecewise (gsi, do_unop, type, compute_type, 1267 gimple_assign_rhs1 (assign), 1268 NULL_TREE(tree) nullptr, code, false); 1269 else 1270 return expand_vector_piecewise (gsi, do_binop, type, compute_type, 1271 gimple_assign_rhs1 (assign), 1272 gimple_assign_rhs2 (assign), code, false); 1273} 1274 1275/* Try to optimize 1276 a_5 = { b_7, b_7 + 3, b_7 + 6, b_7 + 9 }; 1277 style stmts into: 1278 _9 = { b_7, b_7, b_7, b_7 }; 1279 a_5 = _9 + { 0, 3, 6, 9 }; 1280 because vector splat operation is usually more efficient 1281 than piecewise initialization of the vector. */ 1282 1283static void 1284optimize_vector_constructor (gimple_stmt_iterator *gsi) 1285{ 1286 gassign *stmt = as_a <gassign *> (gsi_stmt (*gsi)); 1287 tree lhs = gimple_assign_lhs (stmt); 1288 tree rhs = gimple_assign_rhs1 (stmt); 1289 tree type = TREE_TYPE (rhs)((contains_struct_check ((rhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1289, __FUNCTION__))->typed.type)
; 1290 unsigned int i, j; 1291 unsigned HOST_WIDE_INTlong nelts; 1292 bool all_same = true; 1293 constructor_elt *elt; 1294 gimple *g; 1295 tree base = NULL_TREE(tree) nullptr; 1296 optab op; 1297 1298 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts) 1299 || nelts <= 2 1300 || CONSTRUCTOR_NELTS (rhs)(vec_safe_length (((tree_check ((rhs), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1300, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts)))
!= nelts) 1301 return; 1302 op = optab_for_tree_code (PLUS_EXPR, type, optab_default); 1303 if (op == unknown_optab 1304 || optab_handler (op, TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1304, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
) == CODE_FOR_nothing) 1305 return; 1306 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (rhs), i, elt)for (i = 0; vec_safe_iterate ((((tree_check ((rhs), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1306, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts)),
(i), &(elt)); ++(i))
1307 if (TREE_CODE (elt->value)((enum tree_code) (elt->value)->base.code) != SSA_NAME 1308 || TREE_CODE (TREE_TYPE (elt->value))((enum tree_code) (((contains_struct_check ((elt->value), (
TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1308, __FUNCTION__))->typed.type))->base.code)
== VECTOR_TYPE) 1309 return; 1310 else 1311 { 1312 tree this_base = elt->value; 1313 if (this_base != CONSTRUCTOR_ELT (rhs, 0)(&(*((tree_check ((rhs), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1313, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts))[
0])
->value) 1314 all_same = false; 1315 for (j = 0; j < nelts + 1; j++) 1316 { 1317 g = SSA_NAME_DEF_STMT (this_base)(tree_check ((this_base), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1317, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
; 1318 if (is_gimple_assign (g) 1319 && gimple_assign_rhs_code (g) == PLUS_EXPR 1320 && TREE_CODE (gimple_assign_rhs2 (g))((enum tree_code) (gimple_assign_rhs2 (g))->base.code) == INTEGER_CST 1321 && TREE_CODE (gimple_assign_rhs1 (g))((enum tree_code) (gimple_assign_rhs1 (g))->base.code) == SSA_NAME 1322 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (g))(tree_check ((gimple_assign_rhs1 (g)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1322, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
) 1323 this_base = gimple_assign_rhs1 (g); 1324 else 1325 break; 1326 } 1327 if (i == 0) 1328 base = this_base; 1329 else if (this_base != base) 1330 return; 1331 } 1332 if (all_same) 1333 return; 1334 tree_vector_builder cst (type, nelts, 1); 1335 for (i = 0; i < nelts; i++) 1336 { 1337 tree this_base = CONSTRUCTOR_ELT (rhs, i)(&(*((tree_check ((rhs), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1337, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts))[
i])
->value; 1338 tree elt = build_zero_cst (TREE_TYPE (base)((contains_struct_check ((base), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1338, __FUNCTION__))->typed.type)
); 1339 while (this_base != base) 1340 { 1341 g = SSA_NAME_DEF_STMT (this_base)(tree_check ((this_base), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1341, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
; 1342 elt = fold_binary (PLUS_EXPR, TREE_TYPE (base),fold_binary_loc (((location_t) 0), PLUS_EXPR, ((contains_struct_check
((base), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1342, __FUNCTION__))->typed.type), elt, gimple_assign_rhs2
(g))
1343 elt, gimple_assign_rhs2 (g))fold_binary_loc (((location_t) 0), PLUS_EXPR, ((contains_struct_check
((base), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1342, __FUNCTION__))->typed.type), elt, gimple_assign_rhs2
(g))
; 1344 if (elt == NULL_TREE(tree) nullptr 1345 || TREE_CODE (elt)((enum tree_code) (elt)->base.code) != INTEGER_CST 1346 || TREE_OVERFLOW (elt)((tree_class_check ((elt), (tcc_constant), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1346, __FUNCTION__))->base.public_flag)
) 1347 return; 1348 this_base = gimple_assign_rhs1 (g); 1349 } 1350 cst.quick_push (elt); 1351 } 1352 for (i = 0; i < nelts; i++) 1353 CONSTRUCTOR_ELT (rhs, i)(&(*((tree_check ((rhs), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1353, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts))[
i])
->value = base; 1354 g = gimple_build_assign (make_ssa_name (type), rhs); 1355 gsi_insert_before (gsi, g, GSI_SAME_STMT); 1356 g = gimple_build_assign (lhs, PLUS_EXPR, gimple_assign_lhs (g), 1357 cst.build ()); 1358 gsi_replace (gsi, g, false); 1359} 1360
1361/* Return a type for the widest vector mode whose components are of type 1362 TYPE, or NULL_TREE if none is found. */ 1363 1364static tree 1365type_for_widest_vector_mode (tree type, optab op) 1366{ 1367 machine_mode inner_mode = TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1367, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
; 1368 machine_mode best_mode = VOIDmode((void) 0, E_VOIDmode), mode; 1369 poly_int64 best_nunits = 0; 1370 1371 if (SCALAR_FLOAT_MODE_P (inner_mode)(((enum mode_class) mode_class[inner_mode]) == MODE_FLOAT || (
(enum mode_class) mode_class[inner_mode]) == MODE_DECIMAL_FLOAT
)
) 1372 mode = MIN_MODE_VECTOR_FLOAT; 1373 else if (SCALAR_FRACT_MODE_P (inner_mode)(((enum mode_class) mode_class[inner_mode]) == MODE_FRACT)) 1374 mode = MIN_MODE_VECTOR_FRACT; 1375 else if (SCALAR_UFRACT_MODE_P (inner_mode)(((enum mode_class) mode_class[inner_mode]) == MODE_UFRACT)) 1376 mode = MIN_MODE_VECTOR_UFRACT; 1377 else if (SCALAR_ACCUM_MODE_P (inner_mode)(((enum mode_class) mode_class[inner_mode]) == MODE_ACCUM)) 1378 mode = MIN_MODE_VECTOR_ACCUM; 1379 else if (SCALAR_UACCUM_MODE_P (inner_mode)(((enum mode_class) mode_class[inner_mode]) == MODE_UACCUM)) 1380 mode = MIN_MODE_VECTOR_UACCUM; 1381 else if (inner_mode == BImode(scalar_int_mode ((scalar_int_mode::from_int) E_BImode))) 1382 mode = MIN_MODE_VECTOR_BOOL; 1383 else 1384 mode = MIN_MODE_VECTOR_INT; 1385 1386 FOR_EACH_MODE_FROM (mode, mode)for ((mode) = (mode); mode_iterator::iterate_p (&(mode));
mode_iterator::get_next (&(mode)))
1387 if (GET_MODE_INNER (mode)(mode_to_inner (mode)) == inner_mode 1388 && maybe_gt (GET_MODE_NUNITS (mode), best_nunits)maybe_lt (best_nunits, GET_MODE_NUNITS (mode)) 1389 && optab_handler (op, mode) != CODE_FOR_nothing) 1390 best_mode = mode, best_nunits = GET_MODE_NUNITS (mode); 1391 1392 if (best_mode == VOIDmode((void) 0, E_VOIDmode)) 1393 return NULL_TREE(tree) nullptr; 1394 else 1395 return build_vector_type_for_mode (type, best_mode); 1396} 1397 1398 1399/* Build a reference to the element of the vector VECT. Function 1400 returns either the element itself, either BIT_FIELD_REF, or an 1401 ARRAY_REF expression. 1402 1403 GSI is required to insert temporary variables while building a 1404 refernece to the element of the vector VECT. 1405 1406 PTMPVEC is a pointer to the temporary variable for caching 1407 purposes. In case when PTMPVEC is NULL new temporary variable 1408 will be created. */ 1409static tree 1410vector_element (gimple_stmt_iterator *gsi, tree vect, tree idx, tree *ptmpvec) 1411{ 1412 tree vect_type, vect_elt_type; 1413 gimple *asgn; 1414 tree tmpvec; 1415 tree arraytype; 1416 bool need_asgn = true; 1417 unsigned int elements; 1418 1419 vect_type = TREE_TYPE (vect)((contains_struct_check ((vect), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1419, __FUNCTION__))->typed.type)
; 1420 vect_elt_type = TREE_TYPE (vect_type)((contains_struct_check ((vect_type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1420, __FUNCTION__))->typed.type)
; 1421 elements = nunits_for_known_piecewise_op (vect_type); 1422 1423 if (TREE_CODE (idx)((enum tree_code) (idx)->base.code) == INTEGER_CST) 1424 { 1425 unsigned HOST_WIDE_INTlong index; 1426 1427 /* Given that we're about to compute a binary modulus, 1428 we don't care about the high bits of the value. */ 1429 index = TREE_INT_CST_LOW (idx)((unsigned long) (*tree_int_cst_elt_check ((idx), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1429, __FUNCTION__)))
; 1430 if (!tree_fits_uhwi_p (idx) || index >= elements) 1431 { 1432 index &= elements - 1; 1433 idx = build_int_cst (TREE_TYPE (idx)((contains_struct_check ((idx), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1433, __FUNCTION__))->typed.type)
, index); 1434 } 1435 1436 /* When lowering a vector statement sequence do some easy 1437 simplification by looking through intermediate vector results. */ 1438 if (TREE_CODE (vect)((enum tree_code) (vect)->base.code) == SSA_NAME) 1439 { 1440 gimple *def_stmt = SSA_NAME_DEF_STMT (vect)(tree_check ((vect), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1440, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
; 1441 if (is_gimple_assign (def_stmt) 1442 && (gimple_assign_rhs_code (def_stmt) == VECTOR_CST 1443 || gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR)) 1444 vect = gimple_assign_rhs1 (def_stmt); 1445 } 1446 1447 if (TREE_CODE (vect)((enum tree_code) (vect)->base.code) == VECTOR_CST) 1448 return VECTOR_CST_ELT (vect, index)vector_cst_elt (vect, index); 1449 else if (TREE_CODE (vect)((enum tree_code) (vect)->base.code) == CONSTRUCTOR 1450 && (CONSTRUCTOR_NELTS (vect)(vec_safe_length (((tree_check ((vect), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1450, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts)))
== 0 1451 || TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (vect, 0)->value))((enum tree_code) (((contains_struct_check (((&(*((tree_check
((vect), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1451, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts))[
0])->value), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1451, __FUNCTION__))->typed.type))->base.code)
1452 != VECTOR_TYPE)) 1453 { 1454 if (index < CONSTRUCTOR_NELTS (vect)(vec_safe_length (((tree_check ((vect), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1454, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts)))
) 1455 return CONSTRUCTOR_ELT (vect, index)(&(*((tree_check ((vect), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1455, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts))[
index])
->value; 1456 return build_zero_cst (vect_elt_type); 1457 } 1458 else 1459 { 1460 tree size = vector_element_bits_tree (vect_type); 1461 tree pos = fold_build2 (MULT_EXPR, bitsizetype, bitsize_int (index),fold_build2_loc (((location_t) 0), MULT_EXPR, sizetype_tab[(int
) stk_bitsizetype], size_int_kind (index, stk_bitsizetype), size
)
1462 size)fold_build2_loc (((location_t) 0), MULT_EXPR, sizetype_tab[(int
) stk_bitsizetype], size_int_kind (index, stk_bitsizetype), size
)
; 1463 return fold_build3 (BIT_FIELD_REF, vect_elt_type, vect, size, pos)fold_build3_loc (((location_t) 0), BIT_FIELD_REF, vect_elt_type
, vect, size, pos )
; 1464 } 1465 } 1466 1467 if (!ptmpvec) 1468 tmpvec = create_tmp_var (vect_type, "vectmp"); 1469 else if (!*ptmpvec) 1470 tmpvec = *ptmpvec = create_tmp_var (vect_type, "vectmp"); 1471 else 1472 { 1473 tmpvec = *ptmpvec; 1474 need_asgn = false; 1475 } 1476 1477 if (need_asgn) 1478 { 1479 TREE_ADDRESSABLE (tmpvec)((tmpvec)->base.addressable_flag) = 1; 1480 asgn = gimple_build_assign (tmpvec, vect); 1481 gsi_insert_before (gsi, asgn, GSI_SAME_STMT); 1482 } 1483 1484 arraytype = build_array_type_nelts (vect_elt_type, elements); 1485 return build4 (ARRAY_REF, vect_elt_type, 1486 build1 (VIEW_CONVERT_EXPR, arraytype, tmpvec), 1487 idx, NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr); 1488} 1489 1490/* Check if VEC_PERM_EXPR within the given setting is supported 1491 by hardware, or lower it piecewise. 1492 1493 When VEC_PERM_EXPR has the same first and second operands: 1494 VEC_PERM_EXPR <v0, v0, mask> the lowered version would be 1495 {v0[mask[0]], v0[mask[1]], ...} 1496 MASK and V0 must have the same number of elements. 1497 1498 Otherwise VEC_PERM_EXPR <v0, v1, mask> is lowered to 1499 {mask[0] < len(v0) ? v0[mask[0]] : v1[mask[0]], ...} 1500 V0 and V1 must have the same type. MASK, V0, V1 must have the 1501 same number of arguments. */ 1502 1503static void 1504lower_vec_perm (gimple_stmt_iterator *gsi) 1505{ 1506 gassign *stmt = as_a <gassign *> (gsi_stmt (*gsi)); 1507 tree mask = gimple_assign_rhs3 (stmt); 1508 tree vec0 = gimple_assign_rhs1 (stmt); 1509 tree vec1 = gimple_assign_rhs2 (stmt); 1510 tree vect_type = TREE_TYPE (vec0)((contains_struct_check ((vec0), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1510, __FUNCTION__))->typed.type)
; 1511 tree mask_type = TREE_TYPE (mask)((contains_struct_check ((mask), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1511, __FUNCTION__))->typed.type)
; 1512 tree vect_elt_type = TREE_TYPE (vect_type)((contains_struct_check ((vect_type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1512, __FUNCTION__))->typed.type)
; 1513 tree mask_elt_type = TREE_TYPE (mask_type)((contains_struct_check ((mask_type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1513, __FUNCTION__))->typed.type)
; 1514 unsigned HOST_WIDE_INTlong elements; 1515 vec<constructor_elt, va_gc> *v; 1516 tree constr, t, si, i_val; 1517 tree vec0tmp = NULL_TREE(tree) nullptr, vec1tmp = NULL_TREE(tree) nullptr, masktmp = NULL_TREE(tree) nullptr; 1518 bool two_operand_p = !operand_equal_p (vec0, vec1, 0); 1519 location_t loc = gimple_location (gsi_stmt (*gsi)); 1520 unsigned i; 1521 1522 if (!TYPE_VECTOR_SUBPARTS (vect_type).is_constant (&elements)) 1523 return; 1524 1525 if (TREE_CODE (mask)((enum tree_code) (mask)->base.code) == SSA_NAME) 1526 { 1527 gimple *def_stmt = SSA_NAME_DEF_STMT (mask)(tree_check ((mask), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1527, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
; 1528 if (is_gimple_assign (def_stmt) 1529 && gimple_assign_rhs_code (def_stmt) == VECTOR_CST) 1530 mask = gimple_assign_rhs1 (def_stmt); 1531 } 1532 1533 vec_perm_builder sel_int; 1534 1535 if (TREE_CODE (mask)((enum tree_code) (mask)->base.code) == VECTOR_CST 1536 && tree_to_vec_perm_builder (&sel_int, mask)) 1537 { 1538 vec_perm_indices indices (sel_int, 2, elements); 1539 machine_mode vmode = TYPE_MODE (vect_type)((((enum tree_code) ((tree_class_check ((vect_type), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1539, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vect_type) : (vect_type)->type_common.mode)
; 1540 tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt))((contains_struct_check ((gimple_assign_lhs (stmt)), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1540, __FUNCTION__))->typed.type)
; 1541 machine_mode lhs_mode = TYPE_MODE (lhs_type)((((enum tree_code) ((tree_class_check ((lhs_type), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1541, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(lhs_type) : (lhs_type)->type_common.mode)
; 1542 if (can_vec_perm_const_p (lhs_mode, vmode, indices)) 1543 { 1544 gimple_assign_set_rhs3 (stmt, mask); 1545 update_stmt (stmt); 1546 return; 1547 } 1548 /* Also detect vec_shr pattern - VEC_PERM_EXPR with zero 1549 vector as VEC1 and a right element shift MASK. */ 1550 if (optab_handler (vec_shr_optab, TYPE_MODE (vect_type)((((enum tree_code) ((tree_class_check ((vect_type), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1550, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vect_type) : (vect_type)->type_common.mode)
) 1551 != CODE_FOR_nothing 1552 && TREE_CODE (vec1)((enum tree_code) (vec1)->base.code) == VECTOR_CST 1553 && initializer_zerop (vec1) 1554 && maybe_ne (indices[0], 0) 1555 && known_lt (poly_uint64 (indices[0]), elements)(!maybe_le (elements, poly_uint64 (indices[0])))) 1556 { 1557 bool ok_p = indices.series_p (0, 1, indices[0], 1); 1558 if (!ok_p) 1559 { 1560 for (i = 1; i < elements; ++i) 1561 { 1562 poly_uint64 actual = indices[i]; 1563 poly_uint64 expected = i + indices[0]; 1564 /* Indices into the second vector are all equivalent. */ 1565 if (maybe_lt (actual, elements) 1566 ? maybe_ne (actual, expected) 1567 : maybe_lt (expected, elements)) 1568 break; 1569 } 1570 ok_p = i == elements; 1571 } 1572 if (ok_p) 1573 { 1574 gimple_assign_set_rhs3 (stmt, mask); 1575 update_stmt (stmt); 1576 return; 1577 } 1578 } 1579 /* And similarly vec_shl pattern. */ 1580 if (optab_handler (vec_shl_optab, TYPE_MODE (vect_type)((((enum tree_code) ((tree_class_check ((vect_type), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1580, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vect_type) : (vect_type)->type_common.mode)
) 1581 != CODE_FOR_nothing 1582 && TREE_CODE (vec0)((enum tree_code) (vec0)->base.code) == VECTOR_CST 1583 && initializer_zerop (vec0)) 1584 { 1585 unsigned int first = 0; 1586 for (i = 0; i < elements; ++i) 1587 if (known_eq (poly_uint64 (indices[i]), elements)(!maybe_ne (poly_uint64 (indices[i]), elements))) 1588 { 1589 if (i == 0 || first) 1590 break; 1591 first = i; 1592 } 1593 else if (first 1594 ? maybe_ne (poly_uint64 (indices[i]), 1595 elements + i - first) 1596 : maybe_ge (poly_uint64 (indices[i]), elements)maybe_le (elements, poly_uint64 (indices[i]))) 1597 break; 1598 if (first && i == elements) 1599 { 1600 gimple_assign_set_rhs3 (stmt, mask); 1601 update_stmt (stmt); 1602 return; 1603 } 1604 } 1605 } 1606 else if (can_vec_perm_var_p (TYPE_MODE (vect_type)((((enum tree_code) ((tree_class_check ((vect_type), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1606, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vect_type) : (vect_type)->type_common.mode)
)) 1607 return; 1608 1609 if (!warning_suppressed_p (stmt, OPT_Wvector_operation_performance)) 1610 warning_at (loc, OPT_Wvector_operation_performance, 1611 "vector shuffling operation will be expanded piecewise"); 1612 1613 vec_alloc (v, elements); 1614 bool constant_p = true; 1615 for (i = 0; i < elements; i++) 1616 { 1617 si = size_int (i)size_int_kind (i, stk_sizetype); 1618 i_val = vector_element (gsi, mask, si, &masktmp); 1619 1620 if (TREE_CODE (i_val)((enum tree_code) (i_val)->base.code) == INTEGER_CST) 1621 { 1622 unsigned HOST_WIDE_INTlong index; 1623 1624 index = TREE_INT_CST_LOW (i_val)((unsigned long) (*tree_int_cst_elt_check ((i_val), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1624, __FUNCTION__)))
; 1625 if (!tree_fits_uhwi_p (i_val) || index >= elements) 1626 i_val = build_int_cst (mask_elt_type, index & (elements - 1)); 1627 1628 if (two_operand_p && (index & elements) != 0) 1629 t = vector_element (gsi, vec1, i_val, &vec1tmp); 1630 else 1631 t = vector_element (gsi, vec0, i_val, &vec0tmp); 1632 1633 t = force_gimple_operand_gsi (gsi, t, true, NULL_TREE(tree) nullptr, 1634 true, GSI_SAME_STMT); 1635 } 1636 else 1637 { 1638 tree cond = NULL_TREE(tree) nullptr, v0_val; 1639 1640 if (two_operand_p) 1641 { 1642 cond = fold_build2 (BIT_AND_EXPR, mask_elt_type, i_val,fold_build2_loc (((location_t) 0), BIT_AND_EXPR, mask_elt_type
, i_val, build_int_cst (mask_elt_type, elements) )
1643 build_int_cst (mask_elt_type, elements))fold_build2_loc (((location_t) 0), BIT_AND_EXPR, mask_elt_type
, i_val, build_int_cst (mask_elt_type, elements) )
; 1644 cond = force_gimple_operand_gsi (gsi, cond, true, NULL_TREE(tree) nullptr, 1645 true, GSI_SAME_STMT); 1646 } 1647 1648 i_val = fold_build2 (BIT_AND_EXPR, mask_elt_type, i_val,fold_build2_loc (((location_t) 0), BIT_AND_EXPR, mask_elt_type
, i_val, build_int_cst (mask_elt_type, elements - 1) )
1649 build_int_cst (mask_elt_type, elements - 1))fold_build2_loc (((location_t) 0), BIT_AND_EXPR, mask_elt_type
, i_val, build_int_cst (mask_elt_type, elements - 1) )
; 1650 i_val = force_gimple_operand_gsi (gsi, i_val, true, NULL_TREE(tree) nullptr, 1651 true, GSI_SAME_STMT); 1652 1653 v0_val = vector_element (gsi, vec0, i_val, &vec0tmp); 1654 v0_val = force_gimple_operand_gsi (gsi, v0_val, true, NULL_TREE(tree) nullptr, 1655 true, GSI_SAME_STMT); 1656 1657 if (two_operand_p) 1658 { 1659 tree v1_val; 1660 1661 v1_val = vector_element (gsi, vec1, i_val, &vec1tmp); 1662 v1_val = force_gimple_operand_gsi (gsi, v1_val, true, NULL_TREE(tree) nullptr, 1663 true, GSI_SAME_STMT); 1664 1665 cond = fold_build2 (EQ_EXPR, boolean_type_node,fold_build2_loc (((location_t) 0), EQ_EXPR, global_trees[TI_BOOLEAN_TYPE
], cond, build_zero_cst (mask_elt_type) )
1666 cond, build_zero_cst (mask_elt_type))fold_build2_loc (((location_t) 0), EQ_EXPR, global_trees[TI_BOOLEAN_TYPE
], cond, build_zero_cst (mask_elt_type) )
; 1667 cond = fold_build3 (COND_EXPR, vect_elt_type,fold_build3_loc (((location_t) 0), COND_EXPR, vect_elt_type, cond
, v0_val, v1_val )
1668 cond, v0_val, v1_val)fold_build3_loc (((location_t) 0), COND_EXPR, vect_elt_type, cond
, v0_val, v1_val )
; 1669 t = force_gimple_operand_gsi (gsi, cond, true, NULL_TREE(tree) nullptr, 1670 true, GSI_SAME_STMT); 1671 } 1672 else 1673 t = v0_val; 1674 } 1675 1676 if (!CONSTANT_CLASS_P (t)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code
) (t)->base.code))] == tcc_constant)
) 1677 constant_p = false; 1678 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t)do { constructor_elt _ce___ = {(tree) nullptr, t}; vec_safe_push
((v), _ce___); } while (0)
; 1679 } 1680 1681 if (constant_p) 1682 constr = build_vector_from_ctor (vect_type, v); 1683 else 1684 constr = build_constructor (vect_type, v); 1685 gimple_assign_set_rhs_from_tree (gsi, constr); 1686 update_stmt (gsi_stmt (*gsi)); 1687} 1688 1689/* If OP is a uniform vector return the element it is a splat from. */ 1690 1691static tree 1692ssa_uniform_vector_p (tree op) 1693{ 1694 if (TREE_CODE (op)((enum tree_code) (op)->base.code) == VECTOR_CST 1695 || TREE_CODE (op)((enum tree_code) (op)->base.code) == VEC_DUPLICATE_EXPR 1696 || TREE_CODE (op)((enum tree_code) (op)->base.code) == CONSTRUCTOR) 1697 return uniform_vector_p (op); 1698 if (TREE_CODE (op)((enum tree_code) (op)->base.code) == SSA_NAME) 1699 { 1700 gimple *def_stmt = SSA_NAME_DEF_STMT (op)(tree_check ((op), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1700, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
; 1701 if (gimple_assign_single_p (def_stmt)) 1702 return uniform_vector_p (gimple_assign_rhs1 (def_stmt)); 1703 } 1704 return NULL_TREE(tree) nullptr; 1705} 1706 1707/* Return type in which CODE operation with optab OP can be 1708 computed. */ 1709 1710static tree 1711get_compute_type (enum tree_code code, optab op, tree type) 1712{ 1713 /* For very wide vectors, try using a smaller vector mode. */ 1714 tree compute_type = type; 1715 if (op 1716 && (!VECTOR_MODE_P (TYPE_MODE (type))(((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1716, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)]) == MODE_VECTOR_BOOL ||
((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1716, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)]) == MODE_VECTOR_INT ||
((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1716, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)]) == MODE_VECTOR_FLOAT
|| ((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1716, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)]) == MODE_VECTOR_FRACT
|| ((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1716, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)]) == MODE_VECTOR_UFRACT
|| ((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1716, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)]) == MODE_VECTOR_ACCUM
|| ((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1716, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)]) == MODE_VECTOR_UACCUM
)
1717 || optab_handler (op, TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1717, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
) == CODE_FOR_nothing)) 1718 { 1719 tree vector_compute_type 1720 = type_for_widest_vector_mode (TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1720, __FUNCTION__))->typed.type)
, op); 1721 if (vector_compute_type != NULL_TREE(tree) nullptr 1722 && subparts_gt (compute_type, vector_compute_type) 1723 && maybe_ne (TYPE_VECTOR_SUBPARTS (vector_compute_type), 1U) 1724 && (optab_handler (op, TYPE_MODE (vector_compute_type)((((enum tree_code) ((tree_class_check ((vector_compute_type)
, (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1724, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vector_compute_type) : (vector_compute_type)->type_common
.mode)
) 1725 != CODE_FOR_nothing)) 1726 compute_type = vector_compute_type; 1727 } 1728 1729 /* If we are breaking a BLKmode vector into smaller pieces, 1730 type_for_widest_vector_mode has already looked into the optab, 1731 so skip these checks. */ 1732 if (compute_type == type) 1733 { 1734 machine_mode compute_mode = TYPE_MODE (compute_type)((((enum tree_code) ((tree_class_check ((compute_type), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1734, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(compute_type) : (compute_type)->type_common.mode)
; 1735 if (VECTOR_MODE_P (compute_mode)(((enum mode_class) mode_class[compute_mode]) == MODE_VECTOR_BOOL
|| ((enum mode_class) mode_class[compute_mode]) == MODE_VECTOR_INT
|| ((enum mode_class) mode_class[compute_mode]) == MODE_VECTOR_FLOAT
|| ((enum mode_class) mode_class[compute_mode]) == MODE_VECTOR_FRACT
|| ((enum mode_class) mode_class[compute_mode]) == MODE_VECTOR_UFRACT
|| ((enum mode_class) mode_class[compute_mode]) == MODE_VECTOR_ACCUM
|| ((enum mode_class) mode_class[compute_mode]) == MODE_VECTOR_UACCUM
)
) 1736 { 1737 if (op && optab_handler (op, compute_mode) != CODE_FOR_nothing) 1738 return compute_type; 1739 if (code == MULT_HIGHPART_EXPR 1740 && can_mult_highpart_p (compute_mode, 1741 TYPE_UNSIGNED (compute_type)((tree_class_check ((compute_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1741, __FUNCTION__))->base.u.bits.unsigned_flag)
)) 1742 return compute_type; 1743 } 1744 /* There is no operation in hardware, so fall back to scalars. */ 1745 compute_type = TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1745, __FUNCTION__))->typed.type)
; 1746 } 1747 1748 return compute_type; 1749} 1750 1751static tree 1752do_cond (gimple_stmt_iterator *gsi, tree inner_type, tree a, tree b, 1753 tree bitpos, tree bitsize, enum tree_code code, 1754 tree type ATTRIBUTE_UNUSED__attribute__ ((__unused__))) 1755{ 1756 if (TREE_CODE (TREE_TYPE (a))((enum tree_code) (((contains_struct_check ((a), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1756, __FUNCTION__))->typed.type))->base.code)
== VECTOR_TYPE) 1757 a = tree_vec_extract (gsi, inner_type, a, bitsize, bitpos); 1758 if (TREE_CODE (TREE_TYPE (b))((enum tree_code) (((contains_struct_check ((b), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1758, __FUNCTION__))->typed.type))->base.code)
== VECTOR_TYPE) 1759 b = tree_vec_extract (gsi, inner_type, b, bitsize, bitpos); 1760 tree cond = gimple_assign_rhs1 (gsi_stmt (*gsi)); 1761 return gimplify_build3 (gsi, code, inner_type, unshare_expr (cond), a, b); 1762} 1763 1764/* Expand a vector COND_EXPR to scalars, piecewise. */ 1765static void 1766expand_vector_scalar_condition (gimple_stmt_iterator *gsi) 1767{ 1768 gassign *stmt = as_a <gassign *> (gsi_stmt (*gsi)); 1769 tree lhs = gimple_assign_lhs (stmt); 1770 tree type = TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1770, __FUNCTION__))->typed.type)
; 1771 tree compute_type = get_compute_type (COND_EXPR, mov_optab, type); 1772 machine_mode compute_mode = TYPE_MODE (compute_type)((((enum tree_code) ((tree_class_check ((compute_type), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1772, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(compute_type) : (compute_type)->type_common.mode)
; 1773 gcc_assert (compute_mode != BLKmode)((void)(!(compute_mode != ((void) 0, E_BLKmode)) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1773, __FUNCTION__), 0 : 0))
; 1774 tree rhs2 = gimple_assign_rhs2 (stmt); 1775 tree rhs3 = gimple_assign_rhs3 (stmt); 1776 tree new_rhs; 1777 1778 /* If the compute mode is not a vector mode (hence we are not decomposing 1779 a BLKmode vector to smaller, hardware-supported vectors), we may want 1780 to expand the operations in parallel. */ 1781 if (!VECTOR_MODE_P (compute_mode)(((enum mode_class) mode_class[compute_mode]) == MODE_VECTOR_BOOL
|| ((enum mode_class) mode_class[compute_mode]) == MODE_VECTOR_INT
|| ((enum mode_class) mode_class[compute_mode]) == MODE_VECTOR_FLOAT
|| ((enum mode_class) mode_class[compute_mode]) == MODE_VECTOR_FRACT
|| ((enum mode_class) mode_class[compute_mode]) == MODE_VECTOR_UFRACT
|| ((enum mode_class) mode_class[compute_mode]) == MODE_VECTOR_ACCUM
|| ((enum mode_class) mode_class[compute_mode]) == MODE_VECTOR_UACCUM
)
) 1782 new_rhs = expand_vector_parallel (gsi, do_cond, type, rhs2, rhs3, 1783 COND_EXPR); 1784 else 1785 new_rhs = expand_vector_piecewise (gsi, do_cond, type, compute_type, 1786 rhs2, rhs3, COND_EXPR, false); 1787 if (!useless_type_conversion_p (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1787, __FUNCTION__))->typed.type)
, TREE_TYPE (new_rhs)((contains_struct_check ((new_rhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1787, __FUNCTION__))->typed.type)
)) 1788 new_rhs = gimplify_build1 (gsi, VIEW_CONVERT_EXPR, TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1788, __FUNCTION__))->typed.type)
, 1789 new_rhs); 1790 1791 /* NOTE: We should avoid using gimple_assign_set_rhs_from_tree. One 1792 way to do it is change expand_vector_operation and its callees to 1793 return a tree_code, RHS1 and RHS2 instead of a tree. */ 1794 gimple_assign_set_rhs_from_tree (gsi, new_rhs); 1795 update_stmt (gsi_stmt (*gsi)); 1796} 1797 1798/* Callback for expand_vector_piecewise to do VEC_CONVERT ifn call 1799 lowering. If INNER_TYPE is not a vector type, this is a scalar 1800 fallback. */ 1801 1802static tree 1803do_vec_conversion (gimple_stmt_iterator *gsi, tree inner_type, tree a, 1804 tree decl, tree bitpos, tree bitsize, 1805 enum tree_code code, tree type) 1806{ 1807 a = tree_vec_extract (gsi, inner_type, a, bitsize, bitpos); 1808 if (!VECTOR_TYPE_P (inner_type)(((enum tree_code) (inner_type)->base.code) == VECTOR_TYPE
)
) 1809 return gimplify_build1 (gsi, code, TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1809, __FUNCTION__))->typed.type)
, a); 1810 if (code == CALL_EXPR) 1811 { 1812 gimple *g = gimple_build_call (decl, 1, a); 1813 tree lhs = make_ssa_name (TREE_TYPE (TREE_TYPE (decl))((contains_struct_check ((((contains_struct_check ((decl), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1813, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1813, __FUNCTION__))->typed.type)
); 1814 gimple_call_set_lhs (g, lhs); 1815 gsi_insert_before (gsi, g, GSI_SAME_STMT); 1816 return lhs; 1817 } 1818 else 1819 { 1820 tree outer_type = build_vector_type (TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1820, __FUNCTION__))->typed.type)
, 1821 TYPE_VECTOR_SUBPARTS (inner_type)); 1822 return gimplify_build1 (gsi, code, outer_type, a); 1823 } 1824} 1825 1826/* Similarly, but for narrowing conversion. */ 1827 1828static tree 1829do_vec_narrow_conversion (gimple_stmt_iterator *gsi, tree inner_type, tree a, 1830 tree, tree bitpos, tree, enum tree_code code, 1831 tree type) 1832{ 1833 tree itype = build_vector_type (TREE_TYPE (inner_type)((contains_struct_check ((inner_type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1833, __FUNCTION__))->typed.type)
, 1834 exact_div (TYPE_VECTOR_SUBPARTS (inner_type), 1835 2)); 1836 tree b = tree_vec_extract (gsi, itype, a, TYPE_SIZE (itype)((tree_class_check ((itype), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1836, __FUNCTION__))->type_common.size)
, bitpos); 1837 tree c = tree_vec_extract (gsi, itype, a, TYPE_SIZE (itype)((tree_class_check ((itype), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1837, __FUNCTION__))->type_common.size)
, 1838 int_const_binop (PLUS_EXPR, bitpos, 1839 TYPE_SIZE (itype)((tree_class_check ((itype), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1839, __FUNCTION__))->type_common.size)
)); 1840 tree outer_type = build_vector_type (TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1840, __FUNCTION__))->typed.type)
, 1841 TYPE_VECTOR_SUBPARTS (inner_type)); 1842 return gimplify_build2 (gsi, code, outer_type, b, c); 1843} 1844 1845/* Expand VEC_CONVERT ifn call. */ 1846 1847static void 1848expand_vector_conversion (gimple_stmt_iterator *gsi) 1849{ 1850 gimple *stmt = gsi_stmt (*gsi); 1851 gimple *g; 1852 tree lhs = gimple_call_lhs (stmt); 1853 if (lhs == NULL_TREE(tree) nullptr) 1854 { 1855 g = gimple_build_nop (); 1856 gsi_replace (gsi, g, false); 1857 return; 1858 } 1859 tree arg = gimple_call_arg (stmt, 0); 1860 tree ret_type = TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1860, __FUNCTION__))->typed.type)
; 1861 tree arg_type = TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1861, __FUNCTION__))->typed.type)
; 1862 tree new_rhs, compute_type = TREE_TYPE (arg_type)((contains_struct_check ((arg_type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1862, __FUNCTION__))->typed.type)
; 1863 enum tree_code code = NOP_EXPR; 1864 enum tree_code code1 = ERROR_MARK; 1865 enum { NARROW, NONE, WIDEN } modifier = NONE; 1866 optab optab1 = unknown_optab; 1867 1868 gcc_checking_assert (VECTOR_TYPE_P (ret_type) && VECTOR_TYPE_P (arg_type))((void)(!((((enum tree_code) (ret_type)->base.code) == VECTOR_TYPE
) && (((enum tree_code) (arg_type)->base.code) == VECTOR_TYPE
)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1868, __FUNCTION__), 0 : 0))
; 1869 if (INTEGRAL_TYPE_P (TREE_TYPE (ret_type))(((enum tree_code) (((contains_struct_check ((ret_type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1869, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((ret_type), (
TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1869, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((ret_type), (
TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1869, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
1870 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg_type))(((enum tree_code) (((contains_struct_check ((arg_type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1870, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE
)
) 1871 code = FIX_TRUNC_EXPR; 1872 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg_type))(((enum tree_code) (((contains_struct_check ((arg_type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1872, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg_type), (
TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1872, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((arg_type), (
TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1872, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
1873 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (ret_type))(((enum tree_code) (((contains_struct_check ((ret_type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1873, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE
)
) 1874 code = FLOAT_EXPR; 1875 unsigned int ret_elt_bits = vector_element_bits (ret_type); 1876 unsigned int arg_elt_bits = vector_element_bits (arg_type); 1877 if (ret_elt_bits < arg_elt_bits) 1878 modifier = NARROW; 1879 else if (ret_elt_bits > arg_elt_bits) 1880 modifier = WIDEN; 1881 1882 if (modifier == NONE && (code == FIX_TRUNC_EXPR || code == FLOAT_EXPR)) 1883 { 1884 if (supportable_convert_operation (code, ret_type, arg_type, &code1)) 1885 { 1886 g = gimple_build_assign (lhs, code1, arg); 1887 gsi_replace (gsi, g, false); 1888 return; 1889 } 1890 /* Can't use get_compute_type here, as supportable_convert_operation 1891 doesn't necessarily use an optab and needs two arguments. */ 1892 tree vec_compute_type 1893 = type_for_widest_vector_mode (TREE_TYPE (arg_type)((contains_struct_check ((arg_type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1893, __FUNCTION__))->typed.type)
, mov_optab); 1894 if (vec_compute_type 1895 && VECTOR_MODE_P (TYPE_MODE (vec_compute_type))(((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((vec_compute_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1895, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vec_compute_type) : (vec_compute_type)->type_common.mode
)]) == MODE_VECTOR_BOOL || ((enum mode_class) mode_class[((((
enum tree_code) ((tree_class_check ((vec_compute_type), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1895, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vec_compute_type) : (vec_compute_type)->type_common.mode
)]) == MODE_VECTOR_INT || ((enum mode_class) mode_class[((((enum
tree_code) ((tree_class_check ((vec_compute_type), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1895, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vec_compute_type) : (vec_compute_type)->type_common.mode
)]) == MODE_VECTOR_FLOAT || ((enum mode_class) mode_class[(((
(enum tree_code) ((tree_class_check ((vec_compute_type), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1895, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vec_compute_type) : (vec_compute_type)->type_common.mode
)]) == MODE_VECTOR_FRACT || ((enum mode_class) mode_class[(((
(enum tree_code) ((tree_class_check ((vec_compute_type), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1895, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vec_compute_type) : (vec_compute_type)->type_common.mode
)]) == MODE_VECTOR_UFRACT || ((enum mode_class) mode_class[((
((enum tree_code) ((tree_class_check ((vec_compute_type), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1895, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vec_compute_type) : (vec_compute_type)->type_common.mode
)]) == MODE_VECTOR_ACCUM || ((enum mode_class) mode_class[(((
(enum tree_code) ((tree_class_check ((vec_compute_type), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1895, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vec_compute_type) : (vec_compute_type)->type_common.mode
)]) == MODE_VECTOR_UACCUM)
1896 && subparts_gt (arg_type, vec_compute_type)) 1897 { 1898 unsigned HOST_WIDE_INTlong nelts 1899 = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_compute_type)); 1900 while (nelts > 1) 1901 { 1902 tree ret1_type = build_vector_type (TREE_TYPE (ret_type)((contains_struct_check ((ret_type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1902, __FUNCTION__))->typed.type)
, nelts); 1903 tree arg1_type = build_vector_type (TREE_TYPE (arg_type)((contains_struct_check ((arg_type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1903, __FUNCTION__))->typed.type)
, nelts); 1904 if (supportable_convert_operation (code, ret1_type, arg1_type, 1905 &code1)) 1906 { 1907 new_rhs = expand_vector_piecewise (gsi, do_vec_conversion, 1908 ret_type, arg1_type, arg, 1909 NULL_TREE(tree) nullptr, code1, false); 1910 g = gimple_build_assign (lhs, new_rhs); 1911 gsi_replace (gsi, g, false); 1912 return; 1913 } 1914 nelts = nelts / 2; 1915 } 1916 } 1917 } 1918 else if (modifier == NARROW) 1919 { 1920 switch (code) 1921 { 1922 CASE_CONVERTcase NOP_EXPR: case CONVERT_EXPR: 1923 code1 = VEC_PACK_TRUNC_EXPR; 1924 optab1 = optab_for_tree_code (code1, arg_type, optab_default); 1925 break; 1926 case FIX_TRUNC_EXPR: 1927 code1 = VEC_PACK_FIX_TRUNC_EXPR; 1928 /* The signedness is determined from output operand. */ 1929 optab1 = optab_for_tree_code (code1, ret_type, optab_default); 1930 break; 1931 case FLOAT_EXPR: 1932 code1 = VEC_PACK_FLOAT_EXPR; 1933 optab1 = optab_for_tree_code (code1, arg_type, optab_default); 1934 break; 1935 default: 1936 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1936, __FUNCTION__))
; 1937 } 1938 1939 if (optab1) 1940 compute_type = get_compute_type (code1, optab1, arg_type); 1941 enum insn_code icode1; 1942 if (VECTOR_TYPE_P (compute_type)(((enum tree_code) (compute_type)->base.code) == VECTOR_TYPE
)
1943 && ((icode1 = optab_handler (optab1, TYPE_MODE (compute_type)((((enum tree_code) ((tree_class_check ((compute_type), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1943, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(compute_type) : (compute_type)->type_common.mode)
)) 1944 != CODE_FOR_nothing) 1945 && VECTOR_MODE_P (insn_data[icode1].operand[0].mode)(((enum mode_class) mode_class[insn_data[icode1].operand[0].mode
]) == MODE_VECTOR_BOOL || ((enum mode_class) mode_class[insn_data
[icode1].operand[0].mode]) == MODE_VECTOR_INT || ((enum mode_class
) mode_class[insn_data[icode1].operand[0].mode]) == MODE_VECTOR_FLOAT
|| ((enum mode_class) mode_class[insn_data[icode1].operand[0
].mode]) == MODE_VECTOR_FRACT || ((enum mode_class) mode_class
[insn_data[icode1].operand[0].mode]) == MODE_VECTOR_UFRACT ||
((enum mode_class) mode_class[insn_data[icode1].operand[0].mode
]) == MODE_VECTOR_ACCUM || ((enum mode_class) mode_class[insn_data
[icode1].operand[0].mode]) == MODE_VECTOR_UACCUM)
) 1946 { 1947 tree cretd_type 1948 = build_vector_type (TREE_TYPE (ret_type)((contains_struct_check ((ret_type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1948, __FUNCTION__))->typed.type)
, 1949 TYPE_VECTOR_SUBPARTS (compute_type) * 2); 1950 if (insn_data[icode1].operand[0].mode == TYPE_MODE (cretd_type)((((enum tree_code) ((tree_class_check ((cretd_type), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1950, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(cretd_type) : (cretd_type)->type_common.mode)
) 1951 { 1952 if (compute_type == arg_type) 1953 { 1954 new_rhs = gimplify_build2 (gsi, code1, cretd_type, 1955 arg, build_zero_cst (arg_type)); 1956 new_rhs = tree_vec_extract (gsi, ret_type, new_rhs, 1957 TYPE_SIZE (ret_type)((tree_class_check ((ret_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1957, __FUNCTION__))->type_common.size)
, 1958 bitsize_int (0)size_int_kind (0, stk_bitsizetype)); 1959 g = gimple_build_assign (lhs, new_rhs); 1960 gsi_replace (gsi, g, false); 1961 return; 1962 } 1963 tree dcompute_type 1964 = build_vector_type (TREE_TYPE (compute_type)((contains_struct_check ((compute_type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1964, __FUNCTION__))->typed.type)
, 1965 TYPE_VECTOR_SUBPARTS (compute_type) * 2); 1966 if (TYPE_MAIN_VARIANT (dcompute_type)((tree_class_check ((dcompute_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1966, __FUNCTION__))->type_common.main_variant)
1967 == TYPE_MAIN_VARIANT (arg_type)((tree_class_check ((arg_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 1967, __FUNCTION__))->type_common.main_variant)
) 1968 new_rhs = do_vec_narrow_conversion (gsi, dcompute_type, arg, 1969 NULL_TREE(tree) nullptr, bitsize_int (0)size_int_kind (0, stk_bitsizetype), 1970 NULL_TREE(tree) nullptr, code1, 1971 ret_type); 1972 else 1973 new_rhs = expand_vector_piecewise (gsi, 1974 do_vec_narrow_conversion, 1975 arg_type, dcompute_type, 1976 arg, NULL_TREE(tree) nullptr, code1, 1977 false, ret_type); 1978 g = gimple_build_assign (lhs, new_rhs); 1979 gsi_replace (gsi, g, false); 1980 return; 1981 } 1982 } 1983 } 1984 else if (modifier == WIDEN) 1985 { 1986 enum tree_code code2 = ERROR_MARK; 1987 optab optab2 = unknown_optab; 1988 switch (code) 1989 { 1990 CASE_CONVERTcase NOP_EXPR: case CONVERT_EXPR: 1991 code1 = VEC_UNPACK_LO_EXPR; 1992 code2 = VEC_UNPACK_HI_EXPR; 1993 break; 1994 case FIX_TRUNC_EXPR: 1995 code1 = VEC_UNPACK_FIX_TRUNC_LO_EXPR; 1996 code2 = VEC_UNPACK_FIX_TRUNC_HI_EXPR; 1997 break; 1998 case FLOAT_EXPR: 1999 code1 = VEC_UNPACK_FLOAT_LO_EXPR; 2000 code2 = VEC_UNPACK_FLOAT_HI_EXPR; 2001 break; 2002 default: 2003 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2003, __FUNCTION__))
; 2004 } 2005 if (BYTES_BIG_ENDIAN0) 2006 std::swap (code1, code2); 2007 2008 if (code == FIX_TRUNC_EXPR) 2009 { 2010 /* The signedness is determined from output operand. */ 2011 optab1 = optab_for_tree_code (code1, ret_type, optab_default); 2012 optab2 = optab_for_tree_code (code2, ret_type, optab_default); 2013 } 2014 else 2015 { 2016 optab1 = optab_for_tree_code (code1, arg_type, optab_default); 2017 optab2 = optab_for_tree_code (code2, arg_type, optab_default); 2018 } 2019 2020 if (optab1 && optab2) 2021 compute_type = get_compute_type (code1, optab1, arg_type); 2022 2023 enum insn_code icode1, icode2; 2024 if (VECTOR_TYPE_P (compute_type)(((enum tree_code) (compute_type)->base.code) == VECTOR_TYPE
)
2025 && ((icode1 = optab_handler (optab1, TYPE_MODE (compute_type)((((enum tree_code) ((tree_class_check ((compute_type), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2025, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(compute_type) : (compute_type)->type_common.mode)
)) 2026 != CODE_FOR_nothing) 2027 && ((icode2 = optab_handler (optab2, TYPE_MODE (compute_type)((((enum tree_code) ((tree_class_check ((compute_type), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2027, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(compute_type) : (compute_type)->type_common.mode)
)) 2028 != CODE_FOR_nothing) 2029 && VECTOR_MODE_P (insn_data[icode1].operand[0].mode)(((enum mode_class) mode_class[insn_data[icode1].operand[0].mode
]) == MODE_VECTOR_BOOL || ((enum mode_class) mode_class[insn_data
[icode1].operand[0].mode]) == MODE_VECTOR_INT || ((enum mode_class
) mode_class[insn_data[icode1].operand[0].mode]) == MODE_VECTOR_FLOAT
|| ((enum mode_class) mode_class[insn_data[icode1].operand[0
].mode]) == MODE_VECTOR_FRACT || ((enum mode_class) mode_class
[insn_data[icode1].operand[0].mode]) == MODE_VECTOR_UFRACT ||
((enum mode_class) mode_class[insn_data[icode1].operand[0].mode
]) == MODE_VECTOR_ACCUM || ((enum mode_class) mode_class[insn_data
[icode1].operand[0].mode]) == MODE_VECTOR_UACCUM)
2030 && (insn_data[icode1].operand[0].mode 2031 == insn_data[icode2].operand[0].mode)) 2032 { 2033 poly_uint64 nunits 2034 = exact_div (TYPE_VECTOR_SUBPARTS (compute_type), 2); 2035 tree cretd_type = build_vector_type (TREE_TYPE (ret_type)((contains_struct_check ((ret_type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2035, __FUNCTION__))->typed.type)
, nunits); 2036 if (insn_data[icode1].operand[0].mode == TYPE_MODE (cretd_type)((((enum tree_code) ((tree_class_check ((cretd_type), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2036, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(cretd_type) : (cretd_type)->type_common.mode)
) 2037 { 2038 vec<constructor_elt, va_gc> *v; 2039 tree part_width = TYPE_SIZE (compute_type)((tree_class_check ((compute_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2039, __FUNCTION__))->type_common.size)
; 2040 tree index = bitsize_int (0)size_int_kind (0, stk_bitsizetype); 2041 int nunits = nunits_for_known_piecewise_op (arg_type); 2042 int delta = tree_to_uhwi (part_width) / arg_elt_bits; 2043 int i; 2044 location_t loc = gimple_location (gsi_stmt (*gsi)); 2045 2046 if (compute_type != arg_type) 2047 { 2048 if (!warning_suppressed_p (gsi_stmt (*gsi), 2049 OPT_Wvector_operation_performance)) 2050 warning_at (loc, OPT_Wvector_operation_performance, 2051 "vector operation will be expanded piecewise"); 2052 } 2053 else 2054 { 2055 nunits = 1; 2056 delta = 1; 2057 } 2058 2059 vec_alloc (v, (nunits + delta - 1) / delta * 2); 2060 bool constant_p = true; 2061 for (i = 0; i < nunits; 2062 i += delta, index = int_const_binop (PLUS_EXPR, index, 2063 part_width)) 2064 { 2065 tree a = arg; 2066 if (compute_type != arg_type) 2067 a = tree_vec_extract (gsi, compute_type, a, part_width, 2068 index); 2069 tree result = gimplify_build1 (gsi, code1, cretd_type, a); 2070 constructor_elt ce = { NULL_TREE(tree) nullptr, result }; 2071 if (!CONSTANT_CLASS_P (ce.value)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code
) (ce.value)->base.code))] == tcc_constant)
) 2072 constant_p = false; 2073 v->quick_push (ce); 2074 ce.value = gimplify_build1 (gsi, code2, cretd_type, a); 2075 if (!CONSTANT_CLASS_P (ce.value)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code
) (ce.value)->base.code))] == tcc_constant)
) 2076 constant_p = false; 2077 v->quick_push (ce); 2078 } 2079 2080 if (constant_p) 2081 new_rhs = build_vector_from_ctor (ret_type, v); 2082 else 2083 new_rhs = build_constructor (ret_type, v); 2084 g = gimple_build_assign (lhs, new_rhs); 2085 gsi_replace (gsi, g, false); 2086 return; 2087 } 2088 } 2089 } 2090 2091 new_rhs = expand_vector_piecewise (gsi, do_vec_conversion, arg_type, 2092 TREE_TYPE (arg_type)((contains_struct_check ((arg_type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2092, __FUNCTION__))->typed.type)
, arg, 2093 NULL_TREE(tree) nullptr, code, false, ret_type); 2094 g = gimple_build_assign (lhs, new_rhs); 2095 gsi_replace (gsi, g, false); 2096} 2097 2098/* Process one statement. If we identify a vector operation, expand it. */ 2099 2100static void 2101expand_vector_operations_1 (gimple_stmt_iterator *gsi, 2102 bitmap dce_ssa_names) 2103{ 2104 tree lhs, rhs1, rhs2 = NULLnullptr, type, compute_type = NULL_TREE(tree) nullptr; 2105 enum tree_code code; 2106 optab op = unknown_optab; 2107 enum gimple_rhs_class rhs_class; 2108 tree new_rhs; 2109 2110 /* Only consider code == GIMPLE_ASSIGN. */ 2111 gassign *stmt = dyn_cast <gassign *> (gsi_stmt (*gsi)); 2112 if (!stmt) 2113 { 2114 if (gimple_call_internal_p (gsi_stmt (*gsi), IFN_VEC_CONVERT)) 2115 expand_vector_conversion (gsi); 2116 return; 2117 } 2118 2119 code = gimple_assign_rhs_code (stmt); 2120 rhs_class = get_gimple_rhs_class (code); 2121 lhs = gimple_assign_lhs (stmt); 2122 2123 if (code == VEC_PERM_EXPR) 2124 { 2125 lower_vec_perm (gsi); 2126 return; 2127 } 2128 2129 if (code == VEC_COND_EXPR) 2130 { 2131 expand_vector_condition (gsi, dce_ssa_names); 2132 return; 2133 } 2134 2135 if (code == COND_EXPR 2136 && TREE_CODE (TREE_TYPE (gimple_assign_lhs (stmt)))((enum tree_code) (((contains_struct_check ((gimple_assign_lhs
(stmt)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2136, __FUNCTION__))->typed.type))->base.code)
== VECTOR_TYPE 2137 && TYPE_MODE (TREE_TYPE (gimple_assign_lhs (stmt)))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((gimple_assign_lhs (stmt)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2137, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2137, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((gimple_assign_lhs (stmt)), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2137, __FUNCTION__))->typed.type)) : (((contains_struct_check
((gimple_assign_lhs (stmt)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2137, __FUNCTION__))->typed.type))->type_common.mode)
== BLKmode((void) 0, E_BLKmode)) 2138 { 2139 expand_vector_scalar_condition (gsi); 2140 return; 2141 } 2142 2143 if (code == CONSTRUCTOR 2144 && TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME 2145 && VECTOR_MODE_P (TYPE_MODE (TREE_TYPE (lhs)))(((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2145, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2145, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2145, __FUNCTION__))->typed.type)) : (((contains_struct_check
((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2145, __FUNCTION__))->typed.type))->type_common.mode)
]) == MODE_VECTOR_BOOL || ((enum mode_class) mode_class[((((enum
tree_code) ((tree_class_check ((((contains_struct_check ((lhs
), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2145, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2145, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2145, __FUNCTION__))->typed.type)) : (((contains_struct_check
((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2145, __FUNCTION__))->typed.type))->type_common.mode)
]) == MODE_VECTOR_INT || ((enum mode_class) mode_class[((((enum
tree_code) ((tree_class_check ((((contains_struct_check ((lhs
), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2145, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2145, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2145, __FUNCTION__))->typed.type)) : (((contains_struct_check
((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2145, __FUNCTION__))->typed.type))->type_common.mode)
]) == MODE_VECTOR_FLOAT || ((enum mode_class) mode_class[((((
enum tree_code) ((tree_class_check ((((contains_struct_check (
(lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2145, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2145, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2145, __FUNCTION__))->typed.type)) : (((contains_struct_check
((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2145, __FUNCTION__))->typed.type))->type_common.mode)
]) == MODE_VECTOR_FRACT || ((enum mode_class) mode_class[((((
enum tree_code) ((tree_class_check ((((contains_struct_check (
(lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2145, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2145, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2145, __FUNCTION__))->typed.type)) : (((contains_struct_check
((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2145, __FUNCTION__))->typed.type))->type_common.mode)
]) == MODE_VECTOR_UFRACT || ((enum mode_class) mode_class[(((
(enum tree_code) ((tree_class_check ((((contains_struct_check
((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2145, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2145, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2145, __FUNCTION__))->typed.type)) : (((contains_struct_check
((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2145, __FUNCTION__))->typed.type))->type_common.mode)
]) == MODE_VECTOR_ACCUM || ((enum mode_class) mode_class[((((
enum tree_code) ((tree_class_check ((((contains_struct_check (
(lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2145, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2145, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2145, __FUNCTION__))->typed.type)) : (((contains_struct_check
((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2145, __FUNCTION__))->typed.type))->type_common.mode)
]) == MODE_VECTOR_UACCUM)
2146 && !gimple_clobber_p (stmt) 2147 && optimizeglobal_options.x_optimize) 2148 { 2149 optimize_vector_constructor (gsi); 2150 return; 2151 } 2152 2153 if (rhs_class != GIMPLE_UNARY_RHS && rhs_class != GIMPLE_BINARY_RHS) 2154 return; 2155 2156 rhs1 = gimple_assign_rhs1 (stmt); 2157 if (rhs_class == GIMPLE_BINARY_RHS) 2158 rhs2 = gimple_assign_rhs2 (stmt); 2159 2160 type = TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2160, __FUNCTION__))->typed.type)
; 2161 if (!VECTOR_TYPE_P (type)(((enum tree_code) (type)->base.code) == VECTOR_TYPE) 2162 || !VECTOR_TYPE_P (TREE_TYPE (rhs1))(((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2162, __FUNCTION__))->typed.type))->base.code) == VECTOR_TYPE
)
) 2163 return; 2164 2165 /* A scalar operation pretending to be a vector one. */ 2166 if (VECTOR_BOOLEAN_TYPE_P (type)(((enum tree_code) (type)->base.code) == VECTOR_TYPE &&
((enum tree_code) (((contains_struct_check ((type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2166, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
2167 && !VECTOR_MODE_P (TYPE_MODE (type))(((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2167, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)]) == MODE_VECTOR_BOOL ||
((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2167, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)]) == MODE_VECTOR_INT ||
((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2167, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)]) == MODE_VECTOR_FLOAT
|| ((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2167, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)]) == MODE_VECTOR_FRACT
|| ((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2167, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)]) == MODE_VECTOR_UFRACT
|| ((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2167, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)]) == MODE_VECTOR_ACCUM
|| ((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2167, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)]) == MODE_VECTOR_UACCUM
)
2168 && TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2168, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
!= BLKmode((void) 0, E_BLKmode) 2169 && (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))tree_code_type_tmpl <0>::tree_code_type[(int) (gimple_assign_rhs_code
(stmt))]
!= tcc_comparison 2170 || (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (rhs1))(((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2170, __FUNCTION__))->typed.type))->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((((contains_struct_check
((rhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2170, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2170, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
2171 && !VECTOR_MODE_P (TYPE_MODE (TREE_TYPE (rhs1)))(((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((((contains_struct_check ((rhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2171, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2171, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((rhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2171, __FUNCTION__))->typed.type)) : (((contains_struct_check
((rhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2171, __FUNCTION__))->typed.type))->type_common.mode)
]) == MODE_VECTOR_BOOL || ((enum mode_class) mode_class[((((enum
tree_code) ((tree_class_check ((((contains_struct_check ((rhs1
), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2171, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2171, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((rhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2171, __FUNCTION__))->typed.type)) : (((contains_struct_check
((rhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2171, __FUNCTION__))->typed.type))->type_common.mode)
]) == MODE_VECTOR_INT || ((enum mode_class) mode_class[((((enum
tree_code) ((tree_class_check ((((contains_struct_check ((rhs1
), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2171, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2171, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((rhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2171, __FUNCTION__))->typed.type)) : (((contains_struct_check
((rhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2171, __FUNCTION__))->typed.type))->type_common.mode)
]) == MODE_VECTOR_FLOAT || ((enum mode_class) mode_class[((((
enum tree_code) ((tree_class_check ((((contains_struct_check (
(rhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2171, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2171, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((rhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2171, __FUNCTION__))->typed.type)) : (((contains_struct_check
((rhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2171, __FUNCTION__))->typed.type))->type_common.mode)
]) == MODE_VECTOR_FRACT || ((enum mode_class) mode_class[((((
enum tree_code) ((tree_class_check ((((contains_struct_check (
(rhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2171, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2171, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((rhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2171, __FUNCTION__))->typed.type)) : (((contains_struct_check
((rhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2171, __FUNCTION__))->typed.type))->type_common.mode)
]) == MODE_VECTOR_UFRACT || ((enum mode_class) mode_class[(((
(enum tree_code) ((tree_class_check ((((contains_struct_check
((rhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2171, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2171, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((rhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2171, __FUNCTION__))->typed.type)) : (((contains_struct_check
((rhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2171, __FUNCTION__))->typed.type))->type_common.mode)
]) == MODE_VECTOR_ACCUM || ((enum mode_class) mode_class[((((
enum tree_code) ((tree_class_check ((((contains_struct_check (
(rhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2171, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2171, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((rhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2171, __FUNCTION__))->typed.type)) : (((contains_struct_check
((rhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2171, __FUNCTION__))->typed.type))->type_common.mode)
]) == MODE_VECTOR_UACCUM)
2172 && TYPE_MODE (TREE_TYPE (rhs1))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((rhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2172, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2172, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((rhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2172, __FUNCTION__))->typed.type)) : (((contains_struct_check
((rhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2172, __FUNCTION__))->typed.type))->type_common.mode)
!= BLKmode((void) 0, E_BLKmode)))) 2173 return; 2174 2175 /* If the vector operation is operating on all same vector elements 2176 implement it with a scalar operation and a splat if the target 2177 supports the scalar operation. */ 2178 tree srhs1, srhs2 = NULL_TREE(tree) nullptr; 2179 if ((srhs1 = ssa_uniform_vector_p (rhs1)) != NULL_TREE(tree) nullptr 2180 && (rhs2 == NULL_TREE(tree) nullptr 2181 || (! VECTOR_TYPE_P (TREE_TYPE (rhs2))(((enum tree_code) (((contains_struct_check ((rhs2), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2181, __FUNCTION__))->typed.type))->base.code) == VECTOR_TYPE
)
2182 && (srhs2 = rhs2)) 2183 || (srhs2 = ssa_uniform_vector_p (rhs2)) != NULL_TREE(tree) nullptr) 2184 /* As we query direct optabs restrict to non-convert operations. */ 2185 && TYPE_MODE (TREE_TYPE (type))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2185, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2185, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2185, __FUNCTION__))->typed.type)) : (((contains_struct_check
((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2185, __FUNCTION__))->typed.type))->type_common.mode)
== TYPE_MODE (TREE_TYPE (srhs1))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((srhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2185, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2185, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((srhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2185, __FUNCTION__))->typed.type)) : (((contains_struct_check
((srhs1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2185, __FUNCTION__))->typed.type))->type_common.mode)
) 2186 { 2187 op = optab_for_tree_code (code, TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2187, __FUNCTION__))->typed.type)
, optab_scalar); 2188 if (op >= FIRST_NORM_OPTAB && op <= LAST_NORM_OPTAB 2189 && optab_handler (op, TYPE_MODE (TREE_TYPE (type))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2189, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2189, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2189, __FUNCTION__))->typed.type)) : (((contains_struct_check
((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2189, __FUNCTION__))->typed.type))->type_common.mode)
) != CODE_FOR_nothing) 2190 { 2191 tree stype = TREE_TYPE (TREE_TYPE (lhs))((contains_struct_check ((((contains_struct_check ((lhs), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2191, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2191, __FUNCTION__))->typed.type)
; 2192 tree slhs = (rhs2 != NULL_TREE(tree) nullptr) 2193 ? gimplify_build2 (gsi, code, stype, srhs1, srhs2) 2194 : gimplify_build1 (gsi, code, stype, srhs1); 2195 gimple_assign_set_rhs_from_tree (gsi, 2196 build_vector_from_val (type, slhs)); 2197 update_stmt (stmt); 2198 return; 2199 } 2200 } 2201 2202 if (CONVERT_EXPR_CODE_P (code)((code) == NOP_EXPR || (code) == CONVERT_EXPR) 2203 || code == FLOAT_EXPR 2204 || code == FIX_TRUNC_EXPR 2205 || code == VIEW_CONVERT_EXPR) 2206 return; 2207 2208 /* The signedness is determined from input argument. */ 2209 if (code == VEC_UNPACK_FLOAT_HI_EXPR 2210 || code == VEC_UNPACK_FLOAT_LO_EXPR 2211 || code == VEC_PACK_FLOAT_EXPR) 2212 { 2213 /* We do not know how to scalarize those. */ 2214 return; 2215 } 2216 2217 /* For widening/narrowing vector operations, the relevant type is of the 2218 arguments, not the widened result. VEC_UNPACK_FLOAT_*_EXPR is 2219 calculated in the same way above. */ 2220 if (code == WIDEN_SUM_EXPR 2221 || code == VEC_WIDEN_PLUS_HI_EXPR 2222 || code == VEC_WIDEN_PLUS_LO_EXPR 2223 || code == VEC_WIDEN_MINUS_HI_EXPR 2224 || code == VEC_WIDEN_MINUS_LO_EXPR 2225 || code == VEC_WIDEN_MULT_HI_EXPR 2226 || code == VEC_WIDEN_MULT_LO_EXPR 2227 || code == VEC_WIDEN_MULT_EVEN_EXPR 2228 || code == VEC_WIDEN_MULT_ODD_EXPR 2229 || code == VEC_UNPACK_HI_EXPR 2230 || code == VEC_UNPACK_LO_EXPR 2231 || code == VEC_UNPACK_FIX_TRUNC_HI_EXPR 2232 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR 2233 || code == VEC_PACK_TRUNC_EXPR 2234 || code == VEC_PACK_SAT_EXPR 2235 || code == VEC_PACK_FIX_TRUNC_EXPR 2236 || code == VEC_WIDEN_LSHIFT_HI_EXPR 2237 || code == VEC_WIDEN_LSHIFT_LO_EXPR) 2238 { 2239 /* We do not know how to scalarize those. */ 2240 return; 2241 } 2242 2243 /* Choose between vector shift/rotate by vector and vector shift/rotate by 2244 scalar */ 2245 if (code == LSHIFT_EXPR 2246 || code == RSHIFT_EXPR 2247 || code == LROTATE_EXPR 2248 || code == RROTATE_EXPR) 2249 { 2250 optab opv; 2251 2252 /* Check whether we have vector <op> {x,x,x,x} where x 2253 could be a scalar variable or a constant. Transform 2254 vector <op> {x,x,x,x} ==> vector <op> scalar. */ 2255 if (VECTOR_INTEGER_TYPE_P (TREE_TYPE (rhs2))((((enum tree_code) (((contains_struct_check ((rhs2), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2255, __FUNCTION__))->typed.type))->base.code) == VECTOR_TYPE
) && ((enum tree_code) (((contains_struct_check ((((contains_struct_check
((rhs2), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2255, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2255, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
) 2256 { 2257 tree first; 2258 2259 if ((first = ssa_uniform_vector_p (rhs2)) != NULL_TREE(tree) nullptr) 2260 { 2261 gimple_assign_set_rhs2 (stmt, first); 2262 update_stmt (stmt); 2263 rhs2 = first; 2264 } 2265 } 2266 2267 opv = optab_for_tree_code (code, type, optab_vector); 2268 if (VECTOR_INTEGER_TYPE_P (TREE_TYPE (rhs2))((((enum tree_code) (((contains_struct_check ((rhs2), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2268, __FUNCTION__))->typed.type))->base.code) == VECTOR_TYPE
) && ((enum tree_code) (((contains_struct_check ((((contains_struct_check
((rhs2), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2268, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2268, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
) 2269 op = opv; 2270 else 2271 { 2272 op = optab_for_tree_code (code, type, optab_scalar); 2273 2274 compute_type = get_compute_type (code, op, type); 2275 if (compute_type == type) 2276 return; 2277 /* The rtl expander will expand vector/scalar as vector/vector 2278 if necessary. Pick one with wider vector type. */ 2279 tree compute_vtype = get_compute_type (code, opv, type); 2280 if (subparts_gt (compute_vtype, compute_type)) 2281 { 2282 compute_type = compute_vtype; 2283 op = opv; 2284 } 2285 } 2286 2287 if (code == LROTATE_EXPR || code == RROTATE_EXPR) 2288 { 2289 if (compute_type == NULL_TREE(tree) nullptr) 2290 compute_type = get_compute_type (code, op, type); 2291 if (compute_type == type) 2292 return; 2293 /* Before splitting vector rotates into scalar rotates, 2294 see if we can't use vector shifts and BIT_IOR_EXPR 2295 instead. For vector by vector rotates we'd also 2296 need to check BIT_AND_EXPR and NEGATE_EXPR, punt there 2297 for now, fold doesn't seem to create such rotates anyway. */ 2298 if (compute_type == TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2298, __FUNCTION__))->typed.type)
2299 && !VECTOR_INTEGER_TYPE_P (TREE_TYPE (rhs2))((((enum tree_code) (((contains_struct_check ((rhs2), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2299, __FUNCTION__))->typed.type))->base.code) == VECTOR_TYPE
) && ((enum tree_code) (((contains_struct_check ((((contains_struct_check
((rhs2), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2299, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2299, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
) 2300 { 2301 optab oplv = vashl_optab, opl = ashl_optab; 2302 optab oprv = vlshr_optab, opr = lshr_optab, opo = ior_optab; 2303 tree compute_lvtype = get_compute_type (LSHIFT_EXPR, oplv, type); 2304 tree compute_rvtype = get_compute_type (RSHIFT_EXPR, oprv, type); 2305 tree compute_otype = get_compute_type (BIT_IOR_EXPR, opo, type); 2306 tree compute_ltype = get_compute_type (LSHIFT_EXPR, opl, type); 2307 tree compute_rtype = get_compute_type (RSHIFT_EXPR, opr, type); 2308 /* The rtl expander will expand vector/scalar as vector/vector 2309 if necessary. Pick one with wider vector type. */ 2310 if (subparts_gt (compute_lvtype, compute_ltype)) 2311 { 2312 compute_ltype = compute_lvtype; 2313 opl = oplv; 2314 } 2315 if (subparts_gt (compute_rvtype, compute_rtype)) 2316 { 2317 compute_rtype = compute_rvtype; 2318 opr = oprv; 2319 } 2320 /* Pick the narrowest type from LSHIFT_EXPR, RSHIFT_EXPR and 2321 BIT_IOR_EXPR. */ 2322 compute_type = compute_ltype; 2323 if (subparts_gt (compute_type, compute_rtype)) 2324 compute_type = compute_rtype; 2325 if (subparts_gt (compute_type, compute_otype)) 2326 compute_type = compute_otype; 2327 /* Verify all 3 operations can be performed in that type. */ 2328 if (compute_type != TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2328, __FUNCTION__))->typed.type)
) 2329 { 2330 if (optab_handler (opl, TYPE_MODE (compute_type)((((enum tree_code) ((tree_class_check ((compute_type), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2330, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(compute_type) : (compute_type)->type_common.mode)
) 2331 == CODE_FOR_nothing 2332 || optab_handler (opr, TYPE_MODE (compute_type)((((enum tree_code) ((tree_class_check ((compute_type), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2332, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(compute_type) : (compute_type)->type_common.mode)
) 2333 == CODE_FOR_nothing 2334 || optab_handler (opo, TYPE_MODE (compute_type)((((enum tree_code) ((tree_class_check ((compute_type), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2334, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(compute_type) : (compute_type)->type_common.mode)
) 2335 == CODE_FOR_nothing) 2336 compute_type = TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2336, __FUNCTION__))->typed.type)
; 2337 } 2338 } 2339 } 2340 } 2341 else 2342 op = optab_for_tree_code (code, type, optab_default); 2343 2344 /* Optabs will try converting a negation into a subtraction, so 2345 look for it as well. TODO: negation of floating-point vectors 2346 might be turned into an exclusive OR toggling the sign bit. */ 2347 if (op == unknown_optab 2348 && code == NEGATE_EXPR 2349 && INTEGRAL_TYPE_P (TREE_TYPE (type))(((enum tree_code) (((contains_struct_check ((type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2349, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2349, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2349, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
) 2350 op = optab_for_tree_code (MINUS_EXPR, type, optab_default); 2351 2352 if (compute_type == NULL_TREE(tree) nullptr) 2353 compute_type = get_compute_type (code, op, type); 2354 if (compute_type == type) 2355 return; 2356 2357 new_rhs = expand_vector_operation (gsi, type, compute_type, stmt, code, 2358 dce_ssa_names); 2359 2360 /* Leave expression untouched for later expansion. */ 2361 if (new_rhs == NULL_TREE(tree) nullptr) 2362 return; 2363 2364 if (!useless_type_conversion_p (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2364, __FUNCTION__))->typed.type)
, TREE_TYPE (new_rhs)((contains_struct_check ((new_rhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2364, __FUNCTION__))->typed.type)
)) 2365 new_rhs = gimplify_build1 (gsi, VIEW_CONVERT_EXPR, TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-generic.cc"
, 2365, __FUNCTION__))->typed.type)
, 2366 new_rhs); 2367 2368 /* NOTE: We should avoid using gimple_assign_set_rhs_from_tree. One 2369 way to do it is change expand_vector_operation and its callees to 2370 return a tree_code, RHS1 and RHS2 instead of a tree. */ 2371 gimple_assign_set_rhs_from_tree (gsi, new_rhs); 2372 update_stmt (gsi_stmt (*gsi)); 2373} 2374
2375/* Use this to lower vector operations introduced by the vectorizer, 2376 if it may need the bit-twiddling tricks implemented in this file. */ 2377 2378static unsigned int 2379expand_vector_operations (void) 2380{ 2381 gimple_stmt_iterator gsi; 2382 basic_block bb; 2383 bool cfg_changed = false; 2384 2385 auto_bitmap dce_ssa_names; 2386 2387 FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb
; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb->
next_bb)
2388 { 2389 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 2390 { 2391 expand_vector_operations_1 (&gsi, dce_ssa_names); 2392 /* ??? If we do not cleanup EH then we will ICE in 2393 verification. But in reality we have created wrong-code 2394 as we did not properly transition EH info and edges to 2395 the piecewise computations. */ 2396 if (maybe_clean_eh_stmt (gsi_stmt (gsi)) 2397 && gimple_purge_dead_eh_edges (bb)) 2398 cfg_changed = true; 2399 /* If a .LOOP_DIST_ALIAS call prevailed loops got elided 2400 before vectorization got a chance to get at them. Simply 2401 fold as if loop distribution wasn't performed. */ 2402 if (gimple_call_internal_p (gsi_stmt (gsi), IFN_LOOP_DIST_ALIAS)) 2403 { 2404 fold_loop_internal_call (gsi_stmt (gsi), boolean_false_nodeglobal_trees[TI_BOOLEAN_FALSE]); 2405 cfg_changed = true; 2406 } 2407 } 2408 } 2409 2410 simple_dce_from_worklist (dce_ssa_names); 2411 2412 return cfg_changed ? TODO_cleanup_cfg(1 << 5) : 0; 2413} 2414 2415namespace { 2416 2417const pass_data pass_data_lower_vector = 2418{ 2419 GIMPLE_PASS, /* type */ 2420 "veclower", /* name */ 2421 OPTGROUP_VEC, /* optinfo_flags */ 2422 TV_NONE, /* tv_id */ 2423 PROP_cfg(1 << 3), /* properties_required */ 2424 PROP_gimple_lvec(1 << 12), /* properties_provided */ 2425 0, /* properties_destroyed */ 2426 0, /* todo_flags_start */ 2427 TODO_update_ssa(1 << 11), /* todo_flags_finish */ 2428}; 2429 2430class pass_lower_vector : public gimple_opt_pass 2431{ 2432public: 2433 pass_lower_vector (gcc::context *ctxt) 2434 : gimple_opt_pass (pass_data_lower_vector, ctxt) 2435 {} 2436 2437 /* opt_pass methods: */ 2438 bool gate (function *fun) final override 2439 { 2440 return !(fun->curr_properties & PROP_gimple_lvec(1 << 12)); 2441 } 2442 2443 unsigned int execute (function *) final override 2444 { 2445 return expand_vector_operations (); 2446 } 2447 2448}; // class pass_lower_vector 2449 2450} // anon namespace 2451 2452gimple_opt_pass * 2453make_pass_lower_vector (gcc::context *ctxt) 2454{ 2455 return new pass_lower_vector (ctxt); 2456} 2457 2458namespace { 2459 2460const pass_data pass_data_lower_vector_ssa = 2461{ 2462 GIMPLE_PASS, /* type */ 2463 "veclower2", /* name */ 2464 OPTGROUP_VEC, /* optinfo_flags */ 2465 TV_NONE, /* tv_id */ 2466 PROP_cfg(1 << 3), /* properties_required */ 2467 PROP_gimple_lvec(1 << 12), /* properties_provided */ 2468 0, /* properties_destroyed */ 2469 0, /* todo_flags_start */ 2470 ( TODO_update_ssa(1 << 11) 2471 | TODO_cleanup_cfg(1 << 5) ), /* todo_flags_finish */ 2472}; 2473 2474class pass_lower_vector_ssa : public gimple_opt_pass 2475{ 2476public: 2477 pass_lower_vector_ssa (gcc::context *ctxt) 2478 : gimple_opt_pass (pass_data_lower_vector_ssa, ctxt) 2479 {} 2480 2481 /* opt_pass methods: */ 2482 opt_pass * clone () final override 2483 { 2484 return new pass_lower_vector_ssa (m_ctxt); 2485 } 2486 unsigned int execute (function *) final override 2487 { 2488 return expand_vector_operations (); 2489 } 2490 2491}; // class pass_lower_vector_ssa 2492 2493} // anon namespace 2494 2495gimple_opt_pass * 2496make_pass_lower_vector_ssa (gcc::context *ctxt) 2497{ 2498 return new pass_lower_vector_ssa (ctxt); 2499} 2500 2501#include "gt-tree-vect-generic.h"

/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h

1/* Vector API for GNU compiler.
2 Copyright (C) 2004-2023 Free Software Foundation, Inc.
3 Contributed by Nathan Sidwell <nathan@codesourcery.com>
4 Re-implemented in C++ by Diego Novillo <dnovillo@google.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22#ifndef GCC_VEC_H
23#define GCC_VEC_H
24
25/* Some gen* file have no ggc support as the header file gtype-desc.h is
26 missing. Provide these definitions in case ggc.h has not been included.
27 This is not a problem because any code that runs before gengtype is built
28 will never need to use GC vectors.*/
29
30extern void ggc_free (void *);
31extern size_t ggc_round_alloc_size (size_t requested_size);
32extern void *ggc_realloc (void *, size_t MEM_STAT_DECL);
33
34/* Templated vector type and associated interfaces.
35
36 The interface functions are typesafe and use inline functions,
37 sometimes backed by out-of-line generic functions. The vectors are
38 designed to interoperate with the GTY machinery.
39
40 There are both 'index' and 'iterate' accessors. The index accessor
41 is implemented by operator[]. The iterator returns a boolean
42 iteration condition and updates the iteration variable passed by
43 reference. Because the iterator will be inlined, the address-of
44 can be optimized away.
45
46 Each operation that increases the number of active elements is
47 available in 'quick' and 'safe' variants. The former presumes that
48 there is sufficient allocated space for the operation to succeed
49 (it dies if there is not). The latter will reallocate the
50 vector, if needed. Reallocation causes an exponential increase in
51 vector size. If you know you will be adding N elements, it would
52 be more efficient to use the reserve operation before adding the
53 elements with the 'quick' operation. This will ensure there are at
54 least as many elements as you ask for, it will exponentially
55 increase if there are too few spare slots. If you want reserve a
56 specific number of slots, but do not want the exponential increase
57 (for instance, you know this is the last allocation), use the
58 reserve_exact operation. You can also create a vector of a
59 specific size from the get go.
60
61 You should prefer the push and pop operations, as they append and
62 remove from the end of the vector. If you need to remove several
63 items in one go, use the truncate operation. The insert and remove
64 operations allow you to change elements in the middle of the
65 vector. There are two remove operations, one which preserves the
66 element ordering 'ordered_remove', and one which does not
67 'unordered_remove'. The latter function copies the end element
68 into the removed slot, rather than invoke a memmove operation. The
69 'lower_bound' function will determine where to place an item in the
70 array using insert that will maintain sorted order.
71
72 Vectors are template types with three arguments: the type of the
73 elements in the vector, the allocation strategy, and the physical
74 layout to use
75
76 Four allocation strategies are supported:
77
78 - Heap: allocation is done using malloc/free. This is the
79 default allocation strategy.
80
81 - GC: allocation is done using ggc_alloc/ggc_free.
82
83 - GC atomic: same as GC with the exception that the elements
84 themselves are assumed to be of an atomic type that does
85 not need to be garbage collected. This means that marking
86 routines do not need to traverse the array marking the
87 individual elements. This increases the performance of
88 GC activities.
89
90 Two physical layouts are supported:
91
92 - Embedded: The vector is structured using the trailing array
93 idiom. The last member of the structure is an array of size
94 1. When the vector is initially allocated, a single memory
95 block is created to hold the vector's control data and the
96 array of elements. These vectors cannot grow without
97 reallocation (see discussion on embeddable vectors below).
98
99 - Space efficient: The vector is structured as a pointer to an
100 embedded vector. This is the default layout. It means that
101 vectors occupy a single word of storage before initial
102 allocation. Vectors are allowed to grow (the internal
103 pointer is reallocated but the main vector instance does not
104 need to relocate).
105
106 The type, allocation and layout are specified when the vector is
107 declared.
108
109 If you need to directly manipulate a vector, then the 'address'
110 accessor will return the address of the start of the vector. Also
111 the 'space' predicate will tell you whether there is spare capacity
112 in the vector. You will not normally need to use these two functions.
113
114 Notes on the different layout strategies
115
116 * Embeddable vectors (vec<T, A, vl_embed>)
117
118 These vectors are suitable to be embedded in other data
119 structures so that they can be pre-allocated in a contiguous
120 memory block.
121
122 Embeddable vectors are implemented using the trailing array
123 idiom, thus they are not resizeable without changing the address
124 of the vector object itself. This means you cannot have
125 variables or fields of embeddable vector type -- always use a
126 pointer to a vector. The one exception is the final field of a
127 structure, which could be a vector type.
128
129 You will have to use the embedded_size & embedded_init calls to
130 create such objects, and they will not be resizeable (so the
131 'safe' allocation variants are not available).
132
133 Properties of embeddable vectors:
134
135 - The whole vector and control data are allocated in a single
136 contiguous block. It uses the trailing-vector idiom, so
137 allocation must reserve enough space for all the elements
138 in the vector plus its control data.
139 - The vector cannot be re-allocated.
140 - The vector cannot grow nor shrink.
141 - No indirections needed for access/manipulation.
142 - It requires 2 words of storage (prior to vector allocation).
143
144
145 * Space efficient vector (vec<T, A, vl_ptr>)
146
147 These vectors can grow dynamically and are allocated together
148 with their control data. They are suited to be included in data
149 structures. Prior to initial allocation, they only take a single
150 word of storage.
151
152 These vectors are implemented as a pointer to embeddable vectors.
153 The semantics allow for this pointer to be NULL to represent
154 empty vectors. This way, empty vectors occupy minimal space in
155 the structure containing them.
156
157 Properties:
158
159 - The whole vector and control data are allocated in a single
160 contiguous block.
161 - The whole vector may be re-allocated.
162 - Vector data may grow and shrink.
163 - Access and manipulation requires a pointer test and
164 indirection.
165 - It requires 1 word of storage (prior to vector allocation).
166
167 An example of their use would be,
168
169 struct my_struct {
170 // A space-efficient vector of tree pointers in GC memory.
171 vec<tree, va_gc, vl_ptr> v;
172 };
173
174 struct my_struct *s;
175
176 if (s->v.length ()) { we have some contents }
177 s->v.safe_push (decl); // append some decl onto the end
178 for (ix = 0; s->v.iterate (ix, &elt); ix++)
179 { do something with elt }
180*/
181
182/* Support function for statistics. */
183extern void dump_vec_loc_statistics (void);
184
185/* Hashtable mapping vec addresses to descriptors. */
186extern htab_t vec_mem_usage_hash;
187
188/* Control data for vectors. This contains the number of allocated
189 and used slots inside a vector. */
190
191struct vec_prefix
192{
193 /* FIXME - These fields should be private, but we need to cater to
194 compilers that have stricter notions of PODness for types. */
195
196 /* Memory allocation support routines in vec.cc. */
197 void register_overhead (void *, size_t, size_t CXX_MEM_STAT_INFO);
198 void release_overhead (void *, size_t, size_t, bool CXX_MEM_STAT_INFO);
199 static unsigned calculate_allocation (vec_prefix *, unsigned, bool);
200 static unsigned calculate_allocation_1 (unsigned, unsigned);
201
202 /* Note that vec_prefix should be a base class for vec, but we use
203 offsetof() on vector fields of tree structures (e.g.,
204 tree_binfo::base_binfos), and offsetof only supports base types.
205
206 To compensate, we make vec_prefix a field inside vec and make
207 vec a friend class of vec_prefix so it can access its fields. */
208 template <typename, typename, typename> friend struct vec;
209
210 /* The allocator types also need access to our internals. */
211 friend struct va_gc;
212 friend struct va_gc_atomic;
213 friend struct va_heap;
214
215 unsigned m_alloc : 31;
216 unsigned m_using_auto_storage : 1;
217 unsigned m_num;
218};
219
220/* Calculate the number of slots to reserve a vector, making sure that
221 RESERVE slots are free. If EXACT grow exactly, otherwise grow
222 exponentially. PFX is the control data for the vector. */
223
224inline unsigned
225vec_prefix::calculate_allocation (vec_prefix *pfx, unsigned reserve,
226 bool exact)
227{
228 if (exact)
229 return (pfx ? pfx->m_num : 0) + reserve;
230 else if (!pfx)
231 return MAX (4, reserve)((4) > (reserve) ? (4) : (reserve));
232 return calculate_allocation_1 (pfx->m_alloc, pfx->m_num + reserve);
233}
234
235template<typename, typename, typename> struct vec;
236
237/* Valid vector layouts
238
239 vl_embed - Embeddable vector that uses the trailing array idiom.
240 vl_ptr - Space efficient vector that uses a pointer to an
241 embeddable vector. */
242struct vl_embed { };
243struct vl_ptr { };
244
245
246/* Types of supported allocations
247
248 va_heap - Allocation uses malloc/free.
249 va_gc - Allocation uses ggc_alloc.
250 va_gc_atomic - Same as GC, but individual elements of the array
251 do not need to be marked during collection. */
252
253/* Allocator type for heap vectors. */
254struct va_heap
255{
256 /* Heap vectors are frequently regular instances, so use the vl_ptr
257 layout for them. */
258 typedef vl_ptr default_layout;
259
260 template<typename T>
261 static void reserve (vec<T, va_heap, vl_embed> *&, unsigned, bool
262 CXX_MEM_STAT_INFO);
263
264 template<typename T>
265 static void release (vec<T, va_heap, vl_embed> *&);
266};
267
268
269/* Allocator for heap memory. Ensure there are at least RESERVE free
270 slots in V. If EXACT is true, grow exactly, else grow
271 exponentially. As a special case, if the vector had not been
272 allocated and RESERVE is 0, no vector will be created. */
273
274template<typename T>
275inline void
276va_heap::reserve (vec<T, va_heap, vl_embed> *&v, unsigned reserve, bool exact
277 MEM_STAT_DECL)
278{
279 size_t elt_size = sizeof (T);
280 unsigned alloc
281 = vec_prefix::calculate_allocation (v ? &v->m_vecpfx : 0, reserve, exact);
282 gcc_checking_assert (alloc)((void)(!(alloc) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 282, __FUNCTION__), 0 : 0))
;
283
284 if (GATHER_STATISTICS0 && v)
285 v->m_vecpfx.release_overhead (v, elt_size * v->allocated (),
286 v->allocated (), false);
287
288 size_t size = vec<T, va_heap, vl_embed>::embedded_size (alloc);
289 unsigned nelem = v ? v->length () : 0;
290 v = static_cast <vec<T, va_heap, vl_embed> *> (xrealloc (v, size));
291 v->embedded_init (alloc, nelem);
292
293 if (GATHER_STATISTICS0)
294 v->m_vecpfx.register_overhead (v, alloc, elt_size PASS_MEM_STAT);
295}
296
297
298#if GCC_VERSION(4 * 1000 + 2) >= 4007
299#pragma GCC diagnostic push
300#pragma GCC diagnostic ignored "-Wfree-nonheap-object"
301#endif
302
303/* Free the heap space allocated for vector V. */
304
305template<typename T>
306void
307va_heap::release (vec<T, va_heap, vl_embed> *&v)
308{
309 size_t elt_size = sizeof (T);
310 if (v == NULLnullptr)
311 return;
312
313 if (GATHER_STATISTICS0)
314 v->m_vecpfx.release_overhead (v, elt_size * v->allocated (),
315 v->allocated (), true);
316 ::free (v);
317 v = NULLnullptr;
318}
319
320#if GCC_VERSION(4 * 1000 + 2) >= 4007
321#pragma GCC diagnostic pop
322#endif
323
324/* Allocator type for GC vectors. Notice that we need the structure
325 declaration even if GC is not enabled. */
326
327struct va_gc
328{
329 /* Use vl_embed as the default layout for GC vectors. Due to GTY
330 limitations, GC vectors must always be pointers, so it is more
331 efficient to use a pointer to the vl_embed layout, rather than
332 using a pointer to a pointer as would be the case with vl_ptr. */
333 typedef vl_embed default_layout;
334
335 template<typename T, typename A>
336 static void reserve (vec<T, A, vl_embed> *&, unsigned, bool
337 CXX_MEM_STAT_INFO);
338
339 template<typename T, typename A>
340 static void release (vec<T, A, vl_embed> *&v);
341};
342
343
344/* Free GC memory used by V and reset V to NULL. */
345
346template<typename T, typename A>
347inline void
348va_gc::release (vec<T, A, vl_embed> *&v)
349{
350 if (v)
351 ::ggc_free (v);
352 v = NULLnullptr;
353}
354
355
356/* Allocator for GC memory. Ensure there are at least RESERVE free
357 slots in V. If EXACT is true, grow exactly, else grow
358 exponentially. As a special case, if the vector had not been
359 allocated and RESERVE is 0, no vector will be created. */
360
361template<typename T, typename A>
362void
363va_gc::reserve (vec<T, A, vl_embed> *&v, unsigned reserve, bool exact
364 MEM_STAT_DECL)
365{
366 unsigned alloc
367 = vec_prefix::calculate_allocation (v
25.1
'v' is null
25.1
'v' is null
? &v->m_vecpfx : 0, reserve, exact);
26
'?' condition is false
368 if (!alloc)
27
Assuming 'alloc' is 0
28
Taking true branch
369 {
370 ::ggc_free (v);
371 v = NULLnullptr;
29
Null pointer value stored to 'v'
372 return;
373 }
374
375 /* Calculate the amount of space we want. */
376 size_t size = vec<T, A, vl_embed>::embedded_size (alloc);
377
378 /* Ask the allocator how much space it will really give us. */
379 size = ::ggc_round_alloc_size (size);
380
381 /* Adjust the number of slots accordingly. */
382 size_t vec_offset = sizeof (vec_prefix);
383 size_t elt_size = sizeof (T);
384 alloc = (size - vec_offset) / elt_size;
385
386 /* And finally, recalculate the amount of space we ask for. */
387 size = vec_offset + alloc * elt_size;
388
389 unsigned nelem = v ? v->length () : 0;
390 v = static_cast <vec<T, A, vl_embed> *> (::ggc_realloc (v, size
391 PASS_MEM_STAT));
392 v->embedded_init (alloc, nelem);
393}
394
395
396/* Allocator type for GC vectors. This is for vectors of types
397 atomics w.r.t. collection, so allocation and deallocation is
398 completely inherited from va_gc. */
399struct va_gc_atomic : va_gc
400{
401};
402
403
404/* Generic vector template. Default values for A and L indicate the
405 most commonly used strategies.
406
407 FIXME - Ideally, they would all be vl_ptr to encourage using regular
408 instances for vectors, but the existing GTY machinery is limited
409 in that it can only deal with GC objects that are pointers
410 themselves.
411
412 This means that vector operations that need to deal with
413 potentially NULL pointers, must be provided as free
414 functions (see the vec_safe_* functions above). */
415template<typename T,
416 typename A = va_heap,
417 typename L = typename A::default_layout>
418struct GTY((user)) vec
419{
420};
421
422/* Allow C++11 range-based 'for' to work directly on vec<T>*. */
423template<typename T, typename A, typename L>
424T* begin (vec<T,A,L> *v) { return v ? v->begin () : nullptr; }
425template<typename T, typename A, typename L>
426T* end (vec<T,A,L> *v) { return v ? v->end () : nullptr; }
427template<typename T, typename A, typename L>
428const T* begin (const vec<T,A,L> *v) { return v ? v->begin () : nullptr; }
429template<typename T, typename A, typename L>
430const T* end (const vec<T,A,L> *v) { return v ? v->end () : nullptr; }
431
432/* Generic vec<> debug helpers.
433
434 These need to be instantiated for each vec<TYPE> used throughout
435 the compiler like this:
436
437 DEFINE_DEBUG_VEC (TYPE)
438
439 The reason we have a debug_helper() is because GDB can't
440 disambiguate a plain call to debug(some_vec), and it must be called
441 like debug<TYPE>(some_vec). */
442
443template<typename T>
444void
445debug_helper (vec<T> &ref)
446{
447 unsigned i;
448 for (i = 0; i < ref.length (); ++i)
449 {
450 fprintf (stderrstderr, "[%d] = ", i);
451 debug_slim (ref[i]);
452 fputc ('\n', stderrstderr);
453 }
454}
455
456/* We need a separate va_gc variant here because default template
457 argument for functions cannot be used in c++-98. Once this
458 restriction is removed, those variant should be folded with the
459 above debug_helper. */
460
461template<typename T>
462void
463debug_helper (vec<T, va_gc> &ref)
464{
465 unsigned i;
466 for (i = 0; i < ref.length (); ++i)
467 {
468 fprintf (stderrstderr, "[%d] = ", i);
469 debug_slim (ref[i]);
470 fputc ('\n', stderrstderr);
471 }
472}
473
474/* Macro to define debug(vec<T>) and debug(vec<T, va_gc>) helper
475 functions for a type T. */
476
477#define DEFINE_DEBUG_VEC(T)template void debug_helper (vec<T> &); template void
debug_helper (vec<T, va_gc> &); __attribute__ ((__used__
)) void debug (vec<T> &ref) { debug_helper <T>
(ref); } __attribute__ ((__used__)) void debug (vec<T>
*ptr) { if (ptr) debug (*ptr); else fprintf (stderr, "<nil>\n"
); } __attribute__ ((__used__)) void debug (vec<T, va_gc>
&ref) { debug_helper <T> (ref); } __attribute__ ((
__used__)) void debug (vec<T, va_gc> *ptr) { if (ptr) debug
(*ptr); else fprintf (stderr, "<nil>\n"); }
\
478 template void debug_helper (vec<T> &); \
479 template void debug_helper (vec<T, va_gc> &); \
480 /* Define the vec<T> debug functions. */ \
481 DEBUG_FUNCTION__attribute__ ((__used__)) void \
482 debug (vec<T> &ref) \
483 { \
484 debug_helper <T> (ref); \
485 } \
486 DEBUG_FUNCTION__attribute__ ((__used__)) void \
487 debug (vec<T> *ptr) \
488 { \
489 if (ptr) \
490 debug (*ptr); \
491 else \
492 fprintf (stderrstderr, "<nil>\n"); \
493 } \
494 /* Define the vec<T, va_gc> debug functions. */ \
495 DEBUG_FUNCTION__attribute__ ((__used__)) void \
496 debug (vec<T, va_gc> &ref) \
497 { \
498 debug_helper <T> (ref); \
499 } \
500 DEBUG_FUNCTION__attribute__ ((__used__)) void \
501 debug (vec<T, va_gc> *ptr) \
502 { \
503 if (ptr) \
504 debug (*ptr); \
505 else \
506 fprintf (stderrstderr, "<nil>\n"); \
507 }
508
509/* Default-construct N elements in DST. */
510
511template <typename T>
512inline void
513vec_default_construct (T *dst, unsigned n)
514{
515#ifdef BROKEN_VALUE_INITIALIZATION
516 /* Versions of GCC before 4.4 sometimes leave certain objects
517 uninitialized when value initialized, though if the type has
518 user defined default ctor, that ctor is invoked. As a workaround
519 perform clearing first and then the value initialization, which
520 fixes the case when value initialization doesn't initialize due to
521 the bugs and should initialize to all zeros, but still allows
522 vectors for types with user defined default ctor that initializes
523 some or all elements to non-zero. If T has no user defined
524 default ctor and some non-static data members have user defined
525 default ctors that initialize to non-zero the workaround will
526 still not work properly; in that case we just need to provide
527 user defined default ctor. */
528 memset (dst, '\0', sizeof (T) * n);
529#endif
530 for ( ; n; ++dst, --n)
531 ::new (static_cast<void*>(dst)) T ();
532}
533
534/* Copy-construct N elements in DST from *SRC. */
535
536template <typename T>
537inline void
538vec_copy_construct (T *dst, const T *src, unsigned n)
539{
540 for ( ; n; ++dst, ++src, --n)
541 ::new (static_cast<void*>(dst)) T (*src);
542}
543
544/* Type to provide zero-initialized values for vec<T, A, L>. This is
545 used to provide nil initializers for vec instances. Since vec must
546 be a trivially copyable type that can be copied by memcpy and zeroed
547 out by memset, it must have defaulted default and copy ctor and copy
548 assignment. To initialize a vec either use value initialization
549 (e.g., vec() or vec v{ };) or assign it the value vNULL. This isn't
550 needed for file-scope and function-local static vectors, which are
551 zero-initialized by default. */
552struct vnull { };
553constexpr vnull vNULL{ };
554
555
556/* Embeddable vector. These vectors are suitable to be embedded
557 in other data structures so that they can be pre-allocated in a
558 contiguous memory block.
559
560 Embeddable vectors are implemented using the trailing array idiom,
561 thus they are not resizeable without changing the address of the
562 vector object itself. This means you cannot have variables or
563 fields of embeddable vector type -- always use a pointer to a
564 vector. The one exception is the final field of a structure, which
565 could be a vector type.
566
567 You will have to use the embedded_size & embedded_init calls to
568 create such objects, and they will not be resizeable (so the 'safe'
569 allocation variants are not available).
570
571 Properties:
572
573 - The whole vector and control data are allocated in a single
574 contiguous block. It uses the trailing-vector idiom, so
575 allocation must reserve enough space for all the elements
576 in the vector plus its control data.
577 - The vector cannot be re-allocated.
578 - The vector cannot grow nor shrink.
579 - No indirections needed for access/manipulation.
580 - It requires 2 words of storage (prior to vector allocation). */
581
582template<typename T, typename A>
583struct GTY((user)) vec<T, A, vl_embed>
584{
585public:
586 unsigned allocated (void) const { return m_vecpfx.m_alloc; }
587 unsigned length (void) const { return m_vecpfx.m_num; }
588 bool is_empty (void) const { return m_vecpfx.m_num == 0; }
589 T *address (void) { return reinterpret_cast <T *> (this + 1); }
590 const T *address (void) const
591 { return reinterpret_cast <const T *> (this + 1); }
592 T *begin () { return address (); }
593 const T *begin () const { return address (); }
594 T *end () { return address () + length (); }
595 const T *end () const { return address () + length (); }
596 const T &operator[] (unsigned) const;
597 T &operator[] (unsigned);
598 T &last (void);
599 bool space (unsigned) const;
600 bool iterate (unsigned, T *) const;
601 bool iterate (unsigned, T **) const;
602 vec *copy (ALONE_CXX_MEM_STAT_INFO) const;
603 void splice (const vec &);
604 void splice (const vec *src);
605 T *quick_push (const T &);
606 T &pop (void);
607 void truncate (unsigned);
608 void quick_insert (unsigned, const T &);
609 void ordered_remove (unsigned);
610 void unordered_remove (unsigned);
611 void block_remove (unsigned, unsigned);
612 void qsort (int (*) (const void *, const void *))qsort (int (*) (const void *, const void *));
613 void sort (int (*) (const void *, const void *, void *), void *);
614 void stablesort (int (*) (const void *, const void *, void *), void *);
615 T *bsearch (const void *key, int (*compar) (const void *, const void *));
616 T *bsearch (const void *key,
617 int (*compar)(const void *, const void *, void *), void *);
618 unsigned lower_bound (const T &, bool (*) (const T &, const T &)) const;
619 bool contains (const T &search) const;
620 static size_t embedded_size (unsigned);
621 void embedded_init (unsigned, unsigned = 0, unsigned = 0);
622 void quick_grow (unsigned len);
623 void quick_grow_cleared (unsigned len);
624
625 /* vec class can access our internal data and functions. */
626 template <typename, typename, typename> friend struct vec;
627
628 /* The allocator types also need access to our internals. */
629 friend struct va_gc;
630 friend struct va_gc_atomic;
631 friend struct va_heap;
632
633 /* FIXME - This field should be private, but we need to cater to
634 compilers that have stricter notions of PODness for types. */
635 /* Align m_vecpfx to simplify address (). */
636 alignas (T) alignas (vec_prefix) vec_prefix m_vecpfx;
637};
638
639
640/* Convenience wrapper functions to use when dealing with pointers to
641 embedded vectors. Some functionality for these vectors must be
642 provided via free functions for these reasons:
643
644 1- The pointer may be NULL (e.g., before initial allocation).
645
646 2- When the vector needs to grow, it must be reallocated, so
647 the pointer will change its value.
648
649 Because of limitations with the current GC machinery, all vectors
650 in GC memory *must* be pointers. */
651
652
653/* If V contains no room for NELEMS elements, return false. Otherwise,
654 return true. */
655template<typename T, typename A>
656inline bool
657vec_safe_space (const vec<T, A, vl_embed> *v, unsigned nelems)
658{
659 return v ? v->space (nelems) : nelems == 0;
660}
661
662
663/* If V is NULL, return 0. Otherwise, return V->length(). */
664template<typename T, typename A>
665inline unsigned
666vec_safe_length (const vec<T, A, vl_embed> *v)
667{
668 return v ? v->length () : 0;
669}
670
671
672/* If V is NULL, return NULL. Otherwise, return V->address(). */
673template<typename T, typename A>
674inline T *
675vec_safe_address (vec<T, A, vl_embed> *v)
676{
677 return v ? v->address () : NULLnullptr;
678}
679
680
681/* If V is NULL, return true. Otherwise, return V->is_empty(). */
682template<typename T, typename A>
683inline bool
684vec_safe_is_empty (vec<T, A, vl_embed> *v)
685{
686 return v ? v->is_empty () : true;
687}
688
689/* If V does not have space for NELEMS elements, call
690 V->reserve(NELEMS, EXACT). */
691template<typename T, typename A>
692inline bool
693vec_safe_reserve (vec<T, A, vl_embed> *&v, unsigned nelems, bool exact = false
694 CXX_MEM_STAT_INFO)
695{
696 bool extend = nelems ? !vec_safe_space (v, nelems) : false;
22
Assuming 'nelems' is not equal to 0
23
'?' condition is true
697 if (extend
23.1
'extend' is true
23.1
'extend' is true
)
24
Taking true branch
698 A::reserve (v, nelems, exact PASS_MEM_STAT);
25
Calling 'va_gc::reserve'
30
Returning from 'va_gc::reserve'
699 return extend;
700}
701
702template<typename T, typename A>
703inline bool
704vec_safe_reserve_exact (vec<T, A, vl_embed> *&v, unsigned nelems
705 CXX_MEM_STAT_INFO)
706{
707 return vec_safe_reserve (v, nelems, true PASS_MEM_STAT);
708}
709
710
711/* Allocate GC memory for V with space for NELEMS slots. If NELEMS
712 is 0, V is initialized to NULL. */
713
714template<typename T, typename A>
715inline void
716vec_alloc (vec<T, A, vl_embed> *&v, unsigned nelems CXX_MEM_STAT_INFO)
717{
718 v = NULLnullptr;
719 vec_safe_reserve (v, nelems, false PASS_MEM_STAT);
21
Calling 'vec_safe_reserve<constructor_elt, va_gc>'
31
Returning from 'vec_safe_reserve<constructor_elt, va_gc>'
720}
721
722
723/* Free the GC memory allocated by vector V and set it to NULL. */
724
725template<typename T, typename A>
726inline void
727vec_free (vec<T, A, vl_embed> *&v)
728{
729 A::release (v);
730}
731
732
733/* Grow V to length LEN. Allocate it, if necessary. */
734template<typename T, typename A>
735inline void
736vec_safe_grow (vec<T, A, vl_embed> *&v, unsigned len,
737 bool exact = false CXX_MEM_STAT_INFO)
738{
739 unsigned oldlen = vec_safe_length (v);
740 gcc_checking_assert (len >= oldlen)((void)(!(len >= oldlen) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 740, __FUNCTION__), 0 : 0))
;
741 vec_safe_reserve (v, len - oldlen, exact PASS_MEM_STAT);
742 v->quick_grow (len);
743}
744
745
746/* If V is NULL, allocate it. Call V->safe_grow_cleared(LEN). */
747template<typename T, typename A>
748inline void
749vec_safe_grow_cleared (vec<T, A, vl_embed> *&v, unsigned len,
750 bool exact = false CXX_MEM_STAT_INFO)
751{
752 unsigned oldlen = vec_safe_length (v);
753 vec_safe_grow (v, len, exact PASS_MEM_STAT);
754 vec_default_construct (v->address () + oldlen, len - oldlen);
755}
756
757
758/* Assume V is not NULL. */
759
760template<typename T>
761inline void
762vec_safe_grow_cleared (vec<T, va_heap, vl_ptr> *&v,
763 unsigned len, bool exact = false CXX_MEM_STAT_INFO)
764{
765 v->safe_grow_cleared (len, exact PASS_MEM_STAT);
766}
767
768/* If V does not have space for NELEMS elements, call
769 V->reserve(NELEMS, EXACT). */
770
771template<typename T>
772inline bool
773vec_safe_reserve (vec<T, va_heap, vl_ptr> *&v, unsigned nelems, bool exact = false
774 CXX_MEM_STAT_INFO)
775{
776 return v->reserve (nelems, exact);
777}
778
779
780/* If V is NULL return false, otherwise return V->iterate(IX, PTR). */
781template<typename T, typename A>
782inline bool
783vec_safe_iterate (const vec<T, A, vl_embed> *v, unsigned ix, T **ptr)
784{
785 if (v)
786 return v->iterate (ix, ptr);
787 else
788 {
789 *ptr = 0;
790 return false;
791 }
792}
793
794template<typename T, typename A>
795inline bool
796vec_safe_iterate (const vec<T, A, vl_embed> *v, unsigned ix, T *ptr)
797{
798 if (v)
799 return v->iterate (ix, ptr);
800 else
801 {
802 *ptr = 0;
803 return false;
804 }
805}
806
807
808/* If V has no room for one more element, reallocate it. Then call
809 V->quick_push(OBJ). */
810template<typename T, typename A>
811inline T *
812vec_safe_push (vec<T, A, vl_embed> *&v, const T &obj CXX_MEM_STAT_INFO)
813{
814 vec_safe_reserve (v, 1, false PASS_MEM_STAT);
815 return v->quick_push (obj);
816}
817
818
819/* if V has no room for one more element, reallocate it. Then call
820 V->quick_insert(IX, OBJ). */
821template<typename T, typename A>
822inline void
823vec_safe_insert (vec<T, A, vl_embed> *&v, unsigned ix, const T &obj
824 CXX_MEM_STAT_INFO)
825{
826 vec_safe_reserve (v, 1, false PASS_MEM_STAT);
827 v->quick_insert (ix, obj);
828}
829
830
831/* If V is NULL, do nothing. Otherwise, call V->truncate(SIZE). */
832template<typename T, typename A>
833inline void
834vec_safe_truncate (vec<T, A, vl_embed> *v, unsigned size)
835{
836 if (v)
837 v->truncate (size);
838}
839
840
841/* If SRC is not NULL, return a pointer to a copy of it. */
842template<typename T, typename A>
843inline vec<T, A, vl_embed> *
844vec_safe_copy (vec<T, A, vl_embed> *src CXX_MEM_STAT_INFO)
845{
846 return src ? src->copy (ALONE_PASS_MEM_STAT) : NULLnullptr;
847}
848
849/* Copy the elements from SRC to the end of DST as if by memcpy.
850 Reallocate DST, if necessary. */
851template<typename T, typename A>
852inline void
853vec_safe_splice (vec<T, A, vl_embed> *&dst, const vec<T, A, vl_embed> *src
854 CXX_MEM_STAT_INFO)
855{
856 unsigned src_len = vec_safe_length (src);
857 if (src_len)
858 {
859 vec_safe_reserve_exact (dst, vec_safe_length (dst) + src_len
860 PASS_MEM_STAT);
861 dst->splice (*src);
862 }
863}
864
865/* Return true if SEARCH is an element of V. Note that this is O(N) in the
866 size of the vector and so should be used with care. */
867
868template<typename T, typename A>
869inline bool
870vec_safe_contains (vec<T, A, vl_embed> *v, const T &search)
871{
872 return v ? v->contains (search) : false;
873}
874
875/* Index into vector. Return the IX'th element. IX must be in the
876 domain of the vector. */
877
878template<typename T, typename A>
879inline const T &
880vec<T, A, vl_embed>::operator[] (unsigned ix) const
881{
882 gcc_checking_assert (ix < m_vecpfx.m_num)((void)(!(ix < m_vecpfx.m_num) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 882, __FUNCTION__), 0 : 0))
;
883 return address ()[ix];
884}
885
886template<typename T, typename A>
887inline T &
888vec<T, A, vl_embed>::operator[] (unsigned ix)
889{
890 gcc_checking_assert (ix < m_vecpfx.m_num)((void)(!(ix < m_vecpfx.m_num) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 890, __FUNCTION__), 0 : 0))
;
891 return address ()[ix];
892}
893
894
895/* Get the final element of the vector, which must not be empty. */
896
897template<typename T, typename A>
898inline T &
899vec<T, A, vl_embed>::last (void)
900{
901 gcc_checking_assert (m_vecpfx.m_num > 0)((void)(!(m_vecpfx.m_num > 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 901, __FUNCTION__), 0 : 0))
;
902 return (*this)[m_vecpfx.m_num - 1];
903}
904
905
906/* If this vector has space for NELEMS additional entries, return
907 true. You usually only need to use this if you are doing your
908 own vector reallocation, for instance on an embedded vector. This
909 returns true in exactly the same circumstances that vec::reserve
910 will. */
911
912template<typename T, typename A>
913inline bool
914vec<T, A, vl_embed>::space (unsigned nelems) const
915{
916 return m_vecpfx.m_alloc - m_vecpfx.m_num >= nelems;
917}
918
919
920/* Return iteration condition and update *PTR to (a copy of) the IX'th
921 element of this vector. Use this to iterate over the elements of a
922 vector as follows,
923
924 for (ix = 0; v->iterate (ix, &val); ix++)
925 continue; */
926
927template<typename T, typename A>
928inline bool
929vec<T, A, vl_embed>::iterate (unsigned ix, T *ptr) const
930{
931 if (ix < m_vecpfx.m_num)
932 {
933 *ptr = address ()[ix];
934 return true;
935 }
936 else
937 {
938 *ptr = 0;
939 return false;
940 }
941}
942
943
944/* Return iteration condition and update *PTR to point to the
945 IX'th element of this vector. Use this to iterate over the
946 elements of a vector as follows,
947
948 for (ix = 0; v->iterate (ix, &ptr); ix++)
949 continue;
950
951 This variant is for vectors of objects. */
952
953template<typename T, typename A>
954inline bool
955vec<T, A, vl_embed>::iterate (unsigned ix, T **ptr) const
956{
957 if (ix < m_vecpfx.m_num)
958 {
959 *ptr = CONST_CAST (T *, &address ()[ix])(const_cast<T *> ((&address ()[ix])));
960 return true;
961 }
962 else
963 {
964 *ptr = 0;
965 return false;
966 }
967}
968
969
970/* Return a pointer to a copy of this vector. */
971
972template<typename T, typename A>
973inline vec<T, A, vl_embed> *
974vec<T, A, vl_embed>::copy (ALONE_MEM_STAT_DECLvoid) const
975{
976 vec<T, A, vl_embed> *new_vec = NULLnullptr;
977 unsigned len = length ();
978 if (len)
979 {
980 vec_alloc (new_vec, len PASS_MEM_STAT);
981 new_vec->embedded_init (len, len);
982 vec_copy_construct (new_vec->address (), address (), len);
983 }
984 return new_vec;
985}
986
987
988/* Copy the elements from SRC to the end of this vector as if by memcpy.
989 The vector must have sufficient headroom available. */
990
991template<typename T, typename A>
992inline void
993vec<T, A, vl_embed>::splice (const vec<T, A, vl_embed> &src)
994{
995 unsigned len = src.length ();
996 if (len)
997 {
998 gcc_checking_assert (space (len))((void)(!(space (len)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 998, __FUNCTION__), 0 : 0))
;
999 vec_copy_construct (end (), src.address (), len);
1000 m_vecpfx.m_num += len;
1001 }
1002}
1003
1004template<typename T, typename A>
1005inline void
1006vec<T, A, vl_embed>::splice (const vec<T, A, vl_embed> *src)
1007{
1008 if (src)
1009 splice (*src);
1010}
1011
1012
1013/* Push OBJ (a new element) onto the end of the vector. There must be
1014 sufficient space in the vector. Return a pointer to the slot
1015 where OBJ was inserted. */
1016
1017template<typename T, typename A>
1018inline T *
1019vec<T, A, vl_embed>::quick_push (const T &obj)
1020{
1021 gcc_checking_assert (space (1))((void)(!(space (1)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1021, __FUNCTION__), 0 : 0))
;
1022 T *slot = &address ()[m_vecpfx.m_num++];
1023 *slot = obj;
1024 return slot;
1025}
1026
1027
1028/* Pop and return the last element off the end of the vector. */
1029
1030template<typename T, typename A>
1031inline T &
1032vec<T, A, vl_embed>::pop (void)
1033{
1034 gcc_checking_assert (length () > 0)((void)(!(length () > 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1034, __FUNCTION__), 0 : 0))
;
1035 return address ()[--m_vecpfx.m_num];
1036}
1037
1038
1039/* Set the length of the vector to SIZE. The new length must be less
1040 than or equal to the current length. This is an O(1) operation. */
1041
1042template<typename T, typename A>
1043inline void
1044vec<T, A, vl_embed>::truncate (unsigned size)
1045{
1046 gcc_checking_assert (length () >= size)((void)(!(length () >= size) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1046, __FUNCTION__), 0 : 0))
;
1047 m_vecpfx.m_num = size;
1048}
1049
1050
1051/* Insert an element, OBJ, at the IXth position of this vector. There
1052 must be sufficient space. */
1053
1054template<typename T, typename A>
1055inline void
1056vec<T, A, vl_embed>::quick_insert (unsigned ix, const T &obj)
1057{
1058 gcc_checking_assert (length () < allocated ())((void)(!(length () < allocated ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1058, __FUNCTION__), 0 : 0))
;
1059 gcc_checking_assert (ix <= length ())((void)(!(ix <= length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1059, __FUNCTION__), 0 : 0))
;
1060 T *slot = &address ()[ix];
1061 memmove (slot + 1, slot, (m_vecpfx.m_num++ - ix) * sizeof (T));
1062 *slot = obj;
1063}
1064
1065
1066/* Remove an element from the IXth position of this vector. Ordering of
1067 remaining elements is preserved. This is an O(N) operation due to
1068 memmove. */
1069
1070template<typename T, typename A>
1071inline void
1072vec<T, A, vl_embed>::ordered_remove (unsigned ix)
1073{
1074 gcc_checking_assert (ix < length ())((void)(!(ix < length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1074, __FUNCTION__), 0 : 0))
;
1075 T *slot = &address ()[ix];
1076 memmove (slot, slot + 1, (--m_vecpfx.m_num - ix) * sizeof (T));
1077}
1078
1079
1080/* Remove elements in [START, END) from VEC for which COND holds. Ordering of
1081 remaining elements is preserved. This is an O(N) operation. */
1082
1083#define VEC_ORDERED_REMOVE_IF_FROM_TO(vec, read_index, write_index, \{ ((void)(!((end) <= (vec).length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1084, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (start); read_index < (end); ++read_index) { elem_ptr =
&(vec)[read_index]; bool remove_p = (cond); if (remove_p
) continue; if (read_index != write_index) (vec)[write_index]
= (vec)[read_index]; write_index++; } if (read_index - write_index
> 0) (vec).block_remove (write_index, read_index - write_index
); }
1084 elem_ptr, start, end, cond){ ((void)(!((end) <= (vec).length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1084, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (start); read_index < (end); ++read_index) { elem_ptr =
&(vec)[read_index]; bool remove_p = (cond); if (remove_p
) continue; if (read_index != write_index) (vec)[write_index]
= (vec)[read_index]; write_index++; } if (read_index - write_index
> 0) (vec).block_remove (write_index, read_index - write_index
); }
\
1085 { \
1086 gcc_assert ((end) <= (vec).length ())((void)(!((end) <= (vec).length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1086, __FUNCTION__), 0 : 0))
; \
1087 for (read_index = write_index = (start); read_index < (end); \
1088 ++read_index) \
1089 { \
1090 elem_ptr = &(vec)[read_index]; \
1091 bool remove_p = (cond); \
1092 if (remove_p) \
1093 continue; \
1094 \
1095 if (read_index != write_index) \
1096 (vec)[write_index] = (vec)[read_index]; \
1097 \
1098 write_index++; \
1099 } \
1100 \
1101 if (read_index - write_index > 0) \
1102 (vec).block_remove (write_index, read_index - write_index); \
1103 }
1104
1105
1106/* Remove elements from VEC for which COND holds. Ordering of remaining
1107 elements is preserved. This is an O(N) operation. */
1108
1109#define VEC_ORDERED_REMOVE_IF(vec, read_index, write_index, elem_ptr, \{ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1110, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (0); read_index < ((vec).length ()); ++read_index) { elem_ptr
= &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p
) continue; if (read_index != write_index) ((vec))[write_index
] = ((vec))[read_index]; write_index++; } if (read_index - write_index
> 0) ((vec)).block_remove (write_index, read_index - write_index
); }
1110 cond){ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1110, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (0); read_index < ((vec).length ()); ++read_index) { elem_ptr
= &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p
) continue; if (read_index != write_index) ((vec))[write_index
] = ((vec))[read_index]; write_index++; } if (read_index - write_index
> 0) ((vec)).block_remove (write_index, read_index - write_index
); }
\
1111 VEC_ORDERED_REMOVE_IF_FROM_TO ((vec), read_index, write_index, \{ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1112, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (0); read_index < ((vec).length ()); ++read_index) { elem_ptr
= &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p
) continue; if (read_index != write_index) ((vec))[write_index
] = ((vec))[read_index]; write_index++; } if (read_index - write_index
> 0) ((vec)).block_remove (write_index, read_index - write_index
); }
1112 elem_ptr, 0, (vec).length (), (cond)){ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1112, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (0); read_index < ((vec).length ()); ++read_index) { elem_ptr
= &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p
) continue; if (read_index != write_index) ((vec))[write_index
] = ((vec))[read_index]; write_index++; } if (read_index - write_index
> 0) ((vec)).block_remove (write_index, read_index - write_index
); }
1113
1114/* Remove an element from the IXth position of this vector. Ordering of
1115 remaining elements is destroyed. This is an O(1) operation. */
1116
1117template<typename T, typename A>
1118inline void
1119vec<T, A, vl_embed>::unordered_remove (unsigned ix)
1120{
1121 gcc_checking_assert (ix < length ())((void)(!(ix < length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1121, __FUNCTION__), 0 : 0))
;
1122 T *p = address ();
1123 p[ix] = p[--m_vecpfx.m_num];
1124}
1125
1126
1127/* Remove LEN elements starting at the IXth. Ordering is retained.
1128 This is an O(N) operation due to memmove. */
1129
1130template<typename T, typename A>
1131inline void
1132vec<T, A, vl_embed>::block_remove (unsigned ix, unsigned len)
1133{
1134 gcc_checking_assert (ix + len <= length ())((void)(!(ix + len <= length ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1134, __FUNCTION__), 0 : 0))
;
1135 T *slot = &address ()[ix];
1136 m_vecpfx.m_num -= len;
1137 memmove (slot, slot + len, (m_vecpfx.m_num - ix) * sizeof (T));
1138}
1139
1140
1141/* Sort the contents of this vector with qsort. CMP is the comparison
1142 function to pass to qsort. */
1143
1144template<typename T, typename A>
1145inline void
1146vec<T, A, vl_embed>::qsort (int (*cmp) (const void *, const void *))qsort (int (*cmp) (const void *, const void *))
1147{
1148 if (length () > 1)
1149 gcc_qsort (address (), length (), sizeof (T), cmp);
1150}
1151
1152/* Sort the contents of this vector with qsort. CMP is the comparison
1153 function to pass to qsort. */
1154
1155template<typename T, typename A>
1156inline void
1157vec<T, A, vl_embed>::sort (int (*cmp) (const void *, const void *, void *),
1158 void *data)
1159{
1160 if (length () > 1)
1161 gcc_sort_r (address (), length (), sizeof (T), cmp, data);
1162}
1163
1164/* Sort the contents of this vector with gcc_stablesort_r. CMP is the
1165 comparison function to pass to qsort. */
1166
1167template<typename T, typename A>
1168inline void
1169vec<T, A, vl_embed>::stablesort (int (*cmp) (const void *, const void *,
1170 void *), void *data)
1171{
1172 if (length () > 1)
1173 gcc_stablesort_r (address (), length (), sizeof (T), cmp, data);
1174}
1175
1176/* Search the contents of the sorted vector with a binary search.
1177 CMP is the comparison function to pass to bsearch. */
1178
1179template<typename T, typename A>
1180inline T *
1181vec<T, A, vl_embed>::bsearch (const void *key,
1182 int (*compar) (const void *, const void *))
1183{
1184 const void *base = this->address ();
1185 size_t nmemb = this->length ();
1186 size_t size = sizeof (T);
1187 /* The following is a copy of glibc stdlib-bsearch.h. */
1188 size_t l, u, idx;
1189 const void *p;
1190 int comparison;
1191
1192 l = 0;
1193 u = nmemb;
1194 while (l < u)
1195 {
1196 idx = (l + u) / 2;
1197 p = (const void *) (((const char *) base) + (idx * size));
1198 comparison = (*compar) (key, p);
1199 if (comparison < 0)
1200 u = idx;
1201 else if (comparison > 0)
1202 l = idx + 1;
1203 else
1204 return (T *)const_cast<void *>(p);
1205 }
1206
1207 return NULLnullptr;
1208}
1209
1210/* Search the contents of the sorted vector with a binary search.
1211 CMP is the comparison function to pass to bsearch. */
1212
1213template<typename T, typename A>
1214inline T *
1215vec<T, A, vl_embed>::bsearch (const void *key,
1216 int (*compar) (const void *, const void *,
1217 void *), void *data)
1218{
1219 const void *base = this->address ();
1220 size_t nmemb = this->length ();
1221 size_t size = sizeof (T);
1222 /* The following is a copy of glibc stdlib-bsearch.h. */
1223 size_t l, u, idx;
1224 const void *p;
1225 int comparison;
1226
1227 l = 0;
1228 u = nmemb;
1229 while (l < u)
1230 {
1231 idx = (l + u) / 2;
1232 p = (const void *) (((const char *) base) + (idx * size));
1233 comparison = (*compar) (key, p, data);
1234 if (comparison < 0)
1235 u = idx;
1236 else if (comparison > 0)
1237 l = idx + 1;
1238 else
1239 return (T *)const_cast<void *>(p);
1240 }
1241
1242 return NULLnullptr;
1243}
1244
1245/* Return true if SEARCH is an element of V. Note that this is O(N) in the
1246 size of the vector and so should be used with care. */
1247
1248template<typename T, typename A>
1249inline bool
1250vec<T, A, vl_embed>::contains (const T &search) const
1251{
1252 unsigned int len = length ();
1253 const T *p = address ();
1254 for (unsigned int i = 0; i < len; i++)
1255 {
1256 const T *slot = &p[i];
1257 if (*slot == search)
1258 return true;
1259 }
1260
1261 return false;
1262}
1263
1264/* Find and return the first position in which OBJ could be inserted
1265 without changing the ordering of this vector. LESSTHAN is a
1266 function that returns true if the first argument is strictly less
1267 than the second. */
1268
1269template<typename T, typename A>
1270unsigned
1271vec<T, A, vl_embed>::lower_bound (const T &obj,
1272 bool (*lessthan)(const T &, const T &))
1273 const
1274{
1275 unsigned int len = length ();
1276 unsigned int half, middle;
1277 unsigned int first = 0;
1278 while (len > 0)
1279 {
1280 half = len / 2;
1281 middle = first;
1282 middle += half;
1283 const T &middle_elem = address ()[middle];
1284 if (lessthan (middle_elem, obj))
1285 {
1286 first = middle;
1287 ++first;
1288 len = len - half - 1;
1289 }
1290 else
1291 len = half;
1292 }
1293 return first;
1294}
1295
1296
1297/* Return the number of bytes needed to embed an instance of an
1298 embeddable vec inside another data structure.
1299
1300 Use these methods to determine the required size and initialization
1301 of a vector V of type T embedded within another structure (as the
1302 final member):
1303
1304 size_t vec<T, A, vl_embed>::embedded_size (unsigned alloc);
1305 void v->embedded_init (unsigned alloc, unsigned num);
1306
1307 These allow the caller to perform the memory allocation. */
1308
1309template<typename T, typename A>
1310inline size_t
1311vec<T, A, vl_embed>::embedded_size (unsigned alloc)
1312{
1313 struct alignas (T) U { char data[sizeof (T)]; };
1314 typedef vec<U, A, vl_embed> vec_embedded;
1315 typedef typename std::conditional<std::is_standard_layout<T>::value,
1316 vec, vec_embedded>::type vec_stdlayout;
1317 static_assert (sizeof (vec_stdlayout) == sizeof (vec), "");
1318 static_assert (alignof (vec_stdlayout) == alignof (vec), "");
1319 return sizeof (vec_stdlayout) + alloc * sizeof (T);
1320}
1321
1322
1323/* Initialize the vector to contain room for ALLOC elements and
1324 NUM active elements. */
1325
1326template<typename T, typename A>
1327inline void
1328vec<T, A, vl_embed>::embedded_init (unsigned alloc, unsigned num, unsigned aut)
1329{
1330 m_vecpfx.m_alloc = alloc;
1331 m_vecpfx.m_using_auto_storage = aut;
1332 m_vecpfx.m_num = num;
1333}
1334
1335
1336/* Grow the vector to a specific length. LEN must be as long or longer than
1337 the current length. The new elements are uninitialized. */
1338
1339template<typename T, typename A>
1340inline void
1341vec<T, A, vl_embed>::quick_grow (unsigned len)
1342{
1343 gcc_checking_assert (length () <= len && len <= m_vecpfx.m_alloc)((void)(!(length () <= len && len <= m_vecpfx.m_alloc
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1343, __FUNCTION__), 0 : 0))
;
1344 m_vecpfx.m_num = len;
1345}
1346
1347
1348/* Grow the vector to a specific length. LEN must be as long or longer than
1349 the current length. The new elements are initialized to zero. */
1350
1351template<typename T, typename A>
1352inline void
1353vec<T, A, vl_embed>::quick_grow_cleared (unsigned len)
1354{
1355 unsigned oldlen = length ();
1356 size_t growby = len - oldlen;
1357 quick_grow (len);
1358 if (growby != 0)
1359 vec_default_construct (address () + oldlen, growby);
1360}
1361
1362/* Garbage collection support for vec<T, A, vl_embed>. */
1363
1364template<typename T>
1365void
1366gt_ggc_mx (vec<T, va_gc> *v)
1367{
1368 extern void gt_ggc_mx (T &);
1369 for (unsigned i = 0; i < v->length (); i++)
1370 gt_ggc_mx ((*v)[i]);
1371}
1372
1373template<typename T>
1374void
1375gt_ggc_mx (vec<T, va_gc_atomic, vl_embed> *v ATTRIBUTE_UNUSED__attribute__ ((__unused__)))
1376{
1377 /* Nothing to do. Vectors of atomic types wrt GC do not need to
1378 be traversed. */
1379}
1380
1381
1382/* PCH support for vec<T, A, vl_embed>. */
1383
1384template<typename T, typename A>
1385void
1386gt_pch_nx (vec<T, A, vl_embed> *v)
1387{
1388 extern void gt_pch_nx (T &);
1389 for (unsigned i = 0; i < v->length (); i++)
1390 gt_pch_nx ((*v)[i]);
1391}
1392
1393template<typename T, typename A>
1394void
1395gt_pch_nx (vec<T *, A, vl_embed> *v, gt_pointer_operator op, void *cookie)
1396{
1397 for (unsigned i = 0; i < v->length (); i++)
1398 op (&((*v)[i]), NULLnullptr, cookie);
1399}
1400
1401template<typename T, typename A>
1402void
1403gt_pch_nx (vec<T, A, vl_embed> *v, gt_pointer_operator op, void *cookie)
1404{
1405 extern void gt_pch_nx (T *, gt_pointer_operator, void *);
1406 for (unsigned i = 0; i < v->length (); i++)
1407 gt_pch_nx (&((*v)[i]), op, cookie);
1408}
1409
1410
1411/* Space efficient vector. These vectors can grow dynamically and are
1412 allocated together with their control data. They are suited to be
1413 included in data structures. Prior to initial allocation, they
1414 only take a single word of storage.
1415
1416 These vectors are implemented as a pointer to an embeddable vector.
1417 The semantics allow for this pointer to be NULL to represent empty
1418 vectors. This way, empty vectors occupy minimal space in the
1419 structure containing them.
1420
1421 Properties:
1422
1423 - The whole vector and control data are allocated in a single
1424 contiguous block.
1425 - The whole vector may be re-allocated.
1426 - Vector data may grow and shrink.
1427 - Access and manipulation requires a pointer test and
1428 indirection.
1429 - It requires 1 word of storage (prior to vector allocation).
1430
1431
1432 Limitations:
1433
1434 These vectors must be PODs because they are stored in unions.
1435 (http://en.wikipedia.org/wiki/Plain_old_data_structures).
1436 As long as we use C++03, we cannot have constructors nor
1437 destructors in classes that are stored in unions. */
1438
1439template<typename T, size_t N = 0>
1440class auto_vec;
1441
1442template<typename T>
1443struct vec<T, va_heap, vl_ptr>
1444{
1445public:
1446 /* Default ctors to ensure triviality. Use value-initialization
1447 (e.g., vec() or vec v{ };) or vNULL to create a zero-initialized
1448 instance. */
1449 vec () = default;
1450 vec (const vec &) = default;
1451 /* Initialization from the generic vNULL. */
1452 vec (vnull): m_vec () { }
1453 /* Same as default ctor: vec storage must be released manually. */
1454 ~vec () = default;
1455
1456 /* Defaulted same as copy ctor. */
1457 vec& operator= (const vec &) = default;
1458
1459 /* Prevent implicit conversion from auto_vec. Use auto_vec::to_vec()
1460 instead. */
1461 template <size_t N>
1462 vec (auto_vec<T, N> &) = delete;
1463
1464 template <size_t N>
1465 void operator= (auto_vec<T, N> &) = delete;
1466
1467 /* Memory allocation and deallocation for the embedded vector.
1468 Needed because we cannot have proper ctors/dtors defined. */
1469 void create (unsigned nelems CXX_MEM_STAT_INFO);
1470 void release (void);
1471
1472 /* Vector operations. */
1473 bool exists (void) const
1474 { return m_vec != NULLnullptr; }
1475
1476 bool is_empty (void) const
1477 { return m_vec ? m_vec->is_empty () : true; }
1478
1479 unsigned allocated (void) const
1480 { return m_vec ? m_vec->allocated () : 0; }
1481
1482 unsigned length (void) const
1483 { return m_vec ? m_vec->length () : 0; }
1484
1485 T *address (void)
1486 { return m_vec ? m_vec->address () : NULLnullptr; }
1487
1488 const T *address (void) const
1489 { return m_vec ? m_vec->address () : NULLnullptr; }
1490
1491 T *begin () { return address (); }
1492 const T *begin () const { return address (); }
1493 T *end () { return begin () + length (); }
1494 const T *end () const { return begin () + length (); }
1495 const T &operator[] (unsigned ix) const
1496 { return (*m_vec)[ix]; }
1497
1498 bool operator!=(const vec &other) const
1499 { return !(*this == other); }
1500
1501 bool operator==(const vec &other) const
1502 { return address () == other.address (); }
1503
1504 T &operator[] (unsigned ix)
1505 { return (*m_vec)[ix]; }
1506
1507 T &last (void)
1508 { return m_vec->last (); }
1509
1510 bool space (int nelems) const
1511 { return m_vec ? m_vec->space (nelems) : nelems == 0; }
1512
1513 bool iterate (unsigned ix, T *p) const;
1514 bool iterate (unsigned ix, T **p) const;
1515 vec copy (ALONE_CXX_MEM_STAT_INFO) const;
1516 bool reserve (unsigned, bool = false CXX_MEM_STAT_INFO);
1517 bool reserve_exact (unsigned CXX_MEM_STAT_INFO);
1518 void splice (const vec &);
1519 void safe_splice (const vec & CXX_MEM_STAT_INFO);
1520 T *quick_push (const T &);
1521 T *safe_push (const T &CXX_MEM_STAT_INFO);
1522 T &pop (void);
1523 void truncate (unsigned);
1524 void safe_grow (unsigned, bool = false CXX_MEM_STAT_INFO);
1525 void safe_grow_cleared (unsigned, bool = false CXX_MEM_STAT_INFO);
1526 void quick_grow (unsigned);
1527 void quick_grow_cleared (unsigned);
1528 void quick_insert (unsigned, const T &);
1529 void safe_insert (unsigned, const T & CXX_MEM_STAT_INFO);
1530 void ordered_remove (unsigned);
1531 void unordered_remove (unsigned);
1532 void block_remove (unsigned, unsigned);
1533 void qsort (int (*) (const void *, const void *))qsort (int (*) (const void *, const void *));
1534 void sort (int (*) (const void *, const void *, void *), void *);
1535 void stablesort (int (*) (const void *, const void *, void *), void *);
1536 T *bsearch (const void *key, int (*compar)(const void *, const void *));
1537 T *bsearch (const void *key,
1538 int (*compar)(const void *, const void *, void *), void *);
1539 unsigned lower_bound (T, bool (*)(const T &, const T &)) const;
1540 bool contains (const T &search) const;
1541 void reverse (void);
1542
1543 bool using_auto_storage () const;
1544
1545 /* FIXME - This field should be private, but we need to cater to
1546 compilers that have stricter notions of PODness for types. */
1547 vec<T, va_heap, vl_embed> *m_vec;
1548};
1549
1550
1551/* auto_vec is a subclass of vec that automatically manages creating and
1552 releasing the internal vector. If N is non zero then it has N elements of
1553 internal storage. The default is no internal storage, and you probably only
1554 want to ask for internal storage for vectors on the stack because if the
1555 size of the vector is larger than the internal storage that space is wasted.
1556 */
1557template<typename T, size_t N /* = 0 */>
1558class auto_vec : public vec<T, va_heap>
1559{
1560public:
1561 auto_vec ()
1562 {
1563 m_auto.embedded_init (N, 0, 1);
1564 /* ??? Instead of initializing m_vec from &m_auto directly use an
1565 expression that avoids refering to a specific member of 'this'
1566 to derail the -Wstringop-overflow diagnostic code, avoiding
1567 the impression that data accesses are supposed to be to the
1568 m_auto member storage. */
1569 size_t off = (char *) &m_auto - (char *) this;
1570 this->m_vec = (vec<T, va_heap, vl_embed> *) ((char *) this + off);
1571 }
1572
1573 auto_vec (size_t s CXX_MEM_STAT_INFO)
1574 {
1575 if (s > N)
1576 {
1577 this->create (s PASS_MEM_STAT);
1578 return;
1579 }
1580
1581 m_auto.embedded_init (N, 0, 1);
1582 /* ??? See above. */
1583 size_t off = (char *) &m_auto - (char *) this;
1584 this->m_vec = (vec<T, va_heap, vl_embed> *) ((char *) this + off);
1585 }
1586
1587 ~auto_vec ()
1588 {
1589 this->release ();
1590 }
1591
1592 /* Explicitly convert to the base class. There is no conversion
1593 from a const auto_vec because a copy of the returned vec can
1594 be used to modify *THIS.
1595 This is a legacy function not to be used in new code. */
1596 vec<T, va_heap> to_vec_legacy () {
1597 return *static_cast<vec<T, va_heap> *>(this);
1598 }
1599
1600private:
1601 vec<T, va_heap, vl_embed> m_auto;
1602 unsigned char m_data[sizeof (T) * N];
1603};
1604
1605/* auto_vec is a sub class of vec whose storage is released when it is
1606 destroyed. */
1607template<typename T>
1608class auto_vec<T, 0> : public vec<T, va_heap>
1609{
1610public:
1611 auto_vec () { this->m_vec = NULLnullptr; }
1612 auto_vec (size_t n CXX_MEM_STAT_INFO) { this->create (n PASS_MEM_STAT); }
1613 ~auto_vec () { this->release (); }
1614
1615 auto_vec (vec<T, va_heap>&& r)
1616 {
1617 gcc_assert (!r.using_auto_storage ())((void)(!(!r.using_auto_storage ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1617, __FUNCTION__), 0 : 0))
;
1618 this->m_vec = r.m_vec;
1619 r.m_vec = NULLnullptr;
1620 }
1621
1622 auto_vec (auto_vec<T> &&r)
1623 {
1624 gcc_assert (!r.using_auto_storage ())((void)(!(!r.using_auto_storage ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1624, __FUNCTION__), 0 : 0))
;
1625 this->m_vec = r.m_vec;
1626 r.m_vec = NULLnullptr;
1627 }
1628
1629 auto_vec& operator= (vec<T, va_heap>&& r)
1630 {
1631 if (this == &r)
1632 return *this;
1633
1634 gcc_assert (!r.using_auto_storage ())((void)(!(!r.using_auto_storage ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1634, __FUNCTION__), 0 : 0))
;
1635 this->release ();
1636 this->m_vec = r.m_vec;
1637 r.m_vec = NULLnullptr;
1638 return *this;
1639 }
1640
1641 auto_vec& operator= (auto_vec<T> &&r)
1642 {
1643 if (this == &r)
1644 return *this;
1645
1646 gcc_assert (!r.using_auto_storage ())((void)(!(!r.using_auto_storage ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1646, __FUNCTION__), 0 : 0))
;
1647 this->release ();
1648 this->m_vec = r.m_vec;
1649 r.m_vec = NULLnullptr;
1650 return *this;
1651 }
1652
1653 /* Explicitly convert to the base class. There is no conversion
1654 from a const auto_vec because a copy of the returned vec can
1655 be used to modify *THIS.
1656 This is a legacy function not to be used in new code. */
1657 vec<T, va_heap> to_vec_legacy () {
1658 return *static_cast<vec<T, va_heap> *>(this);
1659 }
1660
1661 // You probably don't want to copy a vector, so these are deleted to prevent
1662 // unintentional use. If you really need a copy of the vectors contents you
1663 // can use copy ().
1664 auto_vec(const auto_vec &) = delete;
1665 auto_vec &operator= (const auto_vec &) = delete;
1666};
1667
1668
1669/* Allocate heap memory for pointer V and create the internal vector
1670 with space for NELEMS elements. If NELEMS is 0, the internal
1671 vector is initialized to empty. */
1672
1673template<typename T>
1674inline void
1675vec_alloc (vec<T> *&v, unsigned nelems CXX_MEM_STAT_INFO)
1676{
1677 v = new vec<T>;
1678 v->create (nelems PASS_MEM_STAT);
1679}
1680
1681
1682/* A subclass of auto_vec <char *> that frees all of its elements on
1683 deletion. */
1684
1685class auto_string_vec : public auto_vec <char *>
1686{
1687 public:
1688 ~auto_string_vec ();
1689};
1690
1691/* A subclass of auto_vec <T *> that deletes all of its elements on
1692 destruction.
1693
1694 This is a crude way for a vec to "own" the objects it points to
1695 and clean up automatically.
1696
1697 For example, no attempt is made to delete elements when an item
1698 within the vec is overwritten.
1699
1700 We can't rely on gnu::unique_ptr within a container,
1701 since we can't rely on move semantics in C++98. */
1702
1703template <typename T>
1704class auto_delete_vec : public auto_vec <T *>
1705{
1706 public:
1707 auto_delete_vec () {}
1708 auto_delete_vec (size_t s) : auto_vec <T *> (s) {}
1709
1710 ~auto_delete_vec ();
1711
1712private:
1713 DISABLE_COPY_AND_ASSIGN(auto_delete_vec)auto_delete_vec (const auto_delete_vec&) = delete; void operator
= (const auto_delete_vec &) = delete
;
1714};
1715
1716/* Conditionally allocate heap memory for VEC and its internal vector. */
1717
1718template<typename T>
1719inline void
1720vec_check_alloc (vec<T, va_heap> *&vec, unsigned nelems CXX_MEM_STAT_INFO)
1721{
1722 if (!vec)
1723 vec_alloc (vec, nelems PASS_MEM_STAT);
1724}
1725
1726
1727/* Free the heap memory allocated by vector V and set it to NULL. */
1728
1729template<typename T>
1730inline void
1731vec_free (vec<T> *&v)
1732{
1733 if (v == NULLnullptr)
1734 return;
1735
1736 v->release ();
1737 delete v;
1738 v = NULLnullptr;
1739}
1740
1741
1742/* Return iteration condition and update PTR to point to the IX'th
1743 element of this vector. Use this to iterate over the elements of a
1744 vector as follows,
1745
1746 for (ix = 0; v.iterate (ix, &ptr); ix++)
1747 continue; */
1748
1749template<typename T>
1750inline bool
1751vec<T, va_heap, vl_ptr>::iterate (unsigned ix, T *ptr) const
1752{
1753 if (m_vec)
1754 return m_vec->iterate (ix, ptr);
1755 else
1756 {
1757 *ptr = 0;
1758 return false;
1759 }
1760}
1761
1762
1763/* Return iteration condition and update *PTR to point to the
1764 IX'th element of this vector. Use this to iterate over the
1765 elements of a vector as follows,
1766
1767 for (ix = 0; v->iterate (ix, &ptr); ix++)
1768 continue;
1769
1770 This variant is for vectors of objects. */
1771
1772template<typename T>
1773inline bool
1774vec<T, va_heap, vl_ptr>::iterate (unsigned ix, T **ptr) const
1775{
1776 if (m_vec)
1777 return m_vec->iterate (ix, ptr);
1778 else
1779 {
1780 *ptr = 0;
1781 return false;
1782 }
1783}
1784
1785
1786/* Convenience macro for forward iteration. */
1787#define FOR_EACH_VEC_ELT(V, I, P)for (I = 0; (V).iterate ((I), &(P)); ++(I)) \
1788 for (I = 0; (V).iterate ((I), &(P)); ++(I))
1789
1790#define FOR_EACH_VEC_SAFE_ELT(V, I, P)for (I = 0; vec_safe_iterate ((V), (I), &(P)); ++(I)) \
1791 for (I = 0; vec_safe_iterate ((V), (I), &(P)); ++(I))
1792
1793/* Likewise, but start from FROM rather than 0. */
1794#define FOR_EACH_VEC_ELT_FROM(V, I, P, FROM)for (I = (FROM); (V).iterate ((I), &(P)); ++(I)) \
1795 for (I = (FROM); (V).iterate ((I), &(P)); ++(I))
1796
1797/* Convenience macro for reverse iteration. */
1798#define FOR_EACH_VEC_ELT_REVERSE(V, I, P)for (I = (V).length () - 1; (V).iterate ((I), &(P)); (I)--
)
\
1799 for (I = (V).length () - 1; \
1800 (V).iterate ((I), &(P)); \
1801 (I)--)
1802
1803#define FOR_EACH_VEC_SAFE_ELT_REVERSE(V, I, P)for (I = vec_safe_length (V) - 1; vec_safe_iterate ((V), (I),
&(P)); (I)--)
\
1804 for (I = vec_safe_length (V) - 1; \
1805 vec_safe_iterate ((V), (I), &(P)); \
1806 (I)--)
1807
1808/* auto_string_vec's dtor, freeing all contained strings, automatically
1809 chaining up to ~auto_vec <char *>, which frees the internal buffer. */
1810
1811inline
1812auto_string_vec::~auto_string_vec ()
1813{
1814 int i;
1815 char *str;
1816 FOR_EACH_VEC_ELT (*this, i, str)for (i = 0; (*this).iterate ((i), &(str)); ++(i))
1817 free (str);
1818}
1819
1820/* auto_delete_vec's dtor, deleting all contained items, automatically
1821 chaining up to ~auto_vec <T*>, which frees the internal buffer. */
1822
1823template <typename T>
1824inline
1825auto_delete_vec<T>::~auto_delete_vec ()
1826{
1827 int i;
1828 T *item;
1829 FOR_EACH_VEC_ELT (*this, i, item)for (i = 0; (*this).iterate ((i), &(item)); ++(i))
1830 delete item;
1831}
1832
1833
1834/* Return a copy of this vector. */
1835
1836template<typename T>
1837inline vec<T, va_heap, vl_ptr>
1838vec<T, va_heap, vl_ptr>::copy (ALONE_MEM_STAT_DECLvoid) const
1839{
1840 vec<T, va_heap, vl_ptr> new_vec{ };
1841 if (length ())
1842 new_vec.m_vec = m_vec->copy (ALONE_PASS_MEM_STAT);
1843 return new_vec;
1844}
1845
1846
1847/* Ensure that the vector has at least RESERVE slots available (if
1848 EXACT is false), or exactly RESERVE slots available (if EXACT is
1849 true).
1850
1851 This may create additional headroom if EXACT is false.
1852
1853 Note that this can cause the embedded vector to be reallocated.
1854 Returns true iff reallocation actually occurred. */
1855
1856template<typename T>
1857inline bool
1858vec<T, va_heap, vl_ptr>::reserve (unsigned nelems, bool exact MEM_STAT_DECL)
1859{
1860 if (space (nelems))
1861 return false;
1862
1863 /* For now play a game with va_heap::reserve to hide our auto storage if any,
1864 this is necessary because it doesn't have enough information to know the
1865 embedded vector is in auto storage, and so should not be freed. */
1866 vec<T, va_heap, vl_embed> *oldvec = m_vec;
1867 unsigned int oldsize = 0;
1868 bool handle_auto_vec = m_vec && using_auto_storage ();
1869 if (handle_auto_vec)
1870 {
1871 m_vec = NULLnullptr;
1872 oldsize = oldvec->length ();
1873 nelems += oldsize;
1874 }
1875
1876 va_heap::reserve (m_vec, nelems, exact PASS_MEM_STAT);
1877 if (handle_auto_vec)
1878 {
1879 vec_copy_construct (m_vec->address (), oldvec->address (), oldsize);
1880 m_vec->m_vecpfx.m_num = oldsize;
1881 }
1882
1883 return true;
1884}
1885
1886
1887/* Ensure that this vector has exactly NELEMS slots available. This
1888 will not create additional headroom. Note this can cause the
1889 embedded vector to be reallocated. Returns true iff reallocation
1890 actually occurred. */
1891
1892template<typename T>
1893inline bool
1894vec<T, va_heap, vl_ptr>::reserve_exact (unsigned nelems MEM_STAT_DECL)
1895{
1896 return reserve (nelems, true PASS_MEM_STAT);
1897}
1898
1899
1900/* Create the internal vector and reserve NELEMS for it. This is
1901 exactly like vec::reserve, but the internal vector is
1902 unconditionally allocated from scratch. The old one, if it
1903 existed, is lost. */
1904
1905template<typename T>
1906inline void
1907vec<T, va_heap, vl_ptr>::create (unsigned nelems MEM_STAT_DECL)
1908{
1909 m_vec = NULLnullptr;
1910 if (nelems > 0)
1911 reserve_exact (nelems PASS_MEM_STAT);
1912}
1913
1914
1915/* Free the memory occupied by the embedded vector. */
1916
1917template<typename T>
1918inline void
1919vec<T, va_heap, vl_ptr>::release (void)
1920{
1921 if (!m_vec)
1922 return;
1923
1924 if (using_auto_storage ())
1925 {
1926 m_vec->m_vecpfx.m_num = 0;
1927 return;
1928 }
1929
1930 va_heap::release (m_vec);
1931}
1932
1933/* Copy the elements from SRC to the end of this vector as if by memcpy.
1934 SRC and this vector must be allocated with the same memory
1935 allocation mechanism. This vector is assumed to have sufficient
1936 headroom available. */
1937
1938template<typename T>
1939inline void
1940vec<T, va_heap, vl_ptr>::splice (const vec<T, va_heap, vl_ptr> &src)
1941{
1942 if (src.length ())
1943 m_vec->splice (*(src.m_vec));
1944}
1945
1946
1947/* Copy the elements in SRC to the end of this vector as if by memcpy.
1948 SRC and this vector must be allocated with the same mechanism.
1949 If there is not enough headroom in this vector, it will be reallocated
1950 as needed. */
1951
1952template<typename T>
1953inline void
1954vec<T, va_heap, vl_ptr>::safe_splice (const vec<T, va_heap, vl_ptr> &src
1955 MEM_STAT_DECL)
1956{
1957 if (src.length ())
1958 {
1959 reserve_exact (src.length ());
1960 splice (src);
1961 }
1962}
1963
1964
1965/* Push OBJ (a new element) onto the end of the vector. There must be
1966 sufficient space in the vector. Return a pointer to the slot
1967 where OBJ was inserted. */
1968
1969template<typename T>
1970inline T *
1971vec<T, va_heap, vl_ptr>::quick_push (const T &obj)
1972{
1973 return m_vec->quick_push (obj);
1974}
1975
1976
1977/* Push a new element OBJ onto the end of this vector. Reallocates
1978 the embedded vector, if needed. Return a pointer to the slot where
1979 OBJ was inserted. */
1980
1981template<typename T>
1982inline T *
1983vec<T, va_heap, vl_ptr>::safe_push (const T &obj MEM_STAT_DECL)
1984{
1985 reserve (1, false PASS_MEM_STAT);
1986 return quick_push (obj);
1987}
1988
1989
1990/* Pop and return the last element off the end of the vector. */
1991
1992template<typename T>
1993inline T &
1994vec<T, va_heap, vl_ptr>::pop (void)
1995{
1996 return m_vec->pop ();
1997}
1998
1999
2000/* Set the length of the vector to LEN. The new length must be less
2001 than or equal to the current length. This is an O(1) operation. */
2002
2003template<typename T>
2004inline void
2005vec<T, va_heap, vl_ptr>::truncate (unsigned size)
2006{
2007 if (m_vec)
2008 m_vec->truncate (size);
2009 else
2010 gcc_checking_assert (size == 0)((void)(!(size == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 2010, __FUNCTION__), 0 : 0))
;
2011}
2012
2013
2014/* Grow the vector to a specific length. LEN must be as long or
2015 longer than the current length. The new elements are
2016 uninitialized. Reallocate the internal vector, if needed. */
2017
2018template<typename T>
2019inline void
2020vec<T, va_heap, vl_ptr>::safe_grow (unsigned len, bool exact MEM_STAT_DECL)
2021{
2022 unsigned oldlen = length ();
2023 gcc_checking_assert (oldlen <= len)((void)(!(oldlen <= len) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 2023, __FUNCTION__), 0 : 0))
;
2024 reserve (len - oldlen, exact PASS_MEM_STAT);
2025 if (m_vec)
2026 m_vec->quick_grow (len);
2027 else
2028 gcc_checking_assert (len == 0)((void)(!(len == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 2028, __FUNCTION__), 0 : 0))
;
2029}
2030
2031
2032/* Grow the embedded vector to a specific length. LEN must be as
2033 long or longer than the current length. The new elements are
2034 initialized to zero. Reallocate the internal vector, if needed. */
2035
2036template<typename T>
2037inline void
2038vec<T, va_heap, vl_ptr>::safe_grow_cleared (unsigned len, bool exact
2039 MEM_STAT_DECL)
2040{
2041 unsigned oldlen = length ();
2042 size_t growby = len - oldlen;
2043 safe_grow (len, exact PASS_MEM_STAT);
2044 if (growby != 0)
2045 vec_default_construct (address () + oldlen, growby);
2046}
2047
2048
2049/* Same as vec::safe_grow but without reallocation of the internal vector.
2050 If the vector cannot be extended, a runtime assertion will be triggered. */
2051
2052template<typename T>
2053inline void
2054vec<T, va_heap, vl_ptr>::quick_grow (unsigned len)
2055{
2056 gcc_checking_assert (m_vec)((void)(!(m_vec) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 2056, __FUNCTION__), 0 : 0))
;
2057 m_vec->quick_grow (len);
2058}
2059
2060
2061/* Same as vec::quick_grow_cleared but without reallocation of the
2062 internal vector. If the vector cannot be extended, a runtime
2063 assertion will be triggered. */
2064
2065template<typename T>
2066inline void
2067vec<T, va_heap, vl_ptr>::quick_grow_cleared (unsigned len)
2068{
2069 gcc_checking_assert (m_vec)((void)(!(m_vec) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 2069, __FUNCTION__), 0 : 0))
;
2070 m_vec->quick_grow_cleared (len);
2071}
2072
2073
2074/* Insert an element, OBJ, at the IXth position of this vector. There
2075 must be sufficient space. */
2076
2077template<typename T>
2078inline void
2079vec<T, va_heap, vl_ptr>::quick_insert (unsigned ix, const T &obj)
2080{
2081 m_vec->quick_insert (ix, obj);
2082}
2083
2084
2085/* Insert an element, OBJ, at the IXth position of the vector.
2086 Reallocate the embedded vector, if necessary. */
2087
2088template<typename T>
2089inline void
2090vec<T, va_heap, vl_ptr>::safe_insert (unsigned ix, const T &obj MEM_STAT_DECL)
2091{
2092 reserve (1, false PASS_MEM_STAT);
2093 quick_insert (ix, obj);
2094}
2095
2096
2097/* Remove an element from the IXth position of this vector. Ordering of
2098 remaining elements is preserved. This is an O(N) operation due to
2099 a memmove. */
2100
2101template<typename T>
2102inline void
2103vec<T, va_heap, vl_ptr>::ordered_remove (unsigned ix)
2104{
2105 m_vec->ordered_remove (ix);
2106}
2107
2108
2109/* Remove an element from the IXth position of this vector. Ordering
2110 of remaining elements is destroyed. This is an O(1) operation. */
2111
2112template<typename T>
2113inline void
2114vec<T, va_heap, vl_ptr>::unordered_remove (unsigned ix)
2115{
2116 m_vec->unordered_remove (ix);
2117}
2118
2119
2120/* Remove LEN elements starting at the IXth. Ordering is retained.
2121 This is an O(N) operation due to memmove. */
2122
2123template<typename T>
2124inline void
2125vec<T, va_heap, vl_ptr>::block_remove (unsigned ix, unsigned len)
2126{
2127 m_vec->block_remove (ix, len);
2128}
2129
2130
2131/* Sort the contents of this vector with qsort. CMP is the comparison
2132 function to pass to qsort. */
2133
2134template<typename T>
2135inline void
2136vec<T, va_heap, vl_ptr>::qsort (int (*cmp) (const void *, const void *))qsort (int (*cmp) (const void *, const void *))
2137{
2138 if (m_vec)
2139 m_vec->qsort (cmp)qsort (cmp);
2140}
2141
2142/* Sort the contents of this vector with qsort. CMP is the comparison
2143 function to pass to qsort. */
2144
2145template<typename T>
2146inline void
2147vec<T, va_heap, vl_ptr>::sort (int (*cmp) (const void *, const void *,
2148 void *), void *data)
2149{
2150 if (m_vec)
2151 m_vec->sort (cmp, data);
2152}
2153
2154/* Sort the contents of this vector with gcc_stablesort_r. CMP is the
2155 comparison function to pass to qsort. */
2156
2157template<typename T>
2158inline void
2159vec<T, va_heap, vl_ptr>::stablesort (int (*cmp) (const void *, const void *,
2160 void *), void *data)
2161{
2162 if (m_vec)
2163 m_vec->stablesort (cmp, data);
2164}
2165
2166/* Search the contents of the sorted vector with a binary search.
2167 CMP is the comparison function to pass to bsearch. */
2168
2169template<typename T>
2170inline T *
2171vec<T, va_heap, vl_ptr>::bsearch (const void *key,
2172 int (*cmp) (const void *, const void *))
2173{
2174 if (m_vec)
2175 return m_vec->bsearch (key, cmp);
2176 return NULLnullptr;
2177}
2178
2179/* Search the contents of the sorted vector with a binary search.
2180 CMP is the comparison function to pass to bsearch. */
2181
2182template<typename T>
2183inline T *
2184vec<T, va_heap, vl_ptr>::bsearch (const void *key,
2185 int (*cmp) (const void *, const void *,
2186 void *), void *data)
2187{
2188 if (m_vec)
2189 return m_vec->bsearch (key, cmp, data);
2190 return NULLnullptr;
2191}
2192
2193
2194/* Find and return the first position in which OBJ could be inserted
2195 without changing the ordering of this vector. LESSTHAN is a
2196 function that returns true if the first argument is strictly less
2197 than the second. */
2198
2199template<typename T>
2200inline unsigned
2201vec<T, va_heap, vl_ptr>::lower_bound (T obj,
2202 bool (*lessthan)(const T &, const T &))
2203 const
2204{
2205 return m_vec ? m_vec->lower_bound (obj, lessthan) : 0;
2206}
2207
2208/* Return true if SEARCH is an element of V. Note that this is O(N) in the
2209 size of the vector and so should be used with care. */
2210
2211template<typename T>
2212inline bool
2213vec<T, va_heap, vl_ptr>::contains (const T &search) const
2214{
2215 return m_vec ? m_vec->contains (search) : false;
2216}
2217
2218/* Reverse content of the vector. */
2219
2220template<typename T>
2221inline void
2222vec<T, va_heap, vl_ptr>::reverse (void)
2223{
2224 unsigned l = length ();
2225 T *ptr = address ();
2226
2227 for (unsigned i = 0; i < l / 2; i++)
2228 std::swap (ptr[i], ptr[l - i - 1]);
2229}
2230
2231template<typename T>
2232inline bool
2233vec<T, va_heap, vl_ptr>::using_auto_storage () const
2234{
2235 return m_vec ? m_vec->m_vecpfx.m_using_auto_storage : false;
2236}
2237
2238/* Release VEC and call release of all element vectors. */
2239
2240template<typename T>
2241inline void
2242release_vec_vec (vec<vec<T> > &vec)
2243{
2244 for (unsigned i = 0; i < vec.length (); i++)
2245 vec[i].release ();
2246
2247 vec.release ();
2248}
2249
2250// Provide a subset of the std::span functionality. (We can't use std::span
2251// itself because it's a C++20 feature.)
2252//
2253// In addition, provide an invalid value that is distinct from all valid
2254// sequences (including the empty sequence). This can be used to return
2255// failure without having to use std::optional.
2256//
2257// There is no operator bool because it would be ambiguous whether it is
2258// testing for a valid value or an empty sequence.
2259template<typename T>
2260class array_slice
2261{
2262 template<typename OtherT> friend class array_slice;
2263
2264public:
2265 using value_type = T;
2266 using iterator = T *;
2267 using const_iterator = const T *;
2268
2269 array_slice () : m_base (nullptr), m_size (0) {}
2270
2271 template<typename OtherT>
2272 array_slice (array_slice<OtherT> other)
2273 : m_base (other.m_base), m_size (other.m_size) {}
2274
2275 array_slice (iterator base, unsigned int size)
2276 : m_base (base), m_size (size) {}
2277
2278 template<size_t N>
2279 array_slice (T (&array)[N]) : m_base (array), m_size (N) {}
2280
2281 template<typename OtherT>
2282 array_slice (const vec<OtherT> &v)
2283 : m_base (v.address ()), m_size (v.length ()) {}
2284
2285 template<typename OtherT>
2286 array_slice (vec<OtherT> &v)
2287 : m_base (v.address ()), m_size (v.length ()) {}
2288
2289 template<typename OtherT>
2290 array_slice (const vec<OtherT, va_gc> *v)
2291 : m_base (v ? v->address () : nullptr), m_size (v ? v->length () : 0) {}
2292
2293 template<typename OtherT>
2294 array_slice (vec<OtherT, va_gc> *v)
2295 : m_base (v ? v->address () : nullptr), m_size (v ? v->length () : 0) {}
2296
2297 iterator begin () { return m_base; }
2298 iterator end () { return m_base + m_size; }
2299
2300 const_iterator begin () const { return m_base; }
2301 const_iterator end () const { return m_base + m_size; }
2302
2303 value_type &front ();
2304 value_type &back ();
2305 value_type &operator[] (unsigned int i);
2306
2307 const value_type &front () const;
2308 const value_type &back () const;
2309 const value_type &operator[] (unsigned int i) const;
2310
2311 size_t size () const { return m_size; }
2312 size_t size_bytes () const { return m_size * sizeof (T); }
2313 bool empty () const { return m_size == 0; }
2314
2315 // An invalid array_slice that represents a failed operation. This is
2316 // distinct from an empty slice, which is a valid result in some contexts.
2317 static array_slice invalid () { return { nullptr, ~0U }; }
2318
2319 // True if the array is valid, false if it is an array like INVALID.
2320 bool is_valid () const { return m_base || m_size == 0; }
2321
2322private:
2323 iterator m_base;
2324 unsigned int m_size;
2325};
2326
2327template<typename T>
2328inline typename array_slice<T>::value_type &
2329array_slice<T>::front ()
2330{
2331 gcc_checking_assert (m_size)((void)(!(m_size) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 2331, __FUNCTION__), 0 : 0))
;
2332 return m_base[0];
2333}
2334
2335template<typename T>
2336inline const typename array_slice<T>::value_type &
2337array_slice<T>::front () const
2338{
2339 gcc_checking_assert (m_size)((void)(!(m_size) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 2339, __FUNCTION__), 0 : 0))
;
2340 return m_base[0];
2341}
2342
2343template<typename T>
2344inline typename array_slice<T>::value_type &
2345array_slice<T>::back ()
2346{
2347 gcc_checking_assert (m_size)((void)(!(m_size) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 2347, __FUNCTION__), 0 : 0))
;
2348 return m_base[m_size - 1];
2349}
2350
2351template<typename T>
2352inline const typename array_slice<T>::value_type &
2353array_slice<T>::back () const
2354{
2355 gcc_checking_assert (m_size)((void)(!(m_size) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 2355, __FUNCTION__), 0 : 0))
;
2356 return m_base[m_size - 1];
2357}
2358
2359template<typename T>
2360inline typename array_slice<T>::value_type &
2361array_slice<T>::operator[] (unsigned int i)
2362{
2363 gcc_checking_assert (i < m_size)((void)(!(i < m_size) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 2363, __FUNCTION__), 0 : 0))
;
2364 return m_base[i];
2365}
2366
2367template<typename T>
2368inline const typename array_slice<T>::value_type &
2369array_slice<T>::operator[] (unsigned int i) const
2370{
2371 gcc_checking_assert (i < m_size)((void)(!(i < m_size) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 2371, __FUNCTION__), 0 : 0))
;
2372 return m_base[i];
2373}
2374
2375template<typename T>
2376array_slice<T>
2377make_array_slice (T *base, unsigned int size)
2378{
2379 return array_slice<T> (base, size);
2380}
2381
2382#if (GCC_VERSION(4 * 1000 + 2) >= 3000)
2383# pragma GCC poison m_vec m_vecpfx m_vecdata
2384#endif
2385
2386#endif // GCC_VEC_H