Bug Summary

File:build/gcc/tree-vect-stmts.cc
Warning:line 10151, column 9
Value stored to 'msq' is never read

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-suse-linux -analyze -disable-free -clear-ast-before-backend -disable-llvm-verifier -discard-value-names -main-file-name tree-vect-stmts.cc -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -ffp-contract=on -fno-rounding-math -mconstructor-aliases -funwind-tables=2 -target-cpu x86-64 -tune-cpu generic -debugger-tuning=gdb -fcoverage-compilation-dir=/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -resource-dir /usr/lib64/clang/15.0.7 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13/backward -internal-isystem /usr/lib64/clang/15.0.7/include -internal-isystem /usr/local/include -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../x86_64-suse-linux/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir=/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -D__GCC_HAVE_DWARF2_CFI_ASM=1 -o /buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2023-03-27-141847-20772-1/report-Pg2E55.plist -x c++ /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc
1/* Statement Analysis and Transformation for Vectorization
2 Copyright (C) 2003-2023 Free Software Foundation, Inc.
3 Contributed by Dorit Naishlos <dorit@il.ibm.com>
4 and Ira Rosen <irar@il.ibm.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "backend.h"
26#include "target.h"
27#include "rtl.h"
28#include "tree.h"
29#include "gimple.h"
30#include "ssa.h"
31#include "optabs-tree.h"
32#include "insn-config.h"
33#include "recog.h" /* FIXME: for insn_data */
34#include "cgraph.h"
35#include "dumpfile.h"
36#include "alias.h"
37#include "fold-const.h"
38#include "stor-layout.h"
39#include "tree-eh.h"
40#include "gimplify.h"
41#include "gimple-iterator.h"
42#include "gimplify-me.h"
43#include "tree-cfg.h"
44#include "tree-ssa-loop-manip.h"
45#include "cfgloop.h"
46#include "explow.h"
47#include "tree-ssa-loop.h"
48#include "tree-scalar-evolution.h"
49#include "tree-vectorizer.h"
50#include "builtins.h"
51#include "internal-fn.h"
52#include "tree-vector-builder.h"
53#include "vec-perm-indices.h"
54#include "tree-ssa-loop-niter.h"
55#include "gimple-fold.h"
56#include "regs.h"
57#include "attribs.h"
58
59/* For lang_hooks.types.type_for_mode. */
60#include "langhooks.h"
61
62/* Return the vectorized type for the given statement. */
63
64tree
65stmt_vectype (class _stmt_vec_info *stmt_info)
66{
67 return STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
68}
69
70/* Return TRUE iff the given statement is in an inner loop relative to
71 the loop being vectorized. */
72bool
73stmt_in_inner_loop_p (vec_info *vinfo, class _stmt_vec_info *stmt_info)
74{
75 gimple *stmt = STMT_VINFO_STMT (stmt_info)(stmt_info)->stmt;
76 basic_block bb = gimple_bb (stmt);
77 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
78 class loop* loop;
79
80 if (!loop_vinfo)
81 return false;
82
83 loop = LOOP_VINFO_LOOP (loop_vinfo)(loop_vinfo)->loop;
84
85 return (bb->loop_father == loop->inner);
86}
87
88/* Record the cost of a statement, either by directly informing the
89 target model or by saving it in a vector for later processing.
90 Return a preliminary estimate of the statement's cost. */
91
92static unsigned
93record_stmt_cost (stmt_vector_for_cost *body_cost_vec, int count,
94 enum vect_cost_for_stmt kind,
95 stmt_vec_info stmt_info, slp_tree node,
96 tree vectype, int misalign,
97 enum vect_cost_model_location where)
98{
99 if ((kind == vector_load || kind == unaligned_load)
100 && (stmt_info && STMT_VINFO_GATHER_SCATTER_P (stmt_info)(stmt_info)->gather_scatter_p))
101 kind = vector_gather_load;
102 if ((kind == vector_store || kind == unaligned_store)
103 && (stmt_info && STMT_VINFO_GATHER_SCATTER_P (stmt_info)(stmt_info)->gather_scatter_p))
104 kind = vector_scatter_store;
105
106 stmt_info_for_cost si
107 = { count, kind, where, stmt_info, node, vectype, misalign };
108 body_cost_vec->safe_push (si);
109
110 return (unsigned)
111 (builtin_vectorization_cost (kind, vectype, misalign) * count);
112}
113
114unsigned
115record_stmt_cost (stmt_vector_for_cost *body_cost_vec, int count,
116 enum vect_cost_for_stmt kind, stmt_vec_info stmt_info,
117 tree vectype, int misalign,
118 enum vect_cost_model_location where)
119{
120 return record_stmt_cost (body_cost_vec, count, kind, stmt_info, NULLnullptr,
121 vectype, misalign, where);
122}
123
124unsigned
125record_stmt_cost (stmt_vector_for_cost *body_cost_vec, int count,
126 enum vect_cost_for_stmt kind, slp_tree node,
127 tree vectype, int misalign,
128 enum vect_cost_model_location where)
129{
130 return record_stmt_cost (body_cost_vec, count, kind, NULLnullptr, node,
131 vectype, misalign, where);
132}
133
134unsigned
135record_stmt_cost (stmt_vector_for_cost *body_cost_vec, int count,
136 enum vect_cost_for_stmt kind,
137 enum vect_cost_model_location where)
138{
139 gcc_assert (kind == cond_branch_taken || kind == cond_branch_not_taken((void)(!(kind == cond_branch_taken || kind == cond_branch_not_taken
|| kind == scalar_stmt) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 140, __FUNCTION__), 0 : 0))
140 || kind == scalar_stmt)((void)(!(kind == cond_branch_taken || kind == cond_branch_not_taken
|| kind == scalar_stmt) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 140, __FUNCTION__), 0 : 0))
;
141 return record_stmt_cost (body_cost_vec, count, kind, NULLnullptr, NULLnullptr,
142 NULL_TREE(tree) nullptr, 0, where);
143}
144
145/* Return a variable of type ELEM_TYPE[NELEMS]. */
146
147static tree
148create_vector_array (tree elem_type, unsigned HOST_WIDE_INTlong nelems)
149{
150 return create_tmp_var (build_array_type_nelts (elem_type, nelems),
151 "vect_array");
152}
153
154/* ARRAY is an array of vectors created by create_vector_array.
155 Return an SSA_NAME for the vector in index N. The reference
156 is part of the vectorization of STMT_INFO and the vector is associated
157 with scalar destination SCALAR_DEST. */
158
159static tree
160read_vector_array (vec_info *vinfo,
161 stmt_vec_info stmt_info, gimple_stmt_iterator *gsi,
162 tree scalar_dest, tree array, unsigned HOST_WIDE_INTlong n)
163{
164 tree vect_type, vect, vect_name, array_ref;
165 gimple *new_stmt;
166
167 gcc_assert (TREE_CODE (TREE_TYPE (array)) == ARRAY_TYPE)((void)(!(((enum tree_code) (((contains_struct_check ((array)
, (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 167, __FUNCTION__))->typed.type))->base.code) == ARRAY_TYPE
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 167, __FUNCTION__), 0 : 0))
;
168 vect_type = TREE_TYPE (TREE_TYPE (array))((contains_struct_check ((((contains_struct_check ((array), (
TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 168, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 168, __FUNCTION__))->typed.type)
;
169 vect = vect_create_destination_var (scalar_dest, vect_type);
170 array_ref = build4 (ARRAY_REF, vect_type, array,
171 build_int_cst (size_type_nodeglobal_trees[TI_SIZE_TYPE], n),
172 NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr);
173
174 new_stmt = gimple_build_assign (vect, array_ref);
175 vect_name = make_ssa_name (vect, new_stmt);
176 gimple_assign_set_lhs (new_stmt, vect_name);
177 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
178
179 return vect_name;
180}
181
182/* ARRAY is an array of vectors created by create_vector_array.
183 Emit code to store SSA_NAME VECT in index N of the array.
184 The store is part of the vectorization of STMT_INFO. */
185
186static void
187write_vector_array (vec_info *vinfo,
188 stmt_vec_info stmt_info, gimple_stmt_iterator *gsi,
189 tree vect, tree array, unsigned HOST_WIDE_INTlong n)
190{
191 tree array_ref;
192 gimple *new_stmt;
193
194 array_ref = build4 (ARRAY_REF, TREE_TYPE (vect)((contains_struct_check ((vect), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 194, __FUNCTION__))->typed.type)
, array,
195 build_int_cst (size_type_nodeglobal_trees[TI_SIZE_TYPE], n),
196 NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr);
197
198 new_stmt = gimple_build_assign (array_ref, vect);
199 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
200}
201
202/* PTR is a pointer to an array of type TYPE. Return a representation
203 of *PTR. The memory reference replaces those in FIRST_DR
204 (and its group). */
205
206static tree
207create_array_ref (tree type, tree ptr, tree alias_ptr_type)
208{
209 tree mem_ref;
210
211 mem_ref = build2 (MEM_REF, type, ptr, build_int_cst (alias_ptr_type, 0));
212 /* Arrays have the same alignment as their type. */
213 set_ptr_info_alignment (get_ptr_info (ptr), TYPE_ALIGN_UNIT (type)((((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 213, __FUNCTION__))->type_common.align) ? ((unsigned)1) <<
(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 213, __FUNCTION__))->type_common.align) - 1) : 0) / (8))
, 0);
214 return mem_ref;
215}
216
217/* Add a clobber of variable VAR to the vectorization of STMT_INFO.
218 Emit the clobber before *GSI. */
219
220static void
221vect_clobber_variable (vec_info *vinfo, stmt_vec_info stmt_info,
222 gimple_stmt_iterator *gsi, tree var)
223{
224 tree clobber = build_clobber (TREE_TYPE (var)((contains_struct_check ((var), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 224, __FUNCTION__))->typed.type)
);
225 gimple *new_stmt = gimple_build_assign (var, clobber);
226 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
227}
228
229/* Utility functions used by vect_mark_stmts_to_be_vectorized. */
230
231/* Function vect_mark_relevant.
232
233 Mark STMT_INFO as "relevant for vectorization" and add it to WORKLIST. */
234
235static void
236vect_mark_relevant (vec<stmt_vec_info> *worklist, stmt_vec_info stmt_info,
237 enum vect_relevant relevant, bool live_p)
238{
239 enum vect_relevant save_relevant = STMT_VINFO_RELEVANT (stmt_info)(stmt_info)->relevant;
240 bool save_live_p = STMT_VINFO_LIVE_P (stmt_info)(stmt_info)->live;
241
242 if (dump_enabled_p ())
243 dump_printf_loc (MSG_NOTE, vect_location,
244 "mark relevant %d, live %d: %G", relevant, live_p,
245 stmt_info->stmt);
246
247 /* If this stmt is an original stmt in a pattern, we might need to mark its
248 related pattern stmt instead of the original stmt. However, such stmts
249 may have their own uses that are not in any pattern, in such cases the
250 stmt itself should be marked. */
251 if (STMT_VINFO_IN_PATTERN_P (stmt_info)(stmt_info)->in_pattern_p)
252 {
253 /* This is the last stmt in a sequence that was detected as a
254 pattern that can potentially be vectorized. Don't mark the stmt
255 as relevant/live because it's not going to be vectorized.
256 Instead mark the pattern-stmt that replaces it. */
257
258 if (dump_enabled_p ())
259 dump_printf_loc (MSG_NOTE, vect_location,
260 "last stmt in pattern. don't mark"
261 " relevant/live.\n");
262 stmt_vec_info old_stmt_info = stmt_info;
263 stmt_info = STMT_VINFO_RELATED_STMT (stmt_info)(stmt_info)->related_stmt;
264 gcc_assert (STMT_VINFO_RELATED_STMT (stmt_info) == old_stmt_info)((void)(!((stmt_info)->related_stmt == old_stmt_info) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 264, __FUNCTION__), 0 : 0))
;
265 save_relevant = STMT_VINFO_RELEVANT (stmt_info)(stmt_info)->relevant;
266 save_live_p = STMT_VINFO_LIVE_P (stmt_info)(stmt_info)->live;
267 }
268
269 STMT_VINFO_LIVE_P (stmt_info)(stmt_info)->live |= live_p;
270 if (relevant > STMT_VINFO_RELEVANT (stmt_info)(stmt_info)->relevant)
271 STMT_VINFO_RELEVANT (stmt_info)(stmt_info)->relevant = relevant;
272
273 if (STMT_VINFO_RELEVANT (stmt_info)(stmt_info)->relevant == save_relevant
274 && STMT_VINFO_LIVE_P (stmt_info)(stmt_info)->live == save_live_p)
275 {
276 if (dump_enabled_p ())
277 dump_printf_loc (MSG_NOTE, vect_location,
278 "already marked relevant/live.\n");
279 return;
280 }
281
282 worklist->safe_push (stmt_info);
283}
284
285
286/* Function is_simple_and_all_uses_invariant
287
288 Return true if STMT_INFO is simple and all uses of it are invariant. */
289
290bool
291is_simple_and_all_uses_invariant (stmt_vec_info stmt_info,
292 loop_vec_info loop_vinfo)
293{
294 tree op;
295 ssa_op_iter iter;
296
297 gassign *stmt = dyn_cast <gassign *> (stmt_info->stmt);
298 if (!stmt)
299 return false;
300
301 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)for (op = op_iter_init_tree (&(iter), stmt, 0x01); !op_iter_done
(&(iter)); (void) (op = op_iter_next_tree (&(iter)))
)
302 {
303 enum vect_def_type dt = vect_uninitialized_def;
304
305 if (!vect_is_simple_use (op, loop_vinfo, &dt))
306 {
307 if (dump_enabled_p ())
308 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
309 "use not simple.\n");
310 return false;
311 }
312
313 if (dt != vect_external_def && dt != vect_constant_def)
314 return false;
315 }
316 return true;
317}
318
319/* Function vect_stmt_relevant_p.
320
321 Return true if STMT_INFO, in the loop that is represented by LOOP_VINFO,
322 is "relevant for vectorization".
323
324 A stmt is considered "relevant for vectorization" if:
325 - it has uses outside the loop.
326 - it has vdefs (it alters memory).
327 - control stmts in the loop (except for the exit condition).
328
329 CHECKME: what other side effects would the vectorizer allow? */
330
331static bool
332vect_stmt_relevant_p (stmt_vec_info stmt_info, loop_vec_info loop_vinfo,
333 enum vect_relevant *relevant, bool *live_p)
334{
335 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo)(loop_vinfo)->loop;
336 ssa_op_iter op_iter;
337 imm_use_iterator imm_iter;
338 use_operand_p use_p;
339 def_operand_p def_p;
340
341 *relevant = vect_unused_in_scope;
342 *live_p = false;
343
344 /* cond stmt other than loop exit cond. */
345 if (is_ctrl_stmt (stmt_info->stmt)
346 && STMT_VINFO_TYPE (stmt_info)(stmt_info)->type != loop_exit_ctrl_vec_info_type)
347 *relevant = vect_used_in_scope;
348
349 /* changing memory. */
350 if (gimple_code (stmt_info->stmt) != GIMPLE_PHI)
351 if (gimple_vdef (stmt_info->stmt)
352 && !gimple_clobber_p (stmt_info->stmt))
353 {
354 if (dump_enabled_p ())
355 dump_printf_loc (MSG_NOTE, vect_location,
356 "vec_stmt_relevant_p: stmt has vdefs.\n");
357 *relevant = vect_used_in_scope;
358 }
359
360 /* uses outside the loop. */
361 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt_info->stmt, op_iter, SSA_OP_DEF)for ((def_p) = (gimple_code (stmt_info->stmt) == GIMPLE_PHI
? op_iter_init_phidef (&(op_iter), as_a <gphi *> (
stmt_info->stmt), 0x02) : op_iter_init_def (&(op_iter)
, stmt_info->stmt, 0x02)); !op_iter_done (&(op_iter));
(def_p) = op_iter_next_def (&(op_iter)))
362 {
363 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, DEF_FROM_PTR (def_p))for ((use_p) = first_readonly_imm_use (&(imm_iter), (get_def_from_ptr
(def_p))); !end_readonly_imm_use_p (&(imm_iter)); (void)
((use_p) = next_readonly_imm_use (&(imm_iter))))
364 {
365 basic_block bb = gimple_bb (USE_STMT (use_p)(use_p)->loc.stmt);
366 if (!flow_bb_inside_loop_p (loop, bb))
367 {
368 if (is_gimple_debug (USE_STMT (use_p)(use_p)->loc.stmt))
369 continue;
370
371 if (dump_enabled_p ())
372 dump_printf_loc (MSG_NOTE, vect_location,
373 "vec_stmt_relevant_p: used out of loop.\n");
374
375 /* We expect all such uses to be in the loop exit phis
376 (because of loop closed form) */
377 gcc_assert (gimple_code (USE_STMT (use_p)) == GIMPLE_PHI)((void)(!(gimple_code ((use_p)->loc.stmt) == GIMPLE_PHI) ?
fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 377, __FUNCTION__), 0 : 0))
;
378 gcc_assert (bb == single_exit (loop)->dest)((void)(!(bb == single_exit (loop)->dest) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 378, __FUNCTION__), 0 : 0))
;
379
380 *live_p = true;
381 }
382 }
383 }
384
385 if (*live_p && *relevant == vect_unused_in_scope
386 && !is_simple_and_all_uses_invariant (stmt_info, loop_vinfo))
387 {
388 if (dump_enabled_p ())
389 dump_printf_loc (MSG_NOTE, vect_location,
390 "vec_stmt_relevant_p: stmt live but not relevant.\n");
391 *relevant = vect_used_only_live;
392 }
393
394 return (*live_p || *relevant);
395}
396
397
398/* Function exist_non_indexing_operands_for_use_p
399
400 USE is one of the uses attached to STMT_INFO. Check if USE is
401 used in STMT_INFO for anything other than indexing an array. */
402
403static bool
404exist_non_indexing_operands_for_use_p (tree use, stmt_vec_info stmt_info)
405{
406 tree operand;
407
408 /* USE corresponds to some operand in STMT. If there is no data
409 reference in STMT, then any operand that corresponds to USE
410 is not indexing an array. */
411 if (!STMT_VINFO_DATA_REF (stmt_info)((stmt_info)->dr_aux.dr + 0))
412 return true;
413
414 /* STMT has a data_ref. FORNOW this means that its of one of
415 the following forms:
416 -1- ARRAY_REF = var
417 -2- var = ARRAY_REF
418 (This should have been verified in analyze_data_refs).
419
420 'var' in the second case corresponds to a def, not a use,
421 so USE cannot correspond to any operands that are not used
422 for array indexing.
423
424 Therefore, all we need to check is if STMT falls into the
425 first case, and whether var corresponds to USE. */
426
427 gassign *assign = dyn_cast <gassign *> (stmt_info->stmt);
428 if (!assign || !gimple_assign_copy_p (assign))
429 {
430 gcall *call = dyn_cast <gcall *> (stmt_info->stmt);
431 if (call && gimple_call_internal_p (call))
432 {
433 internal_fn ifn = gimple_call_internal_fn (call);
434 int mask_index = internal_fn_mask_index (ifn);
435 if (mask_index >= 0
436 && use == gimple_call_arg (call, mask_index))
437 return true;
438 int stored_value_index = internal_fn_stored_value_index (ifn);
439 if (stored_value_index >= 0
440 && use == gimple_call_arg (call, stored_value_index))
441 return true;
442 if (internal_gather_scatter_fn_p (ifn)
443 && use == gimple_call_arg (call, 1))
444 return true;
445 }
446 return false;
447 }
448
449 if (TREE_CODE (gimple_assign_lhs (assign))((enum tree_code) (gimple_assign_lhs (assign))->base.code) == SSA_NAME)
450 return false;
451 operand = gimple_assign_rhs1 (assign);
452 if (TREE_CODE (operand)((enum tree_code) (operand)->base.code) != SSA_NAME)
453 return false;
454
455 if (operand == use)
456 return true;
457
458 return false;
459}
460
461
462/*
463 Function process_use.
464
465 Inputs:
466 - a USE in STMT_VINFO in a loop represented by LOOP_VINFO
467 - RELEVANT - enum value to be set in the STMT_VINFO of the stmt
468 that defined USE. This is done by calling mark_relevant and passing it
469 the WORKLIST (to add DEF_STMT to the WORKLIST in case it is relevant).
470 - FORCE is true if exist_non_indexing_operands_for_use_p check shouldn't
471 be performed.
472
473 Outputs:
474 Generally, LIVE_P and RELEVANT are used to define the liveness and
475 relevance info of the DEF_STMT of this USE:
476 STMT_VINFO_LIVE_P (DEF_stmt_vinfo) <-- live_p
477 STMT_VINFO_RELEVANT (DEF_stmt_vinfo) <-- relevant
478 Exceptions:
479 - case 1: If USE is used only for address computations (e.g. array indexing),
480 which does not need to be directly vectorized, then the liveness/relevance
481 of the respective DEF_STMT is left unchanged.
482 - case 2: If STMT_VINFO is a reduction phi and DEF_STMT is a reduction stmt,
483 we skip DEF_STMT cause it had already been processed.
484 - case 3: If DEF_STMT and STMT_VINFO are in different nests, then
485 "relevant" will be modified accordingly.
486
487 Return true if everything is as expected. Return false otherwise. */
488
489static opt_result
490process_use (stmt_vec_info stmt_vinfo, tree use, loop_vec_info loop_vinfo,
491 enum vect_relevant relevant, vec<stmt_vec_info> *worklist,
492 bool force)
493{
494 stmt_vec_info dstmt_vinfo;
495 enum vect_def_type dt;
496
497 /* case 1: we are only interested in uses that need to be vectorized. Uses
498 that are used for address computation are not considered relevant. */
499 if (!force && !exist_non_indexing_operands_for_use_p (use, stmt_vinfo))
500 return opt_result::success ();
501
502 if (!vect_is_simple_use (use, loop_vinfo, &dt, &dstmt_vinfo))
503 return opt_result::failure_at (stmt_vinfo->stmt,
504 "not vectorized:"
505 " unsupported use in stmt.\n");
506
507 if (!dstmt_vinfo)
508 return opt_result::success ();
509
510 basic_block def_bb = gimple_bb (dstmt_vinfo->stmt);
511 basic_block bb = gimple_bb (stmt_vinfo->stmt);
512
513 /* case 2: A reduction phi (STMT) defined by a reduction stmt (DSTMT_VINFO).
514 We have to force the stmt live since the epilogue loop needs it to
515 continue computing the reduction. */
516 if (gimple_code (stmt_vinfo->stmt) == GIMPLE_PHI
517 && STMT_VINFO_DEF_TYPE (stmt_vinfo)(stmt_vinfo)->def_type == vect_reduction_def
518 && gimple_code (dstmt_vinfo->stmt) != GIMPLE_PHI
519 && STMT_VINFO_DEF_TYPE (dstmt_vinfo)(dstmt_vinfo)->def_type == vect_reduction_def
520 && bb->loop_father == def_bb->loop_father)
521 {
522 if (dump_enabled_p ())
523 dump_printf_loc (MSG_NOTE, vect_location,
524 "reduc-stmt defining reduc-phi in the same nest.\n");
525 vect_mark_relevant (worklist, dstmt_vinfo, relevant, true);
526 return opt_result::success ();
527 }
528
529 /* case 3a: outer-loop stmt defining an inner-loop stmt:
530 outer-loop-header-bb:
531 d = dstmt_vinfo
532 inner-loop:
533 stmt # use (d)
534 outer-loop-tail-bb:
535 ... */
536 if (flow_loop_nested_p (def_bb->loop_father, bb->loop_father))
537 {
538 if (dump_enabled_p ())
539 dump_printf_loc (MSG_NOTE, vect_location,
540 "outer-loop def-stmt defining inner-loop stmt.\n");
541
542 switch (relevant)
543 {
544 case vect_unused_in_scope:
545 relevant = (STMT_VINFO_DEF_TYPE (stmt_vinfo)(stmt_vinfo)->def_type == vect_nested_cycle) ?
546 vect_used_in_scope : vect_unused_in_scope;
547 break;
548
549 case vect_used_in_outer_by_reduction:
550 gcc_assert (STMT_VINFO_DEF_TYPE (stmt_vinfo) != vect_reduction_def)((void)(!((stmt_vinfo)->def_type != vect_reduction_def) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 550, __FUNCTION__), 0 : 0))
;
551 relevant = vect_used_by_reduction;
552 break;
553
554 case vect_used_in_outer:
555 gcc_assert (STMT_VINFO_DEF_TYPE (stmt_vinfo) != vect_reduction_def)((void)(!((stmt_vinfo)->def_type != vect_reduction_def) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 555, __FUNCTION__), 0 : 0))
;
556 relevant = vect_used_in_scope;
557 break;
558
559 case vect_used_in_scope:
560 break;
561
562 default:
563 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 563, __FUNCTION__))
;
564 }
565 }
566
567 /* case 3b: inner-loop stmt defining an outer-loop stmt:
568 outer-loop-header-bb:
569 ...
570 inner-loop:
571 d = dstmt_vinfo
572 outer-loop-tail-bb (or outer-loop-exit-bb in double reduction):
573 stmt # use (d) */
574 else if (flow_loop_nested_p (bb->loop_father, def_bb->loop_father))
575 {
576 if (dump_enabled_p ())
577 dump_printf_loc (MSG_NOTE, vect_location,
578 "inner-loop def-stmt defining outer-loop stmt.\n");
579
580 switch (relevant)
581 {
582 case vect_unused_in_scope:
583 relevant = (STMT_VINFO_DEF_TYPE (stmt_vinfo)(stmt_vinfo)->def_type == vect_reduction_def
584 || STMT_VINFO_DEF_TYPE (stmt_vinfo)(stmt_vinfo)->def_type == vect_double_reduction_def) ?
585 vect_used_in_outer_by_reduction : vect_unused_in_scope;
586 break;
587
588 case vect_used_by_reduction:
589 case vect_used_only_live:
590 relevant = vect_used_in_outer_by_reduction;
591 break;
592
593 case vect_used_in_scope:
594 relevant = vect_used_in_outer;
595 break;
596
597 default:
598 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 598, __FUNCTION__))
;
599 }
600 }
601 /* We are also not interested in uses on loop PHI backedges that are
602 inductions. Otherwise we'll needlessly vectorize the IV increment
603 and cause hybrid SLP for SLP inductions. Unless the PHI is live
604 of course. */
605 else if (gimple_code (stmt_vinfo->stmt) == GIMPLE_PHI
606 && STMT_VINFO_DEF_TYPE (stmt_vinfo)(stmt_vinfo)->def_type == vect_induction_def
607 && ! STMT_VINFO_LIVE_P (stmt_vinfo)(stmt_vinfo)->live
608 && (PHI_ARG_DEF_FROM_EDGE (stmt_vinfo->stmt,gimple_phi_arg_def (((stmt_vinfo->stmt)), ((loop_latch_edge
(bb->loop_father))->dest_idx))
609 loop_latch_edge (bb->loop_father))gimple_phi_arg_def (((stmt_vinfo->stmt)), ((loop_latch_edge
(bb->loop_father))->dest_idx))
610 == use))
611 {
612 if (dump_enabled_p ())
613 dump_printf_loc (MSG_NOTE, vect_location,
614 "induction value on backedge.\n");
615 return opt_result::success ();
616 }
617
618
619 vect_mark_relevant (worklist, dstmt_vinfo, relevant, false);
620 return opt_result::success ();
621}
622
623
624/* Function vect_mark_stmts_to_be_vectorized.
625
626 Not all stmts in the loop need to be vectorized. For example:
627
628 for i...
629 for j...
630 1. T0 = i + j
631 2. T1 = a[T0]
632
633 3. j = j + 1
634
635 Stmt 1 and 3 do not need to be vectorized, because loop control and
636 addressing of vectorized data-refs are handled differently.
637
638 This pass detects such stmts. */
639
640opt_result
641vect_mark_stmts_to_be_vectorized (loop_vec_info loop_vinfo, bool *fatal)
642{
643 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo)(loop_vinfo)->loop;
644 basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo)(loop_vinfo)->bbs;
645 unsigned int nbbs = loop->num_nodes;
646 gimple_stmt_iterator si;
647 unsigned int i;
648 basic_block bb;
649 bool live_p;
650 enum vect_relevant relevant;
651
652 DUMP_VECT_SCOPE ("vect_mark_stmts_to_be_vectorized")auto_dump_scope scope ("vect_mark_stmts_to_be_vectorized", vect_location
)
;
653
654 auto_vec<stmt_vec_info, 64> worklist;
655
656 /* 1. Init worklist. */
657 for (i = 0; i < nbbs; i++)
658 {
659 bb = bbs[i];
660 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
661 {
662 stmt_vec_info phi_info = loop_vinfo->lookup_stmt (gsi_stmt (si));
663 if (dump_enabled_p ())
664 dump_printf_loc (MSG_NOTE, vect_location, "init: phi relevant? %G",
665 phi_info->stmt);
666
667 if (vect_stmt_relevant_p (phi_info, loop_vinfo, &relevant, &live_p))
668 vect_mark_relevant (&worklist, phi_info, relevant, live_p);
669 }
670 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
671 {
672 if (is_gimple_debug (gsi_stmt (si)))
673 continue;
674 stmt_vec_info stmt_info = loop_vinfo->lookup_stmt (gsi_stmt (si));
675 if (dump_enabled_p ())
676 dump_printf_loc (MSG_NOTE, vect_location,
677 "init: stmt relevant? %G", stmt_info->stmt);
678
679 if (vect_stmt_relevant_p (stmt_info, loop_vinfo, &relevant, &live_p))
680 vect_mark_relevant (&worklist, stmt_info, relevant, live_p);
681 }
682 }
683
684 /* 2. Process_worklist */
685 while (worklist.length () > 0)
686 {
687 use_operand_p use_p;
688 ssa_op_iter iter;
689
690 stmt_vec_info stmt_vinfo = worklist.pop ();
691 if (dump_enabled_p ())
692 dump_printf_loc (MSG_NOTE, vect_location,
693 "worklist: examine stmt: %G", stmt_vinfo->stmt);
694
695 /* Examine the USEs of STMT. For each USE, mark the stmt that defines it
696 (DEF_STMT) as relevant/irrelevant according to the relevance property
697 of STMT. */
698 relevant = STMT_VINFO_RELEVANT (stmt_vinfo)(stmt_vinfo)->relevant;
699
700 /* Generally, the relevance property of STMT (in STMT_VINFO_RELEVANT) is
701 propagated as is to the DEF_STMTs of its USEs.
702
703 One exception is when STMT has been identified as defining a reduction
704 variable; in this case we set the relevance to vect_used_by_reduction.
705 This is because we distinguish between two kinds of relevant stmts -
706 those that are used by a reduction computation, and those that are
707 (also) used by a regular computation. This allows us later on to
708 identify stmts that are used solely by a reduction, and therefore the
709 order of the results that they produce does not have to be kept. */
710
711 switch (STMT_VINFO_DEF_TYPE (stmt_vinfo)(stmt_vinfo)->def_type)
712 {
713 case vect_reduction_def:
714 gcc_assert (relevant != vect_unused_in_scope)((void)(!(relevant != vect_unused_in_scope) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 714, __FUNCTION__), 0 : 0))
;
715 if (relevant != vect_unused_in_scope
716 && relevant != vect_used_in_scope
717 && relevant != vect_used_by_reduction
718 && relevant != vect_used_only_live)
719 return opt_result::failure_at
720 (stmt_vinfo->stmt, "unsupported use of reduction.\n");
721 break;
722
723 case vect_nested_cycle:
724 if (relevant != vect_unused_in_scope
725 && relevant != vect_used_in_outer_by_reduction
726 && relevant != vect_used_in_outer)
727 return opt_result::failure_at
728 (stmt_vinfo->stmt, "unsupported use of nested cycle.\n");
729 break;
730
731 case vect_double_reduction_def:
732 if (relevant != vect_unused_in_scope
733 && relevant != vect_used_by_reduction
734 && relevant != vect_used_only_live)
735 return opt_result::failure_at
736 (stmt_vinfo->stmt, "unsupported use of double reduction.\n");
737 break;
738
739 default:
740 break;
741 }
742
743 if (is_pattern_stmt_p (stmt_vinfo))
744 {
745 /* Pattern statements are not inserted into the code, so
746 FOR_EACH_PHI_OR_STMT_USE optimizes their operands out, and we
747 have to scan the RHS or function arguments instead. */
748 if (gassign *assign = dyn_cast <gassign *> (stmt_vinfo->stmt))
749 {
750 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
751 tree op = gimple_assign_rhs1 (assign);
752
753 i = 1;
754 if (rhs_code == COND_EXPR && COMPARISON_CLASS_P (op)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code
) (op)->base.code))] == tcc_comparison)
)
755 {
756 opt_result res
757 = process_use (stmt_vinfo, TREE_OPERAND (op, 0)(*((const_cast<tree*> (tree_operand_check ((op), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 757, __FUNCTION__)))))
,
758 loop_vinfo, relevant, &worklist, false);
759 if (!res)
760 return res;
761 res = process_use (stmt_vinfo, TREE_OPERAND (op, 1)(*((const_cast<tree*> (tree_operand_check ((op), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 761, __FUNCTION__)))))
,
762 loop_vinfo, relevant, &worklist, false);
763 if (!res)
764 return res;
765 i = 2;
766 }
767 for (; i < gimple_num_ops (assign); i++)
768 {
769 op = gimple_op (assign, i);
770 if (TREE_CODE (op)((enum tree_code) (op)->base.code) == SSA_NAME)
771 {
772 opt_result res
773 = process_use (stmt_vinfo, op, loop_vinfo, relevant,
774 &worklist, false);
775 if (!res)
776 return res;
777 }
778 }
779 }
780 else if (gcall *call = dyn_cast <gcall *> (stmt_vinfo->stmt))
781 {
782 for (i = 0; i < gimple_call_num_args (call); i++)
783 {
784 tree arg = gimple_call_arg (call, i);
785 opt_result res
786 = process_use (stmt_vinfo, arg, loop_vinfo, relevant,
787 &worklist, false);
788 if (!res)
789 return res;
790 }
791 }
792 }
793 else
794 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt_vinfo->stmt, iter, SSA_OP_USE)for ((use_p) = (gimple_code (stmt_vinfo->stmt) == GIMPLE_PHI
? op_iter_init_phiuse (&(iter), as_a <gphi *> (stmt_vinfo
->stmt), 0x01) : op_iter_init_use (&(iter), stmt_vinfo
->stmt, 0x01)); !op_iter_done (&(iter)); (use_p) = op_iter_next_use
(&(iter)))
795 {
796 tree op = USE_FROM_PTR (use_p)get_use_from_ptr (use_p);
797 opt_result res
798 = process_use (stmt_vinfo, op, loop_vinfo, relevant,
799 &worklist, false);
800 if (!res)
801 return res;
802 }
803
804 if (STMT_VINFO_GATHER_SCATTER_P (stmt_vinfo)(stmt_vinfo)->gather_scatter_p)
805 {
806 gather_scatter_info gs_info;
807 if (!vect_check_gather_scatter (stmt_vinfo, loop_vinfo, &gs_info))
808 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 808, __FUNCTION__))
;
809 opt_result res
810 = process_use (stmt_vinfo, gs_info.offset, loop_vinfo, relevant,
811 &worklist, true);
812 if (!res)
813 {
814 if (fatal)
815 *fatal = false;
816 return res;
817 }
818 }
819 } /* while worklist */
820
821 return opt_result::success ();
822}
823
824/* Function vect_model_simple_cost.
825
826 Models cost for simple operations, i.e. those that only emit ncopies of a
827 single op. Right now, this does not account for multiple insns that could
828 be generated for the single vector op. We will handle that shortly. */
829
830static void
831vect_model_simple_cost (vec_info *,
832 stmt_vec_info stmt_info, int ncopies,
833 enum vect_def_type *dt,
834 int ndts,
835 slp_tree node,
836 stmt_vector_for_cost *cost_vec,
837 vect_cost_for_stmt kind = vector_stmt)
838{
839 int inside_cost = 0, prologue_cost = 0;
840
841 gcc_assert (cost_vec != NULL)((void)(!(cost_vec != nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 841, __FUNCTION__), 0 : 0))
;
842
843 /* ??? Somehow we need to fix this at the callers. */
844 if (node)
845 ncopies = SLP_TREE_NUMBER_OF_VEC_STMTS (node)(node)->vec_stmts_size;
846
847 if (!node)
848 /* Cost the "broadcast" of a scalar operand in to a vector operand.
849 Use scalar_to_vec to cost the broadcast, as elsewhere in the vector
850 cost model. */
851 for (int i = 0; i < ndts; i++)
852 if (dt[i] == vect_constant_def || dt[i] == vect_external_def)
853 prologue_cost += record_stmt_cost (cost_vec, 1, scalar_to_vec,
854 stmt_info, 0, vect_prologue);
855
856 /* Pass the inside-of-loop statements to the target-specific cost model. */
857 inside_cost += record_stmt_cost (cost_vec, ncopies, kind,
858 stmt_info, 0, vect_body);
859
860 if (dump_enabled_p ())
861 dump_printf_loc (MSG_NOTE, vect_location,
862 "vect_model_simple_cost: inside_cost = %d, "
863 "prologue_cost = %d .\n", inside_cost, prologue_cost);
864}
865
866
867/* Model cost for type demotion and promotion operations. PWR is
868 normally zero for single-step promotions and demotions. It will be
869 one if two-step promotion/demotion is required, and so on. NCOPIES
870 is the number of vector results (and thus number of instructions)
871 for the narrowest end of the operation chain. Each additional
872 step doubles the number of instructions required. If WIDEN_ARITH
873 is true the stmt is doing widening arithmetic. */
874
875static void
876vect_model_promotion_demotion_cost (stmt_vec_info stmt_info,
877 enum vect_def_type *dt,
878 unsigned int ncopies, int pwr,
879 stmt_vector_for_cost *cost_vec,
880 bool widen_arith)
881{
882 int i;
883 int inside_cost = 0, prologue_cost = 0;
884
885 for (i = 0; i < pwr + 1; i++)
886 {
887 inside_cost += record_stmt_cost (cost_vec, ncopies,
888 widen_arith
889 ? vector_stmt : vec_promote_demote,
890 stmt_info, 0, vect_body);
891 ncopies *= 2;
892 }
893
894 /* FORNOW: Assuming maximum 2 args per stmts. */
895 for (i = 0; i < 2; i++)
896 if (dt[i] == vect_constant_def || dt[i] == vect_external_def)
897 prologue_cost += record_stmt_cost (cost_vec, 1, vector_stmt,
898 stmt_info, 0, vect_prologue);
899
900 if (dump_enabled_p ())
901 dump_printf_loc (MSG_NOTE, vect_location,
902 "vect_model_promotion_demotion_cost: inside_cost = %d, "
903 "prologue_cost = %d .\n", inside_cost, prologue_cost);
904}
905
906/* Returns true if the current function returns DECL. */
907
908static bool
909cfun_returns (tree decl)
910{
911 edge_iterator ei;
912 edge e;
913 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)for ((ei) = ei_start_1 (&(((((cfun + 0))->cfg->x_exit_block_ptr
)->preds))); ei_cond ((ei), &(e)); ei_next (&(ei))
)
914 {
915 greturn *ret = safe_dyn_cast <greturn *> (last_stmt (e->src));
916 if (!ret)
917 continue;
918 if (gimple_return_retval (ret) == decl)
919 return true;
920 /* We often end up with an aggregate copy to the result decl,
921 handle that case as well. First skip intermediate clobbers
922 though. */
923 gimple *def = ret;
924 do
925 {
926 def = SSA_NAME_DEF_STMT (gimple_vuse (def))(tree_check ((gimple_vuse (def)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 926, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
927 }
928 while (gimple_clobber_p (def));
929 if (is_a <gassign *> (def)
930 && gimple_assign_lhs (def) == gimple_return_retval (ret)
931 && gimple_assign_rhs1 (def) == decl)
932 return true;
933 }
934 return false;
935}
936
937/* Function vect_model_store_cost
938
939 Models cost for stores. In the case of grouped accesses, one access
940 has the overhead of the grouped access attributed to it. */
941
942static void
943vect_model_store_cost (vec_info *vinfo, stmt_vec_info stmt_info, int ncopies,
944 vect_memory_access_type memory_access_type,
945 dr_alignment_support alignment_support_scheme,
946 int misalignment,
947 vec_load_store_type vls_type, slp_tree slp_node,
948 stmt_vector_for_cost *cost_vec)
949{
950 unsigned int inside_cost = 0, prologue_cost = 0;
951 stmt_vec_info first_stmt_info = stmt_info;
952 bool grouped_access_p = STMT_VINFO_GROUPED_ACCESS (stmt_info)((stmt_info)->dr_aux.dr && (((void)(!((stmt_info)->
dr_aux.dr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 952, __FUNCTION__), 0 : 0)), (stmt_info)->first_element)
)
;
953
954 /* ??? Somehow we need to fix this at the callers. */
955 if (slp_node)
956 ncopies = SLP_TREE_NUMBER_OF_VEC_STMTS (slp_node)(slp_node)->vec_stmts_size;
957
958 if (vls_type == VLS_STORE_INVARIANT)
959 {
960 if (!slp_node)
961 prologue_cost += record_stmt_cost (cost_vec, 1, scalar_to_vec,
962 stmt_info, 0, vect_prologue);
963 }
964
965 /* Grouped stores update all elements in the group at once,
966 so we want the DR for the first statement. */
967 if (!slp_node && grouped_access_p)
968 first_stmt_info = DR_GROUP_FIRST_ELEMENT (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 968, __FUNCTION__), 0 : 0)), (stmt_info)->first_element)
;
969
970 /* True if we should include any once-per-group costs as well as
971 the cost of the statement itself. For SLP we only get called
972 once per group anyhow. */
973 bool first_stmt_p = (first_stmt_info == stmt_info);
974
975 /* We assume that the cost of a single store-lanes instruction is
976 equivalent to the cost of DR_GROUP_SIZE separate stores. If a grouped
977 access is instead being provided by a permute-and-store operation,
978 include the cost of the permutes. */
979 if (first_stmt_p
980 && memory_access_type == VMAT_CONTIGUOUS_PERMUTE)
981 {
982 /* Uses a high and low interleave or shuffle operations for each
983 needed permute. */
984 int group_size = DR_GROUP_SIZE (first_stmt_info)(((void)(!((first_stmt_info)->dr_aux.dr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 984, __FUNCTION__), 0 : 0)), (first_stmt_info)->size)
;
985 int nstmts = ncopies * ceil_log2 (group_size) * group_size;
986 inside_cost = record_stmt_cost (cost_vec, nstmts, vec_perm,
987 stmt_info, 0, vect_body);
988
989 if (dump_enabled_p ())
990 dump_printf_loc (MSG_NOTE, vect_location,
991 "vect_model_store_cost: strided group_size = %d .\n",
992 group_size);
993 }
994
995 tree vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
996 /* Costs of the stores. */
997 if (memory_access_type == VMAT_ELEMENTWISE
998 || memory_access_type == VMAT_GATHER_SCATTER)
999 {
1000 /* N scalar stores plus extracting the elements. */
1001 unsigned int assumed_nunits = vect_nunits_for_cost (vectype);
1002 inside_cost += record_stmt_cost (cost_vec,
1003 ncopies * assumed_nunits,
1004 scalar_store, stmt_info, 0, vect_body);
1005 }
1006 else
1007 vect_get_store_cost (vinfo, stmt_info, ncopies, alignment_support_scheme,
1008 misalignment, &inside_cost, cost_vec);
1009
1010 if (memory_access_type == VMAT_ELEMENTWISE
1011 || memory_access_type == VMAT_STRIDED_SLP)
1012 {
1013 /* N scalar stores plus extracting the elements. */
1014 unsigned int assumed_nunits = vect_nunits_for_cost (vectype);
1015 inside_cost += record_stmt_cost (cost_vec,
1016 ncopies * assumed_nunits,
1017 vec_to_scalar, stmt_info, 0, vect_body);
1018 }
1019
1020 /* When vectorizing a store into the function result assign
1021 a penalty if the function returns in a multi-register location.
1022 In this case we assume we'll end up with having to spill the
1023 vector result and do piecewise loads as a conservative estimate. */
1024 tree base = get_base_address (STMT_VINFO_DATA_REF (stmt_info)((stmt_info)->dr_aux.dr + 0)->ref);
1025 if (base
1026 && (TREE_CODE (base)((enum tree_code) (base)->base.code) == RESULT_DECL
1027 || (DECL_P (base)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code
) (base)->base.code))] == tcc_declaration)
&& cfun_returns (base)))
1028 && !aggregate_value_p (base, cfun(cfun + 0)->decl))
1029 {
1030 rtx reg = hard_function_value (TREE_TYPE (base)((contains_struct_check ((base), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1030, __FUNCTION__))->typed.type)
, cfun(cfun + 0)->decl, 0, 1);
1031 /* ??? Handle PARALLEL in some way. */
1032 if (REG_P (reg)(((enum rtx_code) (reg)->code) == REG))
1033 {
1034 int nregs = hard_regno_nregs (REGNO (reg)(rhs_regno(reg)), GET_MODE (reg)((machine_mode) (reg)->mode));
1035 /* Assume that a single reg-reg move is possible and cheap,
1036 do not account for vector to gp register move cost. */
1037 if (nregs > 1)
1038 {
1039 /* Spill. */
1040 prologue_cost += record_stmt_cost (cost_vec, ncopies,
1041 vector_store,
1042 stmt_info, 0, vect_epilogue);
1043 /* Loads. */
1044 prologue_cost += record_stmt_cost (cost_vec, ncopies * nregs,
1045 scalar_load,
1046 stmt_info, 0, vect_epilogue);
1047 }
1048 }
1049 }
1050
1051 if (dump_enabled_p ())
1052 dump_printf_loc (MSG_NOTE, vect_location,
1053 "vect_model_store_cost: inside_cost = %d, "
1054 "prologue_cost = %d .\n", inside_cost, prologue_cost);
1055}
1056
1057
1058/* Calculate cost of DR's memory access. */
1059void
1060vect_get_store_cost (vec_info *, stmt_vec_info stmt_info, int ncopies,
1061 dr_alignment_support alignment_support_scheme,
1062 int misalignment,
1063 unsigned int *inside_cost,
1064 stmt_vector_for_cost *body_cost_vec)
1065{
1066 switch (alignment_support_scheme)
1067 {
1068 case dr_aligned:
1069 {
1070 *inside_cost += record_stmt_cost (body_cost_vec, ncopies,
1071 vector_store, stmt_info, 0,
1072 vect_body);
1073
1074 if (dump_enabled_p ())
1075 dump_printf_loc (MSG_NOTE, vect_location,
1076 "vect_model_store_cost: aligned.\n");
1077 break;
1078 }
1079
1080 case dr_unaligned_supported:
1081 {
1082 /* Here, we assign an additional cost for the unaligned store. */
1083 *inside_cost += record_stmt_cost (body_cost_vec, ncopies,
1084 unaligned_store, stmt_info,
1085 misalignment, vect_body);
1086 if (dump_enabled_p ())
1087 dump_printf_loc (MSG_NOTE, vect_location,
1088 "vect_model_store_cost: unaligned supported by "
1089 "hardware.\n");
1090 break;
1091 }
1092
1093 case dr_unaligned_unsupported:
1094 {
1095 *inside_cost = VECT_MAX_COST1000;
1096
1097 if (dump_enabled_p ())
1098 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
1099 "vect_model_store_cost: unsupported access.\n");
1100 break;
1101 }
1102
1103 default:
1104 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1104, __FUNCTION__))
;
1105 }
1106}
1107
1108
1109/* Function vect_model_load_cost
1110
1111 Models cost for loads. In the case of grouped accesses, one access has
1112 the overhead of the grouped access attributed to it. Since unaligned
1113 accesses are supported for loads, we also account for the costs of the
1114 access scheme chosen. */
1115
1116static void
1117vect_model_load_cost (vec_info *vinfo,
1118 stmt_vec_info stmt_info, unsigned ncopies, poly_uint64 vf,
1119 vect_memory_access_type memory_access_type,
1120 dr_alignment_support alignment_support_scheme,
1121 int misalignment,
1122 gather_scatter_info *gs_info,
1123 slp_tree slp_node,
1124 stmt_vector_for_cost *cost_vec)
1125{
1126 unsigned int inside_cost = 0, prologue_cost = 0;
1127 bool grouped_access_p = STMT_VINFO_GROUPED_ACCESS (stmt_info)((stmt_info)->dr_aux.dr && (((void)(!((stmt_info)->
dr_aux.dr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1127, __FUNCTION__), 0 : 0)), (stmt_info)->first_element
))
;
1128
1129 gcc_assert (cost_vec)((void)(!(cost_vec) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1129, __FUNCTION__), 0 : 0))
;
1130
1131 /* ??? Somehow we need to fix this at the callers. */
1132 if (slp_node)
1133 ncopies = SLP_TREE_NUMBER_OF_VEC_STMTS (slp_node)(slp_node)->vec_stmts_size;
1134
1135 if (slp_node && SLP_TREE_LOAD_PERMUTATION (slp_node)(slp_node)->load_permutation.exists ())
1136 {
1137 /* If the load is permuted then the alignment is determined by
1138 the first group element not by the first scalar stmt DR. */
1139 stmt_vec_info first_stmt_info = DR_GROUP_FIRST_ELEMENT (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1139, __FUNCTION__), 0 : 0)), (stmt_info)->first_element
)
;
1140 /* Record the cost for the permutation. */
1141 unsigned n_perms, n_loads;
1142 vect_transform_slp_perm_load (vinfo, slp_node, vNULL, NULLnullptr,
1143 vf, true, &n_perms, &n_loads);
1144 inside_cost += record_stmt_cost (cost_vec, n_perms, vec_perm,
1145 first_stmt_info, 0, vect_body);
1146
1147 /* And adjust the number of loads performed. This handles
1148 redundancies as well as loads that are later dead. */
1149 ncopies = n_loads;
1150 }
1151
1152 /* Grouped loads read all elements in the group at once,
1153 so we want the DR for the first statement. */
1154 stmt_vec_info first_stmt_info = stmt_info;
1155 if (!slp_node && grouped_access_p)
1156 first_stmt_info = DR_GROUP_FIRST_ELEMENT (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1156, __FUNCTION__), 0 : 0)), (stmt_info)->first_element
)
;
1157
1158 /* True if we should include any once-per-group costs as well as
1159 the cost of the statement itself. For SLP we only get called
1160 once per group anyhow. */
1161 bool first_stmt_p = (first_stmt_info == stmt_info);
1162
1163 /* An IFN_LOAD_LANES will load all its vector results, regardless of which
1164 ones we actually need. Account for the cost of unused results. */
1165 if (first_stmt_p && !slp_node && memory_access_type == VMAT_LOAD_STORE_LANES)
1166 {
1167 unsigned int gaps = DR_GROUP_SIZE (first_stmt_info)(((void)(!((first_stmt_info)->dr_aux.dr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1167, __FUNCTION__), 0 : 0)), (first_stmt_info)->size)
;
1168 stmt_vec_info next_stmt_info = first_stmt_info;
1169 do
1170 {
1171 gaps -= 1;
1172 next_stmt_info = DR_GROUP_NEXT_ELEMENT (next_stmt_info)(((void)(!((next_stmt_info)->dr_aux.dr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1172, __FUNCTION__), 0 : 0)), (next_stmt_info)->next_element
)
;
1173 }
1174 while (next_stmt_info);
1175 if (gaps)
1176 {
1177 if (dump_enabled_p ())
1178 dump_printf_loc (MSG_NOTE, vect_location,
1179 "vect_model_load_cost: %d unused vectors.\n",
1180 gaps);
1181 vect_get_load_cost (vinfo, stmt_info, ncopies * gaps,
1182 alignment_support_scheme, misalignment, false,
1183 &inside_cost, &prologue_cost,
1184 cost_vec, cost_vec, true);
1185 }
1186 }
1187
1188 /* We assume that the cost of a single load-lanes instruction is
1189 equivalent to the cost of DR_GROUP_SIZE separate loads. If a grouped
1190 access is instead being provided by a load-and-permute operation,
1191 include the cost of the permutes. */
1192 if (first_stmt_p
1193 && memory_access_type == VMAT_CONTIGUOUS_PERMUTE)
1194 {
1195 /* Uses an even and odd extract operations or shuffle operations
1196 for each needed permute. */
1197 int group_size = DR_GROUP_SIZE (first_stmt_info)(((void)(!((first_stmt_info)->dr_aux.dr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1197, __FUNCTION__), 0 : 0)), (first_stmt_info)->size)
;
1198 int nstmts = ncopies * ceil_log2 (group_size) * group_size;
1199 inside_cost += record_stmt_cost (cost_vec, nstmts, vec_perm,
1200 stmt_info, 0, vect_body);
1201
1202 if (dump_enabled_p ())
1203 dump_printf_loc (MSG_NOTE, vect_location,
1204 "vect_model_load_cost: strided group_size = %d .\n",
1205 group_size);
1206 }
1207
1208 /* The loads themselves. */
1209 if (memory_access_type == VMAT_ELEMENTWISE
1210 || memory_access_type == VMAT_GATHER_SCATTER)
1211 {
1212 tree vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
1213 unsigned int assumed_nunits = vect_nunits_for_cost (vectype);
1214 if (memory_access_type == VMAT_GATHER_SCATTER
1215 && gs_info->ifn == IFN_LAST && !gs_info->decl)
1216 /* For emulated gathers N offset vector element extracts
1217 (we assume the scalar scaling and ptr + offset add is consumed by
1218 the load). */
1219 inside_cost += record_stmt_cost (cost_vec, ncopies * assumed_nunits,
1220 vec_to_scalar, stmt_info, 0,
1221 vect_body);
1222 /* N scalar loads plus gathering them into a vector. */
1223 inside_cost += record_stmt_cost (cost_vec,
1224 ncopies * assumed_nunits,
1225 scalar_load, stmt_info, 0, vect_body);
1226 }
1227 else if (memory_access_type == VMAT_INVARIANT)
1228 {
1229 /* Invariant loads will ideally be hoisted and splat to a vector. */
1230 prologue_cost += record_stmt_cost (cost_vec, 1,
1231 scalar_load, stmt_info, 0,
1232 vect_prologue);
1233 prologue_cost += record_stmt_cost (cost_vec, 1,
1234 scalar_to_vec, stmt_info, 0,
1235 vect_prologue);
1236 }
1237 else
1238 vect_get_load_cost (vinfo, stmt_info, ncopies,
1239 alignment_support_scheme, misalignment, first_stmt_p,
1240 &inside_cost, &prologue_cost,
1241 cost_vec, cost_vec, true);
1242 if (memory_access_type == VMAT_ELEMENTWISE
1243 || memory_access_type == VMAT_STRIDED_SLP
1244 || (memory_access_type == VMAT_GATHER_SCATTER
1245 && gs_info->ifn == IFN_LAST && !gs_info->decl))
1246 inside_cost += record_stmt_cost (cost_vec, ncopies, vec_construct,
1247 stmt_info, 0, vect_body);
1248
1249 if (dump_enabled_p ())
1250 dump_printf_loc (MSG_NOTE, vect_location,
1251 "vect_model_load_cost: inside_cost = %d, "
1252 "prologue_cost = %d .\n", inside_cost, prologue_cost);
1253}
1254
1255
1256/* Calculate cost of DR's memory access. */
1257void
1258vect_get_load_cost (vec_info *, stmt_vec_info stmt_info, int ncopies,
1259 dr_alignment_support alignment_support_scheme,
1260 int misalignment,
1261 bool add_realign_cost, unsigned int *inside_cost,
1262 unsigned int *prologue_cost,
1263 stmt_vector_for_cost *prologue_cost_vec,
1264 stmt_vector_for_cost *body_cost_vec,
1265 bool record_prologue_costs)
1266{
1267 switch (alignment_support_scheme)
1268 {
1269 case dr_aligned:
1270 {
1271 *inside_cost += record_stmt_cost (body_cost_vec, ncopies, vector_load,
1272 stmt_info, 0, vect_body);
1273
1274 if (dump_enabled_p ())
1275 dump_printf_loc (MSG_NOTE, vect_location,
1276 "vect_model_load_cost: aligned.\n");
1277
1278 break;
1279 }
1280 case dr_unaligned_supported:
1281 {
1282 /* Here, we assign an additional cost for the unaligned load. */
1283 *inside_cost += record_stmt_cost (body_cost_vec, ncopies,
1284 unaligned_load, stmt_info,
1285 misalignment, vect_body);
1286
1287 if (dump_enabled_p ())
1288 dump_printf_loc (MSG_NOTE, vect_location,
1289 "vect_model_load_cost: unaligned supported by "
1290 "hardware.\n");
1291
1292 break;
1293 }
1294 case dr_explicit_realign:
1295 {
1296 *inside_cost += record_stmt_cost (body_cost_vec, ncopies * 2,
1297 vector_load, stmt_info, 0, vect_body);
1298 *inside_cost += record_stmt_cost (body_cost_vec, ncopies,
1299 vec_perm, stmt_info, 0, vect_body);
1300
1301 /* FIXME: If the misalignment remains fixed across the iterations of
1302 the containing loop, the following cost should be added to the
1303 prologue costs. */
1304 if (targetm.vectorize.builtin_mask_for_load)
1305 *inside_cost += record_stmt_cost (body_cost_vec, 1, vector_stmt,
1306 stmt_info, 0, vect_body);
1307
1308 if (dump_enabled_p ())
1309 dump_printf_loc (MSG_NOTE, vect_location,
1310 "vect_model_load_cost: explicit realign\n");
1311
1312 break;
1313 }
1314 case dr_explicit_realign_optimized:
1315 {
1316 if (dump_enabled_p ())
1317 dump_printf_loc (MSG_NOTE, vect_location,
1318 "vect_model_load_cost: unaligned software "
1319 "pipelined.\n");
1320
1321 /* Unaligned software pipeline has a load of an address, an initial
1322 load, and possibly a mask operation to "prime" the loop. However,
1323 if this is an access in a group of loads, which provide grouped
1324 access, then the above cost should only be considered for one
1325 access in the group. Inside the loop, there is a load op
1326 and a realignment op. */
1327
1328 if (add_realign_cost && record_prologue_costs)
1329 {
1330 *prologue_cost += record_stmt_cost (prologue_cost_vec, 2,
1331 vector_stmt, stmt_info,
1332 0, vect_prologue);
1333 if (targetm.vectorize.builtin_mask_for_load)
1334 *prologue_cost += record_stmt_cost (prologue_cost_vec, 1,
1335 vector_stmt, stmt_info,
1336 0, vect_prologue);
1337 }
1338
1339 *inside_cost += record_stmt_cost (body_cost_vec, ncopies, vector_load,
1340 stmt_info, 0, vect_body);
1341 *inside_cost += record_stmt_cost (body_cost_vec, ncopies, vec_perm,
1342 stmt_info, 0, vect_body);
1343
1344 if (dump_enabled_p ())
1345 dump_printf_loc (MSG_NOTE, vect_location,
1346 "vect_model_load_cost: explicit realign optimized"
1347 "\n");
1348
1349 break;
1350 }
1351
1352 case dr_unaligned_unsupported:
1353 {
1354 *inside_cost = VECT_MAX_COST1000;
1355
1356 if (dump_enabled_p ())
1357 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
1358 "vect_model_load_cost: unsupported access.\n");
1359 break;
1360 }
1361
1362 default:
1363 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1363, __FUNCTION__))
;
1364 }
1365}
1366
1367/* Insert the new stmt NEW_STMT at *GSI or at the appropriate place in
1368 the loop preheader for the vectorized stmt STMT_VINFO. */
1369
1370static void
1371vect_init_vector_1 (vec_info *vinfo, stmt_vec_info stmt_vinfo, gimple *new_stmt,
1372 gimple_stmt_iterator *gsi)
1373{
1374 if (gsi)
1375 vect_finish_stmt_generation (vinfo, stmt_vinfo, new_stmt, gsi);
1376 else
1377 vinfo->insert_on_entry (stmt_vinfo, new_stmt);
1378
1379 if (dump_enabled_p ())
1380 dump_printf_loc (MSG_NOTE, vect_location,
1381 "created new init_stmt: %G", new_stmt);
1382}
1383
1384/* Function vect_init_vector.
1385
1386 Insert a new stmt (INIT_STMT) that initializes a new variable of type
1387 TYPE with the value VAL. If TYPE is a vector type and VAL does not have
1388 vector type a vector with all elements equal to VAL is created first.
1389 Place the initialization at GSI if it is not NULL. Otherwise, place the
1390 initialization at the loop preheader.
1391 Return the DEF of INIT_STMT.
1392 It will be used in the vectorization of STMT_INFO. */
1393
1394tree
1395vect_init_vector (vec_info *vinfo, stmt_vec_info stmt_info, tree val, tree type,
1396 gimple_stmt_iterator *gsi)
1397{
1398 gimple *init_stmt;
1399 tree new_temp;
1400
1401 /* We abuse this function to push sth to a SSA name with initial 'val'. */
1402 if (! useless_type_conversion_p (type, TREE_TYPE (val)((contains_struct_check ((val), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1402, __FUNCTION__))->typed.type)
))
1403 {
1404 gcc_assert (TREE_CODE (type) == VECTOR_TYPE)((void)(!(((enum tree_code) (type)->base.code) == VECTOR_TYPE
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1404, __FUNCTION__), 0 : 0))
;
1405 if (! types_compatible_p (TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1405, __FUNCTION__))->typed.type)
, TREE_TYPE (val)((contains_struct_check ((val), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1405, __FUNCTION__))->typed.type)
))
1406 {
1407 /* Scalar boolean value should be transformed into
1408 all zeros or all ones value before building a vector. */
1409 if (VECTOR_BOOLEAN_TYPE_P (type)(((enum tree_code) (type)->base.code) == VECTOR_TYPE &&
((enum tree_code) (((contains_struct_check ((type), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1409, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
)
1410 {
1411 tree true_val = build_all_ones_cst (TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1411, __FUNCTION__))->typed.type)
);
1412 tree false_val = build_zero_cst (TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1412, __FUNCTION__))->typed.type)
);
1413
1414 if (CONSTANT_CLASS_P (val)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code
) (val)->base.code))] == tcc_constant)
)
1415 val = integer_zerop (val) ? false_val : true_val;
1416 else
1417 {
1418 new_temp = make_ssa_name (TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1418, __FUNCTION__))->typed.type)
);
1419 init_stmt = gimple_build_assign (new_temp, COND_EXPR,
1420 val, true_val, false_val);
1421 vect_init_vector_1 (vinfo, stmt_info, init_stmt, gsi);
1422 val = new_temp;
1423 }
1424 }
1425 else
1426 {
1427 gimple_seq stmts = NULLnullptr;
1428 if (! INTEGRAL_TYPE_P (TREE_TYPE (val))(((enum tree_code) (((contains_struct_check ((val), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1428, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((val), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1428, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((val), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1428, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
)
1429 val = gimple_build (&stmts, VIEW_CONVERT_EXPR,
1430 TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1430, __FUNCTION__))->typed.type)
, val);
1431 else
1432 /* ??? Condition vectorization expects us to do
1433 promotion of invariant/external defs. */
1434 val = gimple_convert (&stmts, TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1434, __FUNCTION__))->typed.type)
, val);
1435 for (gimple_stmt_iterator gsi2 = gsi_start (stmts);
1436 !gsi_end_p (gsi2); )
1437 {
1438 init_stmt = gsi_stmt (gsi2);
1439 gsi_remove (&gsi2, false);
1440 vect_init_vector_1 (vinfo, stmt_info, init_stmt, gsi);
1441 }
1442 }
1443 }
1444 val = build_vector_from_val (type, val);
1445 }
1446
1447 new_temp = vect_get_new_ssa_name (type, vect_simple_var, "cst_");
1448 init_stmt = gimple_build_assign (new_temp, val);
1449 vect_init_vector_1 (vinfo, stmt_info, init_stmt, gsi);
1450 return new_temp;
1451}
1452
1453
1454/* Function vect_get_vec_defs_for_operand.
1455
1456 OP is an operand in STMT_VINFO. This function returns a vector of
1457 NCOPIES defs that will be used in the vectorized stmts for STMT_VINFO.
1458
1459 In the case that OP is an SSA_NAME which is defined in the loop, then
1460 STMT_VINFO_VEC_STMTS of the defining stmt holds the relevant defs.
1461
1462 In case OP is an invariant or constant, a new stmt that creates a vector def
1463 needs to be introduced. VECTYPE may be used to specify a required type for
1464 vector invariant. */
1465
1466void
1467vect_get_vec_defs_for_operand (vec_info *vinfo, stmt_vec_info stmt_vinfo,
1468 unsigned ncopies,
1469 tree op, vec<tree> *vec_oprnds, tree vectype)
1470{
1471 gimple *def_stmt;
1472 enum vect_def_type dt;
1473 bool is_simple_use;
1474 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
1475
1476 if (dump_enabled_p ())
1477 dump_printf_loc (MSG_NOTE, vect_location,
1478 "vect_get_vec_defs_for_operand: %T\n", op);
1479
1480 stmt_vec_info def_stmt_info;
1481 is_simple_use = vect_is_simple_use (op, loop_vinfo, &dt,
1482 &def_stmt_info, &def_stmt);
1483 gcc_assert (is_simple_use)((void)(!(is_simple_use) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1483, __FUNCTION__), 0 : 0))
;
1484 if (def_stmt && dump_enabled_p ())
1485 dump_printf_loc (MSG_NOTE, vect_location, " def_stmt = %G", def_stmt);
1486
1487 vec_oprnds->create (ncopies);
1488 if (dt == vect_constant_def || dt == vect_external_def)
1489 {
1490 tree stmt_vectype = STMT_VINFO_VECTYPE (stmt_vinfo)(stmt_vinfo)->vectype;
1491 tree vector_type;
1492
1493 if (vectype)
1494 vector_type = vectype;
1495 else if (VECT_SCALAR_BOOLEAN_TYPE_P (TREE_TYPE (op))(((enum tree_code) (((contains_struct_check ((op), (TS_TYPED)
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1495, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((((enum tree_code) (((contains_struct_check ((op), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1495, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
|| ((enum tree_code) (((contains_struct_check ((op), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1495, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
) && ((tree_class_check ((((contains_struct_check ((op
), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1495, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1495, __FUNCTION__))->type_common.precision) == 1 &&
((tree_class_check ((((contains_struct_check ((op), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1495, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1495, __FUNCTION__))->base.u.bits.unsigned_flag)))
1496 && VECTOR_BOOLEAN_TYPE_P (stmt_vectype)(((enum tree_code) (stmt_vectype)->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((stmt_vectype
), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1496, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
)
1497 vector_type = truth_type_for (stmt_vectype);
1498 else
1499 vector_type = get_vectype_for_scalar_type (loop_vinfo, TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1499, __FUNCTION__))->typed.type)
);
1500
1501 gcc_assert (vector_type)((void)(!(vector_type) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1501, __FUNCTION__), 0 : 0))
;
1502 tree vop = vect_init_vector (vinfo, stmt_vinfo, op, vector_type, NULLnullptr);
1503 while (ncopies--)
1504 vec_oprnds->quick_push (vop);
1505 }
1506 else
1507 {
1508 def_stmt_info = vect_stmt_to_vectorize (def_stmt_info);
1509 gcc_assert (STMT_VINFO_VEC_STMTS (def_stmt_info).length () == ncopies)((void)(!((def_stmt_info)->vec_stmts.length () == ncopies)
? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1509, __FUNCTION__), 0 : 0))
;
1510 for (unsigned i = 0; i < ncopies; ++i)
1511 vec_oprnds->quick_push (gimple_get_lhs
1512 (STMT_VINFO_VEC_STMTS (def_stmt_info)(def_stmt_info)->vec_stmts[i]));
1513 }
1514}
1515
1516
1517/* Get vectorized definitions for OP0 and OP1. */
1518
1519void
1520vect_get_vec_defs (vec_info *vinfo, stmt_vec_info stmt_info, slp_tree slp_node,
1521 unsigned ncopies,
1522 tree op0, vec<tree> *vec_oprnds0, tree vectype0,
1523 tree op1, vec<tree> *vec_oprnds1, tree vectype1,
1524 tree op2, vec<tree> *vec_oprnds2, tree vectype2,
1525 tree op3, vec<tree> *vec_oprnds3, tree vectype3)
1526{
1527 if (slp_node)
1528 {
1529 if (op0)
1530 vect_get_slp_defs (SLP_TREE_CHILDREN (slp_node)(slp_node)->children[0], vec_oprnds0);
1531 if (op1)
1532 vect_get_slp_defs (SLP_TREE_CHILDREN (slp_node)(slp_node)->children[1], vec_oprnds1);
1533 if (op2)
1534 vect_get_slp_defs (SLP_TREE_CHILDREN (slp_node)(slp_node)->children[2], vec_oprnds2);
1535 if (op3)
1536 vect_get_slp_defs (SLP_TREE_CHILDREN (slp_node)(slp_node)->children[3], vec_oprnds3);
1537 }
1538 else
1539 {
1540 if (op0)
1541 vect_get_vec_defs_for_operand (vinfo, stmt_info, ncopies,
1542 op0, vec_oprnds0, vectype0);
1543 if (op1)
1544 vect_get_vec_defs_for_operand (vinfo, stmt_info, ncopies,
1545 op1, vec_oprnds1, vectype1);
1546 if (op2)
1547 vect_get_vec_defs_for_operand (vinfo, stmt_info, ncopies,
1548 op2, vec_oprnds2, vectype2);
1549 if (op3)
1550 vect_get_vec_defs_for_operand (vinfo, stmt_info, ncopies,
1551 op3, vec_oprnds3, vectype3);
1552 }
1553}
1554
1555void
1556vect_get_vec_defs (vec_info *vinfo, stmt_vec_info stmt_info, slp_tree slp_node,
1557 unsigned ncopies,
1558 tree op0, vec<tree> *vec_oprnds0,
1559 tree op1, vec<tree> *vec_oprnds1,
1560 tree op2, vec<tree> *vec_oprnds2,
1561 tree op3, vec<tree> *vec_oprnds3)
1562{
1563 vect_get_vec_defs (vinfo, stmt_info, slp_node, ncopies,
1564 op0, vec_oprnds0, NULL_TREE(tree) nullptr,
1565 op1, vec_oprnds1, NULL_TREE(tree) nullptr,
1566 op2, vec_oprnds2, NULL_TREE(tree) nullptr,
1567 op3, vec_oprnds3, NULL_TREE(tree) nullptr);
1568}
1569
1570/* Helper function called by vect_finish_replace_stmt and
1571 vect_finish_stmt_generation. Set the location of the new
1572 statement and create and return a stmt_vec_info for it. */
1573
1574static void
1575vect_finish_stmt_generation_1 (vec_info *,
1576 stmt_vec_info stmt_info, gimple *vec_stmt)
1577{
1578 if (dump_enabled_p ())
1579 dump_printf_loc (MSG_NOTE, vect_location, "add new stmt: %G", vec_stmt);
1580
1581 if (stmt_info)
1582 {
1583 gimple_set_location (vec_stmt, gimple_location (stmt_info->stmt));
1584
1585 /* While EH edges will generally prevent vectorization, stmt might
1586 e.g. be in a must-not-throw region. Ensure newly created stmts
1587 that could throw are part of the same region. */
1588 int lp_nr = lookup_stmt_eh_lp (stmt_info->stmt);
1589 if (lp_nr != 0 && stmt_could_throw_p (cfun(cfun + 0), vec_stmt))
1590 add_stmt_to_eh_lp (vec_stmt, lp_nr);
1591 }
1592 else
1593 gcc_assert (!stmt_could_throw_p (cfun, vec_stmt))((void)(!(!stmt_could_throw_p ((cfun + 0), vec_stmt)) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1593, __FUNCTION__), 0 : 0))
;
1594}
1595
1596/* Replace the scalar statement STMT_INFO with a new vector statement VEC_STMT,
1597 which sets the same scalar result as STMT_INFO did. Create and return a
1598 stmt_vec_info for VEC_STMT. */
1599
1600void
1601vect_finish_replace_stmt (vec_info *vinfo,
1602 stmt_vec_info stmt_info, gimple *vec_stmt)
1603{
1604 gimple *scalar_stmt = vect_orig_stmt (stmt_info)->stmt;
1605 gcc_assert (gimple_get_lhs (scalar_stmt) == gimple_get_lhs (vec_stmt))((void)(!(gimple_get_lhs (scalar_stmt) == gimple_get_lhs (vec_stmt
)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1605, __FUNCTION__), 0 : 0))
;
1606
1607 gimple_stmt_iterator gsi = gsi_for_stmt (scalar_stmt);
1608 gsi_replace (&gsi, vec_stmt, true);
1609
1610 vect_finish_stmt_generation_1 (vinfo, stmt_info, vec_stmt);
1611}
1612
1613/* Add VEC_STMT to the vectorized implementation of STMT_INFO and insert it
1614 before *GSI. Create and return a stmt_vec_info for VEC_STMT. */
1615
1616void
1617vect_finish_stmt_generation (vec_info *vinfo,
1618 stmt_vec_info stmt_info, gimple *vec_stmt,
1619 gimple_stmt_iterator *gsi)
1620{
1621 gcc_assert (!stmt_info || gimple_code (stmt_info->stmt) != GIMPLE_LABEL)((void)(!(!stmt_info || gimple_code (stmt_info->stmt) != GIMPLE_LABEL
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1621, __FUNCTION__), 0 : 0))
;
1622
1623 if (!gsi_end_p (*gsi)
1624 && gimple_has_mem_ops (vec_stmt))
1625 {
1626 gimple *at_stmt = gsi_stmt (*gsi);
1627 tree vuse = gimple_vuse (at_stmt);
1628 if (vuse && TREE_CODE (vuse)((enum tree_code) (vuse)->base.code) == SSA_NAME)
1629 {
1630 tree vdef = gimple_vdef (at_stmt);
1631 gimple_set_vuse (vec_stmt, gimple_vuse (at_stmt));
1632 gimple_set_modified (vec_stmt, true);
1633 /* If we have an SSA vuse and insert a store, update virtual
1634 SSA form to avoid triggering the renamer. Do so only
1635 if we can easily see all uses - which is what almost always
1636 happens with the way vectorized stmts are inserted. */
1637 if ((vdef && TREE_CODE (vdef)((enum tree_code) (vdef)->base.code) == SSA_NAME)
1638 && ((is_gimple_assign (vec_stmt)
1639 && !is_gimple_reg (gimple_assign_lhs (vec_stmt)))
1640 || (is_gimple_call (vec_stmt)
1641 && (!(gimple_call_flags (vec_stmt)
1642 & (ECF_CONST(1 << 0)|ECF_PURE(1 << 1)|ECF_NOVOPS(1 << 9)))
1643 || (gimple_call_lhs (vec_stmt)
1644 && !is_gimple_reg (gimple_call_lhs (vec_stmt)))))))
1645 {
1646 tree new_vdef = copy_ssa_name (vuse, vec_stmt);
1647 gimple_set_vdef (vec_stmt, new_vdef);
1648 SET_USE (gimple_vuse_op (at_stmt), new_vdef)set_ssa_use_from_ptr (gimple_vuse_op (at_stmt), new_vdef);
1649 }
1650 }
1651 }
1652 gsi_insert_before (gsi, vec_stmt, GSI_SAME_STMT);
1653 vect_finish_stmt_generation_1 (vinfo, stmt_info, vec_stmt);
1654}
1655
1656/* We want to vectorize a call to combined function CFN with function
1657 decl FNDECL, using VECTYPE_OUT as the type of the output and VECTYPE_IN
1658 as the types of all inputs. Check whether this is possible using
1659 an internal function, returning its code if so or IFN_LAST if not. */
1660
1661static internal_fn
1662vectorizable_internal_function (combined_fn cfn, tree fndecl,
1663 tree vectype_out, tree vectype_in)
1664{
1665 internal_fn ifn;
1666 if (internal_fn_p (cfn))
1667 ifn = as_internal_fn (cfn);
1668 else
1669 ifn = associated_internal_fn (fndecl);
1670 if (ifn != IFN_LAST && direct_internal_fn_p (ifn))
1671 {
1672 const direct_internal_fn_info &info = direct_internal_fn (ifn);
1673 if (info.vectorizable)
1674 {
1675 tree type0 = (info.type0 < 0 ? vectype_out : vectype_in);
1676 tree type1 = (info.type1 < 0 ? vectype_out : vectype_in);
1677 if (direct_internal_fn_supported_p (ifn, tree_pair (type0, type1),
1678 OPTIMIZE_FOR_SPEED))
1679 return ifn;
1680 }
1681 }
1682 return IFN_LAST;
1683}
1684
1685
1686static tree permute_vec_elements (vec_info *, tree, tree, tree, stmt_vec_info,
1687 gimple_stmt_iterator *);
1688
1689/* Check whether a load or store statement in the loop described by
1690 LOOP_VINFO is possible in a loop using partial vectors. This is
1691 testing whether the vectorizer pass has the appropriate support,
1692 as well as whether the target does.
1693
1694 VLS_TYPE says whether the statement is a load or store and VECTYPE
1695 is the type of the vector being loaded or stored. SLP_NODE is the SLP
1696 node that contains the statement, or null if none. MEMORY_ACCESS_TYPE
1697 says how the load or store is going to be implemented and GROUP_SIZE
1698 is the number of load or store statements in the containing group.
1699 If the access is a gather load or scatter store, GS_INFO describes
1700 its arguments. If the load or store is conditional, SCALAR_MASK is the
1701 condition under which it occurs.
1702
1703 Clear LOOP_VINFO_CAN_USE_PARTIAL_VECTORS_P if a loop using partial
1704 vectors is not supported, otherwise record the required rgroup control
1705 types. */
1706
1707static void
1708check_load_store_for_partial_vectors (loop_vec_info loop_vinfo, tree vectype,
1709 slp_tree slp_node,
1710 vec_load_store_type vls_type,
1711 int group_size,
1712 vect_memory_access_type
1713 memory_access_type,
1714 gather_scatter_info *gs_info,
1715 tree scalar_mask)
1716{
1717 /* Invariant loads need no special support. */
1718 if (memory_access_type == VMAT_INVARIANT)
1719 return;
1720
1721 unsigned int nvectors;
1722 if (slp_node)
1723 nvectors = SLP_TREE_NUMBER_OF_VEC_STMTS (slp_node)(slp_node)->vec_stmts_size;
1724 else
1725 nvectors = vect_get_num_copies (loop_vinfo, vectype);
1726
1727 vec_loop_masks *masks = &LOOP_VINFO_MASKS (loop_vinfo)(loop_vinfo)->masks;
1728 machine_mode vecmode = TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1728, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
;
1729 bool is_load = (vls_type == VLS_LOAD);
1730 if (memory_access_type == VMAT_LOAD_STORE_LANES)
1731 {
1732 if (is_load
1733 ? !vect_load_lanes_supported (vectype, group_size, true)
1734 : !vect_store_lanes_supported (vectype, group_size, true))
1735 {
1736 if (dump_enabled_p ())
1737 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
1738 "can't operate on partial vectors because"
1739 " the target doesn't have an appropriate"
1740 " load/store-lanes instruction.\n");
1741 LOOP_VINFO_CAN_USE_PARTIAL_VECTORS_P (loop_vinfo)(loop_vinfo)->can_use_partial_vectors_p = false;
1742 return;
1743 }
1744 vect_record_loop_mask (loop_vinfo, masks, nvectors, vectype,
1745 scalar_mask);
1746 return;
1747 }
1748
1749 if (memory_access_type == VMAT_GATHER_SCATTER)
1750 {
1751 internal_fn ifn = (is_load
1752 ? IFN_MASK_GATHER_LOAD
1753 : IFN_MASK_SCATTER_STORE);
1754 if (!internal_gather_scatter_fn_supported_p (ifn, vectype,
1755 gs_info->memory_type,
1756 gs_info->offset_vectype,
1757 gs_info->scale))
1758 {
1759 if (dump_enabled_p ())
1760 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
1761 "can't operate on partial vectors because"
1762 " the target doesn't have an appropriate"
1763 " gather load or scatter store instruction.\n");
1764 LOOP_VINFO_CAN_USE_PARTIAL_VECTORS_P (loop_vinfo)(loop_vinfo)->can_use_partial_vectors_p = false;
1765 return;
1766 }
1767 vect_record_loop_mask (loop_vinfo, masks, nvectors, vectype,
1768 scalar_mask);
1769 return;
1770 }
1771
1772 if (memory_access_type != VMAT_CONTIGUOUS
1773 && memory_access_type != VMAT_CONTIGUOUS_PERMUTE)
1774 {
1775 /* Element X of the data must come from iteration i * VF + X of the
1776 scalar loop. We need more work to support other mappings. */
1777 if (dump_enabled_p ())
1778 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
1779 "can't operate on partial vectors because an"
1780 " access isn't contiguous.\n");
1781 LOOP_VINFO_CAN_USE_PARTIAL_VECTORS_P (loop_vinfo)(loop_vinfo)->can_use_partial_vectors_p = false;
1782 return;
1783 }
1784
1785 if (!VECTOR_MODE_P (vecmode)(((enum mode_class) mode_class[vecmode]) == MODE_VECTOR_BOOL ||
((enum mode_class) mode_class[vecmode]) == MODE_VECTOR_INT ||
((enum mode_class) mode_class[vecmode]) == MODE_VECTOR_FLOAT
|| ((enum mode_class) mode_class[vecmode]) == MODE_VECTOR_FRACT
|| ((enum mode_class) mode_class[vecmode]) == MODE_VECTOR_UFRACT
|| ((enum mode_class) mode_class[vecmode]) == MODE_VECTOR_ACCUM
|| ((enum mode_class) mode_class[vecmode]) == MODE_VECTOR_UACCUM
)
)
1786 {
1787 if (dump_enabled_p ())
1788 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
1789 "can't operate on partial vectors when emulating"
1790 " vector operations.\n");
1791 LOOP_VINFO_CAN_USE_PARTIAL_VECTORS_P (loop_vinfo)(loop_vinfo)->can_use_partial_vectors_p = false;
1792 return;
1793 }
1794
1795 /* We might load more scalars than we need for permuting SLP loads.
1796 We checked in get_group_load_store_type that the extra elements
1797 don't leak into a new vector. */
1798 auto group_memory_nvectors = [](poly_uint64 size, poly_uint64 nunits)
1799 {
1800 unsigned int nvectors;
1801 if (can_div_away_from_zero_p (size, nunits, &nvectors))
1802 return nvectors;
1803 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1803, __FUNCTION__))
;
1804 };
1805
1806 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
1807 poly_uint64 vf = LOOP_VINFO_VECT_FACTOR (loop_vinfo)(loop_vinfo)->vectorization_factor;
1808 machine_mode mask_mode;
1809 bool using_partial_vectors_p = false;
1810 if (targetm.vectorize.get_mask_mode (vecmode).exists (&mask_mode)
1811 && can_vec_mask_load_store_p (vecmode, mask_mode, is_load))
1812 {
1813 nvectors = group_memory_nvectors (group_size * vf, nunits);
1814 vect_record_loop_mask (loop_vinfo, masks, nvectors, vectype, scalar_mask);
1815 using_partial_vectors_p = true;
1816 }
1817
1818 machine_mode vmode;
1819 if (get_len_load_store_mode (vecmode, is_load).exists (&vmode))
1820 {
1821 nvectors = group_memory_nvectors (group_size * vf, nunits);
1822 vec_loop_lens *lens = &LOOP_VINFO_LENS (loop_vinfo)(loop_vinfo)->lens;
1823 unsigned factor = (vecmode == vmode) ? 1 : GET_MODE_UNIT_SIZE (vecmode)mode_to_unit_size (vecmode);
1824 vect_record_loop_len (loop_vinfo, lens, nvectors, vectype, factor);
1825 using_partial_vectors_p = true;
1826 }
1827
1828 if (!using_partial_vectors_p)
1829 {
1830 if (dump_enabled_p ())
1831 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
1832 "can't operate on partial vectors because the"
1833 " target doesn't have the appropriate partial"
1834 " vectorization load or store.\n");
1835 LOOP_VINFO_CAN_USE_PARTIAL_VECTORS_P (loop_vinfo)(loop_vinfo)->can_use_partial_vectors_p = false;
1836 }
1837}
1838
1839/* Return the mask input to a masked load or store. VEC_MASK is the vectorized
1840 form of the scalar mask condition and LOOP_MASK, if nonnull, is the mask
1841 that needs to be applied to all loads and stores in a vectorized loop.
1842 Return VEC_MASK if LOOP_MASK is null or if VEC_MASK is already masked,
1843 otherwise return VEC_MASK & LOOP_MASK.
1844
1845 MASK_TYPE is the type of both masks. If new statements are needed,
1846 insert them before GSI. */
1847
1848static tree
1849prepare_vec_mask (loop_vec_info loop_vinfo, tree mask_type, tree loop_mask,
1850 tree vec_mask, gimple_stmt_iterator *gsi)
1851{
1852 gcc_assert (useless_type_conversion_p (mask_type, TREE_TYPE (vec_mask)))((void)(!(useless_type_conversion_p (mask_type, ((contains_struct_check
((vec_mask), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1852, __FUNCTION__))->typed.type))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1852, __FUNCTION__), 0 : 0))
;
1853 if (!loop_mask)
1854 return vec_mask;
1855
1856 gcc_assert (TREE_TYPE (loop_mask) == mask_type)((void)(!(((contains_struct_check ((loop_mask), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1856, __FUNCTION__))->typed.type) == mask_type) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1856, __FUNCTION__), 0 : 0))
;
1857
1858 if (loop_vinfo->vec_cond_masked_set.contains ({ vec_mask, loop_mask }))
1859 return vec_mask;
1860
1861 tree and_res = make_temp_ssa_name (mask_type, NULLnullptr, "vec_mask_and");
1862 gimple *and_stmt = gimple_build_assign (and_res, BIT_AND_EXPR,
1863 vec_mask, loop_mask);
1864
1865 gsi_insert_before (gsi, and_stmt, GSI_SAME_STMT);
1866 return and_res;
1867}
1868
1869/* Determine whether we can use a gather load or scatter store to vectorize
1870 strided load or store STMT_INFO by truncating the current offset to a
1871 smaller width. We need to be able to construct an offset vector:
1872
1873 { 0, X, X*2, X*3, ... }
1874
1875 without loss of precision, where X is STMT_INFO's DR_STEP.
1876
1877 Return true if this is possible, describing the gather load or scatter
1878 store in GS_INFO. MASKED_P is true if the load or store is conditional. */
1879
1880static bool
1881vect_truncate_gather_scatter_offset (stmt_vec_info stmt_info,
1882 loop_vec_info loop_vinfo, bool masked_p,
1883 gather_scatter_info *gs_info)
1884{
1885 dr_vec_info *dr_info = STMT_VINFO_DR_INFO (stmt_info)(((void)(!((stmt_info)->dr_aux.stmt == (stmt_info)) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1885, __FUNCTION__), 0 : 0)), &(stmt_info)->dr_aux)
;
1886 data_reference *dr = dr_info->dr;
1887 tree step = DR_STEP (dr)(dr)->innermost.step;
1888 if (TREE_CODE (step)((enum tree_code) (step)->base.code) != INTEGER_CST)
1889 {
1890 /* ??? Perhaps we could use range information here? */
1891 if (dump_enabled_p ())
1892 dump_printf_loc (MSG_NOTE, vect_location,
1893 "cannot truncate variable step.\n");
1894 return false;
1895 }
1896
1897 /* Get the number of bits in an element. */
1898 tree vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
1899 scalar_mode element_mode = SCALAR_TYPE_MODE (TREE_TYPE (vectype))(as_a <scalar_mode> ((tree_class_check ((((contains_struct_check
((vectype), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1899, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1899, __FUNCTION__))->type_common.mode))
;
1900 unsigned int element_bits = GET_MODE_BITSIZE (element_mode);
1901
1902 /* Set COUNT to the upper limit on the number of elements - 1.
1903 Start with the maximum vectorization factor. */
1904 unsigned HOST_WIDE_INTlong count = vect_max_vf (loop_vinfo) - 1;
1905
1906 /* Try lowering COUNT to the number of scalar latch iterations. */
1907 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo)(loop_vinfo)->loop;
1908 widest_int max_iters;
1909 if (max_loop_iterations (loop, &max_iters)
1910 && max_iters < count)
1911 count = max_iters.to_shwi ();
1912
1913 /* Try scales of 1 and the element size. */
1914 int scales[] = { 1, vect_get_scalar_dr_size (dr_info) };
1915 wi::overflow_type overflow = wi::OVF_NONE;
1916 for (int i = 0; i < 2; ++i)
1917 {
1918 int scale = scales[i];
1919 widest_int factor;
1920 if (!wi::multiple_of_p (wi::to_widest (step), scale, SIGNED, &factor))
1921 continue;
1922
1923 /* Determine the minimum precision of (COUNT - 1) * STEP / SCALE. */
1924 widest_int range = wi::mul (count, factor, SIGNED, &overflow);
1925 if (overflow)
1926 continue;
1927 signop sign = range >= 0 ? UNSIGNED : SIGNED;
1928 unsigned int min_offset_bits = wi::min_precision (range, sign);
1929
1930 /* Find the narrowest viable offset type. */
1931 unsigned int offset_bits = 1U << ceil_log2 (min_offset_bits);
1932 tree offset_type = build_nonstandard_integer_type (offset_bits,
1933 sign == UNSIGNED);
1934
1935 /* See whether the target supports the operation with an offset
1936 no narrower than OFFSET_TYPE. */
1937 tree memory_type = TREE_TYPE (DR_REF (dr))((contains_struct_check (((dr)->ref), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1937, __FUNCTION__))->typed.type)
;
1938 if (!vect_gather_scatter_fn_p (loop_vinfo, DR_IS_READ (dr)(dr)->is_read, masked_p,
1939 vectype, memory_type, offset_type, scale,
1940 &gs_info->ifn, &gs_info->offset_vectype)
1941 || gs_info->ifn == IFN_LAST)
1942 continue;
1943
1944 gs_info->decl = NULL_TREE(tree) nullptr;
1945 /* Logically the sum of DR_BASE_ADDRESS, DR_INIT and DR_OFFSET,
1946 but we don't need to store that here. */
1947 gs_info->base = NULL_TREE(tree) nullptr;
1948 gs_info->element_type = TREE_TYPE (vectype)((contains_struct_check ((vectype), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1948, __FUNCTION__))->typed.type)
;
1949 gs_info->offset = fold_convert (offset_type, step)fold_convert_loc (((location_t) 0), offset_type, step);
1950 gs_info->offset_dt = vect_constant_def;
1951 gs_info->scale = scale;
1952 gs_info->memory_type = memory_type;
1953 return true;
1954 }
1955
1956 if (overflow && dump_enabled_p ())
1957 dump_printf_loc (MSG_NOTE, vect_location,
1958 "truncating gather/scatter offset to %d bits"
1959 " might change its value.\n", element_bits);
1960
1961 return false;
1962}
1963
1964/* Return true if we can use gather/scatter internal functions to
1965 vectorize STMT_INFO, which is a grouped or strided load or store.
1966 MASKED_P is true if load or store is conditional. When returning
1967 true, fill in GS_INFO with the information required to perform the
1968 operation. */
1969
1970static bool
1971vect_use_strided_gather_scatters_p (stmt_vec_info stmt_info,
1972 loop_vec_info loop_vinfo, bool masked_p,
1973 gather_scatter_info *gs_info)
1974{
1975 if (!vect_check_gather_scatter (stmt_info, loop_vinfo, gs_info)
1976 || gs_info->ifn == IFN_LAST)
1977 return vect_truncate_gather_scatter_offset (stmt_info, loop_vinfo,
1978 masked_p, gs_info);
1979
1980 tree old_offset_type = TREE_TYPE (gs_info->offset)((contains_struct_check ((gs_info->offset), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1980, __FUNCTION__))->typed.type)
;
1981 tree new_offset_type = TREE_TYPE (gs_info->offset_vectype)((contains_struct_check ((gs_info->offset_vectype), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1981, __FUNCTION__))->typed.type)
;
1982
1983 gcc_assert (TYPE_PRECISION (new_offset_type)((void)(!(((tree_class_check ((new_offset_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1983, __FUNCTION__))->type_common.precision) >= ((tree_class_check
((old_offset_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1984, __FUNCTION__))->type_common.precision)) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1984, __FUNCTION__), 0 : 0))
1984 >= TYPE_PRECISION (old_offset_type))((void)(!(((tree_class_check ((new_offset_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1983, __FUNCTION__))->type_common.precision) >= ((tree_class_check
((old_offset_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1984, __FUNCTION__))->type_common.precision)) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 1984, __FUNCTION__), 0 : 0))
;
1985 gs_info->offset = fold_convert (new_offset_type, gs_info->offset)fold_convert_loc (((location_t) 0), new_offset_type, gs_info->
offset)
;
1986
1987 if (dump_enabled_p ())
1988 dump_printf_loc (MSG_NOTE, vect_location,
1989 "using gather/scatter for strided/grouped access,"
1990 " scale = %d\n", gs_info->scale);
1991
1992 return true;
1993}
1994
1995/* STMT_INFO is a non-strided load or store, meaning that it accesses
1996 elements with a known constant step. Return -1 if that step
1997 is negative, 0 if it is zero, and 1 if it is greater than zero. */
1998
1999static int
2000compare_step_with_zero (vec_info *vinfo, stmt_vec_info stmt_info)
2001{
2002 dr_vec_info *dr_info = STMT_VINFO_DR_INFO (stmt_info)(((void)(!((stmt_info)->dr_aux.stmt == (stmt_info)) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2002, __FUNCTION__), 0 : 0)), &(stmt_info)->dr_aux)
;
2003 return tree_int_cst_compare (vect_dr_behavior (vinfo, dr_info)->step,
2004 size_zero_nodeglobal_trees[TI_SIZE_ZERO]);
2005}
2006
2007/* If the target supports a permute mask that reverses the elements in
2008 a vector of type VECTYPE, return that mask, otherwise return null. */
2009
2010static tree
2011perm_mask_for_reverse (tree vectype)
2012{
2013 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
2014
2015 /* The encoding has a single stepped pattern. */
2016 vec_perm_builder sel (nunits, 1, 3);
2017 for (int i = 0; i < 3; ++i)
2018 sel.quick_push (nunits - 1 - i);
2019
2020 vec_perm_indices indices (sel, 1, nunits);
2021 if (!can_vec_perm_const_p (TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2021, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
, TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2021, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
,
2022 indices))
2023 return NULL_TREE(tree) nullptr;
2024 return vect_gen_perm_mask_checked (vectype, indices);
2025}
2026
2027/* A subroutine of get_load_store_type, with a subset of the same
2028 arguments. Handle the case where STMT_INFO is a load or store that
2029 accesses consecutive elements with a negative step. Sets *POFFSET
2030 to the offset to be applied to the DR for the first access. */
2031
2032static vect_memory_access_type
2033get_negative_load_store_type (vec_info *vinfo,
2034 stmt_vec_info stmt_info, tree vectype,
2035 vec_load_store_type vls_type,
2036 unsigned int ncopies, poly_int64 *poffset)
2037{
2038 dr_vec_info *dr_info = STMT_VINFO_DR_INFO (stmt_info)(((void)(!((stmt_info)->dr_aux.stmt == (stmt_info)) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2038, __FUNCTION__), 0 : 0)), &(stmt_info)->dr_aux)
;
2039 dr_alignment_support alignment_support_scheme;
2040
2041 if (ncopies > 1)
2042 {
2043 if (dump_enabled_p ())
2044 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2045 "multiple types with negative step.\n");
2046 return VMAT_ELEMENTWISE;
2047 }
2048
2049 /* For backward running DRs the first access in vectype actually is
2050 N-1 elements before the address of the DR. */
2051 *poffset = ((-TYPE_VECTOR_SUBPARTS (vectype) + 1)
2052 * TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (vectype)))((unsigned long) (*tree_int_cst_elt_check ((((tree_class_check
((((contains_struct_check ((vectype), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2052, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2052, __FUNCTION__))->type_common.size_unit)), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2052, __FUNCTION__)))
);
2053
2054 int misalignment = dr_misalignment (dr_info, vectype, *poffset);
2055 alignment_support_scheme
2056 = vect_supportable_dr_alignment (vinfo, dr_info, vectype, misalignment);
2057 if (alignment_support_scheme != dr_aligned
2058 && alignment_support_scheme != dr_unaligned_supported)
2059 {
2060 if (dump_enabled_p ())
2061 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2062 "negative step but alignment required.\n");
2063 *poffset = 0;
2064 return VMAT_ELEMENTWISE;
2065 }
2066
2067 if (vls_type == VLS_STORE_INVARIANT)
2068 {
2069 if (dump_enabled_p ())
2070 dump_printf_loc (MSG_NOTE, vect_location,
2071 "negative step with invariant source;"
2072 " no permute needed.\n");
2073 return VMAT_CONTIGUOUS_DOWN;
2074 }
2075
2076 if (!perm_mask_for_reverse (vectype))
2077 {
2078 if (dump_enabled_p ())
2079 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2080 "negative step and reversing not supported.\n");
2081 *poffset = 0;
2082 return VMAT_ELEMENTWISE;
2083 }
2084
2085 return VMAT_CONTIGUOUS_REVERSE;
2086}
2087
2088/* STMT_INFO is either a masked or unconditional store. Return the value
2089 being stored. */
2090
2091tree
2092vect_get_store_rhs (stmt_vec_info stmt_info)
2093{
2094 if (gassign *assign = dyn_cast <gassign *> (stmt_info->stmt))
2095 {
2096 gcc_assert (gimple_assign_single_p (assign))((void)(!(gimple_assign_single_p (assign)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2096, __FUNCTION__), 0 : 0))
;
2097 return gimple_assign_rhs1 (assign);
2098 }
2099 if (gcall *call = dyn_cast <gcall *> (stmt_info->stmt))
2100 {
2101 internal_fn ifn = gimple_call_internal_fn (call);
2102 int index = internal_fn_stored_value_index (ifn);
2103 gcc_assert (index >= 0)((void)(!(index >= 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2103, __FUNCTION__), 0 : 0))
;
2104 return gimple_call_arg (call, index);
2105 }
2106 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2106, __FUNCTION__))
;
2107}
2108
2109/* Function VECTOR_VECTOR_COMPOSITION_TYPE
2110
2111 This function returns a vector type which can be composed with NETLS pieces,
2112 whose type is recorded in PTYPE. VTYPE should be a vector type, and has the
2113 same vector size as the return vector. It checks target whether supports
2114 pieces-size vector mode for construction firstly, if target fails to, check
2115 pieces-size scalar mode for construction further. It returns NULL_TREE if
2116 fails to find the available composition.
2117
2118 For example, for (vtype=V16QI, nelts=4), we can probably get:
2119 - V16QI with PTYPE V4QI.
2120 - V4SI with PTYPE SI.
2121 - NULL_TREE. */
2122
2123static tree
2124vector_vector_composition_type (tree vtype, poly_uint64 nelts, tree *ptype)
2125{
2126 gcc_assert (VECTOR_TYPE_P (vtype))((void)(!((((enum tree_code) (vtype)->base.code) == VECTOR_TYPE
)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2126, __FUNCTION__), 0 : 0))
;
2127 gcc_assert (known_gt (nelts, 0U))((void)(!((!maybe_le (nelts, 0U))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2127, __FUNCTION__), 0 : 0))
;
2128
2129 machine_mode vmode = TYPE_MODE (vtype)((((enum tree_code) ((tree_class_check ((vtype), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2129, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vtype) : (vtype)->type_common.mode)
;
2130 if (!VECTOR_MODE_P (vmode)(((enum mode_class) mode_class[vmode]) == MODE_VECTOR_BOOL ||
((enum mode_class) mode_class[vmode]) == MODE_VECTOR_INT || (
(enum mode_class) mode_class[vmode]) == MODE_VECTOR_FLOAT || (
(enum mode_class) mode_class[vmode]) == MODE_VECTOR_FRACT || (
(enum mode_class) mode_class[vmode]) == MODE_VECTOR_UFRACT ||
((enum mode_class) mode_class[vmode]) == MODE_VECTOR_ACCUM ||
((enum mode_class) mode_class[vmode]) == MODE_VECTOR_UACCUM)
)
2131 return NULL_TREE(tree) nullptr;
2132
2133 poly_uint64 vbsize = GET_MODE_BITSIZE (vmode);
2134 unsigned int pbsize;
2135 if (constant_multiple_p (vbsize, nelts, &pbsize))
2136 {
2137 /* First check if vec_init optab supports construction from
2138 vector pieces directly. */
2139 scalar_mode elmode = SCALAR_TYPE_MODE (TREE_TYPE (vtype))(as_a <scalar_mode> ((tree_class_check ((((contains_struct_check
((vtype), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2139, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2139, __FUNCTION__))->type_common.mode))
;
2140 poly_uint64 inelts = pbsize / GET_MODE_BITSIZE (elmode);
2141 machine_mode rmode;
2142 if (related_vector_mode (vmode, elmode, inelts).exists (&rmode)
2143 && (convert_optab_handler (vec_init_optab, vmode, rmode)
2144 != CODE_FOR_nothing))
2145 {
2146 *ptype = build_vector_type (TREE_TYPE (vtype)((contains_struct_check ((vtype), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2146, __FUNCTION__))->typed.type)
, inelts);
2147 return vtype;
2148 }
2149
2150 /* Otherwise check if exists an integer type of the same piece size and
2151 if vec_init optab supports construction from it directly. */
2152 if (int_mode_for_size (pbsize, 0).exists (&elmode)
2153 && related_vector_mode (vmode, elmode, nelts).exists (&rmode)
2154 && (convert_optab_handler (vec_init_optab, rmode, elmode)
2155 != CODE_FOR_nothing))
2156 {
2157 *ptype = build_nonstandard_integer_type (pbsize, 1);
2158 return build_vector_type (*ptype, nelts);
2159 }
2160 }
2161
2162 return NULL_TREE(tree) nullptr;
2163}
2164
2165/* A subroutine of get_load_store_type, with a subset of the same
2166 arguments. Handle the case where STMT_INFO is part of a grouped load
2167 or store.
2168
2169 For stores, the statements in the group are all consecutive
2170 and there is no gap at the end. For loads, the statements in the
2171 group might not be consecutive; there can be gaps between statements
2172 as well as at the end. */
2173
2174static bool
2175get_group_load_store_type (vec_info *vinfo, stmt_vec_info stmt_info,
2176 tree vectype, slp_tree slp_node,
2177 bool masked_p, vec_load_store_type vls_type,
2178 vect_memory_access_type *memory_access_type,
2179 poly_int64 *poffset,
2180 dr_alignment_support *alignment_support_scheme,
2181 int *misalignment,
2182 gather_scatter_info *gs_info)
2183{
2184 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
2185 class loop *loop = loop_vinfo ? LOOP_VINFO_LOOP (loop_vinfo)(loop_vinfo)->loop : NULLnullptr;
2186 stmt_vec_info first_stmt_info = DR_GROUP_FIRST_ELEMENT (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2186, __FUNCTION__), 0 : 0)), (stmt_info)->first_element
)
;
2187 dr_vec_info *first_dr_info = STMT_VINFO_DR_INFO (first_stmt_info)(((void)(!((first_stmt_info)->dr_aux.stmt == (first_stmt_info
)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2187, __FUNCTION__), 0 : 0)), &(first_stmt_info)->dr_aux
)
;
2188 unsigned int group_size = DR_GROUP_SIZE (first_stmt_info)(((void)(!((first_stmt_info)->dr_aux.dr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2188, __FUNCTION__), 0 : 0)), (first_stmt_info)->size)
;
2189 bool single_element_p = (stmt_info == first_stmt_info
2190 && !DR_GROUP_NEXT_ELEMENT (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2190, __FUNCTION__), 0 : 0)), (stmt_info)->next_element)
);
2191 unsigned HOST_WIDE_INTlong gap = DR_GROUP_GAP (first_stmt_info)(((void)(!((first_stmt_info)->dr_aux.dr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2191, __FUNCTION__), 0 : 0)), (first_stmt_info)->gap)
;
2192 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
2193
2194 /* True if the vectorized statements would access beyond the last
2195 statement in the group. */
2196 bool overrun_p = false;
2197
2198 /* True if we can cope with such overrun by peeling for gaps, so that
2199 there is at least one final scalar iteration after the vector loop. */
2200 bool can_overrun_p = (!masked_p
2201 && vls_type == VLS_LOAD
2202 && loop_vinfo
2203 && !loop->inner);
2204
2205 /* There can only be a gap at the end of the group if the stride is
2206 known at compile time. */
2207 gcc_assert (!STMT_VINFO_STRIDED_P (first_stmt_info) || gap == 0)((void)(!(!(first_stmt_info)->strided_p || gap == 0) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2207, __FUNCTION__), 0 : 0))
;
2208
2209 /* Stores can't yet have gaps. */
2210 gcc_assert (slp_node || vls_type == VLS_LOAD || gap == 0)((void)(!(slp_node || vls_type == VLS_LOAD || gap == 0) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2210, __FUNCTION__), 0 : 0))
;
2211
2212 if (slp_node)
2213 {
2214 /* For SLP vectorization we directly vectorize a subchain
2215 without permutation. */
2216 if (! SLP_TREE_LOAD_PERMUTATION (slp_node)(slp_node)->load_permutation.exists ())
2217 first_dr_info
2218 = STMT_VINFO_DR_INFO (SLP_TREE_SCALAR_STMTS (slp_node)[0])(((void)(!(((slp_node)->stmts[0])->dr_aux.stmt == ((slp_node
)->stmts[0])) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2218, __FUNCTION__), 0 : 0)), &((slp_node)->stmts[0]
)->dr_aux)
;
2219 if (STMT_VINFO_STRIDED_P (first_stmt_info)(first_stmt_info)->strided_p)
2220 {
2221 /* Try to use consecutive accesses of DR_GROUP_SIZE elements,
2222 separated by the stride, until we have a complete vector.
2223 Fall back to scalar accesses if that isn't possible. */
2224 if (multiple_p (nunits, group_size))
2225 *memory_access_type = VMAT_STRIDED_SLP;
2226 else
2227 *memory_access_type = VMAT_ELEMENTWISE;
2228 }
2229 else
2230 {
2231 overrun_p = loop_vinfo && gap != 0;
2232 if (overrun_p && vls_type != VLS_LOAD)
2233 {
2234 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2235 "Grouped store with gaps requires"
2236 " non-consecutive accesses\n");
2237 return false;
2238 }
2239 /* An overrun is fine if the trailing elements are smaller
2240 than the alignment boundary B. Every vector access will
2241 be a multiple of B and so we are guaranteed to access a
2242 non-gap element in the same B-sized block. */
2243 if (overrun_p
2244 && gap < (vect_known_alignment_in_bytes (first_dr_info,
2245 vectype)
2246 / vect_get_scalar_dr_size (first_dr_info)))
2247 overrun_p = false;
2248
2249 /* If the gap splits the vector in half and the target
2250 can do half-vector operations avoid the epilogue peeling
2251 by simply loading half of the vector only. Usually
2252 the construction with an upper zero half will be elided. */
2253 dr_alignment_support alss;
2254 int misalign = dr_misalignment (first_dr_info, vectype);
2255 tree half_vtype;
2256 if (overrun_p
2257 && !masked_p
2258 && (((alss = vect_supportable_dr_alignment (vinfo, first_dr_info,
2259 vectype, misalign)))
2260 == dr_aligned
2261 || alss == dr_unaligned_supported)
2262 && known_eq (nunits, (group_size - gap) * 2)(!maybe_ne (nunits, (group_size - gap) * 2))
2263 && known_eq (nunits, group_size)(!maybe_ne (nunits, group_size))
2264 && (vector_vector_composition_type (vectype, 2, &half_vtype)
2265 != NULL_TREE(tree) nullptr))
2266 overrun_p = false;
2267
2268 if (overrun_p && !can_overrun_p)
2269 {
2270 if (dump_enabled_p ())
2271 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2272 "Peeling for outer loop is not supported\n");
2273 return false;
2274 }
2275 int cmp = compare_step_with_zero (vinfo, stmt_info);
2276 if (cmp < 0)
2277 {
2278 if (single_element_p)
2279 /* ??? The VMAT_CONTIGUOUS_REVERSE code generation is
2280 only correct for single element "interleaving" SLP. */
2281 *memory_access_type = get_negative_load_store_type
2282 (vinfo, stmt_info, vectype, vls_type, 1, poffset);
2283 else
2284 {
2285 /* Try to use consecutive accesses of DR_GROUP_SIZE elements,
2286 separated by the stride, until we have a complete vector.
2287 Fall back to scalar accesses if that isn't possible. */
2288 if (multiple_p (nunits, group_size))
2289 *memory_access_type = VMAT_STRIDED_SLP;
2290 else
2291 *memory_access_type = VMAT_ELEMENTWISE;
2292 }
2293 }
2294 else
2295 {
2296 gcc_assert (!loop_vinfo || cmp > 0)((void)(!(!loop_vinfo || cmp > 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2296, __FUNCTION__), 0 : 0))
;
2297 *memory_access_type = VMAT_CONTIGUOUS;
2298 }
2299
2300 /* When we have a contiguous access across loop iterations
2301 but the access in the loop doesn't cover the full vector
2302 we can end up with no gap recorded but still excess
2303 elements accessed, see PR103116. Make sure we peel for
2304 gaps if necessary and sufficient and give up if not. */
2305 if (loop_vinfo
2306 && *memory_access_type == VMAT_CONTIGUOUS
2307 && SLP_TREE_LOAD_PERMUTATION (slp_node)(slp_node)->load_permutation.exists ()
2308 && !multiple_p (group_size * LOOP_VINFO_VECT_FACTOR (loop_vinfo)(loop_vinfo)->vectorization_factor,
2309 nunits))
2310 {
2311 unsigned HOST_WIDE_INTlong cnunits, cvf;
2312 if (!can_overrun_p
2313 || !nunits.is_constant (&cnunits)
2314 || !LOOP_VINFO_VECT_FACTOR (loop_vinfo)(loop_vinfo)->vectorization_factor.is_constant (&cvf)
2315 /* Peeling for gaps assumes that a single scalar iteration
2316 is enough to make sure the last vector iteration doesn't
2317 access excess elements.
2318 ??? Enhancements include peeling multiple iterations
2319 or using masked loads with a static mask. */
2320 || (group_size * cvf) % cnunits + group_size < cnunits)
2321 {
2322 if (dump_enabled_p ())
2323 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2324 "peeling for gaps insufficient for "
2325 "access\n");
2326 return false;
2327 }
2328 overrun_p = true;
2329 }
2330 }
2331 }
2332 else
2333 {
2334 /* We can always handle this case using elementwise accesses,
2335 but see if something more efficient is available. */
2336 *memory_access_type = VMAT_ELEMENTWISE;
2337
2338 /* If there is a gap at the end of the group then these optimizations
2339 would access excess elements in the last iteration. */
2340 bool would_overrun_p = (gap != 0);
2341 /* An overrun is fine if the trailing elements are smaller than the
2342 alignment boundary B. Every vector access will be a multiple of B
2343 and so we are guaranteed to access a non-gap element in the
2344 same B-sized block. */
2345 if (would_overrun_p
2346 && !masked_p
2347 && gap < (vect_known_alignment_in_bytes (first_dr_info, vectype)
2348 / vect_get_scalar_dr_size (first_dr_info)))
2349 would_overrun_p = false;
2350
2351 if (!STMT_VINFO_STRIDED_P (first_stmt_info)(first_stmt_info)->strided_p
2352 && (can_overrun_p || !would_overrun_p)
2353 && compare_step_with_zero (vinfo, stmt_info) > 0)
2354 {
2355 /* First cope with the degenerate case of a single-element
2356 vector. */
2357 if (known_eq (TYPE_VECTOR_SUBPARTS (vectype), 1U)(!maybe_ne (TYPE_VECTOR_SUBPARTS (vectype), 1U)))
2358 ;
2359
2360 /* Otherwise try using LOAD/STORE_LANES. */
2361 else if (vls_type == VLS_LOAD
2362 ? vect_load_lanes_supported (vectype, group_size, masked_p)
2363 : vect_store_lanes_supported (vectype, group_size,
2364 masked_p))
2365 {
2366 *memory_access_type = VMAT_LOAD_STORE_LANES;
2367 overrun_p = would_overrun_p;
2368 }
2369
2370 /* If that fails, try using permuting loads. */
2371 else if (vls_type == VLS_LOAD
2372 ? vect_grouped_load_supported (vectype, single_element_p,
2373 group_size)
2374 : vect_grouped_store_supported (vectype, group_size))
2375 {
2376 *memory_access_type = VMAT_CONTIGUOUS_PERMUTE;
2377 overrun_p = would_overrun_p;
2378 }
2379 }
2380
2381 /* As a last resort, trying using a gather load or scatter store.
2382
2383 ??? Although the code can handle all group sizes correctly,
2384 it probably isn't a win to use separate strided accesses based
2385 on nearby locations. Or, even if it's a win over scalar code,
2386 it might not be a win over vectorizing at a lower VF, if that
2387 allows us to use contiguous accesses. */
2388 if (*memory_access_type == VMAT_ELEMENTWISE
2389 && single_element_p
2390 && loop_vinfo
2391 && vect_use_strided_gather_scatters_p (stmt_info, loop_vinfo,
2392 masked_p, gs_info))
2393 *memory_access_type = VMAT_GATHER_SCATTER;
2394 }
2395
2396 if (*memory_access_type == VMAT_GATHER_SCATTER
2397 || *memory_access_type == VMAT_ELEMENTWISE)
2398 {
2399 *alignment_support_scheme = dr_unaligned_supported;
2400 *misalignment = DR_MISALIGNMENT_UNKNOWN(-1);
2401 }
2402 else
2403 {
2404 *misalignment = dr_misalignment (first_dr_info, vectype, *poffset);
2405 *alignment_support_scheme
2406 = vect_supportable_dr_alignment (vinfo, first_dr_info, vectype,
2407 *misalignment);
2408 }
2409
2410 if (vls_type != VLS_LOAD && first_stmt_info == stmt_info)
2411 {
2412 /* STMT is the leader of the group. Check the operands of all the
2413 stmts of the group. */
2414 stmt_vec_info next_stmt_info = DR_GROUP_NEXT_ELEMENT (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2414, __FUNCTION__), 0 : 0)), (stmt_info)->next_element)
;
2415 while (next_stmt_info)
2416 {
2417 tree op = vect_get_store_rhs (next_stmt_info);
2418 enum vect_def_type dt;
2419 if (!vect_is_simple_use (op, vinfo, &dt))
2420 {
2421 if (dump_enabled_p ())
2422 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2423 "use not simple.\n");
2424 return false;
2425 }
2426 next_stmt_info = DR_GROUP_NEXT_ELEMENT (next_stmt_info)(((void)(!((next_stmt_info)->dr_aux.dr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2426, __FUNCTION__), 0 : 0)), (next_stmt_info)->next_element
)
;
2427 }
2428 }
2429
2430 if (overrun_p)
2431 {
2432 gcc_assert (can_overrun_p)((void)(!(can_overrun_p) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2432, __FUNCTION__), 0 : 0))
;
2433 if (dump_enabled_p ())
2434 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2435 "Data access with gaps requires scalar "
2436 "epilogue loop\n");
2437 LOOP_VINFO_PEELING_FOR_GAPS (loop_vinfo)(loop_vinfo)->peeling_for_gaps = true;
2438 }
2439
2440 return true;
2441}
2442
2443/* Analyze load or store statement STMT_INFO of type VLS_TYPE. Return true
2444 if there is a memory access type that the vectorized form can use,
2445 storing it in *MEMORY_ACCESS_TYPE if so. If we decide to use gathers
2446 or scatters, fill in GS_INFO accordingly. In addition
2447 *ALIGNMENT_SUPPORT_SCHEME is filled out and false is returned if
2448 the target does not support the alignment scheme. *MISALIGNMENT
2449 is set according to the alignment of the access (including
2450 DR_MISALIGNMENT_UNKNOWN when it is unknown).
2451
2452 SLP says whether we're performing SLP rather than loop vectorization.
2453 MASKED_P is true if the statement is conditional on a vectorized mask.
2454 VECTYPE is the vector type that the vectorized statements will use.
2455 NCOPIES is the number of vector statements that will be needed. */
2456
2457static bool
2458get_load_store_type (vec_info *vinfo, stmt_vec_info stmt_info,
2459 tree vectype, slp_tree slp_node,
2460 bool masked_p, vec_load_store_type vls_type,
2461 unsigned int ncopies,
2462 vect_memory_access_type *memory_access_type,
2463 poly_int64 *poffset,
2464 dr_alignment_support *alignment_support_scheme,
2465 int *misalignment,
2466 gather_scatter_info *gs_info)
2467{
2468 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
2469 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
2470 *misalignment = DR_MISALIGNMENT_UNKNOWN(-1);
2471 *poffset = 0;
2472 if (STMT_VINFO_GATHER_SCATTER_P (stmt_info)(stmt_info)->gather_scatter_p)
2473 {
2474 *memory_access_type = VMAT_GATHER_SCATTER;
2475 if (!vect_check_gather_scatter (stmt_info, loop_vinfo, gs_info))
2476 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2476, __FUNCTION__))
;
2477 /* When using internal functions, we rely on pattern recognition
2478 to convert the type of the offset to the type that the target
2479 requires, with the result being a call to an internal function.
2480 If that failed for some reason (e.g. because another pattern
2481 took priority), just handle cases in which the offset already
2482 has the right type. */
2483 else if (gs_info->ifn != IFN_LAST
2484 && !is_gimple_call (stmt_info->stmt)
2485 && !tree_nop_conversion_p (TREE_TYPE (gs_info->offset)((contains_struct_check ((gs_info->offset), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2485, __FUNCTION__))->typed.type)
,
2486 TREE_TYPE (gs_info->offset_vectype)((contains_struct_check ((gs_info->offset_vectype), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2486, __FUNCTION__))->typed.type)
))
2487 {
2488 if (dump_enabled_p ())
2489 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2490 "%s offset requires a conversion\n",
2491 vls_type == VLS_LOAD ? "gather" : "scatter");
2492 return false;
2493 }
2494 else if (!vect_is_simple_use (gs_info->offset, vinfo,
2495 &gs_info->offset_dt,
2496 &gs_info->offset_vectype))
2497 {
2498 if (dump_enabled_p ())
2499 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2500 "%s index use not simple.\n",
2501 vls_type == VLS_LOAD ? "gather" : "scatter");
2502 return false;
2503 }
2504 else if (gs_info->ifn == IFN_LAST && !gs_info->decl)
2505 {
2506 if (vls_type != VLS_LOAD)
2507 {
2508 if (dump_enabled_p ())
2509 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2510 "unsupported emulated scatter.\n");
2511 return false;
2512 }
2513 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant ()
2514 || !TYPE_VECTOR_SUBPARTS
2515 (gs_info->offset_vectype).is_constant ()
2516 || !constant_multiple_p (TYPE_VECTOR_SUBPARTS
2517 (gs_info->offset_vectype),
2518 TYPE_VECTOR_SUBPARTS (vectype)))
2519 {
2520 if (dump_enabled_p ())
2521 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2522 "unsupported vector types for emulated "
2523 "gather.\n");
2524 return false;
2525 }
2526 }
2527 /* Gather-scatter accesses perform only component accesses, alignment
2528 is irrelevant for them. */
2529 *alignment_support_scheme = dr_unaligned_supported;
2530 }
2531 else if (STMT_VINFO_GROUPED_ACCESS (stmt_info)((stmt_info)->dr_aux.dr && (((void)(!((stmt_info)->
dr_aux.dr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2531, __FUNCTION__), 0 : 0)), (stmt_info)->first_element
))
)
2532 {
2533 if (!get_group_load_store_type (vinfo, stmt_info, vectype, slp_node,
2534 masked_p,
2535 vls_type, memory_access_type, poffset,
2536 alignment_support_scheme,
2537 misalignment, gs_info))
2538 return false;
2539 }
2540 else if (STMT_VINFO_STRIDED_P (stmt_info)(stmt_info)->strided_p)
2541 {
2542 gcc_assert (!slp_node)((void)(!(!slp_node) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2542, __FUNCTION__), 0 : 0))
;
2543 if (loop_vinfo
2544 && vect_use_strided_gather_scatters_p (stmt_info, loop_vinfo,
2545 masked_p, gs_info))
2546 *memory_access_type = VMAT_GATHER_SCATTER;
2547 else
2548 *memory_access_type = VMAT_ELEMENTWISE;
2549 /* Alignment is irrelevant here. */
2550 *alignment_support_scheme = dr_unaligned_supported;
2551 }
2552 else
2553 {
2554 int cmp = compare_step_with_zero (vinfo, stmt_info);
2555 if (cmp == 0)
2556 {
2557 gcc_assert (vls_type == VLS_LOAD)((void)(!(vls_type == VLS_LOAD) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2557, __FUNCTION__), 0 : 0))
;
2558 *memory_access_type = VMAT_INVARIANT;
2559 /* Invariant accesses perform only component accesses, alignment
2560 is irrelevant for them. */
2561 *alignment_support_scheme = dr_unaligned_supported;
2562 }
2563 else
2564 {
2565 if (cmp < 0)
2566 *memory_access_type = get_negative_load_store_type
2567 (vinfo, stmt_info, vectype, vls_type, ncopies, poffset);
2568 else
2569 *memory_access_type = VMAT_CONTIGUOUS;
2570 *misalignment = dr_misalignment (STMT_VINFO_DR_INFO (stmt_info)(((void)(!((stmt_info)->dr_aux.stmt == (stmt_info)) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2570, __FUNCTION__), 0 : 0)), &(stmt_info)->dr_aux)
,
2571 vectype, *poffset);
2572 *alignment_support_scheme
2573 = vect_supportable_dr_alignment (vinfo,
2574 STMT_VINFO_DR_INFO (stmt_info)(((void)(!((stmt_info)->dr_aux.stmt == (stmt_info)) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2574, __FUNCTION__), 0 : 0)), &(stmt_info)->dr_aux)
,
2575 vectype, *misalignment);
2576 }
2577 }
2578
2579 if ((*memory_access_type == VMAT_ELEMENTWISE
2580 || *memory_access_type == VMAT_STRIDED_SLP)
2581 && !nunits.is_constant ())
2582 {
2583 if (dump_enabled_p ())
2584 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2585 "Not using elementwise accesses due to variable "
2586 "vectorization factor.\n");
2587 return false;
2588 }
2589
2590 if (*alignment_support_scheme == dr_unaligned_unsupported)
2591 {
2592 if (dump_enabled_p ())
2593 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2594 "unsupported unaligned access\n");
2595 return false;
2596 }
2597
2598 /* FIXME: At the moment the cost model seems to underestimate the
2599 cost of using elementwise accesses. This check preserves the
2600 traditional behavior until that can be fixed. */
2601 stmt_vec_info first_stmt_info = DR_GROUP_FIRST_ELEMENT (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2601, __FUNCTION__), 0 : 0)), (stmt_info)->first_element
)
;
2602 if (!first_stmt_info)
2603 first_stmt_info = stmt_info;
2604 if (*memory_access_type == VMAT_ELEMENTWISE
2605 && !STMT_VINFO_STRIDED_P (first_stmt_info)(first_stmt_info)->strided_p
2606 && !(stmt_info == DR_GROUP_FIRST_ELEMENT (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2606, __FUNCTION__), 0 : 0)), (stmt_info)->first_element
)
2607 && !DR_GROUP_NEXT_ELEMENT (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2607, __FUNCTION__), 0 : 0)), (stmt_info)->next_element)
2608 && !pow2p_hwi (DR_GROUP_SIZE (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2608, __FUNCTION__), 0 : 0)), (stmt_info)->size)
)))
2609 {
2610 if (dump_enabled_p ())
2611 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2612 "not falling back to elementwise accesses\n");
2613 return false;
2614 }
2615 return true;
2616}
2617
2618/* Return true if boolean argument at MASK_INDEX is suitable for vectorizing
2619 conditional operation STMT_INFO. When returning true, store the mask
2620 in *MASK, the type of its definition in *MASK_DT_OUT, the type of the
2621 vectorized mask in *MASK_VECTYPE_OUT and the SLP node corresponding
2622 to the mask in *MASK_NODE if MASK_NODE is not NULL. */
2623
2624static bool
2625vect_check_scalar_mask (vec_info *vinfo, stmt_vec_info stmt_info,
2626 slp_tree slp_node, unsigned mask_index,
2627 tree *mask, slp_tree *mask_node,
2628 vect_def_type *mask_dt_out, tree *mask_vectype_out)
2629{
2630 enum vect_def_type mask_dt;
2631 tree mask_vectype;
2632 slp_tree mask_node_1;
2633 if (!vect_is_simple_use (vinfo, stmt_info, slp_node, mask_index,
2634 mask, &mask_node_1, &mask_dt, &mask_vectype))
2635 {
2636 if (dump_enabled_p ())
2637 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2638 "mask use not simple.\n");
2639 return false;
2640 }
2641
2642 if (!VECT_SCALAR_BOOLEAN_TYPE_P (TREE_TYPE (*mask))(((enum tree_code) (((contains_struct_check ((*mask), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2642, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((((enum tree_code) (((contains_struct_check ((*mask), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2642, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
|| ((enum tree_code) (((contains_struct_check ((*mask), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2642, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
) && ((tree_class_check ((((contains_struct_check ((*
mask), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2642, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2642, __FUNCTION__))->type_common.precision) == 1 &&
((tree_class_check ((((contains_struct_check ((*mask), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2642, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2642, __FUNCTION__))->base.u.bits.unsigned_flag)))
)
2643 {
2644 if (dump_enabled_p ())
2645 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2646 "mask argument is not a boolean.\n");
2647 return false;
2648 }
2649
2650 /* If the caller is not prepared for adjusting an external/constant
2651 SLP mask vector type fail. */
2652 if (slp_node
2653 && !mask_node
2654 && SLP_TREE_DEF_TYPE (mask_node_1)(mask_node_1)->def_type != vect_internal_def)
2655 {
2656 if (dump_enabled_p ())
2657 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2658 "SLP mask argument is not vectorized.\n");
2659 return false;
2660 }
2661
2662 tree vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
2663 if (!mask_vectype)
2664 mask_vectype = get_mask_type_for_scalar_type (vinfo, TREE_TYPE (vectype)((contains_struct_check ((vectype), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2664, __FUNCTION__))->typed.type)
);
2665
2666 if (!mask_vectype || !VECTOR_BOOLEAN_TYPE_P (mask_vectype)(((enum tree_code) (mask_vectype)->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((mask_vectype
), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2666, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
)
2667 {
2668 if (dump_enabled_p ())
2669 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2670 "could not find an appropriate vector mask type.\n");
2671 return false;
2672 }
2673
2674 if (maybe_ne (TYPE_VECTOR_SUBPARTS (mask_vectype),
2675 TYPE_VECTOR_SUBPARTS (vectype)))
2676 {
2677 if (dump_enabled_p ())
2678 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2679 "vector mask type %T"
2680 " does not match vector data type %T.\n",
2681 mask_vectype, vectype);
2682
2683 return false;
2684 }
2685
2686 *mask_dt_out = mask_dt;
2687 *mask_vectype_out = mask_vectype;
2688 if (mask_node)
2689 *mask_node = mask_node_1;
2690 return true;
2691}
2692
2693/* Return true if stored value RHS is suitable for vectorizing store
2694 statement STMT_INFO. When returning true, store the type of the
2695 definition in *RHS_DT_OUT, the type of the vectorized store value in
2696 *RHS_VECTYPE_OUT and the type of the store in *VLS_TYPE_OUT. */
2697
2698static bool
2699vect_check_store_rhs (vec_info *vinfo, stmt_vec_info stmt_info,
2700 slp_tree slp_node, tree rhs,
2701 vect_def_type *rhs_dt_out, tree *rhs_vectype_out,
2702 vec_load_store_type *vls_type_out)
2703{
2704 /* In the case this is a store from a constant make sure
2705 native_encode_expr can handle it. */
2706 if (CONSTANT_CLASS_P (rhs)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code
) (rhs)->base.code))] == tcc_constant)
&& native_encode_expr (rhs, NULLnullptr, 64) == 0)
2707 {
2708 if (dump_enabled_p ())
2709 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2710 "cannot encode constant as a byte sequence.\n");
2711 return false;
2712 }
2713
2714 unsigned op_no = 0;
2715 if (gcall *call = dyn_cast <gcall *> (stmt_info->stmt))
2716 {
2717 if (gimple_call_internal_p (call)
2718 && internal_store_fn_p (gimple_call_internal_fn (call)))
2719 op_no = internal_fn_stored_value_index (gimple_call_internal_fn (call));
2720 }
2721
2722 enum vect_def_type rhs_dt;
2723 tree rhs_vectype;
2724 slp_tree slp_op;
2725 if (!vect_is_simple_use (vinfo, stmt_info, slp_node, op_no,
2726 &rhs, &slp_op, &rhs_dt, &rhs_vectype))
2727 {
2728 if (dump_enabled_p ())
2729 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2730 "use not simple.\n");
2731 return false;
2732 }
2733
2734 tree vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
2735 if (rhs_vectype && !useless_type_conversion_p (vectype, rhs_vectype))
2736 {
2737 if (dump_enabled_p ())
2738 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2739 "incompatible vector types.\n");
2740 return false;
2741 }
2742
2743 *rhs_dt_out = rhs_dt;
2744 *rhs_vectype_out = rhs_vectype;
2745 if (rhs_dt == vect_constant_def || rhs_dt == vect_external_def)
2746 *vls_type_out = VLS_STORE_INVARIANT;
2747 else
2748 *vls_type_out = VLS_STORE;
2749 return true;
2750}
2751
2752/* Build an all-ones vector mask of type MASKTYPE while vectorizing STMT_INFO.
2753 Note that we support masks with floating-point type, in which case the
2754 floats are interpreted as a bitmask. */
2755
2756static tree
2757vect_build_all_ones_mask (vec_info *vinfo,
2758 stmt_vec_info stmt_info, tree masktype)
2759{
2760 if (TREE_CODE (masktype)((enum tree_code) (masktype)->base.code) == INTEGER_TYPE)
2761 return build_int_cst (masktype, -1);
2762 else if (TREE_CODE (TREE_TYPE (masktype))((enum tree_code) (((contains_struct_check ((masktype), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2762, __FUNCTION__))->typed.type))->base.code)
== INTEGER_TYPE)
2763 {
2764 tree mask = build_int_cst (TREE_TYPE (masktype)((contains_struct_check ((masktype), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2764, __FUNCTION__))->typed.type)
, -1);
2765 mask = build_vector_from_val (masktype, mask);
2766 return vect_init_vector (vinfo, stmt_info, mask, masktype, NULLnullptr);
2767 }
2768 else if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (masktype))(((enum tree_code) (((contains_struct_check ((masktype), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2768, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE
)
)
2769 {
2770 REAL_VALUE_TYPEstruct real_value r;
2771 long tmp[6];
2772 for (int j = 0; j < 6; ++j)
2773 tmp[j] = -1;
2774 real_from_target (&r, tmp, TYPE_MODE (TREE_TYPE (masktype))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((masktype), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2774, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2774, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((masktype), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2774, __FUNCTION__))->typed.type)) : (((contains_struct_check
((masktype), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2774, __FUNCTION__))->typed.type))->type_common.mode)
);
2775 tree mask = build_real (TREE_TYPE (masktype)((contains_struct_check ((masktype), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2775, __FUNCTION__))->typed.type)
, r);
2776 mask = build_vector_from_val (masktype, mask);
2777 return vect_init_vector (vinfo, stmt_info, mask, masktype, NULLnullptr);
2778 }
2779 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2779, __FUNCTION__))
;
2780}
2781
2782/* Build an all-zero merge value of type VECTYPE while vectorizing
2783 STMT_INFO as a gather load. */
2784
2785static tree
2786vect_build_zero_merge_argument (vec_info *vinfo,
2787 stmt_vec_info stmt_info, tree vectype)
2788{
2789 tree merge;
2790 if (TREE_CODE (TREE_TYPE (vectype))((enum tree_code) (((contains_struct_check ((vectype), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2790, __FUNCTION__))->typed.type))->base.code)
== INTEGER_TYPE)
2791 merge = build_int_cst (TREE_TYPE (vectype)((contains_struct_check ((vectype), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2791, __FUNCTION__))->typed.type)
, 0);
2792 else if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (vectype))(((enum tree_code) (((contains_struct_check ((vectype), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2792, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE
)
)
2793 {
2794 REAL_VALUE_TYPEstruct real_value r;
2795 long tmp[6];
2796 for (int j = 0; j < 6; ++j)
2797 tmp[j] = 0;
2798 real_from_target (&r, tmp, TYPE_MODE (TREE_TYPE (vectype))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((vectype), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2798, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2798, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((vectype), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2798, __FUNCTION__))->typed.type)) : (((contains_struct_check
((vectype), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2798, __FUNCTION__))->typed.type))->type_common.mode)
);
2799 merge = build_real (TREE_TYPE (vectype)((contains_struct_check ((vectype), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2799, __FUNCTION__))->typed.type)
, r);
2800 }
2801 else
2802 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2802, __FUNCTION__))
;
2803 merge = build_vector_from_val (vectype, merge);
2804 return vect_init_vector (vinfo, stmt_info, merge, vectype, NULLnullptr);
2805}
2806
2807/* Build a gather load call while vectorizing STMT_INFO. Insert new
2808 instructions before GSI and add them to VEC_STMT. GS_INFO describes
2809 the gather load operation. If the load is conditional, MASK is the
2810 unvectorized condition and MASK_DT is its definition type, otherwise
2811 MASK is null. */
2812
2813static void
2814vect_build_gather_load_calls (vec_info *vinfo, stmt_vec_info stmt_info,
2815 gimple_stmt_iterator *gsi,
2816 gimple **vec_stmt,
2817 gather_scatter_info *gs_info,
2818 tree mask)
2819{
2820 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
2821 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo)(loop_vinfo)->loop;
2822 tree vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
2823 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
2824 int ncopies = vect_get_num_copies (loop_vinfo, vectype);
2825 edge pe = loop_preheader_edge (loop);
2826 enum { NARROW, NONE, WIDEN } modifier;
2827 poly_uint64 gather_off_nunits
2828 = TYPE_VECTOR_SUBPARTS (gs_info->offset_vectype);
2829
2830 tree arglist = TYPE_ARG_TYPES (TREE_TYPE (gs_info->decl))((tree_check2 ((((contains_struct_check ((gs_info->decl), (
TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2830, __FUNCTION__))->typed.type)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2830, __FUNCTION__, (FUNCTION_TYPE), (METHOD_TYPE)))->type_non_common
.values)
;
2831 tree rettype = TREE_TYPE (TREE_TYPE (gs_info->decl))((contains_struct_check ((((contains_struct_check ((gs_info->
decl), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2831, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2831, __FUNCTION__))->typed.type)
;
2832 tree srctype = TREE_VALUE (arglist)((tree_check ((arglist), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2832, __FUNCTION__, (TREE_LIST)))->list.value)
; arglist = TREE_CHAIN (arglist)((contains_struct_check ((arglist), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2832, __FUNCTION__))->common.chain)
;
2833 tree ptrtype = TREE_VALUE (arglist)((tree_check ((arglist), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2833, __FUNCTION__, (TREE_LIST)))->list.value)
; arglist = TREE_CHAIN (arglist)((contains_struct_check ((arglist), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2833, __FUNCTION__))->common.chain)
;
2834 tree idxtype = TREE_VALUE (arglist)((tree_check ((arglist), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2834, __FUNCTION__, (TREE_LIST)))->list.value)
; arglist = TREE_CHAIN (arglist)((contains_struct_check ((arglist), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2834, __FUNCTION__))->common.chain)
;
2835 tree masktype = TREE_VALUE (arglist)((tree_check ((arglist), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2835, __FUNCTION__, (TREE_LIST)))->list.value)
; arglist = TREE_CHAIN (arglist)((contains_struct_check ((arglist), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2835, __FUNCTION__))->common.chain)
;
2836 tree scaletype = TREE_VALUE (arglist)((tree_check ((arglist), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2836, __FUNCTION__, (TREE_LIST)))->list.value)
;
2837 tree real_masktype = masktype;
2838 gcc_checking_assert (types_compatible_p (srctype, rettype)((void)(!(types_compatible_p (srctype, rettype) && (!
mask || ((enum tree_code) (masktype)->base.code) == INTEGER_TYPE
|| types_compatible_p (srctype, masktype))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2841, __FUNCTION__), 0 : 0))
2839 && (!mask((void)(!(types_compatible_p (srctype, rettype) && (!
mask || ((enum tree_code) (masktype)->base.code) == INTEGER_TYPE
|| types_compatible_p (srctype, masktype))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2841, __FUNCTION__), 0 : 0))
2840 || TREE_CODE (masktype) == INTEGER_TYPE((void)(!(types_compatible_p (srctype, rettype) && (!
mask || ((enum tree_code) (masktype)->base.code) == INTEGER_TYPE
|| types_compatible_p (srctype, masktype))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2841, __FUNCTION__), 0 : 0))
2841 || types_compatible_p (srctype, masktype)))((void)(!(types_compatible_p (srctype, rettype) && (!
mask || ((enum tree_code) (masktype)->base.code) == INTEGER_TYPE
|| types_compatible_p (srctype, masktype))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2841, __FUNCTION__), 0 : 0))
;
2842 if (mask)
2843 masktype = truth_type_for (srctype);
2844
2845 tree mask_halftype = masktype;
2846 tree perm_mask = NULL_TREE(tree) nullptr;
2847 tree mask_perm_mask = NULL_TREE(tree) nullptr;
2848 if (known_eq (nunits, gather_off_nunits)(!maybe_ne (nunits, gather_off_nunits)))
2849 modifier = NONE;
2850 else if (known_eq (nunits * 2, gather_off_nunits)(!maybe_ne (nunits * 2, gather_off_nunits)))
2851 {
2852 modifier = WIDEN;
2853
2854 /* Currently widening gathers and scatters are only supported for
2855 fixed-length vectors. */
2856 int count = gather_off_nunits.to_constant ();
2857 vec_perm_builder sel (count, count, 1);
2858 for (int i = 0; i < count; ++i)
2859 sel.quick_push (i | (count / 2));
2860
2861 vec_perm_indices indices (sel, 1, count);
2862 perm_mask = vect_gen_perm_mask_checked (gs_info->offset_vectype,
2863 indices);
2864 }
2865 else if (known_eq (nunits, gather_off_nunits * 2)(!maybe_ne (nunits, gather_off_nunits * 2)))
2866 {
2867 modifier = NARROW;
2868
2869 /* Currently narrowing gathers and scatters are only supported for
2870 fixed-length vectors. */
2871 int count = nunits.to_constant ();
2872 vec_perm_builder sel (count, count, 1);
2873 sel.quick_grow (count);
2874 for (int i = 0; i < count; ++i)
2875 sel[i] = i < count / 2 ? i : i + count / 2;
2876 vec_perm_indices indices (sel, 2, count);
2877 perm_mask = vect_gen_perm_mask_checked (vectype, indices);
2878
2879 ncopies *= 2;
2880
2881 if (mask && VECTOR_TYPE_P (real_masktype)(((enum tree_code) (real_masktype)->base.code) == VECTOR_TYPE
)
)
2882 {
2883 for (int i = 0; i < count; ++i)
2884 sel[i] = i | (count / 2);
2885 indices.new_vector (sel, 2, count);
2886 mask_perm_mask = vect_gen_perm_mask_checked (masktype, indices);
2887 }
2888 else if (mask)
2889 mask_halftype = truth_type_for (gs_info->offset_vectype);
2890 }
2891 else
2892 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2892, __FUNCTION__))
;
2893
2894 tree scalar_dest = gimple_get_lhs (stmt_info->stmt);
2895 tree vec_dest = vect_create_destination_var (scalar_dest, vectype);
2896
2897 tree ptr = fold_convert (ptrtype, gs_info->base)fold_convert_loc (((location_t) 0), ptrtype, gs_info->base
)
;
2898 if (!is_gimple_min_invariant (ptr))
2899 {
2900 gimple_seq seq;
2901 ptr = force_gimple_operand (ptr, &seq, true, NULL_TREE(tree) nullptr);
2902 basic_block new_bb = gsi_insert_seq_on_edge_immediate (pe, seq);
2903 gcc_assert (!new_bb)((void)(!(!new_bb) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2903, __FUNCTION__), 0 : 0))
;
2904 }
2905
2906 tree scale = build_int_cst (scaletype, gs_info->scale);
2907
2908 tree vec_oprnd0 = NULL_TREE(tree) nullptr;
2909 tree vec_mask = NULL_TREE(tree) nullptr;
2910 tree src_op = NULL_TREE(tree) nullptr;
2911 tree mask_op = NULL_TREE(tree) nullptr;
2912 tree prev_res = NULL_TREE(tree) nullptr;
2913
2914 if (!mask)
2915 {
2916 src_op = vect_build_zero_merge_argument (vinfo, stmt_info, rettype);
2917 mask_op = vect_build_all_ones_mask (vinfo, stmt_info, masktype);
2918 }
2919
2920 auto_vec<tree> vec_oprnds0;
2921 auto_vec<tree> vec_masks;
2922 vect_get_vec_defs_for_operand (vinfo, stmt_info,
2923 modifier == WIDEN ? ncopies / 2 : ncopies,
2924 gs_info->offset, &vec_oprnds0);
2925 if (mask)
2926 vect_get_vec_defs_for_operand (vinfo, stmt_info,
2927 modifier == NARROW ? ncopies / 2 : ncopies,
2928 mask, &vec_masks, masktype);
2929 for (int j = 0; j < ncopies; ++j)
2930 {
2931 tree op, var;
2932 if (modifier == WIDEN && (j & 1))
2933 op = permute_vec_elements (vinfo, vec_oprnd0, vec_oprnd0,
2934 perm_mask, stmt_info, gsi);
2935 else
2936 op = vec_oprnd0 = vec_oprnds0[modifier == WIDEN ? j / 2 : j];
2937
2938 if (!useless_type_conversion_p (idxtype, TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2938, __FUNCTION__))->typed.type)
))
2939 {
2940 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (op)),((void)(!((!maybe_ne (TYPE_VECTOR_SUBPARTS (((contains_struct_check
((op), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2940, __FUNCTION__))->typed.type)), TYPE_VECTOR_SUBPARTS
(idxtype)))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2941, __FUNCTION__), 0 : 0))
2941 TYPE_VECTOR_SUBPARTS (idxtype)))((void)(!((!maybe_ne (TYPE_VECTOR_SUBPARTS (((contains_struct_check
((op), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2940, __FUNCTION__))->typed.type)), TYPE_VECTOR_SUBPARTS
(idxtype)))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2941, __FUNCTION__), 0 : 0))
;
2942 var = vect_get_new_ssa_name (idxtype, vect_simple_var);
2943 op = build1 (VIEW_CONVERT_EXPR, idxtype, op);
2944 gassign *new_stmt = gimple_build_assign (var, VIEW_CONVERT_EXPR, op);
2945 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
2946 op = var;
2947 }
2948
2949 if (mask)
2950 {
2951 if (mask_perm_mask && (j & 1))
2952 mask_op = permute_vec_elements (vinfo, mask_op, mask_op,
2953 mask_perm_mask, stmt_info, gsi);
2954 else
2955 {
2956 if (modifier == NARROW)
2957 {
2958 if ((j & 1) == 0)
2959 vec_mask = vec_masks[j / 2];
2960 }
2961 else
2962 vec_mask = vec_masks[j];
2963
2964 mask_op = vec_mask;
2965 if (!useless_type_conversion_p (masktype, TREE_TYPE (vec_mask)((contains_struct_check ((vec_mask), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2965, __FUNCTION__))->typed.type)
))
2966 {
2967 poly_uint64 sub1 = TYPE_VECTOR_SUBPARTS (TREE_TYPE (mask_op)((contains_struct_check ((mask_op), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2967, __FUNCTION__))->typed.type)
);
2968 poly_uint64 sub2 = TYPE_VECTOR_SUBPARTS (masktype);
2969 gcc_assert (known_eq (sub1, sub2))((void)(!((!maybe_ne (sub1, sub2))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2969, __FUNCTION__), 0 : 0))
;
2970 var = vect_get_new_ssa_name (masktype, vect_simple_var);
2971 mask_op = build1 (VIEW_CONVERT_EXPR, masktype, mask_op);
2972 gassign *new_stmt
2973 = gimple_build_assign (var, VIEW_CONVERT_EXPR, mask_op);
2974 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
2975 mask_op = var;
2976 }
2977 }
2978 if (modifier == NARROW && !VECTOR_TYPE_P (real_masktype)(((enum tree_code) (real_masktype)->base.code) == VECTOR_TYPE
)
)
2979 {
2980 var = vect_get_new_ssa_name (mask_halftype, vect_simple_var);
2981 gassign *new_stmt
2982 = gimple_build_assign (var, (j & 1) ? VEC_UNPACK_HI_EXPR
2983 : VEC_UNPACK_LO_EXPR,
2984 mask_op);
2985 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
2986 mask_op = var;
2987 }
2988 src_op = mask_op;
2989 }
2990
2991 tree mask_arg = mask_op;
2992 if (masktype != real_masktype)
2993 {
2994 tree utype, optype = TREE_TYPE (mask_op)((contains_struct_check ((mask_op), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2994, __FUNCTION__))->typed.type)
;
2995 if (VECTOR_TYPE_P (real_masktype)(((enum tree_code) (real_masktype)->base.code) == VECTOR_TYPE
)
2996 || TYPE_MODE (real_masktype)((((enum tree_code) ((tree_class_check ((real_masktype), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2996, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(real_masktype) : (real_masktype)->type_common.mode)
== TYPE_MODE (optype)((((enum tree_code) ((tree_class_check ((optype), (tcc_type),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2996, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(optype) : (optype)->type_common.mode)
)
2997 utype = real_masktype;
2998 else
2999 utype = lang_hooks.types.type_for_mode (TYPE_MODE (optype)((((enum tree_code) ((tree_class_check ((optype), (tcc_type),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 2999, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(optype) : (optype)->type_common.mode)
, 1);
3000 var = vect_get_new_ssa_name (utype, vect_scalar_var);
3001 mask_arg = build1 (VIEW_CONVERT_EXPR, utype, mask_op);
3002 gassign *new_stmt
3003 = gimple_build_assign (var, VIEW_CONVERT_EXPR, mask_arg);
3004 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
3005 mask_arg = var;
3006 if (!useless_type_conversion_p (real_masktype, utype))
3007 {
3008 gcc_assert (TYPE_PRECISION (utype)((void)(!(((tree_class_check ((utype), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3008, __FUNCTION__))->type_common.precision) <= ((tree_class_check
((real_masktype), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3009, __FUNCTION__))->type_common.precision)) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3009, __FUNCTION__), 0 : 0))
3009 <= TYPE_PRECISION (real_masktype))((void)(!(((tree_class_check ((utype), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3008, __FUNCTION__))->type_common.precision) <= ((tree_class_check
((real_masktype), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3009, __FUNCTION__))->type_common.precision)) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3009, __FUNCTION__), 0 : 0))
;
3010 var = vect_get_new_ssa_name (real_masktype, vect_scalar_var);
3011 new_stmt = gimple_build_assign (var, NOP_EXPR, mask_arg);
3012 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
3013 mask_arg = var;
3014 }
3015 src_op = build_zero_cst (srctype);
3016 }
3017 gimple *new_stmt = gimple_build_call (gs_info->decl, 5, src_op, ptr, op,
3018 mask_arg, scale);
3019
3020 if (!useless_type_conversion_p (vectype, rettype))
3021 {
3022 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (vectype),((void)(!((!maybe_ne (TYPE_VECTOR_SUBPARTS (vectype), TYPE_VECTOR_SUBPARTS
(rettype)))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3023, __FUNCTION__), 0 : 0))
3023 TYPE_VECTOR_SUBPARTS (rettype)))((void)(!((!maybe_ne (TYPE_VECTOR_SUBPARTS (vectype), TYPE_VECTOR_SUBPARTS
(rettype)))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3023, __FUNCTION__), 0 : 0))
;
3024 op = vect_get_new_ssa_name (rettype, vect_simple_var);
3025 gimple_call_set_lhs (new_stmt, op);
3026 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
3027 var = make_ssa_name (vec_dest);
3028 op = build1 (VIEW_CONVERT_EXPR, vectype, op);
3029 new_stmt = gimple_build_assign (var, VIEW_CONVERT_EXPR, op);
3030 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
3031 }
3032 else
3033 {
3034 var = make_ssa_name (vec_dest, new_stmt);
3035 gimple_call_set_lhs (new_stmt, var);
3036 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
3037 }
3038
3039 if (modifier == NARROW)
3040 {
3041 if ((j & 1) == 0)
3042 {
3043 prev_res = var;
3044 continue;
3045 }
3046 var = permute_vec_elements (vinfo, prev_res, var, perm_mask,
3047 stmt_info, gsi);
3048 new_stmt = SSA_NAME_DEF_STMT (var)(tree_check ((var), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3048, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
3049 }
3050
3051 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
3052 }
3053 *vec_stmt = STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts[0];
3054}
3055
3056/* Prepare the base and offset in GS_INFO for vectorization.
3057 Set *DATAREF_PTR to the loop-invariant base address and *VEC_OFFSET
3058 to the vectorized offset argument for the first copy of STMT_INFO.
3059 STMT_INFO is the statement described by GS_INFO and LOOP is the
3060 containing loop. */
3061
3062static void
3063vect_get_gather_scatter_ops (loop_vec_info loop_vinfo,
3064 class loop *loop, stmt_vec_info stmt_info,
3065 slp_tree slp_node, gather_scatter_info *gs_info,
3066 tree *dataref_ptr, vec<tree> *vec_offset)
3067{
3068 gimple_seq stmts = NULLnullptr;
3069 *dataref_ptr = force_gimple_operand (gs_info->base, &stmts, true, NULL_TREE(tree) nullptr);
3070 if (stmts != NULLnullptr)
3071 {
3072 basic_block new_bb;
3073 edge pe = loop_preheader_edge (loop);
3074 new_bb = gsi_insert_seq_on_edge_immediate (pe, stmts);
3075 gcc_assert (!new_bb)((void)(!(!new_bb) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3075, __FUNCTION__), 0 : 0))
;
3076 }
3077 if (slp_node)
3078 vect_get_slp_defs (SLP_TREE_CHILDREN (slp_node)(slp_node)->children[0], vec_offset);
3079 else
3080 {
3081 unsigned ncopies
3082 = vect_get_num_copies (loop_vinfo, gs_info->offset_vectype);
3083 vect_get_vec_defs_for_operand (loop_vinfo, stmt_info, ncopies,
3084 gs_info->offset, vec_offset,
3085 gs_info->offset_vectype);
3086 }
3087}
3088
3089/* Prepare to implement a grouped or strided load or store using
3090 the gather load or scatter store operation described by GS_INFO.
3091 STMT_INFO is the load or store statement.
3092
3093 Set *DATAREF_BUMP to the amount that should be added to the base
3094 address after each copy of the vectorized statement. Set *VEC_OFFSET
3095 to an invariant offset vector in which element I has the value
3096 I * DR_STEP / SCALE. */
3097
3098static void
3099vect_get_strided_load_store_ops (stmt_vec_info stmt_info,
3100 loop_vec_info loop_vinfo,
3101 gather_scatter_info *gs_info,
3102 tree *dataref_bump, tree *vec_offset)
3103{
3104 struct data_reference *dr = STMT_VINFO_DATA_REF (stmt_info)((stmt_info)->dr_aux.dr + 0);
3105 tree vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
3106
3107 tree bump = size_binop (MULT_EXPR,size_binop_loc (((location_t) 0), MULT_EXPR, fold_convert_loc
(((location_t) 0), sizetype_tab[(int) stk_sizetype], unshare_expr
((dr)->innermost.step)), size_int_kind (TYPE_VECTOR_SUBPARTS
(vectype), stk_sizetype))
3108 fold_convert (sizetype, unshare_expr (DR_STEP (dr))),size_binop_loc (((location_t) 0), MULT_EXPR, fold_convert_loc
(((location_t) 0), sizetype_tab[(int) stk_sizetype], unshare_expr
((dr)->innermost.step)), size_int_kind (TYPE_VECTOR_SUBPARTS
(vectype), stk_sizetype))
3109 size_int (TYPE_VECTOR_SUBPARTS (vectype)))size_binop_loc (((location_t) 0), MULT_EXPR, fold_convert_loc
(((location_t) 0), sizetype_tab[(int) stk_sizetype], unshare_expr
((dr)->innermost.step)), size_int_kind (TYPE_VECTOR_SUBPARTS
(vectype), stk_sizetype))
;
3110 *dataref_bump = cse_and_gimplify_to_preheader (loop_vinfo, bump);
3111
3112 /* The offset given in GS_INFO can have pointer type, so use the element
3113 type of the vector instead. */
3114 tree offset_type = TREE_TYPE (gs_info->offset_vectype)((contains_struct_check ((gs_info->offset_vectype), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3114, __FUNCTION__))->typed.type)
;
3115
3116 /* Calculate X = DR_STEP / SCALE and convert it to the appropriate type. */
3117 tree step = size_binop (EXACT_DIV_EXPR, unshare_expr (DR_STEP (dr)),size_binop_loc (((location_t) 0), EXACT_DIV_EXPR, unshare_expr
((dr)->innermost.step), size_int_kind (gs_info->scale,
stk_ssizetype))
3118 ssize_int (gs_info->scale))size_binop_loc (((location_t) 0), EXACT_DIV_EXPR, unshare_expr
((dr)->innermost.step), size_int_kind (gs_info->scale,
stk_ssizetype))
;
3119 step = fold_convert (offset_type, step)fold_convert_loc (((location_t) 0), offset_type, step);
3120
3121 /* Create {0, X, X*2, X*3, ...}. */
3122 tree offset = fold_build2 (VEC_SERIES_EXPR, gs_info->offset_vectype,fold_build2_loc (((location_t) 0), VEC_SERIES_EXPR, gs_info->
offset_vectype, build_zero_cst (offset_type), step )
3123 build_zero_cst (offset_type), step)fold_build2_loc (((location_t) 0), VEC_SERIES_EXPR, gs_info->
offset_vectype, build_zero_cst (offset_type), step )
;
3124 *vec_offset = cse_and_gimplify_to_preheader (loop_vinfo, offset);
3125}
3126
3127/* Return the amount that should be added to a vector pointer to move
3128 to the next or previous copy of AGGR_TYPE. DR_INFO is the data reference
3129 being vectorized and MEMORY_ACCESS_TYPE describes the type of
3130 vectorization. */
3131
3132static tree
3133vect_get_data_ptr_increment (vec_info *vinfo,
3134 dr_vec_info *dr_info, tree aggr_type,
3135 vect_memory_access_type memory_access_type)
3136{
3137 if (memory_access_type == VMAT_INVARIANT)
3138 return size_zero_nodeglobal_trees[TI_SIZE_ZERO];
3139
3140 tree iv_step = TYPE_SIZE_UNIT (aggr_type)((tree_class_check ((aggr_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3140, __FUNCTION__))->type_common.size_unit)
;
3141 tree step = vect_dr_behavior (vinfo, dr_info)->step;
3142 if (tree_int_cst_sgn (step) == -1)
3143 iv_step = fold_build1 (NEGATE_EXPR, TREE_TYPE (iv_step), iv_step)fold_build1_loc (((location_t) 0), NEGATE_EXPR, ((contains_struct_check
((iv_step), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3143, __FUNCTION__))->typed.type), iv_step )
;
3144 return iv_step;
3145}
3146
3147/* Check and perform vectorization of BUILT_IN_BSWAP{16,32,64,128}. */
3148
3149static bool
3150vectorizable_bswap (vec_info *vinfo,
3151 stmt_vec_info stmt_info, gimple_stmt_iterator *gsi,
3152 gimple **vec_stmt, slp_tree slp_node,
3153 slp_tree *slp_op,
3154 tree vectype_in, stmt_vector_for_cost *cost_vec)
3155{
3156 tree op, vectype;
3157 gcall *stmt = as_a <gcall *> (stmt_info->stmt);
3158 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
3159 unsigned ncopies;
3160
3161 op = gimple_call_arg (stmt, 0);
3162 vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
3163 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
3164
3165 /* Multiple types in SLP are handled by creating the appropriate number of
3166 vectorized stmts for each SLP node. Hence, NCOPIES is always 1 in
3167 case of SLP. */
3168 if (slp_node)
3169 ncopies = 1;
3170 else
3171 ncopies = vect_get_num_copies (loop_vinfo, vectype);
3172
3173 gcc_assert (ncopies >= 1)((void)(!(ncopies >= 1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3173, __FUNCTION__), 0 : 0))
;
3174
3175 tree char_vectype = get_same_sized_vectype (char_type_nodeinteger_types[itk_char], vectype_in);
3176 if (! char_vectype)
3177 return false;
3178
3179 poly_uint64 num_bytes = TYPE_VECTOR_SUBPARTS (char_vectype);
3180 unsigned word_bytes;
3181 if (!constant_multiple_p (num_bytes, nunits, &word_bytes))
3182 return false;
3183
3184 /* The encoding uses one stepped pattern for each byte in the word. */
3185 vec_perm_builder elts (num_bytes, word_bytes, 3);
3186 for (unsigned i = 0; i < 3; ++i)
3187 for (unsigned j = 0; j < word_bytes; ++j)
3188 elts.quick_push ((i + 1) * word_bytes - j - 1);
3189
3190 vec_perm_indices indices (elts, 1, num_bytes);
3191 machine_mode vmode = TYPE_MODE (char_vectype)((((enum tree_code) ((tree_class_check ((char_vectype), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3191, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(char_vectype) : (char_vectype)->type_common.mode)
;
3192 if (!can_vec_perm_const_p (vmode, vmode, indices))
3193 return false;
3194
3195 if (! vec_stmt)
3196 {
3197 if (slp_node
3198 && !vect_maybe_update_slp_op_vectype (slp_op[0], vectype_in))
3199 {
3200 if (dump_enabled_p ())
3201 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3202 "incompatible vector types for invariants\n");
3203 return false;
3204 }
3205
3206 STMT_VINFO_TYPE (stmt_info)(stmt_info)->type = call_vec_info_type;
3207 DUMP_VECT_SCOPE ("vectorizable_bswap")auto_dump_scope scope ("vectorizable_bswap", vect_location);
3208 record_stmt_cost (cost_vec,
3209 1, vector_stmt, stmt_info, 0, vect_prologue);
3210 record_stmt_cost (cost_vec,
3211 slp_node
3212 ? SLP_TREE_NUMBER_OF_VEC_STMTS (slp_node)(slp_node)->vec_stmts_size : ncopies,
3213 vec_perm, stmt_info, 0, vect_body);
3214 return true;
3215 }
3216
3217 tree bswap_vconst = vec_perm_indices_to_tree (char_vectype, indices);
3218
3219 /* Transform. */
3220 vec<tree> vec_oprnds = vNULL;
3221 vect_get_vec_defs (vinfo, stmt_info, slp_node, ncopies,
3222 op, &vec_oprnds);
3223 /* Arguments are ready. create the new vector stmt. */
3224 unsigned i;
3225 tree vop;
3226 FOR_EACH_VEC_ELT (vec_oprnds, i, vop)for (i = 0; (vec_oprnds).iterate ((i), &(vop)); ++(i))
3227 {
3228 gimple *new_stmt;
3229 tree tem = make_ssa_name (char_vectype);
3230 new_stmt = gimple_build_assign (tem, build1 (VIEW_CONVERT_EXPR,
3231 char_vectype, vop));
3232 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
3233 tree tem2 = make_ssa_name (char_vectype);
3234 new_stmt = gimple_build_assign (tem2, VEC_PERM_EXPR,
3235 tem, tem, bswap_vconst);
3236 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
3237 tem = make_ssa_name (vectype);
3238 new_stmt = gimple_build_assign (tem, build1 (VIEW_CONVERT_EXPR,
3239 vectype, tem2));
3240 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
3241 if (slp_node)
3242 SLP_TREE_VEC_STMTS (slp_node)(slp_node)->vec_stmts.quick_push (new_stmt);
3243 else
3244 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
3245 }
3246
3247 if (!slp_node)
3248 *vec_stmt = STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts[0];
3249
3250 vec_oprnds.release ();
3251 return true;
3252}
3253
3254/* Return true if vector types VECTYPE_IN and VECTYPE_OUT have
3255 integer elements and if we can narrow VECTYPE_IN to VECTYPE_OUT
3256 in a single step. On success, store the binary pack code in
3257 *CONVERT_CODE. */
3258
3259static bool
3260simple_integer_narrowing (tree vectype_out, tree vectype_in,
3261 tree_code *convert_code)
3262{
3263 if (!INTEGRAL_TYPE_P (TREE_TYPE (vectype_out))(((enum tree_code) (((contains_struct_check ((vectype_out), (
TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3263, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((vectype_out)
, (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3263, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((vectype_out)
, (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3263, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
3264 || !INTEGRAL_TYPE_P (TREE_TYPE (vectype_in))(((enum tree_code) (((contains_struct_check ((vectype_in), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3264, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((vectype_in),
(TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3264, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((vectype_in),
(TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3264, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
)
3265 return false;
3266
3267 tree_code code;
3268 int multi_step_cvt = 0;
3269 auto_vec <tree, 8> interm_types;
3270 if (!supportable_narrowing_operation (NOP_EXPR, vectype_out, vectype_in,
3271 &code, &multi_step_cvt, &interm_types)
3272 || multi_step_cvt)
3273 return false;
3274
3275 *convert_code = code;
3276 return true;
3277}
3278
3279/* Function vectorizable_call.
3280
3281 Check if STMT_INFO performs a function call that can be vectorized.
3282 If VEC_STMT is also passed, vectorize STMT_INFO: create a vectorized
3283 stmt to replace it, put it in VEC_STMT, and insert it at GSI.
3284 Return true if STMT_INFO is vectorizable in this way. */
3285
3286static bool
3287vectorizable_call (vec_info *vinfo,
3288 stmt_vec_info stmt_info, gimple_stmt_iterator *gsi,
3289 gimple **vec_stmt, slp_tree slp_node,
3290 stmt_vector_for_cost *cost_vec)
3291{
3292 gcall *stmt;
3293 tree vec_dest;
3294 tree scalar_dest;
3295 tree op;
3296 tree vec_oprnd0 = NULL_TREE(tree) nullptr, vec_oprnd1 = NULL_TREE(tree) nullptr;
3297 tree vectype_out, vectype_in;
3298 poly_uint64 nunits_in;
3299 poly_uint64 nunits_out;
3300 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
3301 bb_vec_info bb_vinfo = dyn_cast <bb_vec_info> (vinfo);
3302 tree fndecl, new_temp, rhs_type;
3303 enum vect_def_type dt[4]
3304 = { vect_unknown_def_type, vect_unknown_def_type, vect_unknown_def_type,
3305 vect_unknown_def_type };
3306 tree vectypes[ARRAY_SIZE (dt)(sizeof (dt) / sizeof ((dt)[0]))] = {};
3307 slp_tree slp_op[ARRAY_SIZE (dt)(sizeof (dt) / sizeof ((dt)[0]))] = {};
3308 int ndts = ARRAY_SIZE (dt)(sizeof (dt) / sizeof ((dt)[0]));
3309 int ncopies, j;
3310 auto_vec<tree, 8> vargs;
3311 enum { NARROW, NONE, WIDEN } modifier;
3312 size_t i, nargs;
3313 tree lhs;
3314
3315 if (!STMT_VINFO_RELEVANT_P (stmt_info)((stmt_info)->relevant != vect_unused_in_scope) && !bb_vinfo)
3316 return false;
3317
3318 if (STMT_VINFO_DEF_TYPE (stmt_info)(stmt_info)->def_type != vect_internal_def
3319 && ! vec_stmt)
3320 return false;
3321
3322 /* Is STMT_INFO a vectorizable call? */
3323 stmt = dyn_cast <gcall *> (stmt_info->stmt);
3324 if (!stmt)
3325 return false;
3326
3327 if (gimple_call_internal_p (stmt)
3328 && (internal_load_fn_p (gimple_call_internal_fn (stmt))
3329 || internal_store_fn_p (gimple_call_internal_fn (stmt))))
3330 /* Handled by vectorizable_load and vectorizable_store. */
3331 return false;
3332
3333 if (gimple_call_lhs (stmt) == NULL_TREE(tree) nullptr
3334 || TREE_CODE (gimple_call_lhs (stmt))((enum tree_code) (gimple_call_lhs (stmt))->base.code) != SSA_NAME)
3335 return false;
3336
3337 gcc_checking_assert (!stmt_can_throw_internal (cfun, stmt))((void)(!(!stmt_can_throw_internal ((cfun + 0), stmt)) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3337, __FUNCTION__), 0 : 0))
;
3338
3339 vectype_out = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
3340
3341 /* Process function arguments. */
3342 rhs_type = NULL_TREE(tree) nullptr;
3343 vectype_in = NULL_TREE(tree) nullptr;
3344 nargs = gimple_call_num_args (stmt);
3345
3346 /* Bail out if the function has more than four arguments, we do not have
3347 interesting builtin functions to vectorize with more than two arguments
3348 except for fma. No arguments is also not good. */
3349 if (nargs == 0 || nargs > 4)
3350 return false;
3351
3352 /* Ignore the arguments of IFN_GOMP_SIMD_LANE, they are magic. */
3353 combined_fn cfn = gimple_call_combined_fn (stmt);
3354 if (cfn == CFN_GOMP_SIMD_LANE)
3355 {
3356 nargs = 0;
3357 rhs_type = unsigned_type_nodeinteger_types[itk_unsigned_int];
3358 }
3359
3360 int mask_opno = -1;
3361 if (internal_fn_p (cfn))
3362 mask_opno = internal_fn_mask_index (as_internal_fn (cfn));
3363
3364 for (i = 0; i < nargs; i++)
3365 {
3366 if ((int) i == mask_opno)
3367 {
3368 if (!vect_check_scalar_mask (vinfo, stmt_info, slp_node, mask_opno,
3369 &op, &slp_op[i], &dt[i], &vectypes[i]))
3370 return false;
3371 continue;
3372 }
3373
3374 if (!vect_is_simple_use (vinfo, stmt_info, slp_node,
3375 i, &op, &slp_op[i], &dt[i], &vectypes[i]))
3376 {
3377 if (dump_enabled_p ())
3378 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3379 "use not simple.\n");
3380 return false;
3381 }
3382
3383 /* We can only handle calls with arguments of the same type. */
3384 if (rhs_type
3385 && !types_compatible_p (rhs_type, TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3385, __FUNCTION__))->typed.type)
))
3386 {
3387 if (dump_enabled_p ())
3388 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3389 "argument types differ.\n");
3390 return false;
3391 }
3392 if (!rhs_type)
3393 rhs_type = TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3393, __FUNCTION__))->typed.type)
;
3394
3395 if (!vectype_in)
3396 vectype_in = vectypes[i];
3397 else if (vectypes[i]
3398 && !types_compatible_p (vectypes[i], vectype_in))
3399 {
3400 if (dump_enabled_p ())
3401 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3402 "argument vector types differ.\n");
3403 return false;
3404 }
3405 }
3406 /* If all arguments are external or constant defs, infer the vector type
3407 from the scalar type. */
3408 if (!vectype_in)
3409 vectype_in = get_vectype_for_scalar_type (vinfo, rhs_type, slp_node);
3410 if (vec_stmt)
3411 gcc_assert (vectype_in)((void)(!(vectype_in) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3411, __FUNCTION__), 0 : 0))
;
3412 if (!vectype_in)
3413 {
3414 if (dump_enabled_p ())
3415 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3416 "no vectype for scalar type %T\n", rhs_type);
3417
3418 return false;
3419 }
3420 /* FORNOW: we don't yet support mixtures of vector sizes for calls,
3421 just mixtures of nunits. E.g. DI->SI versions of __builtin_ctz*
3422 are traditionally vectorized as two VnDI->VnDI IFN_CTZs followed
3423 by a pack of the two vectors into an SI vector. We would need
3424 separate code to handle direct VnDI->VnSI IFN_CTZs. */
3425 if (TYPE_SIZE (vectype_in)((tree_class_check ((vectype_in), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3425, __FUNCTION__))->type_common.size)
!= TYPE_SIZE (vectype_out)((tree_class_check ((vectype_out), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3425, __FUNCTION__))->type_common.size)
)
3426 {
3427 if (dump_enabled_p ())
3428 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3429 "mismatched vector sizes %T and %T\n",
3430 vectype_in, vectype_out);
3431 return false;
3432 }
3433
3434 if (VECTOR_BOOLEAN_TYPE_P (vectype_out)(((enum tree_code) (vectype_out)->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((vectype_out
), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3434, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
3435 != VECTOR_BOOLEAN_TYPE_P (vectype_in)(((enum tree_code) (vectype_in)->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((vectype_in
), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3435, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
)
3436 {
3437 if (dump_enabled_p ())
3438 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3439 "mixed mask and nonmask vector types\n");
3440 return false;
3441 }
3442
3443 if (vect_emulated_vector_p (vectype_in) || vect_emulated_vector_p (vectype_out))
3444 {
3445 if (dump_enabled_p ())
3446 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3447 "use emulated vector type for call\n");
3448 return false;
3449 }
3450
3451 /* FORNOW */
3452 nunits_in = TYPE_VECTOR_SUBPARTS (vectype_in);
3453 nunits_out = TYPE_VECTOR_SUBPARTS (vectype_out);
3454 if (known_eq (nunits_in * 2, nunits_out)(!maybe_ne (nunits_in * 2, nunits_out)))
3455 modifier = NARROW;
3456 else if (known_eq (nunits_out, nunits_in)(!maybe_ne (nunits_out, nunits_in)))
3457 modifier = NONE;
3458 else if (known_eq (nunits_out * 2, nunits_in)(!maybe_ne (nunits_out * 2, nunits_in)))
3459 modifier = WIDEN;
3460 else
3461 return false;
3462
3463 /* We only handle functions that do not read or clobber memory. */
3464 if (gimple_vuse (stmt))
3465 {
3466 if (dump_enabled_p ())
3467 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3468 "function reads from or writes to memory.\n");
3469 return false;
3470 }
3471
3472 /* For now, we only vectorize functions if a target specific builtin
3473 is available. TODO -- in some cases, it might be profitable to
3474 insert the calls for pieces of the vector, in order to be able
3475 to vectorize other operations in the loop. */
3476 fndecl = NULL_TREE(tree) nullptr;
3477 internal_fn ifn = IFN_LAST;
3478 tree callee = gimple_call_fndecl (stmt);
3479
3480 /* First try using an internal function. */
3481 tree_code convert_code = ERROR_MARK;
3482 if (cfn != CFN_LAST
3483 && (modifier == NONE
3484 || (modifier == NARROW
3485 && simple_integer_narrowing (vectype_out, vectype_in,
3486 &convert_code))))
3487 ifn = vectorizable_internal_function (cfn, callee, vectype_out,
3488 vectype_in);
3489
3490 /* If that fails, try asking for a target-specific built-in function. */
3491 if (ifn == IFN_LAST)
3492 {
3493 if (cfn != CFN_LAST)
3494 fndecl = targetm.vectorize.builtin_vectorized_function
3495 (cfn, vectype_out, vectype_in);
3496 else if (callee && fndecl_built_in_p (callee, BUILT_IN_MD))
3497 fndecl = targetm.vectorize.builtin_md_vectorized_function
3498 (callee, vectype_out, vectype_in);
3499 }
3500
3501 if (ifn == IFN_LAST && !fndecl)
3502 {
3503 if (cfn == CFN_GOMP_SIMD_LANE
3504 && !slp_node
3505 && loop_vinfo
3506 && LOOP_VINFO_LOOP (loop_vinfo)(loop_vinfo)->loop->simduid
3507 && TREE_CODE (gimple_call_arg (stmt, 0))((enum tree_code) (gimple_call_arg (stmt, 0))->base.code) == SSA_NAME
3508 && LOOP_VINFO_LOOP (loop_vinfo)(loop_vinfo)->loop->simduid
3509 == SSA_NAME_VAR (gimple_call_arg (stmt, 0))((tree_check ((gimple_call_arg (stmt, 0)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3509, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((gimple_call_arg (stmt, 0))->
ssa_name.var)->base.code) == IDENTIFIER_NODE ? (tree) nullptr
: (gimple_call_arg (stmt, 0))->ssa_name.var)
)
3510 {
3511 /* We can handle IFN_GOMP_SIMD_LANE by returning a
3512 { 0, 1, 2, ... vf - 1 } vector. */
3513 gcc_assert (nargs == 0)((void)(!(nargs == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3513, __FUNCTION__), 0 : 0))
;
3514 }
3515 else if (modifier == NONE
3516 && (gimple_call_builtin_p (stmt, BUILT_IN_BSWAP16)
3517 || gimple_call_builtin_p (stmt, BUILT_IN_BSWAP32)
3518 || gimple_call_builtin_p (stmt, BUILT_IN_BSWAP64)
3519 || gimple_call_builtin_p (stmt, BUILT_IN_BSWAP128)))
3520 return vectorizable_bswap (vinfo, stmt_info, gsi, vec_stmt, slp_node,
3521 slp_op, vectype_in, cost_vec);
3522 else
3523 {
3524 if (dump_enabled_p ())
3525 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3526 "function is not vectorizable.\n");
3527 return false;
3528 }
3529 }
3530
3531 if (slp_node)
3532 ncopies = 1;
3533 else if (modifier == NARROW && ifn == IFN_LAST)
3534 ncopies = vect_get_num_copies (loop_vinfo, vectype_out);
3535 else
3536 ncopies = vect_get_num_copies (loop_vinfo, vectype_in);
3537
3538 /* Sanity check: make sure that at least one copy of the vectorized stmt
3539 needs to be generated. */
3540 gcc_assert (ncopies >= 1)((void)(!(ncopies >= 1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3540, __FUNCTION__), 0 : 0))
;
3541
3542 int reduc_idx = STMT_VINFO_REDUC_IDX (stmt_info)(stmt_info)->reduc_idx;
3543 internal_fn cond_fn = get_conditional_internal_fn (ifn);
3544 vec_loop_masks *masks = (loop_vinfo ? &LOOP_VINFO_MASKS (loop_vinfo)(loop_vinfo)->masks : NULLnullptr);
3545 if (!vec_stmt) /* transformation not required. */
3546 {
3547 if (slp_node)
3548 for (i = 0; i < nargs; ++i)
3549 if (!vect_maybe_update_slp_op_vectype (slp_op[i],
3550 vectypes[i]
3551 ? vectypes[i] : vectype_in))
3552 {
3553 if (dump_enabled_p ())
3554 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3555 "incompatible vector types for invariants\n");
3556 return false;
3557 }
3558 STMT_VINFO_TYPE (stmt_info)(stmt_info)->type = call_vec_info_type;
3559 DUMP_VECT_SCOPE ("vectorizable_call")auto_dump_scope scope ("vectorizable_call", vect_location);
3560 vect_model_simple_cost (vinfo, stmt_info,
3561 ncopies, dt, ndts, slp_node, cost_vec);
3562 if (ifn != IFN_LAST && modifier == NARROW && !slp_node)
3563 record_stmt_cost (cost_vec, ncopies / 2,
3564 vec_promote_demote, stmt_info, 0, vect_body);
3565
3566 if (loop_vinfo
3567 && LOOP_VINFO_CAN_USE_PARTIAL_VECTORS_P (loop_vinfo)(loop_vinfo)->can_use_partial_vectors_p
3568 && (reduc_idx >= 0 || mask_opno >= 0))
3569 {
3570 if (reduc_idx >= 0
3571 && (cond_fn == IFN_LAST
3572 || !direct_internal_fn_supported_p (cond_fn, vectype_out,
3573 OPTIMIZE_FOR_SPEED)))
3574 {
3575 if (dump_enabled_p ())
3576 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3577 "can't use a fully-masked loop because no"
3578 " conditional operation is available.\n");
3579 LOOP_VINFO_CAN_USE_PARTIAL_VECTORS_P (loop_vinfo)(loop_vinfo)->can_use_partial_vectors_p = false;
3580 }
3581 else
3582 {
3583 unsigned int nvectors
3584 = (slp_node
3585 ? SLP_TREE_NUMBER_OF_VEC_STMTS (slp_node)(slp_node)->vec_stmts_size
3586 : ncopies);
3587 tree scalar_mask = NULL_TREE(tree) nullptr;
3588 if (mask_opno >= 0)
3589 scalar_mask = gimple_call_arg (stmt_info->stmt, mask_opno);
3590 vect_record_loop_mask (loop_vinfo, masks, nvectors,
3591 vectype_out, scalar_mask);
3592 }
3593 }
3594 return true;
3595 }
3596
3597 /* Transform. */
3598
3599 if (dump_enabled_p ())
3600 dump_printf_loc (MSG_NOTE, vect_location, "transform call.\n");
3601
3602 /* Handle def. */
3603 scalar_dest = gimple_call_lhs (stmt);
3604 vec_dest = vect_create_destination_var (scalar_dest, vectype_out);
3605
3606 bool masked_loop_p = loop_vinfo && LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)((loop_vinfo)->using_partial_vectors_p && !(loop_vinfo
)->masks.is_empty ())
;
3607 unsigned int vect_nargs = nargs;
3608 if (masked_loop_p && reduc_idx >= 0)
3609 {
3610 ifn = cond_fn;
3611 vect_nargs += 2;
3612 }
3613
3614 if (modifier == NONE || ifn != IFN_LAST)
3615 {
3616 tree prev_res = NULL_TREE(tree) nullptr;
3617 vargs.safe_grow (vect_nargs, true);
3618 auto_vec<vec<tree> > vec_defs (nargs);
3619 for (j = 0; j < ncopies; ++j)
3620 {
3621 /* Build argument list for the vectorized call. */
3622 if (slp_node)
3623 {
3624 vec<tree> vec_oprnds0;
3625
3626 vect_get_slp_defs (vinfo, slp_node, &vec_defs);
3627 vec_oprnds0 = vec_defs[0];
3628
3629 /* Arguments are ready. Create the new vector stmt. */
3630 FOR_EACH_VEC_ELT (vec_oprnds0, i, vec_oprnd0)for (i = 0; (vec_oprnds0).iterate ((i), &(vec_oprnd0)); ++
(i))
3631 {
3632 int varg = 0;
3633 if (masked_loop_p && reduc_idx >= 0)
3634 {
3635 unsigned int vec_num = vec_oprnds0.length ();
3636 /* Always true for SLP. */
3637 gcc_assert (ncopies == 1)((void)(!(ncopies == 1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3637, __FUNCTION__), 0 : 0))
;
3638 vargs[varg++] = vect_get_loop_mask (gsi, masks, vec_num,
3639 vectype_out, i);
3640 }
3641 size_t k;
3642 for (k = 0; k < nargs; k++)
3643 {
3644 vec<tree> vec_oprndsk = vec_defs[k];
3645 vargs[varg++] = vec_oprndsk[i];
3646 }
3647 if (masked_loop_p && reduc_idx >= 0)
3648 vargs[varg++] = vargs[reduc_idx + 1];
3649 gimple *new_stmt;
3650 if (modifier == NARROW)
3651 {
3652 /* We don't define any narrowing conditional functions
3653 at present. */
3654 gcc_assert (mask_opno < 0)((void)(!(mask_opno < 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3654, __FUNCTION__), 0 : 0))
;
3655 tree half_res = make_ssa_name (vectype_in);
3656 gcall *call
3657 = gimple_build_call_internal_vec (ifn, vargs);
3658 gimple_call_set_lhs (call, half_res);
3659 gimple_call_set_nothrow (call, true);
3660 vect_finish_stmt_generation (vinfo, stmt_info, call, gsi);
3661 if ((i & 1) == 0)
3662 {
3663 prev_res = half_res;
3664 continue;
3665 }
3666 new_temp = make_ssa_name (vec_dest);
3667 new_stmt = gimple_build_assign (new_temp, convert_code,
3668 prev_res, half_res);
3669 vect_finish_stmt_generation (vinfo, stmt_info,
3670 new_stmt, gsi);
3671 }
3672 else
3673 {
3674 if (mask_opno >= 0 && masked_loop_p)
3675 {
3676 unsigned int vec_num = vec_oprnds0.length ();
3677 /* Always true for SLP. */
3678 gcc_assert (ncopies == 1)((void)(!(ncopies == 1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3678, __FUNCTION__), 0 : 0))
;
3679 tree mask = vect_get_loop_mask (gsi, masks, vec_num,
3680 vectype_out, i);
3681 vargs[mask_opno] = prepare_vec_mask
3682 (loop_vinfo, TREE_TYPE (mask)((contains_struct_check ((mask), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3682, __FUNCTION__))->typed.type)
, mask,
3683 vargs[mask_opno], gsi);
3684 }
3685
3686 gcall *call;
3687 if (ifn != IFN_LAST)
3688 call = gimple_build_call_internal_vec (ifn, vargs);
3689 else
3690 call = gimple_build_call_vec (fndecl, vargs);
3691 new_temp = make_ssa_name (vec_dest, call);
3692 gimple_call_set_lhs (call, new_temp);
3693 gimple_call_set_nothrow (call, true);
3694 vect_finish_stmt_generation (vinfo, stmt_info, call, gsi);
3695 new_stmt = call;
3696 }
3697 SLP_TREE_VEC_STMTS (slp_node)(slp_node)->vec_stmts.quick_push (new_stmt);
3698 }
3699 continue;
3700 }
3701
3702 int varg = 0;
3703 if (masked_loop_p && reduc_idx >= 0)
3704 vargs[varg++] = vect_get_loop_mask (gsi, masks, ncopies,
3705 vectype_out, j);
3706 for (i = 0; i < nargs; i++)
3707 {
3708 op = gimple_call_arg (stmt, i);
3709 if (j == 0)
3710 {
3711 vec_defs.quick_push (vNULL);
3712 vect_get_vec_defs_for_operand (vinfo, stmt_info, ncopies,
3713 op, &vec_defs[i],
3714 vectypes[i]);
3715 }
3716 vargs[varg++] = vec_defs[i][j];
3717 }
3718 if (masked_loop_p && reduc_idx >= 0)
3719 vargs[varg++] = vargs[reduc_idx + 1];
3720
3721 if (mask_opno >= 0 && masked_loop_p)
3722 {
3723 tree mask = vect_get_loop_mask (gsi, masks, ncopies,
3724 vectype_out, j);
3725 vargs[mask_opno]
3726 = prepare_vec_mask (loop_vinfo, TREE_TYPE (mask)((contains_struct_check ((mask), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3726, __FUNCTION__))->typed.type)
, mask,
3727 vargs[mask_opno], gsi);
3728 }
3729
3730 gimple *new_stmt;
3731 if (cfn == CFN_GOMP_SIMD_LANE)
3732 {
3733 tree cst = build_index_vector (vectype_out, j * nunits_out, 1);
3734 tree new_var
3735 = vect_get_new_ssa_name (vectype_out, vect_simple_var, "cst_");
3736 gimple *init_stmt = gimple_build_assign (new_var, cst);
3737 vect_init_vector_1 (vinfo, stmt_info, init_stmt, NULLnullptr);
3738 new_temp = make_ssa_name (vec_dest);
3739 new_stmt = gimple_build_assign (new_temp, new_var);
3740 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
3741 }
3742 else if (modifier == NARROW)
3743 {
3744 /* We don't define any narrowing conditional functions at
3745 present. */
3746 gcc_assert (mask_opno < 0)((void)(!(mask_opno < 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3746, __FUNCTION__), 0 : 0))
;
3747 tree half_res = make_ssa_name (vectype_in);
3748 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3749 gimple_call_set_lhs (call, half_res);
3750 gimple_call_set_nothrow (call, true);
3751 vect_finish_stmt_generation (vinfo, stmt_info, call, gsi);
3752 if ((j & 1) == 0)
3753 {
3754 prev_res = half_res;
3755 continue;
3756 }
3757 new_temp = make_ssa_name (vec_dest);
3758 new_stmt = gimple_build_assign (new_temp, convert_code,
3759 prev_res, half_res);
3760 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
3761 }
3762 else
3763 {
3764 gcall *call;
3765 if (ifn != IFN_LAST)
3766 call = gimple_build_call_internal_vec (ifn, vargs);
3767 else
3768 call = gimple_build_call_vec (fndecl, vargs);
3769 new_temp = make_ssa_name (vec_dest, call);
3770 gimple_call_set_lhs (call, new_temp);
3771 gimple_call_set_nothrow (call, true);
3772 vect_finish_stmt_generation (vinfo, stmt_info, call, gsi);
3773 new_stmt = call;
3774 }
3775
3776 if (j == (modifier == NARROW ? 1 : 0))
3777 *vec_stmt = new_stmt;
3778 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
3779 }
3780 for (i = 0; i < nargs; i++)
3781 {
3782 vec<tree> vec_oprndsi = vec_defs[i];
3783 vec_oprndsi.release ();
3784 }
3785 }
3786 else if (modifier == NARROW)
3787 {
3788 auto_vec<vec<tree> > vec_defs (nargs);
3789 /* We don't define any narrowing conditional functions at present. */
3790 gcc_assert (mask_opno < 0)((void)(!(mask_opno < 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3790, __FUNCTION__), 0 : 0))
;
3791 for (j = 0; j < ncopies; ++j)
3792 {
3793 /* Build argument list for the vectorized call. */
3794 if (j == 0)
3795 vargs.create (nargs * 2);
3796 else
3797 vargs.truncate (0);
3798
3799 if (slp_node)
3800 {
3801 vec<tree> vec_oprnds0;
3802
3803 vect_get_slp_defs (vinfo, slp_node, &vec_defs);
3804 vec_oprnds0 = vec_defs[0];
3805
3806 /* Arguments are ready. Create the new vector stmt. */
3807 for (i = 0; vec_oprnds0.iterate (i, &vec_oprnd0); i += 2)
3808 {
3809 size_t k;
3810 vargs.truncate (0);
3811 for (k = 0; k < nargs; k++)
3812 {
3813 vec<tree> vec_oprndsk = vec_defs[k];
3814 vargs.quick_push (vec_oprndsk[i]);
3815 vargs.quick_push (vec_oprndsk[i + 1]);
3816 }
3817 gcall *call;
3818 if (ifn != IFN_LAST)
3819 call = gimple_build_call_internal_vec (ifn, vargs);
3820 else
3821 call = gimple_build_call_vec (fndecl, vargs);
3822 new_temp = make_ssa_name (vec_dest, call);
3823 gimple_call_set_lhs (call, new_temp);
3824 gimple_call_set_nothrow (call, true);
3825 vect_finish_stmt_generation (vinfo, stmt_info, call, gsi);
3826 SLP_TREE_VEC_STMTS (slp_node)(slp_node)->vec_stmts.quick_push (call);
3827 }
3828 continue;
3829 }
3830
3831 for (i = 0; i < nargs; i++)
3832 {
3833 op = gimple_call_arg (stmt, i);
3834 if (j == 0)
3835 {
3836 vec_defs.quick_push (vNULL);
3837 vect_get_vec_defs_for_operand (vinfo, stmt_info, 2 * ncopies,
3838 op, &vec_defs[i], vectypes[i]);
3839 }
3840 vec_oprnd0 = vec_defs[i][2*j];
3841 vec_oprnd1 = vec_defs[i][2*j+1];
3842
3843 vargs.quick_push (vec_oprnd0);
3844 vargs.quick_push (vec_oprnd1);
3845 }
3846
3847 gcall *new_stmt = gimple_build_call_vec (fndecl, vargs);
3848 new_temp = make_ssa_name (vec_dest, new_stmt);
3849 gimple_call_set_lhs (new_stmt, new_temp);
3850 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
3851
3852 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
3853 }
3854
3855 if (!slp_node)
3856 *vec_stmt = STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts[0];
3857
3858 for (i = 0; i < nargs; i++)
3859 {
3860 vec<tree> vec_oprndsi = vec_defs[i];
3861 vec_oprndsi.release ();
3862 }
3863 }
3864 else
3865 /* No current target implements this case. */
3866 return false;
3867
3868 vargs.release ();
3869
3870 /* The call in STMT might prevent it from being removed in dce.
3871 We however cannot remove it here, due to the way the ssa name
3872 it defines is mapped to the new definition. So just replace
3873 rhs of the statement with something harmless. */
3874
3875 if (slp_node)
3876 return true;
3877
3878 stmt_info = vect_orig_stmt (stmt_info);
3879 lhs = gimple_get_lhs (stmt_info->stmt);
3880
3881 gassign *new_stmt
3882 = gimple_build_assign (lhs, build_zero_cst (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3882, __FUNCTION__))->typed.type)
));
3883 vinfo->replace_stmt (gsi, stmt_info, new_stmt);
3884
3885 return true;
3886}
3887
3888
3889struct simd_call_arg_info
3890{
3891 tree vectype;
3892 tree op;
3893 HOST_WIDE_INTlong linear_step;
3894 enum vect_def_type dt;
3895 unsigned int align;
3896 bool simd_lane_linear;
3897};
3898
3899/* Helper function of vectorizable_simd_clone_call. If OP, an SSA_NAME,
3900 is linear within simd lane (but not within whole loop), note it in
3901 *ARGINFO. */
3902
3903static void
3904vect_simd_lane_linear (tree op, class loop *loop,
3905 struct simd_call_arg_info *arginfo)
3906{
3907 gimple *def_stmt = SSA_NAME_DEF_STMT (op)(tree_check ((op), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3907, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
3908
3909 if (!is_gimple_assign (def_stmt)
3910 || gimple_assign_rhs_code (def_stmt) != POINTER_PLUS_EXPR
3911 || !is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
3912 return;
3913
3914 tree base = gimple_assign_rhs1 (def_stmt);
3915 HOST_WIDE_INTlong linear_step = 0;
3916 tree v = gimple_assign_rhs2 (def_stmt);
3917 while (TREE_CODE (v)((enum tree_code) (v)->base.code) == SSA_NAME)
3918 {
3919 tree t;
3920 def_stmt = SSA_NAME_DEF_STMT (v)(tree_check ((v), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3920, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
3921 if (is_gimple_assign (def_stmt))
3922 switch (gimple_assign_rhs_code (def_stmt))
3923 {
3924 case PLUS_EXPR:
3925 t = gimple_assign_rhs2 (def_stmt);
3926 if (linear_step || TREE_CODE (t)((enum tree_code) (t)->base.code) != INTEGER_CST)
3927 return;
3928 base = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (base), base, t)fold_build2_loc (((location_t) 0), POINTER_PLUS_EXPR, ((contains_struct_check
((base), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3928, __FUNCTION__))->typed.type), base, t )
;
3929 v = gimple_assign_rhs1 (def_stmt);
3930 continue;
3931 case MULT_EXPR:
3932 t = gimple_assign_rhs2 (def_stmt);
3933 if (linear_step || !tree_fits_shwi_p (t) || integer_zerop (t))
3934 return;
3935 linear_step = tree_to_shwi (t);
3936 v = gimple_assign_rhs1 (def_stmt);
3937 continue;
3938 CASE_CONVERTcase NOP_EXPR: case CONVERT_EXPR:
3939 t = gimple_assign_rhs1 (def_stmt);
3940 if (TREE_CODE (TREE_TYPE (t))((enum tree_code) (((contains_struct_check ((t), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3940, __FUNCTION__))->typed.type))->base.code)
!= INTEGER_TYPE
3941 || (TYPE_PRECISION (TREE_TYPE (v))((tree_class_check ((((contains_struct_check ((v), (TS_TYPED)
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3941, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3941, __FUNCTION__))->type_common.precision)
3942 < TYPE_PRECISION (TREE_TYPE (t))((tree_class_check ((((contains_struct_check ((t), (TS_TYPED)
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3942, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3942, __FUNCTION__))->type_common.precision)
))
3943 return;
3944 if (!linear_step)
3945 linear_step = 1;
3946 v = t;
3947 continue;
3948 default:
3949 return;
3950 }
3951 else if (gimple_call_internal_p (def_stmt, IFN_GOMP_SIMD_LANE)
3952 && loop->simduid
3953 && TREE_CODE (gimple_call_arg (def_stmt, 0))((enum tree_code) (gimple_call_arg (def_stmt, 0))->base.code
)
== SSA_NAME
3954 && (SSA_NAME_VAR (gimple_call_arg (def_stmt, 0))((tree_check ((gimple_call_arg (def_stmt, 0)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 3954, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((gimple_call_arg (def_stmt, 0)
)->ssa_name.var)->base.code) == IDENTIFIER_NODE ? (tree
) nullptr : (gimple_call_arg (def_stmt, 0))->ssa_name.var)
3955 == loop->simduid))
3956 {
3957 if (!linear_step)
3958 linear_step = 1;
3959 arginfo->linear_step = linear_step;
3960 arginfo->op = base;
3961 arginfo->simd_lane_linear = true;
3962 return;
3963 }
3964 }
3965}
3966
3967/* Return the number of elements in vector type VECTYPE, which is associated
3968 with a SIMD clone. At present these vectors always have a constant
3969 length. */
3970
3971static unsigned HOST_WIDE_INTlong
3972simd_clone_subparts (tree vectype)
3973{
3974 return TYPE_VECTOR_SUBPARTS (vectype).to_constant ();
3975}
3976
3977/* Function vectorizable_simd_clone_call.
3978
3979 Check if STMT_INFO performs a function call that can be vectorized
3980 by calling a simd clone of the function.
3981 If VEC_STMT is also passed, vectorize STMT_INFO: create a vectorized
3982 stmt to replace it, put it in VEC_STMT, and insert it at GSI.
3983 Return true if STMT_INFO is vectorizable in this way. */
3984
3985static bool
3986vectorizable_simd_clone_call (vec_info *vinfo, stmt_vec_info stmt_info,
3987 gimple_stmt_iterator *gsi,
3988 gimple **vec_stmt, slp_tree slp_node,
3989 stmt_vector_for_cost *)
3990{
3991 tree vec_dest;
3992 tree scalar_dest;
3993 tree op, type;
3994 tree vec_oprnd0 = NULL_TREE(tree) nullptr;
3995 tree vectype;
3996 poly_uint64 nunits;
3997 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
3998 bb_vec_info bb_vinfo = dyn_cast <bb_vec_info> (vinfo);
3999 class loop *loop = loop_vinfo ? LOOP_VINFO_LOOP (loop_vinfo)(loop_vinfo)->loop : NULLnullptr;
4000 tree fndecl, new_temp;
4001 int ncopies, j;
4002 auto_vec<simd_call_arg_info> arginfo;
4003 vec<tree> vargs = vNULL;
4004 size_t i, nargs;
4005 tree lhs, rtype, ratype;
4006 vec<constructor_elt, va_gc> *ret_ctor_elts = NULLnullptr;
4007 int arg_offset = 0;
4008
4009 /* Is STMT a vectorizable call? */
4010 gcall *stmt = dyn_cast <gcall *> (stmt_info->stmt);
4011 if (!stmt)
4012 return false;
4013
4014 fndecl = gimple_call_fndecl (stmt);
4015 if (fndecl == NULL_TREE(tree) nullptr
4016 && gimple_call_internal_p (stmt, IFN_MASK_CALL))
4017 {
4018 fndecl = gimple_call_arg (stmt, 0);
4019 gcc_checking_assert (TREE_CODE (fndecl) == ADDR_EXPR)((void)(!(((enum tree_code) (fndecl)->base.code) == ADDR_EXPR
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4019, __FUNCTION__), 0 : 0))
;
4020 fndecl = TREE_OPERAND (fndecl, 0)(*((const_cast<tree*> (tree_operand_check ((fndecl), (0
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4020, __FUNCTION__)))))
;
4021 gcc_checking_assert (TREE_CODE (fndecl) == FUNCTION_DECL)((void)(!(((enum tree_code) (fndecl)->base.code) == FUNCTION_DECL
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4021, __FUNCTION__), 0 : 0))
;
4022 arg_offset = 1;
4023 }
4024 if (fndecl == NULL_TREE(tree) nullptr)
4025 return false;
4026
4027 struct cgraph_node *node = cgraph_node::get (fndecl);
4028 if (node == NULLnullptr || node->simd_clones == NULLnullptr)
4029 return false;
4030
4031 if (!STMT_VINFO_RELEVANT_P (stmt_info)((stmt_info)->relevant != vect_unused_in_scope) && !bb_vinfo)
4032 return false;
4033
4034 if (STMT_VINFO_DEF_TYPE (stmt_info)(stmt_info)->def_type != vect_internal_def
4035 && ! vec_stmt)
4036 return false;
4037
4038 if (gimple_call_lhs (stmt)
4039 && TREE_CODE (gimple_call_lhs (stmt))((enum tree_code) (gimple_call_lhs (stmt))->base.code) != SSA_NAME)
4040 return false;
4041
4042 gcc_checking_assert (!stmt_can_throw_internal (cfun, stmt))((void)(!(!stmt_can_throw_internal ((cfun + 0), stmt)) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4042, __FUNCTION__), 0 : 0))
;
4043
4044 vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
4045
4046 if (loop_vinfo && nested_in_vect_loop_p (loop, stmt_info))
4047 return false;
4048
4049 /* FORNOW */
4050 if (slp_node)
4051 return false;
4052
4053 /* Process function arguments. */
4054 nargs = gimple_call_num_args (stmt) - arg_offset;
4055
4056 /* Bail out if the function has zero arguments. */
4057 if (nargs == 0)
4058 return false;
4059
4060 arginfo.reserve (nargs, true);
4061
4062 for (i = 0; i < nargs; i++)
4063 {
4064 simd_call_arg_info thisarginfo;
4065 affine_iv iv;
4066
4067 thisarginfo.linear_step = 0;
4068 thisarginfo.align = 0;
4069 thisarginfo.op = NULL_TREE(tree) nullptr;
4070 thisarginfo.simd_lane_linear = false;
4071
4072 op = gimple_call_arg (stmt, i + arg_offset);
4073 if (!vect_is_simple_use (op, vinfo, &thisarginfo.dt,
4074 &thisarginfo.vectype)
4075 || thisarginfo.dt == vect_uninitialized_def)
4076 {
4077 if (dump_enabled_p ())
4078 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
4079 "use not simple.\n");
4080 return false;
4081 }
4082
4083 if (thisarginfo.dt == vect_constant_def
4084 || thisarginfo.dt == vect_external_def)
4085 gcc_assert (thisarginfo.vectype == NULL_TREE)((void)(!(thisarginfo.vectype == (tree) nullptr) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4085, __FUNCTION__), 0 : 0))
;
4086 else
4087 gcc_assert (thisarginfo.vectype != NULL_TREE)((void)(!(thisarginfo.vectype != (tree) nullptr) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4087, __FUNCTION__), 0 : 0))
;
4088
4089 /* For linear arguments, the analyze phase should have saved
4090 the base and step in STMT_VINFO_SIMD_CLONE_INFO. */
4091 if (i * 3 + 4 <= STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info.length ()
4092 && STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info[i * 3 + 2])
4093 {
4094 gcc_assert (vec_stmt)((void)(!(vec_stmt) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4094, __FUNCTION__), 0 : 0))
;
4095 thisarginfo.linear_step
4096 = tree_to_shwi (STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info[i * 3 + 2]);
4097 thisarginfo.op
4098 = STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info[i * 3 + 1];
4099 thisarginfo.simd_lane_linear
4100 = (STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info[i * 3 + 3]
4101 == boolean_true_nodeglobal_trees[TI_BOOLEAN_TRUE]);
4102 /* If loop has been peeled for alignment, we need to adjust it. */
4103 tree n1 = LOOP_VINFO_NITERS_UNCHANGED (loop_vinfo)(loop_vinfo)->num_iters_unchanged;
4104 tree n2 = LOOP_VINFO_NITERS (loop_vinfo)(loop_vinfo)->num_iters;
4105 if (n1 != n2 && !thisarginfo.simd_lane_linear)
4106 {
4107 tree bias = fold_build2 (MINUS_EXPR, TREE_TYPE (n1), n1, n2)fold_build2_loc (((location_t) 0), MINUS_EXPR, ((contains_struct_check
((n1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4107, __FUNCTION__))->typed.type), n1, n2 )
;
4108 tree step = STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info[i * 3 + 2];
4109 tree opt = TREE_TYPE (thisarginfo.op)((contains_struct_check ((thisarginfo.op), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4109, __FUNCTION__))->typed.type)
;
4110 bias = fold_convert (TREE_TYPE (step), bias)fold_convert_loc (((location_t) 0), ((contains_struct_check (
(step), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4110, __FUNCTION__))->typed.type), bias)
;
4111 bias = fold_build2 (MULT_EXPR, TREE_TYPE (step), bias, step)fold_build2_loc (((location_t) 0), MULT_EXPR, ((contains_struct_check
((step), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4111, __FUNCTION__))->typed.type), bias, step )
;
4112 thisarginfo.op
4113 = fold_build2 (POINTER_TYPE_P (opt)fold_build2_loc (((location_t) 0), (((enum tree_code) (opt)->
base.code) == POINTER_TYPE || ((enum tree_code) (opt)->base
.code) == REFERENCE_TYPE) ? POINTER_PLUS_EXPR : PLUS_EXPR, opt
, thisarginfo.op, bias )
4114 ? POINTER_PLUS_EXPR : PLUS_EXPR, opt,fold_build2_loc (((location_t) 0), (((enum tree_code) (opt)->
base.code) == POINTER_TYPE || ((enum tree_code) (opt)->base
.code) == REFERENCE_TYPE) ? POINTER_PLUS_EXPR : PLUS_EXPR, opt
, thisarginfo.op, bias )
4115 thisarginfo.op, bias)fold_build2_loc (((location_t) 0), (((enum tree_code) (opt)->
base.code) == POINTER_TYPE || ((enum tree_code) (opt)->base
.code) == REFERENCE_TYPE) ? POINTER_PLUS_EXPR : PLUS_EXPR, opt
, thisarginfo.op, bias )
;
4116 }
4117 }
4118 else if (!vec_stmt
4119 && thisarginfo.dt != vect_constant_def
4120 && thisarginfo.dt != vect_external_def
4121 && loop_vinfo
4122 && TREE_CODE (op)((enum tree_code) (op)->base.code) == SSA_NAME
4123 && simple_iv (loop, loop_containing_stmt (stmt), op,
4124 &iv, false)
4125 && tree_fits_shwi_p (iv.step))
4126 {
4127 thisarginfo.linear_step = tree_to_shwi (iv.step);
4128 thisarginfo.op = iv.base;
4129 }
4130 else if ((thisarginfo.dt == vect_constant_def
4131 || thisarginfo.dt == vect_external_def)
4132 && POINTER_TYPE_P (TREE_TYPE (op))(((enum tree_code) (((contains_struct_check ((op), (TS_TYPED)
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4132, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((op), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4132, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
4133 thisarginfo.align = get_pointer_alignment (op) / BITS_PER_UNIT(8);
4134 /* Addresses of array elements indexed by GOMP_SIMD_LANE are
4135 linear too. */
4136 if (POINTER_TYPE_P (TREE_TYPE (op))(((enum tree_code) (((contains_struct_check ((op), (TS_TYPED)
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4136, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((op), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4136, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
4137 && !thisarginfo.linear_step
4138 && !vec_stmt
4139 && thisarginfo.dt != vect_constant_def
4140 && thisarginfo.dt != vect_external_def
4141 && loop_vinfo
4142 && !slp_node
4143 && TREE_CODE (op)((enum tree_code) (op)->base.code) == SSA_NAME)
4144 vect_simd_lane_linear (op, loop, &thisarginfo);
4145
4146 arginfo.quick_push (thisarginfo);
4147 }
4148
4149 poly_uint64 vf = LOOP_VINFO_VECT_FACTOR (loop_vinfo)(loop_vinfo)->vectorization_factor;
4150 if (!vf.is_constant ())
4151 {
4152 if (dump_enabled_p ())
4153 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
4154 "not considering SIMD clones; not yet supported"
4155 " for variable-width vectors.\n");
4156 return false;
4157 }
4158
4159 unsigned int badness = 0;
4160 struct cgraph_node *bestn = NULLnullptr;
4161 if (STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info.exists ())
4162 bestn = cgraph_node::get (STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info[0]);
4163 else
4164 for (struct cgraph_node *n = node->simd_clones; n != NULLnullptr;
4165 n = n->simdclone->next_clone)
4166 {
4167 unsigned int this_badness = 0;
4168 unsigned int num_calls;
4169 if (!constant_multiple_p (vf, n->simdclone->simdlen, &num_calls)
4170 || n->simdclone->nargs != nargs)
4171 continue;
4172 if (num_calls != 1)
4173 this_badness += exact_log2 (num_calls) * 4096;
4174 if (n->simdclone->inbranch)
4175 this_badness += 8192;
4176 int target_badness = targetm.simd_clone.usable (n);
4177 if (target_badness < 0)
4178 continue;
4179 this_badness += target_badness * 512;
4180 for (i = 0; i < nargs; i++)
4181 {
4182 switch (n->simdclone->args[i].arg_type)
4183 {
4184 case SIMD_CLONE_ARG_TYPE_VECTOR:
4185 if (!useless_type_conversion_p
4186 (n->simdclone->args[i].orig_type,
4187 TREE_TYPE (gimple_call_arg (stmt, i + arg_offset))((contains_struct_check ((gimple_call_arg (stmt, i + arg_offset
)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4187, __FUNCTION__))->typed.type)
))
4188 i = -1;
4189 else if (arginfo[i].dt == vect_constant_def
4190 || arginfo[i].dt == vect_external_def
4191 || arginfo[i].linear_step)
4192 this_badness += 64;
4193 break;
4194 case SIMD_CLONE_ARG_TYPE_UNIFORM:
4195 if (arginfo[i].dt != vect_constant_def
4196 && arginfo[i].dt != vect_external_def)
4197 i = -1;
4198 break;
4199 case SIMD_CLONE_ARG_TYPE_LINEAR_CONSTANT_STEP:
4200 case SIMD_CLONE_ARG_TYPE_LINEAR_REF_CONSTANT_STEP:
4201 if (arginfo[i].dt == vect_constant_def
4202 || arginfo[i].dt == vect_external_def
4203 || (arginfo[i].linear_step
4204 != n->simdclone->args[i].linear_step))
4205 i = -1;
4206 break;
4207 case SIMD_CLONE_ARG_TYPE_LINEAR_VARIABLE_STEP:
4208 case SIMD_CLONE_ARG_TYPE_LINEAR_VAL_CONSTANT_STEP:
4209 case SIMD_CLONE_ARG_TYPE_LINEAR_UVAL_CONSTANT_STEP:
4210 case SIMD_CLONE_ARG_TYPE_LINEAR_REF_VARIABLE_STEP:
4211 case SIMD_CLONE_ARG_TYPE_LINEAR_VAL_VARIABLE_STEP:
4212 case SIMD_CLONE_ARG_TYPE_LINEAR_UVAL_VARIABLE_STEP:
4213 /* FORNOW */
4214 i = -1;
4215 break;
4216 case SIMD_CLONE_ARG_TYPE_MASK:
4217 break;
4218 }
4219 if (i == (size_t) -1)
4220 break;
4221 if (n->simdclone->args[i].alignment > arginfo[i].align)
4222 {
4223 i = -1;
4224 break;
4225 }
4226 if (arginfo[i].align)
4227 this_badness += (exact_log2 (arginfo[i].align)
4228 - exact_log2 (n->simdclone->args[i].alignment));
4229 }
4230 if (i == (size_t) -1)
4231 continue;
4232 if (bestn == NULLnullptr || this_badness < badness)
4233 {
4234 bestn = n;
4235 badness = this_badness;
4236 }
4237 }
4238
4239 if (bestn == NULLnullptr)
4240 return false;
4241
4242 for (i = 0; i < nargs; i++)
4243 {
4244 if ((arginfo[i].dt == vect_constant_def
4245 || arginfo[i].dt == vect_external_def)
4246 && bestn->simdclone->args[i].arg_type == SIMD_CLONE_ARG_TYPE_VECTOR)
4247 {
4248 tree arg_type = TREE_TYPE (gimple_call_arg (stmt, i + arg_offset))((contains_struct_check ((gimple_call_arg (stmt, i + arg_offset
)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4248, __FUNCTION__))->typed.type)
;
4249 arginfo[i].vectype = get_vectype_for_scalar_type (vinfo, arg_type,
4250 slp_node);
4251 if (arginfo[i].vectype == NULLnullptr
4252 || !constant_multiple_p (bestn->simdclone->simdlen,
4253 simd_clone_subparts (arginfo[i].vectype)))
4254 return false;
4255 }
4256
4257 if (bestn->simdclone->args[i].arg_type == SIMD_CLONE_ARG_TYPE_VECTOR
4258 && VECTOR_BOOLEAN_TYPE_P (bestn->simdclone->args[i].vector_type)(((enum tree_code) (bestn->simdclone->args[i].vector_type
)->base.code) == VECTOR_TYPE && ((enum tree_code) (
((contains_struct_check ((bestn->simdclone->args[i].vector_type
), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4258, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
)
4259 {
4260 if (dump_enabled_p ())
4261 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
4262 "vector mask arguments are not supported.\n");
4263 return false;
4264 }
4265
4266 if (bestn->simdclone->args[i].arg_type == SIMD_CLONE_ARG_TYPE_MASK
4267 && bestn->simdclone->mask_mode == VOIDmode((void) 0, E_VOIDmode)
4268 && (simd_clone_subparts (bestn->simdclone->args[i].vector_type)
4269 != simd_clone_subparts (arginfo[i].vectype)))
4270 {
4271 /* FORNOW we only have partial support for vector-type masks that
4272 can't hold all of simdlen. */
4273 if (dump_enabled_p ())
4274 dump_printf_loc (MSG_MISSED_OPTIMIZATION,
4275 vect_location,
4276 "in-branch vector clones are not yet"
4277 " supported for mismatched vector sizes.\n");
4278 return false;
4279 }
4280 if (bestn->simdclone->args[i].arg_type == SIMD_CLONE_ARG_TYPE_MASK
4281 && bestn->simdclone->mask_mode != VOIDmode((void) 0, E_VOIDmode))
4282 {
4283 /* FORNOW don't support integer-type masks. */
4284 if (dump_enabled_p ())
4285 dump_printf_loc (MSG_MISSED_OPTIMIZATION,
4286 vect_location,
4287 "in-branch vector clones are not yet"
4288 " supported for integer mask modes.\n");
4289 return false;
4290 }
4291 }
4292
4293 fndecl = bestn->decl;
4294 nunits = bestn->simdclone->simdlen;
4295 ncopies = vector_unroll_factor (vf, nunits)(exact_div (vf, nunits).to_constant ());
4296
4297 /* If the function isn't const, only allow it in simd loops where user
4298 has asserted that at least nunits consecutive iterations can be
4299 performed using SIMD instructions. */
4300 if ((loop == NULLnullptr || maybe_lt ((unsigned) loop->safelen, nunits))
4301 && gimple_vuse (stmt))
4302 return false;
4303
4304 /* Sanity check: make sure that at least one copy of the vectorized stmt
4305 needs to be generated. */
4306 gcc_assert (ncopies >= 1)((void)(!(ncopies >= 1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4306, __FUNCTION__), 0 : 0))
;
4307
4308 if (!vec_stmt) /* transformation not required. */
4309 {
4310 /* When the original call is pure or const but the SIMD ABI dictates
4311 an aggregate return we will have to use a virtual definition and
4312 in a loop eventually even need to add a virtual PHI. That's
4313 not straight-forward so allow to fix this up via renaming. */
4314 if (gimple_call_lhs (stmt)
4315 && !gimple_vdef (stmt)
4316 && TREE_CODE (TREE_TYPE (TREE_TYPE (bestn->decl)))((enum tree_code) (((contains_struct_check ((((contains_struct_check
((bestn->decl), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4316, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4316, __FUNCTION__))->typed.type))->base.code)
== ARRAY_TYPE)
4317 vinfo->any_known_not_updated_vssa = true;
4318 STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info.safe_push (bestn->decl);
4319 for (i = 0; i < nargs; i++)
4320 if ((bestn->simdclone->args[i].arg_type
4321 == SIMD_CLONE_ARG_TYPE_LINEAR_CONSTANT_STEP)
4322 || (bestn->simdclone->args[i].arg_type
4323 == SIMD_CLONE_ARG_TYPE_LINEAR_REF_CONSTANT_STEP))
4324 {
4325 STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info.safe_grow_cleared (i * 3
4326 + 1,
4327 true);
4328 STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info.safe_push (arginfo[i].op);
4329 tree lst = POINTER_TYPE_P (TREE_TYPE (arginfo[i].op))(((enum tree_code) (((contains_struct_check ((arginfo[i].op),
(TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4329, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((arginfo[i].op
), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4329, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
4330 ? size_type_nodeglobal_trees[TI_SIZE_TYPE] : TREE_TYPE (arginfo[i].op)((contains_struct_check ((arginfo[i].op), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4330, __FUNCTION__))->typed.type)
;
4331 tree ls = build_int_cst (lst, arginfo[i].linear_step);
4332 STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info.safe_push (ls);
4333 tree sll = arginfo[i].simd_lane_linear
4334 ? boolean_true_nodeglobal_trees[TI_BOOLEAN_TRUE] : boolean_false_nodeglobal_trees[TI_BOOLEAN_FALSE];
4335 STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info.safe_push (sll);
4336 }
4337 STMT_VINFO_TYPE (stmt_info)(stmt_info)->type = call_simd_clone_vec_info_type;
4338 DUMP_VECT_SCOPE ("vectorizable_simd_clone_call")auto_dump_scope scope ("vectorizable_simd_clone_call", vect_location
)
;
4339/* vect_model_simple_cost (vinfo, stmt_info, ncopies,
4340 dt, slp_node, cost_vec); */
4341 return true;
4342 }
4343
4344 /* Transform. */
4345
4346 if (dump_enabled_p ())
4347 dump_printf_loc (MSG_NOTE, vect_location, "transform call.\n");
4348
4349 /* Handle def. */
4350 scalar_dest = gimple_call_lhs (stmt);
4351 vec_dest = NULL_TREE(tree) nullptr;
4352 rtype = NULL_TREE(tree) nullptr;
4353 ratype = NULL_TREE(tree) nullptr;
4354 if (scalar_dest)
4355 {
4356 vec_dest = vect_create_destination_var (scalar_dest, vectype);
4357 rtype = TREE_TYPE (TREE_TYPE (fndecl))((contains_struct_check ((((contains_struct_check ((fndecl), (
TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4357, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4357, __FUNCTION__))->typed.type)
;
4358 if (TREE_CODE (rtype)((enum tree_code) (rtype)->base.code) == ARRAY_TYPE)
4359 {
4360 ratype = rtype;
4361 rtype = TREE_TYPE (ratype)((contains_struct_check ((ratype), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4361, __FUNCTION__))->typed.type)
;
4362 }
4363 }
4364
4365 auto_vec<vec<tree> > vec_oprnds;
4366 auto_vec<unsigned> vec_oprnds_i;
4367 vec_oprnds.safe_grow_cleared (nargs, true);
4368 vec_oprnds_i.safe_grow_cleared (nargs, true);
4369 for (j = 0; j < ncopies; ++j)
4370 {
4371 /* Build argument list for the vectorized call. */
4372 if (j == 0)
4373 vargs.create (nargs);
4374 else
4375 vargs.truncate (0);
4376
4377 for (i = 0; i < nargs; i++)
4378 {
4379 unsigned int k, l, m, o;
4380 tree atype;
4381 op = gimple_call_arg (stmt, i + arg_offset);
4382 switch (bestn->simdclone->args[i].arg_type)
4383 {
4384 case SIMD_CLONE_ARG_TYPE_VECTOR:
4385 atype = bestn->simdclone->args[i].vector_type;
4386 o = vector_unroll_factor (nunits,(exact_div (nunits, simd_clone_subparts (atype)).to_constant (
))
4387 simd_clone_subparts (atype))(exact_div (nunits, simd_clone_subparts (atype)).to_constant (
))
;
4388 for (m = j * o; m < (j + 1) * o; m++)
4389 {
4390 if (simd_clone_subparts (atype)
4391 < simd_clone_subparts (arginfo[i].vectype))
4392 {
4393 poly_uint64 prec = GET_MODE_BITSIZE (TYPE_MODE (atype)((((enum tree_code) ((tree_class_check ((atype), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4393, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(atype) : (atype)->type_common.mode)
);
4394 k = (simd_clone_subparts (arginfo[i].vectype)
4395 / simd_clone_subparts (atype));
4396 gcc_assert ((k & (k - 1)) == 0)((void)(!((k & (k - 1)) == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4396, __FUNCTION__), 0 : 0))
;
4397 if (m == 0)
4398 {
4399 vect_get_vec_defs_for_operand (vinfo, stmt_info,
4400 ncopies * o / k, op,
4401 &vec_oprnds[i]);
4402 vec_oprnds_i[i] = 0;
4403 vec_oprnd0 = vec_oprnds[i][vec_oprnds_i[i]++];
4404 }
4405 else
4406 {
4407 vec_oprnd0 = arginfo[i].op;
4408 if ((m & (k - 1)) == 0)
4409 vec_oprnd0 = vec_oprnds[i][vec_oprnds_i[i]++];
4410 }
4411 arginfo[i].op = vec_oprnd0;
4412 vec_oprnd0
4413 = build3 (BIT_FIELD_REF, atype, vec_oprnd0,
4414 bitsize_int (prec)size_int_kind (prec, stk_bitsizetype),
4415 bitsize_int ((m & (k - 1)) * prec)size_int_kind ((m & (k - 1)) * prec, stk_bitsizetype));
4416 gassign *new_stmt
4417 = gimple_build_assign (make_ssa_name (atype),
4418 vec_oprnd0);
4419 vect_finish_stmt_generation (vinfo, stmt_info,
4420 new_stmt, gsi);
4421 vargs.safe_push (gimple_assign_lhs (new_stmt));
4422 }
4423 else
4424 {
4425 k = (simd_clone_subparts (atype)
4426 / simd_clone_subparts (arginfo[i].vectype));
4427 gcc_assert ((k & (k - 1)) == 0)((void)(!((k & (k - 1)) == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4427, __FUNCTION__), 0 : 0))
;
4428 vec<constructor_elt, va_gc> *ctor_elts;
4429 if (k != 1)
4430 vec_alloc (ctor_elts, k);
4431 else
4432 ctor_elts = NULLnullptr;
4433 for (l = 0; l < k; l++)
4434 {
4435 if (m == 0 && l == 0)
4436 {
4437 vect_get_vec_defs_for_operand (vinfo, stmt_info,
4438 k * o * ncopies,
4439 op,
4440 &vec_oprnds[i]);
4441 vec_oprnds_i[i] = 0;
4442 vec_oprnd0 = vec_oprnds[i][vec_oprnds_i[i]++];
4443 }
4444 else
4445 vec_oprnd0 = vec_oprnds[i][vec_oprnds_i[i]++];
4446 arginfo[i].op = vec_oprnd0;
4447 if (k == 1)
4448 break;
4449 CONSTRUCTOR_APPEND_ELT (ctor_elts, NULL_TREE,do { constructor_elt _ce___ = {(tree) nullptr, vec_oprnd0}; vec_safe_push
((ctor_elts), _ce___); } while (0)
4450 vec_oprnd0)do { constructor_elt _ce___ = {(tree) nullptr, vec_oprnd0}; vec_safe_push
((ctor_elts), _ce___); } while (0)
;
4451 }
4452 if (k == 1)
4453 if (!useless_type_conversion_p (TREE_TYPE (vec_oprnd0)((contains_struct_check ((vec_oprnd0), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4453, __FUNCTION__))->typed.type)
,
4454 atype))
4455 {
4456 vec_oprnd0
4457 = build1 (VIEW_CONVERT_EXPR, atype, vec_oprnd0);
4458 gassign *new_stmt
4459 = gimple_build_assign (make_ssa_name (atype),
4460 vec_oprnd0);
4461 vect_finish_stmt_generation (vinfo, stmt_info,
4462 new_stmt, gsi);
4463 vargs.safe_push (gimple_assign_lhs (new_stmt));
4464 }
4465 else
4466 vargs.safe_push (vec_oprnd0);
4467 else
4468 {
4469 vec_oprnd0 = build_constructor (atype, ctor_elts);
4470 gassign *new_stmt
4471 = gimple_build_assign (make_ssa_name (atype),
4472 vec_oprnd0);
4473 vect_finish_stmt_generation (vinfo, stmt_info,
4474 new_stmt, gsi);
4475 vargs.safe_push (gimple_assign_lhs (new_stmt));
4476 }
4477 }
4478 }
4479 break;
4480 case SIMD_CLONE_ARG_TYPE_MASK:
4481 atype = bestn->simdclone->args[i].vector_type;
4482 if (bestn->simdclone->mask_mode != VOIDmode((void) 0, E_VOIDmode))
4483 {
4484 /* FORNOW: this is disabled above. */
4485 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4485, __FUNCTION__))
;
4486 }
4487 else
4488 {
4489 tree elt_type = TREE_TYPE (atype)((contains_struct_check ((atype), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4489, __FUNCTION__))->typed.type)
;
4490 tree one = fold_convert (elt_type, integer_one_node)fold_convert_loc (((location_t) 0), elt_type, global_trees[TI_INTEGER_ONE
])
;
4491 tree zero = fold_convert (elt_type, integer_zero_node)fold_convert_loc (((location_t) 0), elt_type, global_trees[TI_INTEGER_ZERO
])
;
4492 o = vector_unroll_factor (nunits,(exact_div (nunits, simd_clone_subparts (atype)).to_constant (
))
4493 simd_clone_subparts (atype))(exact_div (nunits, simd_clone_subparts (atype)).to_constant (
))
;
4494 for (m = j * o; m < (j + 1) * o; m++)
4495 {
4496 if (simd_clone_subparts (atype)
4497 < simd_clone_subparts (arginfo[i].vectype))
4498 {
4499 /* The mask type has fewer elements than simdlen. */
4500
4501 /* FORNOW */
4502 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4502, __FUNCTION__))
;
4503 }
4504 else if (simd_clone_subparts (atype)
4505 == simd_clone_subparts (arginfo[i].vectype))
4506 {
4507 /* The SIMD clone function has the same number of
4508 elements as the current function. */
4509 if (m == 0)
4510 {
4511 vect_get_vec_defs_for_operand (vinfo, stmt_info,
4512 o * ncopies,
4513 op,
4514 &vec_oprnds[i]);
4515 vec_oprnds_i[i] = 0;
4516 }
4517 vec_oprnd0 = vec_oprnds[i][vec_oprnds_i[i]++];
4518 vec_oprnd0
4519 = build3 (VEC_COND_EXPR, atype, vec_oprnd0,
4520 build_vector_from_val (atype, one),
4521 build_vector_from_val (atype, zero));
4522 gassign *new_stmt
4523 = gimple_build_assign (make_ssa_name (atype),
4524 vec_oprnd0);
4525 vect_finish_stmt_generation (vinfo, stmt_info,
4526 new_stmt, gsi);
4527 vargs.safe_push (gimple_assign_lhs (new_stmt));
4528 }
4529 else
4530 {
4531 /* The mask type has more elements than simdlen. */
4532
4533 /* FORNOW */
4534 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4534, __FUNCTION__))
;
4535 }
4536 }
4537 }
4538 break;
4539 case SIMD_CLONE_ARG_TYPE_UNIFORM:
4540 vargs.safe_push (op);
4541 break;
4542 case SIMD_CLONE_ARG_TYPE_LINEAR_CONSTANT_STEP:
4543 case SIMD_CLONE_ARG_TYPE_LINEAR_REF_CONSTANT_STEP:
4544 if (j == 0)
4545 {
4546 gimple_seq stmts;
4547 arginfo[i].op
4548 = force_gimple_operand (unshare_expr (arginfo[i].op),
4549 &stmts, true, NULL_TREE(tree) nullptr);
4550 if (stmts != NULLnullptr)
4551 {
4552 basic_block new_bb;
4553 edge pe = loop_preheader_edge (loop);
4554 new_bb = gsi_insert_seq_on_edge_immediate (pe, stmts);
4555 gcc_assert (!new_bb)((void)(!(!new_bb) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4555, __FUNCTION__), 0 : 0))
;
4556 }
4557 if (arginfo[i].simd_lane_linear)
4558 {
4559 vargs.safe_push (arginfo[i].op);
4560 break;
4561 }
4562 tree phi_res = copy_ssa_name (op);
4563 gphi *new_phi = create_phi_node (phi_res, loop->header);
4564 add_phi_arg (new_phi, arginfo[i].op,
4565 loop_preheader_edge (loop), UNKNOWN_LOCATION((location_t) 0));
4566 enum tree_code code
4567 = POINTER_TYPE_P (TREE_TYPE (op))(((enum tree_code) (((contains_struct_check ((op), (TS_TYPED)
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4567, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((op), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4567, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
4568 ? POINTER_PLUS_EXPR : PLUS_EXPR;
4569 tree type = POINTER_TYPE_P (TREE_TYPE (op))(((enum tree_code) (((contains_struct_check ((op), (TS_TYPED)
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4569, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((op), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4569, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
4570 ? sizetypesizetype_tab[(int) stk_sizetype] : TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4570, __FUNCTION__))->typed.type)
;
4571 poly_widest_int cst
4572 = wi::mul (bestn->simdclone->args[i].linear_step,
4573 ncopies * nunits);
4574 tree tcst = wide_int_to_tree (type, cst);
4575 tree phi_arg = copy_ssa_name (op);
4576 gassign *new_stmt
4577 = gimple_build_assign (phi_arg, code, phi_res, tcst);
4578 gimple_stmt_iterator si = gsi_after_labels (loop->header);
4579 gsi_insert_after (&si, new_stmt, GSI_NEW_STMT);
4580 add_phi_arg (new_phi, phi_arg, loop_latch_edge (loop),
4581 UNKNOWN_LOCATION((location_t) 0));
4582 arginfo[i].op = phi_res;
4583 vargs.safe_push (phi_res);
4584 }
4585 else
4586 {
4587 enum tree_code code
4588 = POINTER_TYPE_P (TREE_TYPE (op))(((enum tree_code) (((contains_struct_check ((op), (TS_TYPED)
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4588, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((op), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4588, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
4589 ? POINTER_PLUS_EXPR : PLUS_EXPR;
4590 tree type = POINTER_TYPE_P (TREE_TYPE (op))(((enum tree_code) (((contains_struct_check ((op), (TS_TYPED)
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4590, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((op), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4590, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
4591 ? sizetypesizetype_tab[(int) stk_sizetype] : TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4591, __FUNCTION__))->typed.type)
;
4592 poly_widest_int cst
4593 = wi::mul (bestn->simdclone->args[i].linear_step,
4594 j * nunits);
4595 tree tcst = wide_int_to_tree (type, cst);
4596 new_temp = make_ssa_name (TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4596, __FUNCTION__))->typed.type)
);
4597 gassign *new_stmt
4598 = gimple_build_assign (new_temp, code,
4599 arginfo[i].op, tcst);
4600 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
4601 vargs.safe_push (new_temp);
4602 }
4603 break;
4604 case SIMD_CLONE_ARG_TYPE_LINEAR_VAL_CONSTANT_STEP:
4605 case SIMD_CLONE_ARG_TYPE_LINEAR_UVAL_CONSTANT_STEP:
4606 case SIMD_CLONE_ARG_TYPE_LINEAR_VARIABLE_STEP:
4607 case SIMD_CLONE_ARG_TYPE_LINEAR_REF_VARIABLE_STEP:
4608 case SIMD_CLONE_ARG_TYPE_LINEAR_VAL_VARIABLE_STEP:
4609 case SIMD_CLONE_ARG_TYPE_LINEAR_UVAL_VARIABLE_STEP:
4610 default:
4611 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4611, __FUNCTION__))
;
4612 }
4613 }
4614
4615 gcall *new_call = gimple_build_call_vec (fndecl, vargs);
4616 if (vec_dest)
4617 {
4618 gcc_assert (ratype((void)(!(ratype || (!maybe_ne (simd_clone_subparts (rtype), nunits
))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4619, __FUNCTION__), 0 : 0))
4619 || known_eq (simd_clone_subparts (rtype), nunits))((void)(!(ratype || (!maybe_ne (simd_clone_subparts (rtype), nunits
))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4619, __FUNCTION__), 0 : 0))
;
4620 if (ratype)
4621 new_temp = create_tmp_var (ratype);
4622 else if (useless_type_conversion_p (vectype, rtype))
4623 new_temp = make_ssa_name (vec_dest, new_call);
4624 else
4625 new_temp = make_ssa_name (rtype, new_call);
4626 gimple_call_set_lhs (new_call, new_temp);
4627 }
4628 vect_finish_stmt_generation (vinfo, stmt_info, new_call, gsi);
4629 gimple *new_stmt = new_call;
4630
4631 if (vec_dest)
4632 {
4633 if (!multiple_p (simd_clone_subparts (vectype), nunits))
4634 {
4635 unsigned int k, l;
4636 poly_uint64 prec = GET_MODE_BITSIZE (TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4636, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
);
4637 poly_uint64 bytes = GET_MODE_SIZE (TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4637, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
);
4638 k = vector_unroll_factor (nunits,(exact_div (nunits, simd_clone_subparts (vectype)).to_constant
())
4639 simd_clone_subparts (vectype))(exact_div (nunits, simd_clone_subparts (vectype)).to_constant
())
;
4640 gcc_assert ((k & (k - 1)) == 0)((void)(!((k & (k - 1)) == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4640, __FUNCTION__), 0 : 0))
;
4641 for (l = 0; l < k; l++)
4642 {
4643 tree t;
4644 if (ratype)
4645 {
4646 t = build_fold_addr_expr (new_temp)build_fold_addr_expr_loc (((location_t) 0), (new_temp));
4647 t = build2 (MEM_REF, vectype, t,
4648 build_int_cst (TREE_TYPE (t)((contains_struct_check ((t), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4648, __FUNCTION__))->typed.type)
, l * bytes));
4649 }
4650 else
4651 t = build3 (BIT_FIELD_REF, vectype, new_temp,
4652 bitsize_int (prec)size_int_kind (prec, stk_bitsizetype), bitsize_int (l * prec)size_int_kind (l * prec, stk_bitsizetype));
4653 new_stmt = gimple_build_assign (make_ssa_name (vectype), t);
4654 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
4655
4656 if (j == 0 && l == 0)
4657 *vec_stmt = new_stmt;
4658 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
4659 }
4660
4661 if (ratype)
4662 vect_clobber_variable (vinfo, stmt_info, gsi, new_temp);
4663 continue;
4664 }
4665 else if (!multiple_p (nunits, simd_clone_subparts (vectype)))
4666 {
4667 unsigned int k = (simd_clone_subparts (vectype)
4668 / simd_clone_subparts (rtype));
4669 gcc_assert ((k & (k - 1)) == 0)((void)(!((k & (k - 1)) == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4669, __FUNCTION__), 0 : 0))
;
4670 if ((j & (k - 1)) == 0)
4671 vec_alloc (ret_ctor_elts, k);
4672 if (ratype)
4673 {
4674 unsigned int m, o;
4675 o = vector_unroll_factor (nunits,(exact_div (nunits, simd_clone_subparts (rtype)).to_constant (
))
4676 simd_clone_subparts (rtype))(exact_div (nunits, simd_clone_subparts (rtype)).to_constant (
))
;
4677 for (m = 0; m < o; m++)
4678 {
4679 tree tem = build4 (ARRAY_REF, rtype, new_temp,
4680 size_int (m)size_int_kind (m, stk_sizetype), NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr);
4681 new_stmt = gimple_build_assign (make_ssa_name (rtype),
4682 tem);
4683 vect_finish_stmt_generation (vinfo, stmt_info,
4684 new_stmt, gsi);
4685 CONSTRUCTOR_APPEND_ELT (ret_ctor_elts, NULL_TREE,do { constructor_elt _ce___ = {(tree) nullptr, gimple_assign_lhs
(new_stmt)}; vec_safe_push ((ret_ctor_elts), _ce___); } while
(0)
4686 gimple_assign_lhs (new_stmt))do { constructor_elt _ce___ = {(tree) nullptr, gimple_assign_lhs
(new_stmt)}; vec_safe_push ((ret_ctor_elts), _ce___); } while
(0)
;
4687 }
4688 vect_clobber_variable (vinfo, stmt_info, gsi, new_temp);
4689 }
4690 else
4691 CONSTRUCTOR_APPEND_ELT (ret_ctor_elts, NULL_TREE, new_temp)do { constructor_elt _ce___ = {(tree) nullptr, new_temp}; vec_safe_push
((ret_ctor_elts), _ce___); } while (0)
;
4692 if ((j & (k - 1)) != k - 1)
4693 continue;
4694 vec_oprnd0 = build_constructor (vectype, ret_ctor_elts);
4695 new_stmt
4696 = gimple_build_assign (make_ssa_name (vec_dest), vec_oprnd0);
4697 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
4698
4699 if ((unsigned) j == k - 1)
4700 *vec_stmt = new_stmt;
4701 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
4702 continue;
4703 }
4704 else if (ratype)
4705 {
4706 tree t = build_fold_addr_expr (new_temp)build_fold_addr_expr_loc (((location_t) 0), (new_temp));
4707 t = build2 (MEM_REF, vectype, t,
4708 build_int_cst (TREE_TYPE (t)((contains_struct_check ((t), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4708, __FUNCTION__))->typed.type)
, 0));
4709 new_stmt = gimple_build_assign (make_ssa_name (vec_dest), t);
4710 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
4711 vect_clobber_variable (vinfo, stmt_info, gsi, new_temp);
4712 }
4713 else if (!useless_type_conversion_p (vectype, rtype))
4714 {
4715 vec_oprnd0 = build1 (VIEW_CONVERT_EXPR, vectype, new_temp);
4716 new_stmt
4717 = gimple_build_assign (make_ssa_name (vec_dest), vec_oprnd0);
4718 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
4719 }
4720 }
4721
4722 if (j == 0)
4723 *vec_stmt = new_stmt;
4724 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
4725 }
4726
4727 for (i = 0; i < nargs; ++i)
4728 {
4729 vec<tree> oprndsi = vec_oprnds[i];
4730 oprndsi.release ();
4731 }
4732 vargs.release ();
4733
4734 /* Mark the clone as no longer being a candidate for GC. */
4735 bestn->gc_candidate = false;
4736
4737 /* The call in STMT might prevent it from being removed in dce.
4738 We however cannot remove it here, due to the way the ssa name
4739 it defines is mapped to the new definition. So just replace
4740 rhs of the statement with something harmless. */
4741
4742 if (slp_node)
4743 return true;
4744
4745 gimple *new_stmt;
4746 if (scalar_dest)
4747 {
4748 type = TREE_TYPE (scalar_dest)((contains_struct_check ((scalar_dest), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4748, __FUNCTION__))->typed.type)
;
4749 lhs = gimple_call_lhs (vect_orig_stmt (stmt_info)->stmt);
4750 new_stmt = gimple_build_assign (lhs, build_zero_cst (type));
4751 }
4752 else
4753 new_stmt = gimple_build_nop ();
4754 vinfo->replace_stmt (gsi, vect_orig_stmt (stmt_info), new_stmt);
4755 unlink_stmt_vdef (stmt);
4756
4757 return true;
4758}
4759
4760
4761/* Function vect_gen_widened_results_half
4762
4763 Create a vector stmt whose code, type, number of arguments, and result
4764 variable are CODE, OP_TYPE, and VEC_DEST, and its arguments are
4765 VEC_OPRND0 and VEC_OPRND1. The new vector stmt is to be inserted at GSI.
4766 In the case that CODE is a CALL_EXPR, this means that a call to DECL
4767 needs to be created (DECL is a function-decl of a target-builtin).
4768 STMT_INFO is the original scalar stmt that we are vectorizing. */
4769
4770static gimple *
4771vect_gen_widened_results_half (vec_info *vinfo, enum tree_code code,
4772 tree vec_oprnd0, tree vec_oprnd1, int op_type,
4773 tree vec_dest, gimple_stmt_iterator *gsi,
4774 stmt_vec_info stmt_info)
4775{
4776 gimple *new_stmt;
4777 tree new_temp;
4778
4779 /* Generate half of the widened result: */
4780 gcc_assert (op_type == TREE_CODE_LENGTH (code))((void)(!(op_type == tree_code_length_tmpl <0>::tree_code_length
[(int) (code)]) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4780, __FUNCTION__), 0 : 0))
;
4781 if (op_type != binary_op)
4782 vec_oprnd1 = NULLnullptr;
4783 new_stmt = gimple_build_assign (vec_dest, code, vec_oprnd0, vec_oprnd1);
4784 new_temp = make_ssa_name (vec_dest, new_stmt);
4785 gimple_assign_set_lhs (new_stmt, new_temp);
4786 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
4787
4788 return new_stmt;
4789}
4790
4791
4792/* Create vectorized demotion statements for vector operands from VEC_OPRNDS.
4793 For multi-step conversions store the resulting vectors and call the function
4794 recursively. */
4795
4796static void
4797vect_create_vectorized_demotion_stmts (vec_info *vinfo, vec<tree> *vec_oprnds,
4798 int multi_step_cvt,
4799 stmt_vec_info stmt_info,
4800 vec<tree> &vec_dsts,
4801 gimple_stmt_iterator *gsi,
4802 slp_tree slp_node, enum tree_code code)
4803{
4804 unsigned int i;
4805 tree vop0, vop1, new_tmp, vec_dest;
4806
4807 vec_dest = vec_dsts.pop ();
4808
4809 for (i = 0; i < vec_oprnds->length (); i += 2)
4810 {
4811 /* Create demotion operation. */
4812 vop0 = (*vec_oprnds)[i];
4813 vop1 = (*vec_oprnds)[i + 1];
4814 gassign *new_stmt = gimple_build_assign (vec_dest, code, vop0, vop1);
4815 new_tmp = make_ssa_name (vec_dest, new_stmt);
4816 gimple_assign_set_lhs (new_stmt, new_tmp);
4817 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
4818
4819 if (multi_step_cvt)
4820 /* Store the resulting vector for next recursive call. */
4821 (*vec_oprnds)[i/2] = new_tmp;
4822 else
4823 {
4824 /* This is the last step of the conversion sequence. Store the
4825 vectors in SLP_NODE or in vector info of the scalar statement
4826 (or in STMT_VINFO_RELATED_STMT chain). */
4827 if (slp_node)
4828 SLP_TREE_VEC_STMTS (slp_node)(slp_node)->vec_stmts.quick_push (new_stmt);
4829 else
4830 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
4831 }
4832 }
4833
4834 /* For multi-step demotion operations we first generate demotion operations
4835 from the source type to the intermediate types, and then combine the
4836 results (stored in VEC_OPRNDS) in demotion operation to the destination
4837 type. */
4838 if (multi_step_cvt)
4839 {
4840 /* At each level of recursion we have half of the operands we had at the
4841 previous level. */
4842 vec_oprnds->truncate ((i+1)/2);
4843 vect_create_vectorized_demotion_stmts (vinfo, vec_oprnds,
4844 multi_step_cvt - 1,
4845 stmt_info, vec_dsts, gsi,
4846 slp_node, VEC_PACK_TRUNC_EXPR);
4847 }
4848
4849 vec_dsts.quick_push (vec_dest);
4850}
4851
4852
4853/* Create vectorized promotion statements for vector operands from VEC_OPRNDS0
4854 and VEC_OPRNDS1, for a binary operation associated with scalar statement
4855 STMT_INFO. For multi-step conversions store the resulting vectors and
4856 call the function recursively. */
4857
4858static void
4859vect_create_vectorized_promotion_stmts (vec_info *vinfo,
4860 vec<tree> *vec_oprnds0,
4861 vec<tree> *vec_oprnds1,
4862 stmt_vec_info stmt_info, tree vec_dest,
4863 gimple_stmt_iterator *gsi,
4864 enum tree_code code1,
4865 enum tree_code code2, int op_type)
4866{
4867 int i;
4868 tree vop0, vop1, new_tmp1, new_tmp2;
4869 gimple *new_stmt1, *new_stmt2;
4870 vec<tree> vec_tmp = vNULL;
4871
4872 vec_tmp.create (vec_oprnds0->length () * 2);
4873 FOR_EACH_VEC_ELT (*vec_oprnds0, i, vop0)for (i = 0; (*vec_oprnds0).iterate ((i), &(vop0)); ++(i))
4874 {
4875 if (op_type == binary_op)
4876 vop1 = (*vec_oprnds1)[i];
4877 else
4878 vop1 = NULL_TREE(tree) nullptr;
4879
4880 /* Generate the two halves of promotion operation. */
4881 new_stmt1 = vect_gen_widened_results_half (vinfo, code1, vop0, vop1,
4882 op_type, vec_dest, gsi,
4883 stmt_info);
4884 new_stmt2 = vect_gen_widened_results_half (vinfo, code2, vop0, vop1,
4885 op_type, vec_dest, gsi,
4886 stmt_info);
4887 if (is_gimple_call (new_stmt1))
4888 {
4889 new_tmp1 = gimple_call_lhs (new_stmt1);
4890 new_tmp2 = gimple_call_lhs (new_stmt2);
4891 }
4892 else
4893 {
4894 new_tmp1 = gimple_assign_lhs (new_stmt1);
4895 new_tmp2 = gimple_assign_lhs (new_stmt2);
4896 }
4897
4898 /* Store the results for the next step. */
4899 vec_tmp.quick_push (new_tmp1);
4900 vec_tmp.quick_push (new_tmp2);
4901 }
4902
4903 vec_oprnds0->release ();
4904 *vec_oprnds0 = vec_tmp;
4905}
4906
4907/* Create vectorized promotion stmts for widening stmts using only half the
4908 potential vector size for input. */
4909static void
4910vect_create_half_widening_stmts (vec_info *vinfo,
4911 vec<tree> *vec_oprnds0,
4912 vec<tree> *vec_oprnds1,
4913 stmt_vec_info stmt_info, tree vec_dest,
4914 gimple_stmt_iterator *gsi,
4915 enum tree_code code1,
4916 int op_type)
4917{
4918 int i;
4919 tree vop0, vop1;
4920 gimple *new_stmt1;
4921 gimple *new_stmt2;
4922 gimple *new_stmt3;
4923 vec<tree> vec_tmp = vNULL;
4924
4925 vec_tmp.create (vec_oprnds0->length ());
4926 FOR_EACH_VEC_ELT (*vec_oprnds0, i, vop0)for (i = 0; (*vec_oprnds0).iterate ((i), &(vop0)); ++(i))
4927 {
4928 tree new_tmp1, new_tmp2, new_tmp3, out_type;
4929
4930 gcc_assert (op_type == binary_op)((void)(!(op_type == binary_op) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4930, __FUNCTION__), 0 : 0))
;
4931 vop1 = (*vec_oprnds1)[i];
4932
4933 /* Widen the first vector input. */
4934 out_type = TREE_TYPE (vec_dest)((contains_struct_check ((vec_dest), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4934, __FUNCTION__))->typed.type)
;
4935 new_tmp1 = make_ssa_name (out_type);
4936 new_stmt1 = gimple_build_assign (new_tmp1, NOP_EXPR, vop0);
4937 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt1, gsi);
4938 if (VECTOR_TYPE_P (TREE_TYPE (vop1))(((enum tree_code) (((contains_struct_check ((vop1), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 4938, __FUNCTION__))->typed.type))->base.code) == VECTOR_TYPE
)
)
4939 {
4940 /* Widen the second vector input. */
4941 new_tmp2 = make_ssa_name (out_type);
4942 new_stmt2 = gimple_build_assign (new_tmp2, NOP_EXPR, vop1);
4943 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt2, gsi);
4944 /* Perform the operation. With both vector inputs widened. */
4945 new_stmt3 = gimple_build_assign (vec_dest, code1, new_tmp1, new_tmp2);
4946 }
4947 else
4948 {
4949 /* Perform the operation. With the single vector input widened. */
4950 new_stmt3 = gimple_build_assign (vec_dest, code1, new_tmp1, vop1);
4951 }
4952
4953 new_tmp3 = make_ssa_name (vec_dest, new_stmt3);
4954 gimple_assign_set_lhs (new_stmt3, new_tmp3);
4955 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt3, gsi);
4956
4957 /* Store the results for the next step. */
4958 vec_tmp.quick_push (new_tmp3);
4959 }
4960
4961 vec_oprnds0->release ();
4962 *vec_oprnds0 = vec_tmp;
4963}
4964
4965
4966/* Check if STMT_INFO performs a conversion operation that can be vectorized.
4967 If VEC_STMT is also passed, vectorize STMT_INFO: create a vectorized
4968 stmt to replace it, put it in VEC_STMT, and insert it at GSI.
4969 Return true if STMT_INFO is vectorizable in this way. */
4970
4971static bool
4972vectorizable_conversion (vec_info *vinfo,
4973 stmt_vec_info stmt_info, gimple_stmt_iterator *gsi,
4974 gimple **vec_stmt, slp_tree slp_node,
4975 stmt_vector_for_cost *cost_vec)
4976{
4977 tree vec_dest;
4978 tree scalar_dest;
4979 tree op0, op1 = NULL_TREE(tree) nullptr;
4980 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
4981 enum tree_code code, code1 = ERROR_MARK, code2 = ERROR_MARK;
4982 enum tree_code codecvt1 = ERROR_MARK, codecvt2 = ERROR_MARK;
4983 tree new_temp;
4984 enum vect_def_type dt[2] = {vect_unknown_def_type, vect_unknown_def_type};
4985 int ndts = 2;
4986 poly_uint64 nunits_in;
4987 poly_uint64 nunits_out;
4988 tree vectype_out, vectype_in;
4989 int ncopies, i;
4990 tree lhs_type, rhs_type;
4991 enum { NARROW, NONE, WIDEN } modifier;
4992 vec<tree> vec_oprnds0 = vNULL;
4993 vec<tree> vec_oprnds1 = vNULL;
4994 tree vop0;
4995 bb_vec_info bb_vinfo = dyn_cast <bb_vec_info> (vinfo);
4996 int multi_step_cvt = 0;
4997 vec<tree> interm_types = vNULL;
4998 tree intermediate_type, cvt_type = NULL_TREE(tree) nullptr;
4999 int op_type;
5000 unsigned short fltsz;
5001
5002 /* Is STMT a vectorizable conversion? */
5003
5004 if (!STMT_VINFO_RELEVANT_P (stmt_info)((stmt_info)->relevant != vect_unused_in_scope) && !bb_vinfo)
5005 return false;
5006
5007 if (STMT_VINFO_DEF_TYPE (stmt_info)(stmt_info)->def_type != vect_internal_def
5008 && ! vec_stmt)
5009 return false;
5010
5011 gassign *stmt = dyn_cast <gassign *> (stmt_info->stmt);
5012 if (!stmt)
5013 return false;
5014
5015 if (TREE_CODE (gimple_assign_lhs (stmt))((enum tree_code) (gimple_assign_lhs (stmt))->base.code) != SSA_NAME)
5016 return false;
5017
5018 code = gimple_assign_rhs_code (stmt);
5019 if (!CONVERT_EXPR_CODE_P (code)((code) == NOP_EXPR || (code) == CONVERT_EXPR)
5020 && code != FIX_TRUNC_EXPR
5021 && code != FLOAT_EXPR
5022 && code != WIDEN_PLUS_EXPR
5023 && code != WIDEN_MINUS_EXPR
5024 && code != WIDEN_MULT_EXPR
5025 && code != WIDEN_LSHIFT_EXPR)
5026 return false;
5027
5028 bool widen_arith = (code == WIDEN_PLUS_EXPR
5029 || code == WIDEN_MINUS_EXPR
5030 || code == WIDEN_MULT_EXPR
5031 || code == WIDEN_LSHIFT_EXPR);
5032 op_type = TREE_CODE_LENGTH (code)tree_code_length_tmpl <0>::tree_code_length[(int) (code
)]
;
5033
5034 /* Check types of lhs and rhs. */
5035 scalar_dest = gimple_assign_lhs (stmt);
5036 lhs_type = TREE_TYPE (scalar_dest)((contains_struct_check ((scalar_dest), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5036, __FUNCTION__))->typed.type)
;
5037 vectype_out = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
5038
5039 /* Check the operands of the operation. */
5040 slp_tree slp_op0, slp_op1 = NULLnullptr;
5041 if (!vect_is_simple_use (vinfo, stmt_info, slp_node,
5042 0, &op0, &slp_op0, &dt[0], &vectype_in))
5043 {
5044 if (dump_enabled_p ())
5045 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5046 "use not simple.\n");
5047 return false;
5048 }
5049
5050 rhs_type = TREE_TYPE (op0)((contains_struct_check ((op0), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5050, __FUNCTION__))->typed.type)
;
5051 if ((code != FIX_TRUNC_EXPR && code != FLOAT_EXPR)
5052 && !((INTEGRAL_TYPE_P (lhs_type)(((enum tree_code) (lhs_type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (lhs_type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (lhs_type)->base.code) == INTEGER_TYPE
)
5053 && INTEGRAL_TYPE_P (rhs_type)(((enum tree_code) (rhs_type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (rhs_type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (rhs_type)->base.code) == INTEGER_TYPE
)
)
5054 || (SCALAR_FLOAT_TYPE_P (lhs_type)(((enum tree_code) (lhs_type)->base.code) == REAL_TYPE)
5055 && SCALAR_FLOAT_TYPE_P (rhs_type)(((enum tree_code) (rhs_type)->base.code) == REAL_TYPE))))
5056 return false;
5057
5058 if (!VECTOR_BOOLEAN_TYPE_P (vectype_out)(((enum tree_code) (vectype_out)->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((vectype_out
), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5058, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
5059 && ((INTEGRAL_TYPE_P (lhs_type)(((enum tree_code) (lhs_type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (lhs_type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (lhs_type)->base.code) == INTEGER_TYPE
)
5060 && !type_has_mode_precision_p (lhs_type))
5061 || (INTEGRAL_TYPE_P (rhs_type)(((enum tree_code) (rhs_type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (rhs_type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (rhs_type)->base.code) == INTEGER_TYPE
)
5062 && !type_has_mode_precision_p (rhs_type))))
5063 {
5064 if (dump_enabled_p ())
5065 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5066 "type conversion to/from bit-precision unsupported."
5067 "\n");
5068 return false;
5069 }
5070
5071 if (op_type == binary_op)
5072 {
5073 gcc_assert (code == WIDEN_MULT_EXPR || code == WIDEN_LSHIFT_EXPR((void)(!(code == WIDEN_MULT_EXPR || code == WIDEN_LSHIFT_EXPR
|| code == WIDEN_PLUS_EXPR || code == WIDEN_MINUS_EXPR) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5074, __FUNCTION__), 0 : 0))
5074 || code == WIDEN_PLUS_EXPR || code == WIDEN_MINUS_EXPR)((void)(!(code == WIDEN_MULT_EXPR || code == WIDEN_LSHIFT_EXPR
|| code == WIDEN_PLUS_EXPR || code == WIDEN_MINUS_EXPR) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5074, __FUNCTION__), 0 : 0))
;
5075
5076 op1 = gimple_assign_rhs2 (stmt);
5077 tree vectype1_in;
5078 if (!vect_is_simple_use (vinfo, stmt_info, slp_node, 1,
5079 &op1, &slp_op1, &dt[1], &vectype1_in))
5080 {
5081 if (dump_enabled_p ())
5082 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5083 "use not simple.\n");
5084 return false;
5085 }
5086 /* For WIDEN_MULT_EXPR, if OP0 is a constant, use the type of
5087 OP1. */
5088 if (!vectype_in)
5089 vectype_in = vectype1_in;
5090 }
5091
5092 /* If op0 is an external or constant def, infer the vector type
5093 from the scalar type. */
5094 if (!vectype_in)
5095 vectype_in = get_vectype_for_scalar_type (vinfo, rhs_type, slp_node);
5096 if (vec_stmt)
5097 gcc_assert (vectype_in)((void)(!(vectype_in) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5097, __FUNCTION__), 0 : 0))
;
5098 if (!vectype_in)
5099 {
5100 if (dump_enabled_p ())
5101 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5102 "no vectype for scalar type %T\n", rhs_type);
5103
5104 return false;
5105 }
5106
5107 if (VECTOR_BOOLEAN_TYPE_P (vectype_out)(((enum tree_code) (vectype_out)->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((vectype_out
), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5107, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
5108 && !VECTOR_BOOLEAN_TYPE_P (vectype_in)(((enum tree_code) (vectype_in)->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((vectype_in
), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5108, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
)
5109 {
5110 if (dump_enabled_p ())
5111 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5112 "can't convert between boolean and non "
5113 "boolean vectors %T\n", rhs_type);
5114
5115 return false;
5116 }
5117
5118 nunits_in = TYPE_VECTOR_SUBPARTS (vectype_in);
5119 nunits_out = TYPE_VECTOR_SUBPARTS (vectype_out);
5120 if (known_eq (nunits_out, nunits_in)(!maybe_ne (nunits_out, nunits_in)))
5121 if (widen_arith)
5122 modifier = WIDEN;
5123 else
5124 modifier = NONE;
5125 else if (multiple_p (nunits_out, nunits_in))
5126 modifier = NARROW;
5127 else
5128 {
5129 gcc_checking_assert (multiple_p (nunits_in, nunits_out))((void)(!(multiple_p (nunits_in, nunits_out)) ? fancy_abort (
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5129, __FUNCTION__), 0 : 0))
;
5130 modifier = WIDEN;
5131 }
5132
5133 /* Multiple types in SLP are handled by creating the appropriate number of
5134 vectorized stmts for each SLP node. Hence, NCOPIES is always 1 in
5135 case of SLP. */
5136 if (slp_node)
5137 ncopies = 1;
5138 else if (modifier == NARROW)
5139 ncopies = vect_get_num_copies (loop_vinfo, vectype_out);
5140 else
5141 ncopies = vect_get_num_copies (loop_vinfo, vectype_in);
5142
5143 /* Sanity check: make sure that at least one copy of the vectorized stmt
5144 needs to be generated. */
5145 gcc_assert (ncopies >= 1)((void)(!(ncopies >= 1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5145, __FUNCTION__), 0 : 0))
;
5146
5147 bool found_mode = false;
5148 scalar_mode lhs_mode = SCALAR_TYPE_MODE (lhs_type)(as_a <scalar_mode> ((tree_class_check ((lhs_type), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5148, __FUNCTION__))->type_common.mode))
;
5149 scalar_mode rhs_mode = SCALAR_TYPE_MODE (rhs_type)(as_a <scalar_mode> ((tree_class_check ((rhs_type), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5149, __FUNCTION__))->type_common.mode))
;
5150 opt_scalar_mode rhs_mode_iter;
5151
5152 /* Supportable by target? */
5153 switch (modifier)
5154 {
5155 case NONE:
5156 if (code != FIX_TRUNC_EXPR
5157 && code != FLOAT_EXPR
5158 && !CONVERT_EXPR_CODE_P (code)((code) == NOP_EXPR || (code) == CONVERT_EXPR))
5159 return false;
5160 if (supportable_convert_operation (code, vectype_out, vectype_in, &code1))
5161 break;
5162 /* FALLTHRU */
5163 unsupported:
5164 if (dump_enabled_p ())
5165 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5166 "conversion not supported by target.\n");
5167 return false;
5168
5169 case WIDEN:
5170 if (known_eq (nunits_in, nunits_out)(!maybe_ne (nunits_in, nunits_out)))
5171 {
5172 if (!supportable_half_widening_operation (code, vectype_out,
5173 vectype_in, &code1))
5174 goto unsupported;
5175 gcc_assert (!(multi_step_cvt && op_type == binary_op))((void)(!(!(multi_step_cvt && op_type == binary_op)) ?
fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5175, __FUNCTION__), 0 : 0))
;
5176 break;
5177 }
5178 if (supportable_widening_operation (vinfo, code, stmt_info,
5179 vectype_out, vectype_in, &code1,
5180 &code2, &multi_step_cvt,
5181 &interm_types))
5182 {
5183 /* Binary widening operation can only be supported directly by the
5184 architecture. */
5185 gcc_assert (!(multi_step_cvt && op_type == binary_op))((void)(!(!(multi_step_cvt && op_type == binary_op)) ?
fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5185, __FUNCTION__), 0 : 0))
;
5186 break;
5187 }
5188
5189 if (code != FLOAT_EXPR
5190 || GET_MODE_SIZE (lhs_mode) <= GET_MODE_SIZE (rhs_mode))
5191 goto unsupported;
5192
5193 fltsz = GET_MODE_SIZE (lhs_mode);
5194 FOR_EACH_2XWIDER_MODE (rhs_mode_iter, rhs_mode)for ((rhs_mode_iter) = (rhs_mode), mode_iterator::get_2xwider
(&(rhs_mode_iter)); mode_iterator::iterate_p (&(rhs_mode_iter
)); mode_iterator::get_2xwider (&(rhs_mode_iter)))
5195 {
5196 rhs_mode = rhs_mode_iter.require ();
5197 if (GET_MODE_SIZE (rhs_mode) > fltsz)
5198 break;
5199
5200 cvt_type
5201 = build_nonstandard_integer_type (GET_MODE_BITSIZE (rhs_mode), 0);
5202 cvt_type = get_same_sized_vectype (cvt_type, vectype_in);
5203 if (cvt_type == NULL_TREE(tree) nullptr)
5204 goto unsupported;
5205
5206 if (GET_MODE_SIZE (rhs_mode) == fltsz)
5207 {
5208 if (!supportable_convert_operation (code, vectype_out,
5209 cvt_type, &codecvt1))
5210 goto unsupported;
5211 }
5212 else if (!supportable_widening_operation (vinfo, code, stmt_info,
5213 vectype_out, cvt_type,
5214 &codecvt1, &codecvt2,
5215 &multi_step_cvt,
5216 &interm_types))
5217 continue;
5218 else
5219 gcc_assert (multi_step_cvt == 0)((void)(!(multi_step_cvt == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5219, __FUNCTION__), 0 : 0))
;
5220
5221 if (supportable_widening_operation (vinfo, NOP_EXPR, stmt_info,
5222 cvt_type,
5223 vectype_in, &code1, &code2,
5224 &multi_step_cvt, &interm_types))
5225 {
5226 found_mode = true;
5227 break;
5228 }
5229 }
5230
5231 if (!found_mode)
5232 goto unsupported;
5233
5234 if (GET_MODE_SIZE (rhs_mode) == fltsz)
5235 codecvt2 = ERROR_MARK;
5236 else
5237 {
5238 multi_step_cvt++;
5239 interm_types.safe_push (cvt_type);
5240 cvt_type = NULL_TREE(tree) nullptr;
5241 }
5242 break;
5243
5244 case NARROW:
5245 gcc_assert (op_type == unary_op)((void)(!(op_type == unary_op) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5245, __FUNCTION__), 0 : 0))
;
5246 if (supportable_narrowing_operation (code, vectype_out, vectype_in,
5247 &code1, &multi_step_cvt,
5248 &interm_types))
5249 break;
5250
5251 if (code != FIX_TRUNC_EXPR
5252 || GET_MODE_SIZE (lhs_mode) >= GET_MODE_SIZE (rhs_mode))
5253 goto unsupported;
5254
5255 cvt_type
5256 = build_nonstandard_integer_type (GET_MODE_BITSIZE (rhs_mode), 0);
5257 cvt_type = get_same_sized_vectype (cvt_type, vectype_in);
5258 if (cvt_type == NULL_TREE(tree) nullptr)
5259 goto unsupported;
5260 if (!supportable_convert_operation (code, cvt_type, vectype_in,
5261 &codecvt1))
5262 goto unsupported;
5263 if (supportable_narrowing_operation (NOP_EXPR, vectype_out, cvt_type,
5264 &code1, &multi_step_cvt,
5265 &interm_types))
5266 break;
5267 goto unsupported;
5268
5269 default:
5270 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5270, __FUNCTION__))
;
5271 }
5272
5273 if (!vec_stmt) /* transformation not required. */
5274 {
5275 if (slp_node
5276 && (!vect_maybe_update_slp_op_vectype (slp_op0, vectype_in)
5277 || !vect_maybe_update_slp_op_vectype (slp_op1, vectype_in)))
5278 {
5279 if (dump_enabled_p ())
5280 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5281 "incompatible vector types for invariants\n");
5282 return false;
5283 }
5284 DUMP_VECT_SCOPE ("vectorizable_conversion")auto_dump_scope scope ("vectorizable_conversion", vect_location
)
;
5285 if (modifier == NONE)
5286 {
5287 STMT_VINFO_TYPE (stmt_info)(stmt_info)->type = type_conversion_vec_info_type;
5288 vect_model_simple_cost (vinfo, stmt_info, ncopies, dt, ndts, slp_node,
5289 cost_vec);
5290 }
5291 else if (modifier == NARROW)
5292 {
5293 STMT_VINFO_TYPE (stmt_info)(stmt_info)->type = type_demotion_vec_info_type;
5294 /* The final packing step produces one vector result per copy. */
5295 unsigned int nvectors
5296 = (slp_node ? SLP_TREE_NUMBER_OF_VEC_STMTS (slp_node)(slp_node)->vec_stmts_size : ncopies);
5297 vect_model_promotion_demotion_cost (stmt_info, dt, nvectors,
5298 multi_step_cvt, cost_vec,
5299 widen_arith);
5300 }
5301 else
5302 {
5303 STMT_VINFO_TYPE (stmt_info)(stmt_info)->type = type_promotion_vec_info_type;
5304 /* The initial unpacking step produces two vector results
5305 per copy. MULTI_STEP_CVT is 0 for a single conversion,
5306 so >> MULTI_STEP_CVT divides by 2^(number of steps - 1). */
5307 unsigned int nvectors
5308 = (slp_node
5309 ? SLP_TREE_NUMBER_OF_VEC_STMTS (slp_node)(slp_node)->vec_stmts_size >> multi_step_cvt
5310 : ncopies * 2);
5311 vect_model_promotion_demotion_cost (stmt_info, dt, nvectors,
5312 multi_step_cvt, cost_vec,
5313 widen_arith);
5314 }
5315 interm_types.release ();
5316 return true;
5317 }
5318
5319 /* Transform. */
5320 if (dump_enabled_p ())
5321 dump_printf_loc (MSG_NOTE, vect_location,
5322 "transform conversion. ncopies = %d.\n", ncopies);
5323
5324 if (op_type == binary_op)
5325 {
5326 if (CONSTANT_CLASS_P (op0)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code
) (op0)->base.code))] == tcc_constant)
)
5327 op0 = fold_convert (TREE_TYPE (op1), op0)fold_convert_loc (((location_t) 0), ((contains_struct_check (
(op1), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5327, __FUNCTION__))->typed.type), op0)
;
5328 else if (CONSTANT_CLASS_P (op1)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code
) (op1)->base.code))] == tcc_constant)
)
5329 op1 = fold_convert (TREE_TYPE (op0), op1)fold_convert_loc (((location_t) 0), ((contains_struct_check (
(op0), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5329, __FUNCTION__))->typed.type), op1)
;
5330 }
5331
5332 /* In case of multi-step conversion, we first generate conversion operations
5333 to the intermediate types, and then from that types to the final one.
5334 We create vector destinations for the intermediate type (TYPES) received
5335 from supportable_*_operation, and store them in the correct order
5336 for future use in vect_create_vectorized_*_stmts (). */
5337 auto_vec<tree> vec_dsts (multi_step_cvt + 1);
5338 vec_dest = vect_create_destination_var (scalar_dest,
5339 (cvt_type && modifier == WIDEN)
5340 ? cvt_type : vectype_out);
5341 vec_dsts.quick_push (vec_dest);
5342
5343 if (multi_step_cvt)
5344 {
5345 for (i = interm_types.length () - 1;
5346 interm_types.iterate (i, &intermediate_type); i--)
5347 {
5348 vec_dest = vect_create_destination_var (scalar_dest,
5349 intermediate_type);
5350 vec_dsts.quick_push (vec_dest);
5351 }
5352 }
5353
5354 if (cvt_type)
5355 vec_dest = vect_create_destination_var (scalar_dest,
5356 modifier == WIDEN
5357 ? vectype_out : cvt_type);
5358
5359 int ninputs = 1;
5360 if (!slp_node)
5361 {
5362 if (modifier == WIDEN)
5363 ;
5364 else if (modifier == NARROW)
5365 {
5366 if (multi_step_cvt)
5367 ninputs = vect_pow2 (multi_step_cvt);
5368 ninputs *= 2;
5369 }
5370 }
5371
5372 switch (modifier)
5373 {
5374 case NONE:
5375 vect_get_vec_defs (vinfo, stmt_info, slp_node, ncopies,
5376 op0, &vec_oprnds0);
5377 FOR_EACH_VEC_ELT (vec_oprnds0, i, vop0)for (i = 0; (vec_oprnds0).iterate ((i), &(vop0)); ++(i))
5378 {
5379 /* Arguments are ready, create the new vector stmt. */
5380 gcc_assert (TREE_CODE_LENGTH (code1) == unary_op)((void)(!(tree_code_length_tmpl <0>::tree_code_length[(
int) (code1)] == unary_op) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5380, __FUNCTION__), 0 : 0))
;
5381 gassign *new_stmt = gimple_build_assign (vec_dest, code1, vop0);
5382 new_temp = make_ssa_name (vec_dest, new_stmt);
5383 gimple_assign_set_lhs (new_stmt, new_temp);
5384 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
5385
5386 if (slp_node)
5387 SLP_TREE_VEC_STMTS (slp_node)(slp_node)->vec_stmts.quick_push (new_stmt);
5388 else
5389 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
5390 }
5391 break;
5392
5393 case WIDEN:
5394 /* In case the vectorization factor (VF) is bigger than the number
5395 of elements that we can fit in a vectype (nunits), we have to
5396 generate more than one vector stmt - i.e - we need to "unroll"
5397 the vector stmt by a factor VF/nunits. */
5398 vect_get_vec_defs (vinfo, stmt_info, slp_node, ncopies * ninputs,
5399 op0, &vec_oprnds0,
5400 code == WIDEN_LSHIFT_EXPR ? NULL_TREE(tree) nullptr : op1,
5401 &vec_oprnds1);
5402 if (code == WIDEN_LSHIFT_EXPR)
5403 {
5404 int oprnds_size = vec_oprnds0.length ();
5405 vec_oprnds1.create (oprnds_size);
5406 for (i = 0; i < oprnds_size; ++i)
5407 vec_oprnds1.quick_push (op1);
5408 }
5409 /* Arguments are ready. Create the new vector stmts. */
5410 for (i = multi_step_cvt; i >= 0; i--)
5411 {
5412 tree this_dest = vec_dsts[i];
5413 enum tree_code c1 = code1, c2 = code2;
5414 if (i == 0 && codecvt2 != ERROR_MARK)
5415 {
5416 c1 = codecvt1;
5417 c2 = codecvt2;
5418 }
5419 if (known_eq (nunits_out, nunits_in)(!maybe_ne (nunits_out, nunits_in)))
5420 vect_create_half_widening_stmts (vinfo, &vec_oprnds0,
5421 &vec_oprnds1, stmt_info,
5422 this_dest, gsi,
5423 c1, op_type);
5424 else
5425 vect_create_vectorized_promotion_stmts (vinfo, &vec_oprnds0,
5426 &vec_oprnds1, stmt_info,
5427 this_dest, gsi,
5428 c1, c2, op_type);
5429 }
5430
5431 FOR_EACH_VEC_ELT (vec_oprnds0, i, vop0)for (i = 0; (vec_oprnds0).iterate ((i), &(vop0)); ++(i))
5432 {
5433 gimple *new_stmt;
5434 if (cvt_type)
5435 {
5436 gcc_assert (TREE_CODE_LENGTH (codecvt1) == unary_op)((void)(!(tree_code_length_tmpl <0>::tree_code_length[(
int) (codecvt1)] == unary_op) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5436, __FUNCTION__), 0 : 0))
;
5437 new_temp = make_ssa_name (vec_dest);
5438 new_stmt = gimple_build_assign (new_temp, codecvt1, vop0);
5439 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
5440 }
5441 else
5442 new_stmt = SSA_NAME_DEF_STMT (vop0)(tree_check ((vop0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5442, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
5443
5444 if (slp_node)
5445 SLP_TREE_VEC_STMTS (slp_node)(slp_node)->vec_stmts.quick_push (new_stmt);
5446 else
5447 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
5448 }
5449 break;
5450
5451 case NARROW:
5452 /* In case the vectorization factor (VF) is bigger than the number
5453 of elements that we can fit in a vectype (nunits), we have to
5454 generate more than one vector stmt - i.e - we need to "unroll"
5455 the vector stmt by a factor VF/nunits. */
5456 vect_get_vec_defs (vinfo, stmt_info, slp_node, ncopies * ninputs,
5457 op0, &vec_oprnds0);
5458 /* Arguments are ready. Create the new vector stmts. */
5459 if (cvt_type)
5460 FOR_EACH_VEC_ELT (vec_oprnds0, i, vop0)for (i = 0; (vec_oprnds0).iterate ((i), &(vop0)); ++(i))
5461 {
5462 gcc_assert (TREE_CODE_LENGTH (codecvt1) == unary_op)((void)(!(tree_code_length_tmpl <0>::tree_code_length[(
int) (codecvt1)] == unary_op) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5462, __FUNCTION__), 0 : 0))
;
5463 new_temp = make_ssa_name (vec_dest);
5464 gassign *new_stmt
5465 = gimple_build_assign (new_temp, codecvt1, vop0);
5466 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
5467 vec_oprnds0[i] = new_temp;
5468 }
5469
5470 vect_create_vectorized_demotion_stmts (vinfo, &vec_oprnds0,
5471 multi_step_cvt,
5472 stmt_info, vec_dsts, gsi,
5473 slp_node, code1);
5474 break;
5475 }
5476 if (!slp_node)
5477 *vec_stmt = STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts[0];
5478
5479 vec_oprnds0.release ();
5480 vec_oprnds1.release ();
5481 interm_types.release ();
5482
5483 return true;
5484}
5485
5486/* Return true if we can assume from the scalar form of STMT_INFO that
5487 neither the scalar nor the vector forms will generate code. STMT_INFO
5488 is known not to involve a data reference. */
5489
5490bool
5491vect_nop_conversion_p (stmt_vec_info stmt_info)
5492{
5493 gassign *stmt = dyn_cast <gassign *> (stmt_info->stmt);
5494 if (!stmt)
5495 return false;
5496
5497 tree lhs = gimple_assign_lhs (stmt);
5498 tree_code code = gimple_assign_rhs_code (stmt);
5499 tree rhs = gimple_assign_rhs1 (stmt);
5500
5501 if (code == SSA_NAME || code == VIEW_CONVERT_EXPR)
5502 return true;
5503
5504 if (CONVERT_EXPR_CODE_P (code)((code) == NOP_EXPR || (code) == CONVERT_EXPR))
5505 return tree_nop_conversion_p (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5505, __FUNCTION__))->typed.type)
, TREE_TYPE (rhs)((contains_struct_check ((rhs), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5505, __FUNCTION__))->typed.type)
);
5506
5507 return false;
5508}
5509
5510/* Function vectorizable_assignment.
5511
5512 Check if STMT_INFO performs an assignment (copy) that can be vectorized.
5513 If VEC_STMT is also passed, vectorize the STMT_INFO: create a vectorized
5514 stmt to replace it, put it in VEC_STMT, and insert it at GSI.
5515 Return true if STMT_INFO is vectorizable in this way. */
5516
5517static bool
5518vectorizable_assignment (vec_info *vinfo,
5519 stmt_vec_info stmt_info, gimple_stmt_iterator *gsi,
5520 gimple **vec_stmt, slp_tree slp_node,
5521 stmt_vector_for_cost *cost_vec)
5522{
5523 tree vec_dest;
5524 tree scalar_dest;
5525 tree op;
5526 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
5527 tree new_temp;
5528 enum vect_def_type dt[1] = {vect_unknown_def_type};
5529 int ndts = 1;
5530 int ncopies;
5531 int i;
5532 vec<tree> vec_oprnds = vNULL;
5533 tree vop;
5534 bb_vec_info bb_vinfo = dyn_cast <bb_vec_info> (vinfo);
5535 enum tree_code code;
5536 tree vectype_in;
5537
5538 if (!STMT_VINFO_RELEVANT_P (stmt_info)((stmt_info)->relevant != vect_unused_in_scope) && !bb_vinfo)
5539 return false;
5540
5541 if (STMT_VINFO_DEF_TYPE (stmt_info)(stmt_info)->def_type != vect_internal_def
5542 && ! vec_stmt)
5543 return false;
5544
5545 /* Is vectorizable assignment? */
5546 gassign *stmt = dyn_cast <gassign *> (stmt_info->stmt);
5547 if (!stmt)
5548 return false;
5549
5550 scalar_dest = gimple_assign_lhs (stmt);
5551 if (TREE_CODE (scalar_dest)((enum tree_code) (scalar_dest)->base.code) != SSA_NAME)
5552 return false;
5553
5554 if (STMT_VINFO_DATA_REF (stmt_info)((stmt_info)->dr_aux.dr + 0))
5555 return false;
5556
5557 code = gimple_assign_rhs_code (stmt);
5558 if (!(gimple_assign_single_p (stmt)
5559 || code == PAREN_EXPR
5560 || CONVERT_EXPR_CODE_P (code)((code) == NOP_EXPR || (code) == CONVERT_EXPR)))
5561 return false;
5562
5563 tree vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
5564 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
5565
5566 /* Multiple types in SLP are handled by creating the appropriate number of
5567 vectorized stmts for each SLP node. Hence, NCOPIES is always 1 in
5568 case of SLP. */
5569 if (slp_node)
5570 ncopies = 1;
5571 else
5572 ncopies = vect_get_num_copies (loop_vinfo, vectype);
5573
5574 gcc_assert (ncopies >= 1)((void)(!(ncopies >= 1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5574, __FUNCTION__), 0 : 0))
;
5575
5576 slp_tree slp_op;
5577 if (!vect_is_simple_use (vinfo, stmt_info, slp_node, 0, &op, &slp_op,
5578 &dt[0], &vectype_in))
5579 {
5580 if (dump_enabled_p ())
5581 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5582 "use not simple.\n");
5583 return false;
5584 }
5585 if (!vectype_in)
5586 vectype_in = get_vectype_for_scalar_type (vinfo, TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5586, __FUNCTION__))->typed.type)
, slp_node);
5587
5588 /* We can handle NOP_EXPR conversions that do not change the number
5589 of elements or the vector size. */
5590 if ((CONVERT_EXPR_CODE_P (code)((code) == NOP_EXPR || (code) == CONVERT_EXPR)
5591 || code == VIEW_CONVERT_EXPR)
5592 && (!vectype_in
5593 || maybe_ne (TYPE_VECTOR_SUBPARTS (vectype_in), nunits)
5594 || maybe_ne (GET_MODE_SIZE (TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5594, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
),
5595 GET_MODE_SIZE (TYPE_MODE (vectype_in)((((enum tree_code) ((tree_class_check ((vectype_in), (tcc_type
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5595, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype_in) : (vectype_in)->type_common.mode)
))))
5596 return false;
5597
5598 if (VECTOR_BOOLEAN_TYPE_P (vectype)(((enum tree_code) (vectype)->base.code) == VECTOR_TYPE &&
((enum tree_code) (((contains_struct_check ((vectype), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5598, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
5599 && !VECTOR_BOOLEAN_TYPE_P (vectype_in)(((enum tree_code) (vectype_in)->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((vectype_in
), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5599, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
)
5600 {
5601 if (dump_enabled_p ())
5602 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5603 "can't convert between boolean and non "
5604 "boolean vectors %T\n", TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5604, __FUNCTION__))->typed.type)
);
5605
5606 return false;
5607 }
5608
5609 /* We do not handle bit-precision changes. */
5610 if ((CONVERT_EXPR_CODE_P (code)((code) == NOP_EXPR || (code) == CONVERT_EXPR)
5611 || code == VIEW_CONVERT_EXPR)
5612 && INTEGRAL_TYPE_P (TREE_TYPE (scalar_dest))(((enum tree_code) (((contains_struct_check ((scalar_dest), (
TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5612, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((scalar_dest)
, (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5612, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((scalar_dest)
, (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5612, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
5613 && (!type_has_mode_precision_p (TREE_TYPE (scalar_dest)((contains_struct_check ((scalar_dest), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5613, __FUNCTION__))->typed.type)
)
5614 || !type_has_mode_precision_p (TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5614, __FUNCTION__))->typed.type)
))
5615 /* But a conversion that does not change the bit-pattern is ok. */
5616 && !((TYPE_PRECISION (TREE_TYPE (scalar_dest))((tree_class_check ((((contains_struct_check ((scalar_dest), (
TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5616, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5616, __FUNCTION__))->type_common.precision)
5617 > TYPE_PRECISION (TREE_TYPE (op))((tree_class_check ((((contains_struct_check ((op), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5617, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5617, __FUNCTION__))->type_common.precision)
)
5618 && TYPE_UNSIGNED (TREE_TYPE (op))((tree_class_check ((((contains_struct_check ((op), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5618, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5618, __FUNCTION__))->base.u.bits.unsigned_flag)
))
5619 {
5620 if (dump_enabled_p ())
5621 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5622 "type conversion to/from bit-precision "
5623 "unsupported.\n");
5624 return false;
5625 }
5626
5627 if (!vec_stmt) /* transformation not required. */
5628 {
5629 if (slp_node
5630 && !vect_maybe_update_slp_op_vectype (slp_op, vectype_in))
5631 {
5632 if (dump_enabled_p ())
5633 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5634 "incompatible vector types for invariants\n");
5635 return false;
5636 }
5637 STMT_VINFO_TYPE (stmt_info)(stmt_info)->type = assignment_vec_info_type;
5638 DUMP_VECT_SCOPE ("vectorizable_assignment")auto_dump_scope scope ("vectorizable_assignment", vect_location
)
;
5639 if (!vect_nop_conversion_p (stmt_info))
5640 vect_model_simple_cost (vinfo, stmt_info, ncopies, dt, ndts, slp_node,
5641 cost_vec);
5642 return true;
5643 }
5644
5645 /* Transform. */
5646 if (dump_enabled_p ())
5647 dump_printf_loc (MSG_NOTE, vect_location, "transform assignment.\n");
5648
5649 /* Handle def. */
5650 vec_dest = vect_create_destination_var (scalar_dest, vectype);
5651
5652 /* Handle use. */
5653 vect_get_vec_defs (vinfo, stmt_info, slp_node, ncopies, op, &vec_oprnds);
5654
5655 /* Arguments are ready. create the new vector stmt. */
5656 FOR_EACH_VEC_ELT (vec_oprnds, i, vop)for (i = 0; (vec_oprnds).iterate ((i), &(vop)); ++(i))
5657 {
5658 if (CONVERT_EXPR_CODE_P (code)((code) == NOP_EXPR || (code) == CONVERT_EXPR)
5659 || code == VIEW_CONVERT_EXPR)
5660 vop = build1 (VIEW_CONVERT_EXPR, vectype, vop);
5661 gassign *new_stmt = gimple_build_assign (vec_dest, vop);
5662 new_temp = make_ssa_name (vec_dest, new_stmt);
5663 gimple_assign_set_lhs (new_stmt, new_temp);
5664 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
5665 if (slp_node)
5666 SLP_TREE_VEC_STMTS (slp_node)(slp_node)->vec_stmts.quick_push (new_stmt);
5667 else
5668 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
5669 }
5670 if (!slp_node)
5671 *vec_stmt = STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts[0];
5672
5673 vec_oprnds.release ();
5674 return true;
5675}
5676
5677
5678/* Return TRUE if CODE (a shift operation) is supported for SCALAR_TYPE
5679 either as shift by a scalar or by a vector. */
5680
5681bool
5682vect_supportable_shift (vec_info *vinfo, enum tree_code code, tree scalar_type)
5683{
5684
5685 machine_mode vec_mode;
5686 optab optab;
5687 int icode;
5688 tree vectype;
5689
5690 vectype = get_vectype_for_scalar_type (vinfo, scalar_type);
5691 if (!vectype)
5692 return false;
5693
5694 optab = optab_for_tree_code (code, vectype, optab_scalar);
5695 if (!optab
5696 || optab_handler (optab, TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5696, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
) == CODE_FOR_nothing)
5697 {
5698 optab = optab_for_tree_code (code, vectype, optab_vector);
5699 if (!optab
5700 || (optab_handler (optab, TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5700, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
)
5701 == CODE_FOR_nothing))
5702 return false;
5703 }
5704
5705 vec_mode = TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5705, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
;
5706 icode = (int) optab_handler (optab, vec_mode);
5707 if (icode == CODE_FOR_nothing)
5708 return false;
5709
5710 return true;
5711}
5712
5713
5714/* Function vectorizable_shift.
5715
5716 Check if STMT_INFO performs a shift operation that can be vectorized.
5717 If VEC_STMT is also passed, vectorize the STMT_INFO: create a vectorized
5718 stmt to replace it, put it in VEC_STMT, and insert it at GSI.
5719 Return true if STMT_INFO is vectorizable in this way. */
5720
5721static bool
5722vectorizable_shift (vec_info *vinfo,
5723 stmt_vec_info stmt_info, gimple_stmt_iterator *gsi,
5724 gimple **vec_stmt, slp_tree slp_node,
5725 stmt_vector_for_cost *cost_vec)
5726{
5727 tree vec_dest;
5728 tree scalar_dest;
5729 tree op0, op1 = NULLnullptr;
5730 tree vec_oprnd1 = NULL_TREE(tree) nullptr;
5731 tree vectype;
5732 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
5733 enum tree_code code;
5734 machine_mode vec_mode;
5735 tree new_temp;
5736 optab optab;
5737 int icode;
5738 machine_mode optab_op2_mode;
5739 enum vect_def_type dt[2] = {vect_unknown_def_type, vect_unknown_def_type};
5740 int ndts = 2;
5741 poly_uint64 nunits_in;
5742 poly_uint64 nunits_out;
5743 tree vectype_out;
5744 tree op1_vectype;
5745 int ncopies;
5746 int i;
5747 vec<tree> vec_oprnds0 = vNULL;
5748 vec<tree> vec_oprnds1 = vNULL;
5749 tree vop0, vop1;
5750 unsigned int k;
5751 bool scalar_shift_arg = true;
5752 bb_vec_info bb_vinfo = dyn_cast <bb_vec_info> (vinfo);
5753 bool incompatible_op1_vectype_p = false;
5754
5755 if (!STMT_VINFO_RELEVANT_P (stmt_info)((stmt_info)->relevant != vect_unused_in_scope) && !bb_vinfo)
5756 return false;
5757
5758 if (STMT_VINFO_DEF_TYPE (stmt_info)(stmt_info)->def_type != vect_internal_def
5759 && STMT_VINFO_DEF_TYPE (stmt_info)(stmt_info)->def_type != vect_nested_cycle
5760 && ! vec_stmt)
5761 return false;
5762
5763 /* Is STMT a vectorizable binary/unary operation? */
5764 gassign *stmt = dyn_cast <gassign *> (stmt_info->stmt);
5765 if (!stmt)
5766 return false;
5767
5768 if (TREE_CODE (gimple_assign_lhs (stmt))((enum tree_code) (gimple_assign_lhs (stmt))->base.code) != SSA_NAME)
5769 return false;
5770
5771 code = gimple_assign_rhs_code (stmt);
5772
5773 if (!(code == LSHIFT_EXPR || code == RSHIFT_EXPR || code == LROTATE_EXPR
5774 || code == RROTATE_EXPR))
5775 return false;
5776
5777 scalar_dest = gimple_assign_lhs (stmt);
5778 vectype_out = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
5779 if (!type_has_mode_precision_p (TREE_TYPE (scalar_dest)((contains_struct_check ((scalar_dest), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5779, __FUNCTION__))->typed.type)
))
5780 {
5781 if (dump_enabled_p ())
5782 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5783 "bit-precision shifts not supported.\n");
5784 return false;
5785 }
5786
5787 slp_tree slp_op0;
5788 if (!vect_is_simple_use (vinfo, stmt_info, slp_node,
5789 0, &op0, &slp_op0, &dt[0], &vectype))
5790 {
5791 if (dump_enabled_p ())
5792 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5793 "use not simple.\n");
5794 return false;
5795 }
5796 /* If op0 is an external or constant def, infer the vector type
5797 from the scalar type. */
5798 if (!vectype)
5799 vectype = get_vectype_for_scalar_type (vinfo, TREE_TYPE (op0)((contains_struct_check ((op0), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5799, __FUNCTION__))->typed.type)
, slp_node);
5800 if (vec_stmt)
5801 gcc_assert (vectype)((void)(!(vectype) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5801, __FUNCTION__), 0 : 0))
;
5802 if (!vectype)
5803 {
5804 if (dump_enabled_p ())
5805 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5806 "no vectype for scalar type\n");
5807 return false;
5808 }
5809
5810 nunits_out = TYPE_VECTOR_SUBPARTS (vectype_out);
5811 nunits_in = TYPE_VECTOR_SUBPARTS (vectype);
5812 if (maybe_ne (nunits_out, nunits_in))
5813 return false;
5814
5815 stmt_vec_info op1_def_stmt_info;
5816 slp_tree slp_op1;
5817 if (!vect_is_simple_use (vinfo, stmt_info, slp_node, 1, &op1, &slp_op1,
5818 &dt[1], &op1_vectype, &op1_def_stmt_info))
5819 {
5820 if (dump_enabled_p ())
5821 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5822 "use not simple.\n");
5823 return false;
5824 }
5825
5826 /* Multiple types in SLP are handled by creating the appropriate number of
5827 vectorized stmts for each SLP node. Hence, NCOPIES is always 1 in
5828 case of SLP. */
5829 if (slp_node)
5830 ncopies = 1;
5831 else
5832 ncopies = vect_get_num_copies (loop_vinfo, vectype);
5833
5834 gcc_assert (ncopies >= 1)((void)(!(ncopies >= 1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.cc"
, 5834, __FUNCTION__), 0 : 0))
;
5835
5836 /* Determine whether the shift amount is a vector, or scalar. If the
5837 shift/rotate amount is a vector, use the vector/vector shift optabs. */
5838
5839 if ((dt[1] == vect_internal_def
5840 || dt[1] == vect_induction_def
5841 || dt[1] == vect_nested_cycle)
5842 && !slp_node)
5843 scalar_shift_arg = false;
5844 else if (dt[1] == vect_constant_def
5845 || dt[1] == vect_external_def
5846 || dt[1] == vect_internal_def)
5847 {
5848 /* In SLP, need to check whether the shift count is the same,
5849 in loops if it is a constant or invariant, it is always
5850 a scalar shift. */
5851 if (slp_node)
5852 {