Bug Summary

File:build/gcc/tree-vect-stmts.c
Warning:line 7599, column 13
Although the value stored to 'vec_mask' is used in the enclosing expression, the value is never actually read from 'vec_mask'

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-unknown-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name tree-vect-stmts.c -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -fno-rounding-math -mconstructor-aliases -munwind-tables -target-cpu x86-64 -tune-cpu generic -debugger-tuning=gdb -fcoverage-compilation-dir=/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -resource-dir /usr/lib64/clang/13.0.0 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11/backward -internal-isystem /usr/lib64/clang/13.0.0/include -internal-isystem /usr/local/include -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../x86_64-suse-linux/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-error=format-diag -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir=/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -D__GCC_HAVE_DWARF2_CFI_ASM=1 -o /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2021-11-20-133755-20252-1/report-DV5zz6.plist -x c++ /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c
1/* Statement Analysis and Transformation for Vectorization
2 Copyright (C) 2003-2021 Free Software Foundation, Inc.
3 Contributed by Dorit Naishlos <dorit@il.ibm.com>
4 and Ira Rosen <irar@il.ibm.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "backend.h"
26#include "target.h"
27#include "rtl.h"
28#include "tree.h"
29#include "gimple.h"
30#include "ssa.h"
31#include "optabs-tree.h"
32#include "insn-config.h"
33#include "recog.h" /* FIXME: for insn_data */
34#include "cgraph.h"
35#include "dumpfile.h"
36#include "alias.h"
37#include "fold-const.h"
38#include "stor-layout.h"
39#include "tree-eh.h"
40#include "gimplify.h"
41#include "gimple-iterator.h"
42#include "gimplify-me.h"
43#include "tree-cfg.h"
44#include "tree-ssa-loop-manip.h"
45#include "cfgloop.h"
46#include "explow.h"
47#include "tree-ssa-loop.h"
48#include "tree-scalar-evolution.h"
49#include "tree-vectorizer.h"
50#include "builtins.h"
51#include "internal-fn.h"
52#include "tree-vector-builder.h"
53#include "vec-perm-indices.h"
54#include "tree-ssa-loop-niter.h"
55#include "gimple-fold.h"
56#include "regs.h"
57#include "attribs.h"
58
59/* For lang_hooks.types.type_for_mode. */
60#include "langhooks.h"
61
62/* Return the vectorized type for the given statement. */
63
64tree
65stmt_vectype (class _stmt_vec_info *stmt_info)
66{
67 return STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
68}
69
70/* Return TRUE iff the given statement is in an inner loop relative to
71 the loop being vectorized. */
72bool
73stmt_in_inner_loop_p (vec_info *vinfo, class _stmt_vec_info *stmt_info)
74{
75 gimple *stmt = STMT_VINFO_STMT (stmt_info)(stmt_info)->stmt;
76 basic_block bb = gimple_bb (stmt);
77 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
78 class loop* loop;
79
80 if (!loop_vinfo)
81 return false;
82
83 loop = LOOP_VINFO_LOOP (loop_vinfo)(loop_vinfo)->loop;
84
85 return (bb->loop_father == loop->inner);
86}
87
88/* Record the cost of a statement, either by directly informing the
89 target model or by saving it in a vector for later processing.
90 Return a preliminary estimate of the statement's cost. */
91
92unsigned
93record_stmt_cost (stmt_vector_for_cost *body_cost_vec, int count,
94 enum vect_cost_for_stmt kind, stmt_vec_info stmt_info,
95 tree vectype, int misalign,
96 enum vect_cost_model_location where)
97{
98 if ((kind == vector_load || kind == unaligned_load)
99 && (stmt_info && STMT_VINFO_GATHER_SCATTER_P (stmt_info)(stmt_info)->gather_scatter_p))
100 kind = vector_gather_load;
101 if ((kind == vector_store || kind == unaligned_store)
102 && (stmt_info && STMT_VINFO_GATHER_SCATTER_P (stmt_info)(stmt_info)->gather_scatter_p))
103 kind = vector_scatter_store;
104
105 stmt_info_for_cost si = { count, kind, where, stmt_info, vectype, misalign };
106 body_cost_vec->safe_push (si);
107
108 return (unsigned)
109 (builtin_vectorization_cost (kind, vectype, misalign) * count);
110}
111
112/* Return a variable of type ELEM_TYPE[NELEMS]. */
113
114static tree
115create_vector_array (tree elem_type, unsigned HOST_WIDE_INTlong nelems)
116{
117 return create_tmp_var (build_array_type_nelts (elem_type, nelems),
118 "vect_array");
119}
120
121/* ARRAY is an array of vectors created by create_vector_array.
122 Return an SSA_NAME for the vector in index N. The reference
123 is part of the vectorization of STMT_INFO and the vector is associated
124 with scalar destination SCALAR_DEST. */
125
126static tree
127read_vector_array (vec_info *vinfo,
128 stmt_vec_info stmt_info, gimple_stmt_iterator *gsi,
129 tree scalar_dest, tree array, unsigned HOST_WIDE_INTlong n)
130{
131 tree vect_type, vect, vect_name, array_ref;
132 gimple *new_stmt;
133
134 gcc_assert (TREE_CODE (TREE_TYPE (array)) == ARRAY_TYPE)((void)(!(((enum tree_code) (((contains_struct_check ((array)
, (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 134, __FUNCTION__))->typed.type))->base.code) == ARRAY_TYPE
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 134, __FUNCTION__), 0 : 0))
;
135 vect_type = TREE_TYPE (TREE_TYPE (array))((contains_struct_check ((((contains_struct_check ((array), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 135, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 135, __FUNCTION__))->typed.type)
;
136 vect = vect_create_destination_var (scalar_dest, vect_type);
137 array_ref = build4 (ARRAY_REF, vect_type, array,
138 build_int_cst (size_type_nodeglobal_trees[TI_SIZE_TYPE], n),
139 NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr);
140
141 new_stmt = gimple_build_assign (vect, array_ref);
142 vect_name = make_ssa_name (vect, new_stmt);
143 gimple_assign_set_lhs (new_stmt, vect_name);
144 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
145
146 return vect_name;
147}
148
149/* ARRAY is an array of vectors created by create_vector_array.
150 Emit code to store SSA_NAME VECT in index N of the array.
151 The store is part of the vectorization of STMT_INFO. */
152
153static void
154write_vector_array (vec_info *vinfo,
155 stmt_vec_info stmt_info, gimple_stmt_iterator *gsi,
156 tree vect, tree array, unsigned HOST_WIDE_INTlong n)
157{
158 tree array_ref;
159 gimple *new_stmt;
160
161 array_ref = build4 (ARRAY_REF, TREE_TYPE (vect)((contains_struct_check ((vect), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 161, __FUNCTION__))->typed.type)
, array,
162 build_int_cst (size_type_nodeglobal_trees[TI_SIZE_TYPE], n),
163 NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr);
164
165 new_stmt = gimple_build_assign (array_ref, vect);
166 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
167}
168
169/* PTR is a pointer to an array of type TYPE. Return a representation
170 of *PTR. The memory reference replaces those in FIRST_DR
171 (and its group). */
172
173static tree
174create_array_ref (tree type, tree ptr, tree alias_ptr_type)
175{
176 tree mem_ref;
177
178 mem_ref = build2 (MEM_REF, type, ptr, build_int_cst (alias_ptr_type, 0));
179 /* Arrays have the same alignment as their type. */
180 set_ptr_info_alignment (get_ptr_info (ptr), TYPE_ALIGN_UNIT (type)((((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 180, __FUNCTION__))->type_common.align) ? ((unsigned)1) <<
(((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 180, __FUNCTION__))->type_common.align) - 1) : 0) / (8))
, 0);
181 return mem_ref;
182}
183
184/* Add a clobber of variable VAR to the vectorization of STMT_INFO.
185 Emit the clobber before *GSI. */
186
187static void
188vect_clobber_variable (vec_info *vinfo, stmt_vec_info stmt_info,
189 gimple_stmt_iterator *gsi, tree var)
190{
191 tree clobber = build_clobber (TREE_TYPE (var)((contains_struct_check ((var), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 191, __FUNCTION__))->typed.type)
);
192 gimple *new_stmt = gimple_build_assign (var, clobber);
193 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
194}
195
196/* Utility functions used by vect_mark_stmts_to_be_vectorized. */
197
198/* Function vect_mark_relevant.
199
200 Mark STMT_INFO as "relevant for vectorization" and add it to WORKLIST. */
201
202static void
203vect_mark_relevant (vec<stmt_vec_info> *worklist, stmt_vec_info stmt_info,
204 enum vect_relevant relevant, bool live_p)
205{
206 enum vect_relevant save_relevant = STMT_VINFO_RELEVANT (stmt_info)(stmt_info)->relevant;
207 bool save_live_p = STMT_VINFO_LIVE_P (stmt_info)(stmt_info)->live;
208
209 if (dump_enabled_p ())
210 dump_printf_loc (MSG_NOTE, vect_location,
211 "mark relevant %d, live %d: %G", relevant, live_p,
212 stmt_info->stmt);
213
214 /* If this stmt is an original stmt in a pattern, we might need to mark its
215 related pattern stmt instead of the original stmt. However, such stmts
216 may have their own uses that are not in any pattern, in such cases the
217 stmt itself should be marked. */
218 if (STMT_VINFO_IN_PATTERN_P (stmt_info)(stmt_info)->in_pattern_p)
219 {
220 /* This is the last stmt in a sequence that was detected as a
221 pattern that can potentially be vectorized. Don't mark the stmt
222 as relevant/live because it's not going to be vectorized.
223 Instead mark the pattern-stmt that replaces it. */
224
225 if (dump_enabled_p ())
226 dump_printf_loc (MSG_NOTE, vect_location,
227 "last stmt in pattern. don't mark"
228 " relevant/live.\n");
229 stmt_vec_info old_stmt_info = stmt_info;
230 stmt_info = STMT_VINFO_RELATED_STMT (stmt_info)(stmt_info)->related_stmt;
231 gcc_assert (STMT_VINFO_RELATED_STMT (stmt_info) == old_stmt_info)((void)(!((stmt_info)->related_stmt == old_stmt_info) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 231, __FUNCTION__), 0 : 0))
;
232 save_relevant = STMT_VINFO_RELEVANT (stmt_info)(stmt_info)->relevant;
233 save_live_p = STMT_VINFO_LIVE_P (stmt_info)(stmt_info)->live;
234 }
235
236 STMT_VINFO_LIVE_P (stmt_info)(stmt_info)->live |= live_p;
237 if (relevant > STMT_VINFO_RELEVANT (stmt_info)(stmt_info)->relevant)
238 STMT_VINFO_RELEVANT (stmt_info)(stmt_info)->relevant = relevant;
239
240 if (STMT_VINFO_RELEVANT (stmt_info)(stmt_info)->relevant == save_relevant
241 && STMT_VINFO_LIVE_P (stmt_info)(stmt_info)->live == save_live_p)
242 {
243 if (dump_enabled_p ())
244 dump_printf_loc (MSG_NOTE, vect_location,
245 "already marked relevant/live.\n");
246 return;
247 }
248
249 worklist->safe_push (stmt_info);
250}
251
252
253/* Function is_simple_and_all_uses_invariant
254
255 Return true if STMT_INFO is simple and all uses of it are invariant. */
256
257bool
258is_simple_and_all_uses_invariant (stmt_vec_info stmt_info,
259 loop_vec_info loop_vinfo)
260{
261 tree op;
262 ssa_op_iter iter;
263
264 gassign *stmt = dyn_cast <gassign *> (stmt_info->stmt);
265 if (!stmt)
266 return false;
267
268 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)for (op = op_iter_init_tree (&(iter), stmt, 0x01); !op_iter_done
(&(iter)); (void) (op = op_iter_next_tree (&(iter)))
)
269 {
270 enum vect_def_type dt = vect_uninitialized_def;
271
272 if (!vect_is_simple_use (op, loop_vinfo, &dt))
273 {
274 if (dump_enabled_p ())
275 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
276 "use not simple.\n");
277 return false;
278 }
279
280 if (dt != vect_external_def && dt != vect_constant_def)
281 return false;
282 }
283 return true;
284}
285
286/* Function vect_stmt_relevant_p.
287
288 Return true if STMT_INFO, in the loop that is represented by LOOP_VINFO,
289 is "relevant for vectorization".
290
291 A stmt is considered "relevant for vectorization" if:
292 - it has uses outside the loop.
293 - it has vdefs (it alters memory).
294 - control stmts in the loop (except for the exit condition).
295
296 CHECKME: what other side effects would the vectorizer allow? */
297
298static bool
299vect_stmt_relevant_p (stmt_vec_info stmt_info, loop_vec_info loop_vinfo,
300 enum vect_relevant *relevant, bool *live_p)
301{
302 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo)(loop_vinfo)->loop;
303 ssa_op_iter op_iter;
304 imm_use_iterator imm_iter;
305 use_operand_p use_p;
306 def_operand_p def_p;
307
308 *relevant = vect_unused_in_scope;
309 *live_p = false;
310
311 /* cond stmt other than loop exit cond. */
312 if (is_ctrl_stmt (stmt_info->stmt)
313 && STMT_VINFO_TYPE (stmt_info)(stmt_info)->type != loop_exit_ctrl_vec_info_type)
314 *relevant = vect_used_in_scope;
315
316 /* changing memory. */
317 if (gimple_code (stmt_info->stmt) != GIMPLE_PHI)
318 if (gimple_vdef (stmt_info->stmt)
319 && !gimple_clobber_p (stmt_info->stmt))
320 {
321 if (dump_enabled_p ())
322 dump_printf_loc (MSG_NOTE, vect_location,
323 "vec_stmt_relevant_p: stmt has vdefs.\n");
324 *relevant = vect_used_in_scope;
325 }
326
327 /* uses outside the loop. */
328 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt_info->stmt, op_iter, SSA_OP_DEF)for ((def_p) = (gimple_code (stmt_info->stmt) == GIMPLE_PHI
? op_iter_init_phidef (&(op_iter), as_a <gphi *> (
stmt_info->stmt), 0x02) : op_iter_init_def (&(op_iter)
, stmt_info->stmt, 0x02)); !op_iter_done (&(op_iter));
(def_p) = op_iter_next_def (&(op_iter)))
329 {
330 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, DEF_FROM_PTR (def_p))for ((use_p) = first_readonly_imm_use (&(imm_iter), (get_def_from_ptr
(def_p))); !end_readonly_imm_use_p (&(imm_iter)); (void)
((use_p) = next_readonly_imm_use (&(imm_iter))))
331 {
332 basic_block bb = gimple_bb (USE_STMT (use_p)(use_p)->loc.stmt);
333 if (!flow_bb_inside_loop_p (loop, bb))
334 {
335 if (is_gimple_debug (USE_STMT (use_p)(use_p)->loc.stmt))
336 continue;
337
338 if (dump_enabled_p ())
339 dump_printf_loc (MSG_NOTE, vect_location,
340 "vec_stmt_relevant_p: used out of loop.\n");
341
342 /* We expect all such uses to be in the loop exit phis
343 (because of loop closed form) */
344 gcc_assert (gimple_code (USE_STMT (use_p)) == GIMPLE_PHI)((void)(!(gimple_code ((use_p)->loc.stmt) == GIMPLE_PHI) ?
fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 344, __FUNCTION__), 0 : 0))
;
345 gcc_assert (bb == single_exit (loop)->dest)((void)(!(bb == single_exit (loop)->dest) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 345, __FUNCTION__), 0 : 0))
;
346
347 *live_p = true;
348 }
349 }
350 }
351
352 if (*live_p && *relevant == vect_unused_in_scope
353 && !is_simple_and_all_uses_invariant (stmt_info, loop_vinfo))
354 {
355 if (dump_enabled_p ())
356 dump_printf_loc (MSG_NOTE, vect_location,
357 "vec_stmt_relevant_p: stmt live but not relevant.\n");
358 *relevant = vect_used_only_live;
359 }
360
361 return (*live_p || *relevant);
362}
363
364
365/* Function exist_non_indexing_operands_for_use_p
366
367 USE is one of the uses attached to STMT_INFO. Check if USE is
368 used in STMT_INFO for anything other than indexing an array. */
369
370static bool
371exist_non_indexing_operands_for_use_p (tree use, stmt_vec_info stmt_info)
372{
373 tree operand;
374
375 /* USE corresponds to some operand in STMT. If there is no data
376 reference in STMT, then any operand that corresponds to USE
377 is not indexing an array. */
378 if (!STMT_VINFO_DATA_REF (stmt_info)((stmt_info)->dr_aux.dr + 0))
379 return true;
380
381 /* STMT has a data_ref. FORNOW this means that its of one of
382 the following forms:
383 -1- ARRAY_REF = var
384 -2- var = ARRAY_REF
385 (This should have been verified in analyze_data_refs).
386
387 'var' in the second case corresponds to a def, not a use,
388 so USE cannot correspond to any operands that are not used
389 for array indexing.
390
391 Therefore, all we need to check is if STMT falls into the
392 first case, and whether var corresponds to USE. */
393
394 gassign *assign = dyn_cast <gassign *> (stmt_info->stmt);
395 if (!assign || !gimple_assign_copy_p (assign))
396 {
397 gcall *call = dyn_cast <gcall *> (stmt_info->stmt);
398 if (call && gimple_call_internal_p (call))
399 {
400 internal_fn ifn = gimple_call_internal_fn (call);
401 int mask_index = internal_fn_mask_index (ifn);
402 if (mask_index >= 0
403 && use == gimple_call_arg (call, mask_index))
404 return true;
405 int stored_value_index = internal_fn_stored_value_index (ifn);
406 if (stored_value_index >= 0
407 && use == gimple_call_arg (call, stored_value_index))
408 return true;
409 if (internal_gather_scatter_fn_p (ifn)
410 && use == gimple_call_arg (call, 1))
411 return true;
412 }
413 return false;
414 }
415
416 if (TREE_CODE (gimple_assign_lhs (assign))((enum tree_code) (gimple_assign_lhs (assign))->base.code) == SSA_NAME)
417 return false;
418 operand = gimple_assign_rhs1 (assign);
419 if (TREE_CODE (operand)((enum tree_code) (operand)->base.code) != SSA_NAME)
420 return false;
421
422 if (operand == use)
423 return true;
424
425 return false;
426}
427
428
429/*
430 Function process_use.
431
432 Inputs:
433 - a USE in STMT_VINFO in a loop represented by LOOP_VINFO
434 - RELEVANT - enum value to be set in the STMT_VINFO of the stmt
435 that defined USE. This is done by calling mark_relevant and passing it
436 the WORKLIST (to add DEF_STMT to the WORKLIST in case it is relevant).
437 - FORCE is true if exist_non_indexing_operands_for_use_p check shouldn't
438 be performed.
439
440 Outputs:
441 Generally, LIVE_P and RELEVANT are used to define the liveness and
442 relevance info of the DEF_STMT of this USE:
443 STMT_VINFO_LIVE_P (DEF_stmt_vinfo) <-- live_p
444 STMT_VINFO_RELEVANT (DEF_stmt_vinfo) <-- relevant
445 Exceptions:
446 - case 1: If USE is used only for address computations (e.g. array indexing),
447 which does not need to be directly vectorized, then the liveness/relevance
448 of the respective DEF_STMT is left unchanged.
449 - case 2: If STMT_VINFO is a reduction phi and DEF_STMT is a reduction stmt,
450 we skip DEF_STMT cause it had already been processed.
451 - case 3: If DEF_STMT and STMT_VINFO are in different nests, then
452 "relevant" will be modified accordingly.
453
454 Return true if everything is as expected. Return false otherwise. */
455
456static opt_result
457process_use (stmt_vec_info stmt_vinfo, tree use, loop_vec_info loop_vinfo,
458 enum vect_relevant relevant, vec<stmt_vec_info> *worklist,
459 bool force)
460{
461 stmt_vec_info dstmt_vinfo;
462 enum vect_def_type dt;
463
464 /* case 1: we are only interested in uses that need to be vectorized. Uses
465 that are used for address computation are not considered relevant. */
466 if (!force && !exist_non_indexing_operands_for_use_p (use, stmt_vinfo))
467 return opt_result::success ();
468
469 if (!vect_is_simple_use (use, loop_vinfo, &dt, &dstmt_vinfo))
470 return opt_result::failure_at (stmt_vinfo->stmt,
471 "not vectorized:"
472 " unsupported use in stmt.\n");
473
474 if (!dstmt_vinfo)
475 return opt_result::success ();
476
477 basic_block def_bb = gimple_bb (dstmt_vinfo->stmt);
478 basic_block bb = gimple_bb (stmt_vinfo->stmt);
479
480 /* case 2: A reduction phi (STMT) defined by a reduction stmt (DSTMT_VINFO).
481 We have to force the stmt live since the epilogue loop needs it to
482 continue computing the reduction. */
483 if (gimple_code (stmt_vinfo->stmt) == GIMPLE_PHI
484 && STMT_VINFO_DEF_TYPE (stmt_vinfo)(stmt_vinfo)->def_type == vect_reduction_def
485 && gimple_code (dstmt_vinfo->stmt) != GIMPLE_PHI
486 && STMT_VINFO_DEF_TYPE (dstmt_vinfo)(dstmt_vinfo)->def_type == vect_reduction_def
487 && bb->loop_father == def_bb->loop_father)
488 {
489 if (dump_enabled_p ())
490 dump_printf_loc (MSG_NOTE, vect_location,
491 "reduc-stmt defining reduc-phi in the same nest.\n");
492 vect_mark_relevant (worklist, dstmt_vinfo, relevant, true);
493 return opt_result::success ();
494 }
495
496 /* case 3a: outer-loop stmt defining an inner-loop stmt:
497 outer-loop-header-bb:
498 d = dstmt_vinfo
499 inner-loop:
500 stmt # use (d)
501 outer-loop-tail-bb:
502 ... */
503 if (flow_loop_nested_p (def_bb->loop_father, bb->loop_father))
504 {
505 if (dump_enabled_p ())
506 dump_printf_loc (MSG_NOTE, vect_location,
507 "outer-loop def-stmt defining inner-loop stmt.\n");
508
509 switch (relevant)
510 {
511 case vect_unused_in_scope:
512 relevant = (STMT_VINFO_DEF_TYPE (stmt_vinfo)(stmt_vinfo)->def_type == vect_nested_cycle) ?
513 vect_used_in_scope : vect_unused_in_scope;
514 break;
515
516 case vect_used_in_outer_by_reduction:
517 gcc_assert (STMT_VINFO_DEF_TYPE (stmt_vinfo) != vect_reduction_def)((void)(!((stmt_vinfo)->def_type != vect_reduction_def) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 517, __FUNCTION__), 0 : 0))
;
518 relevant = vect_used_by_reduction;
519 break;
520
521 case vect_used_in_outer:
522 gcc_assert (STMT_VINFO_DEF_TYPE (stmt_vinfo) != vect_reduction_def)((void)(!((stmt_vinfo)->def_type != vect_reduction_def) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 522, __FUNCTION__), 0 : 0))
;
523 relevant = vect_used_in_scope;
524 break;
525
526 case vect_used_in_scope:
527 break;
528
529 default:
530 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 530, __FUNCTION__))
;
531 }
532 }
533
534 /* case 3b: inner-loop stmt defining an outer-loop stmt:
535 outer-loop-header-bb:
536 ...
537 inner-loop:
538 d = dstmt_vinfo
539 outer-loop-tail-bb (or outer-loop-exit-bb in double reduction):
540 stmt # use (d) */
541 else if (flow_loop_nested_p (bb->loop_father, def_bb->loop_father))
542 {
543 if (dump_enabled_p ())
544 dump_printf_loc (MSG_NOTE, vect_location,
545 "inner-loop def-stmt defining outer-loop stmt.\n");
546
547 switch (relevant)
548 {
549 case vect_unused_in_scope:
550 relevant = (STMT_VINFO_DEF_TYPE (stmt_vinfo)(stmt_vinfo)->def_type == vect_reduction_def
551 || STMT_VINFO_DEF_TYPE (stmt_vinfo)(stmt_vinfo)->def_type == vect_double_reduction_def) ?
552 vect_used_in_outer_by_reduction : vect_unused_in_scope;
553 break;
554
555 case vect_used_by_reduction:
556 case vect_used_only_live:
557 relevant = vect_used_in_outer_by_reduction;
558 break;
559
560 case vect_used_in_scope:
561 relevant = vect_used_in_outer;
562 break;
563
564 default:
565 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 565, __FUNCTION__))
;
566 }
567 }
568 /* We are also not interested in uses on loop PHI backedges that are
569 inductions. Otherwise we'll needlessly vectorize the IV increment
570 and cause hybrid SLP for SLP inductions. Unless the PHI is live
571 of course. */
572 else if (gimple_code (stmt_vinfo->stmt) == GIMPLE_PHI
573 && STMT_VINFO_DEF_TYPE (stmt_vinfo)(stmt_vinfo)->def_type == vect_induction_def
574 && ! STMT_VINFO_LIVE_P (stmt_vinfo)(stmt_vinfo)->live
575 && (PHI_ARG_DEF_FROM_EDGE (stmt_vinfo->stmt,gimple_phi_arg_def (((stmt_vinfo->stmt)), ((loop_latch_edge
(bb->loop_father))->dest_idx))
576 loop_latch_edge (bb->loop_father))gimple_phi_arg_def (((stmt_vinfo->stmt)), ((loop_latch_edge
(bb->loop_father))->dest_idx))
577 == use))
578 {
579 if (dump_enabled_p ())
580 dump_printf_loc (MSG_NOTE, vect_location,
581 "induction value on backedge.\n");
582 return opt_result::success ();
583 }
584
585
586 vect_mark_relevant (worklist, dstmt_vinfo, relevant, false);
587 return opt_result::success ();
588}
589
590
591/* Function vect_mark_stmts_to_be_vectorized.
592
593 Not all stmts in the loop need to be vectorized. For example:
594
595 for i...
596 for j...
597 1. T0 = i + j
598 2. T1 = a[T0]
599
600 3. j = j + 1
601
602 Stmt 1 and 3 do not need to be vectorized, because loop control and
603 addressing of vectorized data-refs are handled differently.
604
605 This pass detects such stmts. */
606
607opt_result
608vect_mark_stmts_to_be_vectorized (loop_vec_info loop_vinfo, bool *fatal)
609{
610 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo)(loop_vinfo)->loop;
611 basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo)(loop_vinfo)->bbs;
612 unsigned int nbbs = loop->num_nodes;
613 gimple_stmt_iterator si;
614 unsigned int i;
615 basic_block bb;
616 bool live_p;
617 enum vect_relevant relevant;
618
619 DUMP_VECT_SCOPE ("vect_mark_stmts_to_be_vectorized")auto_dump_scope scope ("vect_mark_stmts_to_be_vectorized", vect_location
)
;
620
621 auto_vec<stmt_vec_info, 64> worklist;
622
623 /* 1. Init worklist. */
624 for (i = 0; i < nbbs; i++)
625 {
626 bb = bbs[i];
627 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
628 {
629 stmt_vec_info phi_info = loop_vinfo->lookup_stmt (gsi_stmt (si));
630 if (dump_enabled_p ())
631 dump_printf_loc (MSG_NOTE, vect_location, "init: phi relevant? %G",
632 phi_info->stmt);
633
634 if (vect_stmt_relevant_p (phi_info, loop_vinfo, &relevant, &live_p))
635 vect_mark_relevant (&worklist, phi_info, relevant, live_p);
636 }
637 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
638 {
639 if (is_gimple_debug (gsi_stmt (si)))
640 continue;
641 stmt_vec_info stmt_info = loop_vinfo->lookup_stmt (gsi_stmt (si));
642 if (dump_enabled_p ())
643 dump_printf_loc (MSG_NOTE, vect_location,
644 "init: stmt relevant? %G", stmt_info->stmt);
645
646 if (vect_stmt_relevant_p (stmt_info, loop_vinfo, &relevant, &live_p))
647 vect_mark_relevant (&worklist, stmt_info, relevant, live_p);
648 }
649 }
650
651 /* 2. Process_worklist */
652 while (worklist.length () > 0)
653 {
654 use_operand_p use_p;
655 ssa_op_iter iter;
656
657 stmt_vec_info stmt_vinfo = worklist.pop ();
658 if (dump_enabled_p ())
659 dump_printf_loc (MSG_NOTE, vect_location,
660 "worklist: examine stmt: %G", stmt_vinfo->stmt);
661
662 /* Examine the USEs of STMT. For each USE, mark the stmt that defines it
663 (DEF_STMT) as relevant/irrelevant according to the relevance property
664 of STMT. */
665 relevant = STMT_VINFO_RELEVANT (stmt_vinfo)(stmt_vinfo)->relevant;
666
667 /* Generally, the relevance property of STMT (in STMT_VINFO_RELEVANT) is
668 propagated as is to the DEF_STMTs of its USEs.
669
670 One exception is when STMT has been identified as defining a reduction
671 variable; in this case we set the relevance to vect_used_by_reduction.
672 This is because we distinguish between two kinds of relevant stmts -
673 those that are used by a reduction computation, and those that are
674 (also) used by a regular computation. This allows us later on to
675 identify stmts that are used solely by a reduction, and therefore the
676 order of the results that they produce does not have to be kept. */
677
678 switch (STMT_VINFO_DEF_TYPE (stmt_vinfo)(stmt_vinfo)->def_type)
679 {
680 case vect_reduction_def:
681 gcc_assert (relevant != vect_unused_in_scope)((void)(!(relevant != vect_unused_in_scope) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 681, __FUNCTION__), 0 : 0))
;
682 if (relevant != vect_unused_in_scope
683 && relevant != vect_used_in_scope
684 && relevant != vect_used_by_reduction
685 && relevant != vect_used_only_live)
686 return opt_result::failure_at
687 (stmt_vinfo->stmt, "unsupported use of reduction.\n");
688 break;
689
690 case vect_nested_cycle:
691 if (relevant != vect_unused_in_scope
692 && relevant != vect_used_in_outer_by_reduction
693 && relevant != vect_used_in_outer)
694 return opt_result::failure_at
695 (stmt_vinfo->stmt, "unsupported use of nested cycle.\n");
696 break;
697
698 case vect_double_reduction_def:
699 if (relevant != vect_unused_in_scope
700 && relevant != vect_used_by_reduction
701 && relevant != vect_used_only_live)
702 return opt_result::failure_at
703 (stmt_vinfo->stmt, "unsupported use of double reduction.\n");
704 break;
705
706 default:
707 break;
708 }
709
710 if (is_pattern_stmt_p (stmt_vinfo))
711 {
712 /* Pattern statements are not inserted into the code, so
713 FOR_EACH_PHI_OR_STMT_USE optimizes their operands out, and we
714 have to scan the RHS or function arguments instead. */
715 if (gassign *assign = dyn_cast <gassign *> (stmt_vinfo->stmt))
716 {
717 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
718 tree op = gimple_assign_rhs1 (assign);
719
720 i = 1;
721 if (rhs_code == COND_EXPR && COMPARISON_CLASS_P (op)(tree_code_type[(int) (((enum tree_code) (op)->base.code))
] == tcc_comparison)
)
722 {
723 opt_result res
724 = process_use (stmt_vinfo, TREE_OPERAND (op, 0)(*((const_cast<tree*> (tree_operand_check ((op), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 724, __FUNCTION__)))))
,
725 loop_vinfo, relevant, &worklist, false);
726 if (!res)
727 return res;
728 res = process_use (stmt_vinfo, TREE_OPERAND (op, 1)(*((const_cast<tree*> (tree_operand_check ((op), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 728, __FUNCTION__)))))
,
729 loop_vinfo, relevant, &worklist, false);
730 if (!res)
731 return res;
732 i = 2;
733 }
734 for (; i < gimple_num_ops (assign); i++)
735 {
736 op = gimple_op (assign, i);
737 if (TREE_CODE (op)((enum tree_code) (op)->base.code) == SSA_NAME)
738 {
739 opt_result res
740 = process_use (stmt_vinfo, op, loop_vinfo, relevant,
741 &worklist, false);
742 if (!res)
743 return res;
744 }
745 }
746 }
747 else if (gcall *call = dyn_cast <gcall *> (stmt_vinfo->stmt))
748 {
749 for (i = 0; i < gimple_call_num_args (call); i++)
750 {
751 tree arg = gimple_call_arg (call, i);
752 opt_result res
753 = process_use (stmt_vinfo, arg, loop_vinfo, relevant,
754 &worklist, false);
755 if (!res)
756 return res;
757 }
758 }
759 }
760 else
761 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt_vinfo->stmt, iter, SSA_OP_USE)for ((use_p) = (gimple_code (stmt_vinfo->stmt) == GIMPLE_PHI
? op_iter_init_phiuse (&(iter), as_a <gphi *> (stmt_vinfo
->stmt), 0x01) : op_iter_init_use (&(iter), stmt_vinfo
->stmt, 0x01)); !op_iter_done (&(iter)); (use_p) = op_iter_next_use
(&(iter)))
762 {
763 tree op = USE_FROM_PTR (use_p)get_use_from_ptr (use_p);
764 opt_result res
765 = process_use (stmt_vinfo, op, loop_vinfo, relevant,
766 &worklist, false);
767 if (!res)
768 return res;
769 }
770
771 if (STMT_VINFO_GATHER_SCATTER_P (stmt_vinfo)(stmt_vinfo)->gather_scatter_p)
772 {
773 gather_scatter_info gs_info;
774 if (!vect_check_gather_scatter (stmt_vinfo, loop_vinfo, &gs_info))
775 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 775, __FUNCTION__))
;
776 opt_result res
777 = process_use (stmt_vinfo, gs_info.offset, loop_vinfo, relevant,
778 &worklist, true);
779 if (!res)
780 {
781 if (fatal)
782 *fatal = false;
783 return res;
784 }
785 }
786 } /* while worklist */
787
788 return opt_result::success ();
789}
790
791/* Function vect_model_simple_cost.
792
793 Models cost for simple operations, i.e. those that only emit ncopies of a
794 single op. Right now, this does not account for multiple insns that could
795 be generated for the single vector op. We will handle that shortly. */
796
797static void
798vect_model_simple_cost (vec_info *,
799 stmt_vec_info stmt_info, int ncopies,
800 enum vect_def_type *dt,
801 int ndts,
802 slp_tree node,
803 stmt_vector_for_cost *cost_vec,
804 vect_cost_for_stmt kind = vector_stmt)
805{
806 int inside_cost = 0, prologue_cost = 0;
807
808 gcc_assert (cost_vec != NULL)((void)(!(cost_vec != nullptr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 808, __FUNCTION__), 0 : 0))
;
809
810 /* ??? Somehow we need to fix this at the callers. */
811 if (node)
812 ncopies = SLP_TREE_NUMBER_OF_VEC_STMTS (node)(node)->vec_stmts_size;
813
814 if (!node)
815 /* Cost the "broadcast" of a scalar operand in to a vector operand.
816 Use scalar_to_vec to cost the broadcast, as elsewhere in the vector
817 cost model. */
818 for (int i = 0; i < ndts; i++)
819 if (dt[i] == vect_constant_def || dt[i] == vect_external_def)
820 prologue_cost += record_stmt_cost (cost_vec, 1, scalar_to_vec,
821 stmt_info, 0, vect_prologue);
822
823 /* Pass the inside-of-loop statements to the target-specific cost model. */
824 inside_cost += record_stmt_cost (cost_vec, ncopies, kind,
825 stmt_info, 0, vect_body);
826
827 if (dump_enabled_p ())
828 dump_printf_loc (MSG_NOTE, vect_location,
829 "vect_model_simple_cost: inside_cost = %d, "
830 "prologue_cost = %d .\n", inside_cost, prologue_cost);
831}
832
833
834/* Model cost for type demotion and promotion operations. PWR is
835 normally zero for single-step promotions and demotions. It will be
836 one if two-step promotion/demotion is required, and so on. NCOPIES
837 is the number of vector results (and thus number of instructions)
838 for the narrowest end of the operation chain. Each additional
839 step doubles the number of instructions required. If WIDEN_ARITH
840 is true the stmt is doing widening arithmetic. */
841
842static void
843vect_model_promotion_demotion_cost (stmt_vec_info stmt_info,
844 enum vect_def_type *dt,
845 unsigned int ncopies, int pwr,
846 stmt_vector_for_cost *cost_vec,
847 bool widen_arith)
848{
849 int i;
850 int inside_cost = 0, prologue_cost = 0;
851
852 for (i = 0; i < pwr + 1; i++)
853 {
854 inside_cost += record_stmt_cost (cost_vec, ncopies,
855 widen_arith
856 ? vector_stmt : vec_promote_demote,
857 stmt_info, 0, vect_body);
858 ncopies *= 2;
859 }
860
861 /* FORNOW: Assuming maximum 2 args per stmts. */
862 for (i = 0; i < 2; i++)
863 if (dt[i] == vect_constant_def || dt[i] == vect_external_def)
864 prologue_cost += record_stmt_cost (cost_vec, 1, vector_stmt,
865 stmt_info, 0, vect_prologue);
866
867 if (dump_enabled_p ())
868 dump_printf_loc (MSG_NOTE, vect_location,
869 "vect_model_promotion_demotion_cost: inside_cost = %d, "
870 "prologue_cost = %d .\n", inside_cost, prologue_cost);
871}
872
873/* Returns true if the current function returns DECL. */
874
875static bool
876cfun_returns (tree decl)
877{
878 edge_iterator ei;
879 edge e;
880 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)for ((ei) = ei_start_1 (&(((((cfun + 0))->cfg->x_exit_block_ptr
)->preds))); ei_cond ((ei), &(e)); ei_next (&(ei))
)
881 {
882 greturn *ret = safe_dyn_cast <greturn *> (last_stmt (e->src));
883 if (!ret)
884 continue;
885 if (gimple_return_retval (ret) == decl)
886 return true;
887 /* We often end up with an aggregate copy to the result decl,
888 handle that case as well. First skip intermediate clobbers
889 though. */
890 gimple *def = ret;
891 do
892 {
893 def = SSA_NAME_DEF_STMT (gimple_vuse (def))(tree_check ((gimple_vuse (def)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 893, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
894 }
895 while (gimple_clobber_p (def));
896 if (is_a <gassign *> (def)
897 && gimple_assign_lhs (def) == gimple_return_retval (ret)
898 && gimple_assign_rhs1 (def) == decl)
899 return true;
900 }
901 return false;
902}
903
904/* Function vect_model_store_cost
905
906 Models cost for stores. In the case of grouped accesses, one access
907 has the overhead of the grouped access attributed to it. */
908
909static void
910vect_model_store_cost (vec_info *vinfo, stmt_vec_info stmt_info, int ncopies,
911 vect_memory_access_type memory_access_type,
912 dr_alignment_support alignment_support_scheme,
913 int misalignment,
914 vec_load_store_type vls_type, slp_tree slp_node,
915 stmt_vector_for_cost *cost_vec)
916{
917 unsigned int inside_cost = 0, prologue_cost = 0;
918 stmt_vec_info first_stmt_info = stmt_info;
919 bool grouped_access_p = STMT_VINFO_GROUPED_ACCESS (stmt_info)((stmt_info)->dr_aux.dr && (((void)(!((stmt_info)->
dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 919, __FUNCTION__), 0 : 0)), (stmt_info)->first_element)
)
;
920
921 /* ??? Somehow we need to fix this at the callers. */
922 if (slp_node)
923 ncopies = SLP_TREE_NUMBER_OF_VEC_STMTS (slp_node)(slp_node)->vec_stmts_size;
924
925 if (vls_type == VLS_STORE_INVARIANT)
926 {
927 if (!slp_node)
928 prologue_cost += record_stmt_cost (cost_vec, 1, scalar_to_vec,
929 stmt_info, 0, vect_prologue);
930 }
931
932 /* Grouped stores update all elements in the group at once,
933 so we want the DR for the first statement. */
934 if (!slp_node && grouped_access_p)
935 first_stmt_info = DR_GROUP_FIRST_ELEMENT (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 935, __FUNCTION__), 0 : 0)), (stmt_info)->first_element)
;
936
937 /* True if we should include any once-per-group costs as well as
938 the cost of the statement itself. For SLP we only get called
939 once per group anyhow. */
940 bool first_stmt_p = (first_stmt_info == stmt_info);
941
942 /* We assume that the cost of a single store-lanes instruction is
943 equivalent to the cost of DR_GROUP_SIZE separate stores. If a grouped
944 access is instead being provided by a permute-and-store operation,
945 include the cost of the permutes. */
946 if (first_stmt_p
947 && memory_access_type == VMAT_CONTIGUOUS_PERMUTE)
948 {
949 /* Uses a high and low interleave or shuffle operations for each
950 needed permute. */
951 int group_size = DR_GROUP_SIZE (first_stmt_info)(((void)(!((first_stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 951, __FUNCTION__), 0 : 0)), (first_stmt_info)->size)
;
952 int nstmts = ncopies * ceil_log2 (group_size) * group_size;
953 inside_cost = record_stmt_cost (cost_vec, nstmts, vec_perm,
954 stmt_info, 0, vect_body);
955
956 if (dump_enabled_p ())
957 dump_printf_loc (MSG_NOTE, vect_location,
958 "vect_model_store_cost: strided group_size = %d .\n",
959 group_size);
960 }
961
962 tree vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
963 /* Costs of the stores. */
964 if (memory_access_type == VMAT_ELEMENTWISE
965 || memory_access_type == VMAT_GATHER_SCATTER)
966 {
967 /* N scalar stores plus extracting the elements. */
968 unsigned int assumed_nunits = vect_nunits_for_cost (vectype);
969 inside_cost += record_stmt_cost (cost_vec,
970 ncopies * assumed_nunits,
971 scalar_store, stmt_info, 0, vect_body);
972 }
973 else
974 vect_get_store_cost (vinfo, stmt_info, ncopies, alignment_support_scheme,
975 misalignment, &inside_cost, cost_vec);
976
977 if (memory_access_type == VMAT_ELEMENTWISE
978 || memory_access_type == VMAT_STRIDED_SLP)
979 {
980 /* N scalar stores plus extracting the elements. */
981 unsigned int assumed_nunits = vect_nunits_for_cost (vectype);
982 inside_cost += record_stmt_cost (cost_vec,
983 ncopies * assumed_nunits,
984 vec_to_scalar, stmt_info, 0, vect_body);
985 }
986
987 /* When vectorizing a store into the function result assign
988 a penalty if the function returns in a multi-register location.
989 In this case we assume we'll end up with having to spill the
990 vector result and do piecewise loads as a conservative estimate. */
991 tree base = get_base_address (STMT_VINFO_DATA_REF (stmt_info)((stmt_info)->dr_aux.dr + 0)->ref);
992 if (base
993 && (TREE_CODE (base)((enum tree_code) (base)->base.code) == RESULT_DECL
994 || (DECL_P (base)(tree_code_type[(int) (((enum tree_code) (base)->base.code
))] == tcc_declaration)
&& cfun_returns (base)))
995 && !aggregate_value_p (base, cfun(cfun + 0)->decl))
996 {
997 rtx reg = hard_function_value (TREE_TYPE (base)((contains_struct_check ((base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 997, __FUNCTION__))->typed.type)
, cfun(cfun + 0)->decl, 0, 1);
998 /* ??? Handle PARALLEL in some way. */
999 if (REG_P (reg)(((enum rtx_code) (reg)->code) == REG))
1000 {
1001 int nregs = hard_regno_nregs (REGNO (reg)(rhs_regno(reg)), GET_MODE (reg)((machine_mode) (reg)->mode));
1002 /* Assume that a single reg-reg move is possible and cheap,
1003 do not account for vector to gp register move cost. */
1004 if (nregs > 1)
1005 {
1006 /* Spill. */
1007 prologue_cost += record_stmt_cost (cost_vec, ncopies,
1008 vector_store,
1009 stmt_info, 0, vect_epilogue);
1010 /* Loads. */
1011 prologue_cost += record_stmt_cost (cost_vec, ncopies * nregs,
1012 scalar_load,
1013 stmt_info, 0, vect_epilogue);
1014 }
1015 }
1016 }
1017
1018 if (dump_enabled_p ())
1019 dump_printf_loc (MSG_NOTE, vect_location,
1020 "vect_model_store_cost: inside_cost = %d, "
1021 "prologue_cost = %d .\n", inside_cost, prologue_cost);
1022}
1023
1024
1025/* Calculate cost of DR's memory access. */
1026void
1027vect_get_store_cost (vec_info *, stmt_vec_info stmt_info, int ncopies,
1028 dr_alignment_support alignment_support_scheme,
1029 int misalignment,
1030 unsigned int *inside_cost,
1031 stmt_vector_for_cost *body_cost_vec)
1032{
1033 switch (alignment_support_scheme)
1034 {
1035 case dr_aligned:
1036 {
1037 *inside_cost += record_stmt_cost (body_cost_vec, ncopies,
1038 vector_store, stmt_info, 0,
1039 vect_body);
1040
1041 if (dump_enabled_p ())
1042 dump_printf_loc (MSG_NOTE, vect_location,
1043 "vect_model_store_cost: aligned.\n");
1044 break;
1045 }
1046
1047 case dr_unaligned_supported:
1048 {
1049 /* Here, we assign an additional cost for the unaligned store. */
1050 *inside_cost += record_stmt_cost (body_cost_vec, ncopies,
1051 unaligned_store, stmt_info,
1052 misalignment, vect_body);
1053 if (dump_enabled_p ())
1054 dump_printf_loc (MSG_NOTE, vect_location,
1055 "vect_model_store_cost: unaligned supported by "
1056 "hardware.\n");
1057 break;
1058 }
1059
1060 case dr_unaligned_unsupported:
1061 {
1062 *inside_cost = VECT_MAX_COST1000;
1063
1064 if (dump_enabled_p ())
1065 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
1066 "vect_model_store_cost: unsupported access.\n");
1067 break;
1068 }
1069
1070 default:
1071 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1071, __FUNCTION__))
;
1072 }
1073}
1074
1075
1076/* Function vect_model_load_cost
1077
1078 Models cost for loads. In the case of grouped accesses, one access has
1079 the overhead of the grouped access attributed to it. Since unaligned
1080 accesses are supported for loads, we also account for the costs of the
1081 access scheme chosen. */
1082
1083static void
1084vect_model_load_cost (vec_info *vinfo,
1085 stmt_vec_info stmt_info, unsigned ncopies, poly_uint64 vf,
1086 vect_memory_access_type memory_access_type,
1087 dr_alignment_support alignment_support_scheme,
1088 int misalignment,
1089 gather_scatter_info *gs_info,
1090 slp_tree slp_node,
1091 stmt_vector_for_cost *cost_vec)
1092{
1093 unsigned int inside_cost = 0, prologue_cost = 0;
1094 bool grouped_access_p = STMT_VINFO_GROUPED_ACCESS (stmt_info)((stmt_info)->dr_aux.dr && (((void)(!((stmt_info)->
dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1094, __FUNCTION__), 0 : 0)), (stmt_info)->first_element
))
;
1095
1096 gcc_assert (cost_vec)((void)(!(cost_vec) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1096, __FUNCTION__), 0 : 0))
;
1097
1098 /* ??? Somehow we need to fix this at the callers. */
1099 if (slp_node)
1100 ncopies = SLP_TREE_NUMBER_OF_VEC_STMTS (slp_node)(slp_node)->vec_stmts_size;
1101
1102 if (slp_node && SLP_TREE_LOAD_PERMUTATION (slp_node)(slp_node)->load_permutation.exists ())
1103 {
1104 /* If the load is permuted then the alignment is determined by
1105 the first group element not by the first scalar stmt DR. */
1106 stmt_vec_info first_stmt_info = DR_GROUP_FIRST_ELEMENT (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1106, __FUNCTION__), 0 : 0)), (stmt_info)->first_element
)
;
1107 /* Record the cost for the permutation. */
1108 unsigned n_perms, n_loads;
1109 vect_transform_slp_perm_load (vinfo, slp_node, vNULL, NULLnullptr,
1110 vf, true, &n_perms, &n_loads);
1111 inside_cost += record_stmt_cost (cost_vec, n_perms, vec_perm,
1112 first_stmt_info, 0, vect_body);
1113
1114 /* And adjust the number of loads performed. This handles
1115 redundancies as well as loads that are later dead. */
1116 ncopies = n_loads;
1117 }
1118
1119 /* Grouped loads read all elements in the group at once,
1120 so we want the DR for the first statement. */
1121 stmt_vec_info first_stmt_info = stmt_info;
1122 if (!slp_node && grouped_access_p)
1123 first_stmt_info = DR_GROUP_FIRST_ELEMENT (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1123, __FUNCTION__), 0 : 0)), (stmt_info)->first_element
)
;
1124
1125 /* True if we should include any once-per-group costs as well as
1126 the cost of the statement itself. For SLP we only get called
1127 once per group anyhow. */
1128 bool first_stmt_p = (first_stmt_info == stmt_info);
1129
1130 /* An IFN_LOAD_LANES will load all its vector results, regardless of which
1131 ones we actually need. Account for the cost of unused results. */
1132 if (first_stmt_p && !slp_node && memory_access_type == VMAT_LOAD_STORE_LANES)
1133 {
1134 unsigned int gaps = DR_GROUP_SIZE (first_stmt_info)(((void)(!((first_stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1134, __FUNCTION__), 0 : 0)), (first_stmt_info)->size)
;
1135 stmt_vec_info next_stmt_info = first_stmt_info;
1136 do
1137 {
1138 gaps -= 1;
1139 next_stmt_info = DR_GROUP_NEXT_ELEMENT (next_stmt_info)(((void)(!((next_stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1139, __FUNCTION__), 0 : 0)), (next_stmt_info)->next_element
)
;
1140 }
1141 while (next_stmt_info);
1142 if (gaps)
1143 {
1144 if (dump_enabled_p ())
1145 dump_printf_loc (MSG_NOTE, vect_location,
1146 "vect_model_load_cost: %d unused vectors.\n",
1147 gaps);
1148 vect_get_load_cost (vinfo, stmt_info, ncopies * gaps,
1149 alignment_support_scheme, misalignment, false,
1150 &inside_cost, &prologue_cost,
1151 cost_vec, cost_vec, true);
1152 }
1153 }
1154
1155 /* We assume that the cost of a single load-lanes instruction is
1156 equivalent to the cost of DR_GROUP_SIZE separate loads. If a grouped
1157 access is instead being provided by a load-and-permute operation,
1158 include the cost of the permutes. */
1159 if (first_stmt_p
1160 && memory_access_type == VMAT_CONTIGUOUS_PERMUTE)
1161 {
1162 /* Uses an even and odd extract operations or shuffle operations
1163 for each needed permute. */
1164 int group_size = DR_GROUP_SIZE (first_stmt_info)(((void)(!((first_stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1164, __FUNCTION__), 0 : 0)), (first_stmt_info)->size)
;
1165 int nstmts = ncopies * ceil_log2 (group_size) * group_size;
1166 inside_cost += record_stmt_cost (cost_vec, nstmts, vec_perm,
1167 stmt_info, 0, vect_body);
1168
1169 if (dump_enabled_p ())
1170 dump_printf_loc (MSG_NOTE, vect_location,
1171 "vect_model_load_cost: strided group_size = %d .\n",
1172 group_size);
1173 }
1174
1175 /* The loads themselves. */
1176 if (memory_access_type == VMAT_ELEMENTWISE
1177 || memory_access_type == VMAT_GATHER_SCATTER)
1178 {
1179 tree vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
1180 unsigned int assumed_nunits = vect_nunits_for_cost (vectype);
1181 if (memory_access_type == VMAT_GATHER_SCATTER
1182 && gs_info->ifn == IFN_LAST && !gs_info->decl)
1183 /* For emulated gathers N offset vector element extracts
1184 (we assume the scalar scaling and ptr + offset add is consumed by
1185 the load). */
1186 inside_cost += record_stmt_cost (cost_vec, ncopies * assumed_nunits,
1187 vec_to_scalar, stmt_info, 0,
1188 vect_body);
1189 /* N scalar loads plus gathering them into a vector. */
1190 inside_cost += record_stmt_cost (cost_vec,
1191 ncopies * assumed_nunits,
1192 scalar_load, stmt_info, 0, vect_body);
1193 }
1194 else if (memory_access_type == VMAT_INVARIANT)
1195 {
1196 /* Invariant loads will ideally be hoisted and splat to a vector. */
1197 prologue_cost += record_stmt_cost (cost_vec, 1,
1198 scalar_load, stmt_info, 0,
1199 vect_prologue);
1200 prologue_cost += record_stmt_cost (cost_vec, 1,
1201 scalar_to_vec, stmt_info, 0,
1202 vect_prologue);
1203 }
1204 else
1205 vect_get_load_cost (vinfo, stmt_info, ncopies,
1206 alignment_support_scheme, misalignment, first_stmt_p,
1207 &inside_cost, &prologue_cost,
1208 cost_vec, cost_vec, true);
1209 if (memory_access_type == VMAT_ELEMENTWISE
1210 || memory_access_type == VMAT_STRIDED_SLP
1211 || (memory_access_type == VMAT_GATHER_SCATTER
1212 && gs_info->ifn == IFN_LAST && !gs_info->decl))
1213 inside_cost += record_stmt_cost (cost_vec, ncopies, vec_construct,
1214 stmt_info, 0, vect_body);
1215
1216 if (dump_enabled_p ())
1217 dump_printf_loc (MSG_NOTE, vect_location,
1218 "vect_model_load_cost: inside_cost = %d, "
1219 "prologue_cost = %d .\n", inside_cost, prologue_cost);
1220}
1221
1222
1223/* Calculate cost of DR's memory access. */
1224void
1225vect_get_load_cost (vec_info *, stmt_vec_info stmt_info, int ncopies,
1226 dr_alignment_support alignment_support_scheme,
1227 int misalignment,
1228 bool add_realign_cost, unsigned int *inside_cost,
1229 unsigned int *prologue_cost,
1230 stmt_vector_for_cost *prologue_cost_vec,
1231 stmt_vector_for_cost *body_cost_vec,
1232 bool record_prologue_costs)
1233{
1234 switch (alignment_support_scheme)
1235 {
1236 case dr_aligned:
1237 {
1238 *inside_cost += record_stmt_cost (body_cost_vec, ncopies, vector_load,
1239 stmt_info, 0, vect_body);
1240
1241 if (dump_enabled_p ())
1242 dump_printf_loc (MSG_NOTE, vect_location,
1243 "vect_model_load_cost: aligned.\n");
1244
1245 break;
1246 }
1247 case dr_unaligned_supported:
1248 {
1249 /* Here, we assign an additional cost for the unaligned load. */
1250 *inside_cost += record_stmt_cost (body_cost_vec, ncopies,
1251 unaligned_load, stmt_info,
1252 misalignment, vect_body);
1253
1254 if (dump_enabled_p ())
1255 dump_printf_loc (MSG_NOTE, vect_location,
1256 "vect_model_load_cost: unaligned supported by "
1257 "hardware.\n");
1258
1259 break;
1260 }
1261 case dr_explicit_realign:
1262 {
1263 *inside_cost += record_stmt_cost (body_cost_vec, ncopies * 2,
1264 vector_load, stmt_info, 0, vect_body);
1265 *inside_cost += record_stmt_cost (body_cost_vec, ncopies,
1266 vec_perm, stmt_info, 0, vect_body);
1267
1268 /* FIXME: If the misalignment remains fixed across the iterations of
1269 the containing loop, the following cost should be added to the
1270 prologue costs. */
1271 if (targetm.vectorize.builtin_mask_for_load)
1272 *inside_cost += record_stmt_cost (body_cost_vec, 1, vector_stmt,
1273 stmt_info, 0, vect_body);
1274
1275 if (dump_enabled_p ())
1276 dump_printf_loc (MSG_NOTE, vect_location,
1277 "vect_model_load_cost: explicit realign\n");
1278
1279 break;
1280 }
1281 case dr_explicit_realign_optimized:
1282 {
1283 if (dump_enabled_p ())
1284 dump_printf_loc (MSG_NOTE, vect_location,
1285 "vect_model_load_cost: unaligned software "
1286 "pipelined.\n");
1287
1288 /* Unaligned software pipeline has a load of an address, an initial
1289 load, and possibly a mask operation to "prime" the loop. However,
1290 if this is an access in a group of loads, which provide grouped
1291 access, then the above cost should only be considered for one
1292 access in the group. Inside the loop, there is a load op
1293 and a realignment op. */
1294
1295 if (add_realign_cost && record_prologue_costs)
1296 {
1297 *prologue_cost += record_stmt_cost (prologue_cost_vec, 2,
1298 vector_stmt, stmt_info,
1299 0, vect_prologue);
1300 if (targetm.vectorize.builtin_mask_for_load)
1301 *prologue_cost += record_stmt_cost (prologue_cost_vec, 1,
1302 vector_stmt, stmt_info,
1303 0, vect_prologue);
1304 }
1305
1306 *inside_cost += record_stmt_cost (body_cost_vec, ncopies, vector_load,
1307 stmt_info, 0, vect_body);
1308 *inside_cost += record_stmt_cost (body_cost_vec, ncopies, vec_perm,
1309 stmt_info, 0, vect_body);
1310
1311 if (dump_enabled_p ())
1312 dump_printf_loc (MSG_NOTE, vect_location,
1313 "vect_model_load_cost: explicit realign optimized"
1314 "\n");
1315
1316 break;
1317 }
1318
1319 case dr_unaligned_unsupported:
1320 {
1321 *inside_cost = VECT_MAX_COST1000;
1322
1323 if (dump_enabled_p ())
1324 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
1325 "vect_model_load_cost: unsupported access.\n");
1326 break;
1327 }
1328
1329 default:
1330 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1330, __FUNCTION__))
;
1331 }
1332}
1333
1334/* Insert the new stmt NEW_STMT at *GSI or at the appropriate place in
1335 the loop preheader for the vectorized stmt STMT_VINFO. */
1336
1337static void
1338vect_init_vector_1 (vec_info *vinfo, stmt_vec_info stmt_vinfo, gimple *new_stmt,
1339 gimple_stmt_iterator *gsi)
1340{
1341 if (gsi)
1342 vect_finish_stmt_generation (vinfo, stmt_vinfo, new_stmt, gsi);
1343 else
1344 vinfo->insert_on_entry (stmt_vinfo, new_stmt);
1345
1346 if (dump_enabled_p ())
1347 dump_printf_loc (MSG_NOTE, vect_location,
1348 "created new init_stmt: %G", new_stmt);
1349}
1350
1351/* Function vect_init_vector.
1352
1353 Insert a new stmt (INIT_STMT) that initializes a new variable of type
1354 TYPE with the value VAL. If TYPE is a vector type and VAL does not have
1355 vector type a vector with all elements equal to VAL is created first.
1356 Place the initialization at GSI if it is not NULL. Otherwise, place the
1357 initialization at the loop preheader.
1358 Return the DEF of INIT_STMT.
1359 It will be used in the vectorization of STMT_INFO. */
1360
1361tree
1362vect_init_vector (vec_info *vinfo, stmt_vec_info stmt_info, tree val, tree type,
1363 gimple_stmt_iterator *gsi)
1364{
1365 gimple *init_stmt;
1366 tree new_temp;
1367
1368 /* We abuse this function to push sth to a SSA name with initial 'val'. */
1369 if (! useless_type_conversion_p (type, TREE_TYPE (val)((contains_struct_check ((val), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1369, __FUNCTION__))->typed.type)
))
1370 {
1371 gcc_assert (TREE_CODE (type) == VECTOR_TYPE)((void)(!(((enum tree_code) (type)->base.code) == VECTOR_TYPE
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1371, __FUNCTION__), 0 : 0))
;
1372 if (! types_compatible_p (TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1372, __FUNCTION__))->typed.type)
, TREE_TYPE (val)((contains_struct_check ((val), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1372, __FUNCTION__))->typed.type)
))
1373 {
1374 /* Scalar boolean value should be transformed into
1375 all zeros or all ones value before building a vector. */
1376 if (VECTOR_BOOLEAN_TYPE_P (type)(((enum tree_code) (type)->base.code) == VECTOR_TYPE &&
((enum tree_code) (((contains_struct_check ((type), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1376, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
)
1377 {
1378 tree true_val = build_all_ones_cst (TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1378, __FUNCTION__))->typed.type)
);
1379 tree false_val = build_zero_cst (TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1379, __FUNCTION__))->typed.type)
);
1380
1381 if (CONSTANT_CLASS_P (val)(tree_code_type[(int) (((enum tree_code) (val)->base.code)
)] == tcc_constant)
)
1382 val = integer_zerop (val) ? false_val : true_val;
1383 else
1384 {
1385 new_temp = make_ssa_name (TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1385, __FUNCTION__))->typed.type)
);
1386 init_stmt = gimple_build_assign (new_temp, COND_EXPR,
1387 val, true_val, false_val);
1388 vect_init_vector_1 (vinfo, stmt_info, init_stmt, gsi);
1389 val = new_temp;
1390 }
1391 }
1392 else
1393 {
1394 gimple_seq stmts = NULLnullptr;
1395 if (! INTEGRAL_TYPE_P (TREE_TYPE (val))(((enum tree_code) (((contains_struct_check ((val), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1395, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((val), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1395, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((val), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1395, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
)
1396 val = gimple_build (&stmts, VIEW_CONVERT_EXPR,
1397 TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1397, __FUNCTION__))->typed.type)
, val);
1398 else
1399 /* ??? Condition vectorization expects us to do
1400 promotion of invariant/external defs. */
1401 val = gimple_convert (&stmts, TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1401, __FUNCTION__))->typed.type)
, val);
1402 for (gimple_stmt_iterator gsi2 = gsi_start (stmts)gsi_start_1 (&(stmts));
1403 !gsi_end_p (gsi2); )
1404 {
1405 init_stmt = gsi_stmt (gsi2);
1406 gsi_remove (&gsi2, false);
1407 vect_init_vector_1 (vinfo, stmt_info, init_stmt, gsi);
1408 }
1409 }
1410 }
1411 val = build_vector_from_val (type, val);
1412 }
1413
1414 new_temp = vect_get_new_ssa_name (type, vect_simple_var, "cst_");
1415 init_stmt = gimple_build_assign (new_temp, val);
1416 vect_init_vector_1 (vinfo, stmt_info, init_stmt, gsi);
1417 return new_temp;
1418}
1419
1420
1421/* Function vect_get_vec_defs_for_operand.
1422
1423 OP is an operand in STMT_VINFO. This function returns a vector of
1424 NCOPIES defs that will be used in the vectorized stmts for STMT_VINFO.
1425
1426 In the case that OP is an SSA_NAME which is defined in the loop, then
1427 STMT_VINFO_VEC_STMTS of the defining stmt holds the relevant defs.
1428
1429 In case OP is an invariant or constant, a new stmt that creates a vector def
1430 needs to be introduced. VECTYPE may be used to specify a required type for
1431 vector invariant. */
1432
1433void
1434vect_get_vec_defs_for_operand (vec_info *vinfo, stmt_vec_info stmt_vinfo,
1435 unsigned ncopies,
1436 tree op, vec<tree> *vec_oprnds, tree vectype)
1437{
1438 gimple *def_stmt;
1439 enum vect_def_type dt;
1440 bool is_simple_use;
1441 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
1442
1443 if (dump_enabled_p ())
1444 dump_printf_loc (MSG_NOTE, vect_location,
1445 "vect_get_vec_defs_for_operand: %T\n", op);
1446
1447 stmt_vec_info def_stmt_info;
1448 is_simple_use = vect_is_simple_use (op, loop_vinfo, &dt,
1449 &def_stmt_info, &def_stmt);
1450 gcc_assert (is_simple_use)((void)(!(is_simple_use) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1450, __FUNCTION__), 0 : 0))
;
1451 if (def_stmt && dump_enabled_p ())
1452 dump_printf_loc (MSG_NOTE, vect_location, " def_stmt = %G", def_stmt);
1453
1454 vec_oprnds->create (ncopies);
1455 if (dt == vect_constant_def || dt == vect_external_def)
1456 {
1457 tree stmt_vectype = STMT_VINFO_VECTYPE (stmt_vinfo)(stmt_vinfo)->vectype;
1458 tree vector_type;
1459
1460 if (vectype)
1461 vector_type = vectype;
1462 else if (VECT_SCALAR_BOOLEAN_TYPE_P (TREE_TYPE (op))(((enum tree_code) (((contains_struct_check ((op), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1462, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((((enum tree_code) (((contains_struct_check ((op), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1462, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
|| ((enum tree_code) (((contains_struct_check ((op), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1462, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
) && ((tree_class_check ((((contains_struct_check ((op
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1462, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1462, __FUNCTION__))->type_common.precision) == 1 &&
((tree_class_check ((((contains_struct_check ((op), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1462, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1462, __FUNCTION__))->base.u.bits.unsigned_flag)))
1463 && VECTOR_BOOLEAN_TYPE_P (stmt_vectype)(((enum tree_code) (stmt_vectype)->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((stmt_vectype
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1463, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
)
1464 vector_type = truth_type_for (stmt_vectype);
1465 else
1466 vector_type = get_vectype_for_scalar_type (loop_vinfo, TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1466, __FUNCTION__))->typed.type)
);
1467
1468 gcc_assert (vector_type)((void)(!(vector_type) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1468, __FUNCTION__), 0 : 0))
;
1469 tree vop = vect_init_vector (vinfo, stmt_vinfo, op, vector_type, NULLnullptr);
1470 while (ncopies--)
1471 vec_oprnds->quick_push (vop);
1472 }
1473 else
1474 {
1475 def_stmt_info = vect_stmt_to_vectorize (def_stmt_info);
1476 gcc_assert (STMT_VINFO_VEC_STMTS (def_stmt_info).length () == ncopies)((void)(!((def_stmt_info)->vec_stmts.length () == ncopies)
? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1476, __FUNCTION__), 0 : 0))
;
1477 for (unsigned i = 0; i < ncopies; ++i)
1478 vec_oprnds->quick_push (gimple_get_lhs
1479 (STMT_VINFO_VEC_STMTS (def_stmt_info)(def_stmt_info)->vec_stmts[i]));
1480 }
1481}
1482
1483
1484/* Get vectorized definitions for OP0 and OP1. */
1485
1486void
1487vect_get_vec_defs (vec_info *vinfo, stmt_vec_info stmt_info, slp_tree slp_node,
1488 unsigned ncopies,
1489 tree op0, vec<tree> *vec_oprnds0, tree vectype0,
1490 tree op1, vec<tree> *vec_oprnds1, tree vectype1,
1491 tree op2, vec<tree> *vec_oprnds2, tree vectype2,
1492 tree op3, vec<tree> *vec_oprnds3, tree vectype3)
1493{
1494 if (slp_node)
1495 {
1496 if (op0)
1497 vect_get_slp_defs (SLP_TREE_CHILDREN (slp_node)(slp_node)->children[0], vec_oprnds0);
1498 if (op1)
1499 vect_get_slp_defs (SLP_TREE_CHILDREN (slp_node)(slp_node)->children[1], vec_oprnds1);
1500 if (op2)
1501 vect_get_slp_defs (SLP_TREE_CHILDREN (slp_node)(slp_node)->children[2], vec_oprnds2);
1502 if (op3)
1503 vect_get_slp_defs (SLP_TREE_CHILDREN (slp_node)(slp_node)->children[3], vec_oprnds3);
1504 }
1505 else
1506 {
1507 if (op0)
1508 vect_get_vec_defs_for_operand (vinfo, stmt_info, ncopies,
1509 op0, vec_oprnds0, vectype0);
1510 if (op1)
1511 vect_get_vec_defs_for_operand (vinfo, stmt_info, ncopies,
1512 op1, vec_oprnds1, vectype1);
1513 if (op2)
1514 vect_get_vec_defs_for_operand (vinfo, stmt_info, ncopies,
1515 op2, vec_oprnds2, vectype2);
1516 if (op3)
1517 vect_get_vec_defs_for_operand (vinfo, stmt_info, ncopies,
1518 op3, vec_oprnds3, vectype3);
1519 }
1520}
1521
1522void
1523vect_get_vec_defs (vec_info *vinfo, stmt_vec_info stmt_info, slp_tree slp_node,
1524 unsigned ncopies,
1525 tree op0, vec<tree> *vec_oprnds0,
1526 tree op1, vec<tree> *vec_oprnds1,
1527 tree op2, vec<tree> *vec_oprnds2,
1528 tree op3, vec<tree> *vec_oprnds3)
1529{
1530 vect_get_vec_defs (vinfo, stmt_info, slp_node, ncopies,
1531 op0, vec_oprnds0, NULL_TREE(tree) nullptr,
1532 op1, vec_oprnds1, NULL_TREE(tree) nullptr,
1533 op2, vec_oprnds2, NULL_TREE(tree) nullptr,
1534 op3, vec_oprnds3, NULL_TREE(tree) nullptr);
1535}
1536
1537/* Helper function called by vect_finish_replace_stmt and
1538 vect_finish_stmt_generation. Set the location of the new
1539 statement and create and return a stmt_vec_info for it. */
1540
1541static void
1542vect_finish_stmt_generation_1 (vec_info *,
1543 stmt_vec_info stmt_info, gimple *vec_stmt)
1544{
1545 if (dump_enabled_p ())
1546 dump_printf_loc (MSG_NOTE, vect_location, "add new stmt: %G", vec_stmt);
1547
1548 if (stmt_info)
1549 {
1550 gimple_set_location (vec_stmt, gimple_location (stmt_info->stmt));
1551
1552 /* While EH edges will generally prevent vectorization, stmt might
1553 e.g. be in a must-not-throw region. Ensure newly created stmts
1554 that could throw are part of the same region. */
1555 int lp_nr = lookup_stmt_eh_lp (stmt_info->stmt);
1556 if (lp_nr != 0 && stmt_could_throw_p (cfun(cfun + 0), vec_stmt))
1557 add_stmt_to_eh_lp (vec_stmt, lp_nr);
1558 }
1559 else
1560 gcc_assert (!stmt_could_throw_p (cfun, vec_stmt))((void)(!(!stmt_could_throw_p ((cfun + 0), vec_stmt)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1560, __FUNCTION__), 0 : 0))
;
1561}
1562
1563/* Replace the scalar statement STMT_INFO with a new vector statement VEC_STMT,
1564 which sets the same scalar result as STMT_INFO did. Create and return a
1565 stmt_vec_info for VEC_STMT. */
1566
1567void
1568vect_finish_replace_stmt (vec_info *vinfo,
1569 stmt_vec_info stmt_info, gimple *vec_stmt)
1570{
1571 gimple *scalar_stmt = vect_orig_stmt (stmt_info)->stmt;
1572 gcc_assert (gimple_get_lhs (scalar_stmt) == gimple_get_lhs (vec_stmt))((void)(!(gimple_get_lhs (scalar_stmt) == gimple_get_lhs (vec_stmt
)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1572, __FUNCTION__), 0 : 0))
;
1573
1574 gimple_stmt_iterator gsi = gsi_for_stmt (scalar_stmt);
1575 gsi_replace (&gsi, vec_stmt, true);
1576
1577 vect_finish_stmt_generation_1 (vinfo, stmt_info, vec_stmt);
1578}
1579
1580/* Add VEC_STMT to the vectorized implementation of STMT_INFO and insert it
1581 before *GSI. Create and return a stmt_vec_info for VEC_STMT. */
1582
1583void
1584vect_finish_stmt_generation (vec_info *vinfo,
1585 stmt_vec_info stmt_info, gimple *vec_stmt,
1586 gimple_stmt_iterator *gsi)
1587{
1588 gcc_assert (!stmt_info || gimple_code (stmt_info->stmt) != GIMPLE_LABEL)((void)(!(!stmt_info || gimple_code (stmt_info->stmt) != GIMPLE_LABEL
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1588, __FUNCTION__), 0 : 0))
;
1589
1590 if (!gsi_end_p (*gsi)
1591 && gimple_has_mem_ops (vec_stmt))
1592 {
1593 gimple *at_stmt = gsi_stmt (*gsi);
1594 tree vuse = gimple_vuse (at_stmt);
1595 if (vuse && TREE_CODE (vuse)((enum tree_code) (vuse)->base.code) == SSA_NAME)
1596 {
1597 tree vdef = gimple_vdef (at_stmt);
1598 gimple_set_vuse (vec_stmt, gimple_vuse (at_stmt));
1599 gimple_set_modified (vec_stmt, true);
1600 /* If we have an SSA vuse and insert a store, update virtual
1601 SSA form to avoid triggering the renamer. Do so only
1602 if we can easily see all uses - which is what almost always
1603 happens with the way vectorized stmts are inserted. */
1604 if ((vdef && TREE_CODE (vdef)((enum tree_code) (vdef)->base.code) == SSA_NAME)
1605 && ((is_gimple_assign (vec_stmt)
1606 && !is_gimple_reg (gimple_assign_lhs (vec_stmt)))
1607 || (is_gimple_call (vec_stmt)
1608 && !(gimple_call_flags (vec_stmt)
1609 & (ECF_CONST(1 << 0)|ECF_PURE(1 << 1)|ECF_NOVOPS(1 << 9))))))
1610 {
1611 tree new_vdef = copy_ssa_name (vuse, vec_stmt);
1612 gimple_set_vdef (vec_stmt, new_vdef);
1613 SET_USE (gimple_vuse_op (at_stmt), new_vdef)set_ssa_use_from_ptr (gimple_vuse_op (at_stmt), new_vdef);
1614 }
1615 }
1616 }
1617 gsi_insert_before (gsi, vec_stmt, GSI_SAME_STMT);
1618 vect_finish_stmt_generation_1 (vinfo, stmt_info, vec_stmt);
1619}
1620
1621/* We want to vectorize a call to combined function CFN with function
1622 decl FNDECL, using VECTYPE_OUT as the type of the output and VECTYPE_IN
1623 as the types of all inputs. Check whether this is possible using
1624 an internal function, returning its code if so or IFN_LAST if not. */
1625
1626static internal_fn
1627vectorizable_internal_function (combined_fn cfn, tree fndecl,
1628 tree vectype_out, tree vectype_in)
1629{
1630 internal_fn ifn;
1631 if (internal_fn_p (cfn))
1632 ifn = as_internal_fn (cfn);
1633 else
1634 ifn = associated_internal_fn (fndecl);
1635 if (ifn != IFN_LAST && direct_internal_fn_p (ifn))
1636 {
1637 const direct_internal_fn_info &info = direct_internal_fn (ifn);
1638 if (info.vectorizable)
1639 {
1640 tree type0 = (info.type0 < 0 ? vectype_out : vectype_in);
1641 tree type1 = (info.type1 < 0 ? vectype_out : vectype_in);
1642 if (direct_internal_fn_supported_p (ifn, tree_pair (type0, type1),
1643 OPTIMIZE_FOR_SPEED))
1644 return ifn;
1645 }
1646 }
1647 return IFN_LAST;
1648}
1649
1650
1651static tree permute_vec_elements (vec_info *, tree, tree, tree, stmt_vec_info,
1652 gimple_stmt_iterator *);
1653
1654/* Check whether a load or store statement in the loop described by
1655 LOOP_VINFO is possible in a loop using partial vectors. This is
1656 testing whether the vectorizer pass has the appropriate support,
1657 as well as whether the target does.
1658
1659 VLS_TYPE says whether the statement is a load or store and VECTYPE
1660 is the type of the vector being loaded or stored. MEMORY_ACCESS_TYPE
1661 says how the load or store is going to be implemented and GROUP_SIZE
1662 is the number of load or store statements in the containing group.
1663 If the access is a gather load or scatter store, GS_INFO describes
1664 its arguments. If the load or store is conditional, SCALAR_MASK is the
1665 condition under which it occurs.
1666
1667 Clear LOOP_VINFO_CAN_USE_PARTIAL_VECTORS_P if a loop using partial
1668 vectors is not supported, otherwise record the required rgroup control
1669 types. */
1670
1671static void
1672check_load_store_for_partial_vectors (loop_vec_info loop_vinfo, tree vectype,
1673 vec_load_store_type vls_type,
1674 int group_size,
1675 vect_memory_access_type
1676 memory_access_type,
1677 gather_scatter_info *gs_info,
1678 tree scalar_mask)
1679{
1680 /* Invariant loads need no special support. */
1681 if (memory_access_type == VMAT_INVARIANT)
1682 return;
1683
1684 vec_loop_masks *masks = &LOOP_VINFO_MASKS (loop_vinfo)(loop_vinfo)->masks;
1685 machine_mode vecmode = TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1685, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
;
1686 bool is_load = (vls_type == VLS_LOAD);
1687 if (memory_access_type == VMAT_LOAD_STORE_LANES)
1688 {
1689 if (is_load
1690 ? !vect_load_lanes_supported (vectype, group_size, true)
1691 : !vect_store_lanes_supported (vectype, group_size, true))
1692 {
1693 if (dump_enabled_p ())
1694 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
1695 "can't operate on partial vectors because"
1696 " the target doesn't have an appropriate"
1697 " load/store-lanes instruction.\n");
1698 LOOP_VINFO_CAN_USE_PARTIAL_VECTORS_P (loop_vinfo)(loop_vinfo)->can_use_partial_vectors_p = false;
1699 return;
1700 }
1701 unsigned int ncopies = vect_get_num_copies (loop_vinfo, vectype);
1702 vect_record_loop_mask (loop_vinfo, masks, ncopies, vectype, scalar_mask);
1703 return;
1704 }
1705
1706 if (memory_access_type == VMAT_GATHER_SCATTER)
1707 {
1708 internal_fn ifn = (is_load
1709 ? IFN_MASK_GATHER_LOAD
1710 : IFN_MASK_SCATTER_STORE);
1711 if (!internal_gather_scatter_fn_supported_p (ifn, vectype,
1712 gs_info->memory_type,
1713 gs_info->offset_vectype,
1714 gs_info->scale))
1715 {
1716 if (dump_enabled_p ())
1717 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
1718 "can't operate on partial vectors because"
1719 " the target doesn't have an appropriate"
1720 " gather load or scatter store instruction.\n");
1721 LOOP_VINFO_CAN_USE_PARTIAL_VECTORS_P (loop_vinfo)(loop_vinfo)->can_use_partial_vectors_p = false;
1722 return;
1723 }
1724 unsigned int ncopies = vect_get_num_copies (loop_vinfo, vectype);
1725 vect_record_loop_mask (loop_vinfo, masks, ncopies, vectype, scalar_mask);
1726 return;
1727 }
1728
1729 if (memory_access_type != VMAT_CONTIGUOUS
1730 && memory_access_type != VMAT_CONTIGUOUS_PERMUTE)
1731 {
1732 /* Element X of the data must come from iteration i * VF + X of the
1733 scalar loop. We need more work to support other mappings. */
1734 if (dump_enabled_p ())
1735 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
1736 "can't operate on partial vectors because an"
1737 " access isn't contiguous.\n");
1738 LOOP_VINFO_CAN_USE_PARTIAL_VECTORS_P (loop_vinfo)(loop_vinfo)->can_use_partial_vectors_p = false;
1739 return;
1740 }
1741
1742 if (!VECTOR_MODE_P (vecmode)(((enum mode_class) mode_class[vecmode]) == MODE_VECTOR_BOOL ||
((enum mode_class) mode_class[vecmode]) == MODE_VECTOR_INT ||
((enum mode_class) mode_class[vecmode]) == MODE_VECTOR_FLOAT
|| ((enum mode_class) mode_class[vecmode]) == MODE_VECTOR_FRACT
|| ((enum mode_class) mode_class[vecmode]) == MODE_VECTOR_UFRACT
|| ((enum mode_class) mode_class[vecmode]) == MODE_VECTOR_ACCUM
|| ((enum mode_class) mode_class[vecmode]) == MODE_VECTOR_UACCUM
)
)
1743 {
1744 if (dump_enabled_p ())
1745 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
1746 "can't operate on partial vectors when emulating"
1747 " vector operations.\n");
1748 LOOP_VINFO_CAN_USE_PARTIAL_VECTORS_P (loop_vinfo)(loop_vinfo)->can_use_partial_vectors_p = false;
1749 return;
1750 }
1751
1752 /* We might load more scalars than we need for permuting SLP loads.
1753 We checked in get_group_load_store_type that the extra elements
1754 don't leak into a new vector. */
1755 auto get_valid_nvectors = [] (poly_uint64 size, poly_uint64 nunits)
1756 {
1757 unsigned int nvectors;
1758 if (can_div_away_from_zero_p (size, nunits, &nvectors))
1759 return nvectors;
1760 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1760, __FUNCTION__))
;
1761 };
1762
1763 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
1764 poly_uint64 vf = LOOP_VINFO_VECT_FACTOR (loop_vinfo)(loop_vinfo)->vectorization_factor;
1765 machine_mode mask_mode;
1766 bool using_partial_vectors_p = false;
1767 if (targetm.vectorize.get_mask_mode (vecmode).exists (&mask_mode)
1768 && can_vec_mask_load_store_p (vecmode, mask_mode, is_load))
1769 {
1770 unsigned int nvectors = get_valid_nvectors (group_size * vf, nunits);
1771 vect_record_loop_mask (loop_vinfo, masks, nvectors, vectype, scalar_mask);
1772 using_partial_vectors_p = true;
1773 }
1774
1775 machine_mode vmode;
1776 if (get_len_load_store_mode (vecmode, is_load).exists (&vmode))
1777 {
1778 unsigned int nvectors = get_valid_nvectors (group_size * vf, nunits);
1779 vec_loop_lens *lens = &LOOP_VINFO_LENS (loop_vinfo)(loop_vinfo)->lens;
1780 unsigned factor = (vecmode == vmode) ? 1 : GET_MODE_UNIT_SIZE (vecmode)mode_to_unit_size (vecmode);
1781 vect_record_loop_len (loop_vinfo, lens, nvectors, vectype, factor);
1782 using_partial_vectors_p = true;
1783 }
1784
1785 if (!using_partial_vectors_p)
1786 {
1787 if (dump_enabled_p ())
1788 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
1789 "can't operate on partial vectors because the"
1790 " target doesn't have the appropriate partial"
1791 " vectorization load or store.\n");
1792 LOOP_VINFO_CAN_USE_PARTIAL_VECTORS_P (loop_vinfo)(loop_vinfo)->can_use_partial_vectors_p = false;
1793 }
1794}
1795
1796/* Return the mask input to a masked load or store. VEC_MASK is the vectorized
1797 form of the scalar mask condition and LOOP_MASK, if nonnull, is the mask
1798 that needs to be applied to all loads and stores in a vectorized loop.
1799 Return VEC_MASK if LOOP_MASK is null, otherwise return VEC_MASK & LOOP_MASK.
1800
1801 MASK_TYPE is the type of both masks. If new statements are needed,
1802 insert them before GSI. */
1803
1804static tree
1805prepare_load_store_mask (tree mask_type, tree loop_mask, tree vec_mask,
1806 gimple_stmt_iterator *gsi)
1807{
1808 gcc_assert (useless_type_conversion_p (mask_type, TREE_TYPE (vec_mask)))((void)(!(useless_type_conversion_p (mask_type, ((contains_struct_check
((vec_mask), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1808, __FUNCTION__))->typed.type))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1808, __FUNCTION__), 0 : 0))
;
1809 if (!loop_mask)
1810 return vec_mask;
1811
1812 gcc_assert (TREE_TYPE (loop_mask) == mask_type)((void)(!(((contains_struct_check ((loop_mask), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1812, __FUNCTION__))->typed.type) == mask_type) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1812, __FUNCTION__), 0 : 0))
;
1813 tree and_res = make_temp_ssa_name (mask_type, NULLnullptr, "vec_mask_and");
1814 gimple *and_stmt = gimple_build_assign (and_res, BIT_AND_EXPR,
1815 vec_mask, loop_mask);
1816 gsi_insert_before (gsi, and_stmt, GSI_SAME_STMT);
1817 return and_res;
1818}
1819
1820/* Determine whether we can use a gather load or scatter store to vectorize
1821 strided load or store STMT_INFO by truncating the current offset to a
1822 smaller width. We need to be able to construct an offset vector:
1823
1824 { 0, X, X*2, X*3, ... }
1825
1826 without loss of precision, where X is STMT_INFO's DR_STEP.
1827
1828 Return true if this is possible, describing the gather load or scatter
1829 store in GS_INFO. MASKED_P is true if the load or store is conditional. */
1830
1831static bool
1832vect_truncate_gather_scatter_offset (stmt_vec_info stmt_info,
1833 loop_vec_info loop_vinfo, bool masked_p,
1834 gather_scatter_info *gs_info)
1835{
1836 dr_vec_info *dr_info = STMT_VINFO_DR_INFO (stmt_info)(((void)(!((stmt_info)->dr_aux.stmt == (stmt_info)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1836, __FUNCTION__), 0 : 0)), &(stmt_info)->dr_aux)
;
1837 data_reference *dr = dr_info->dr;
1838 tree step = DR_STEP (dr)(dr)->innermost.step;
1839 if (TREE_CODE (step)((enum tree_code) (step)->base.code) != INTEGER_CST)
1840 {
1841 /* ??? Perhaps we could use range information here? */
1842 if (dump_enabled_p ())
1843 dump_printf_loc (MSG_NOTE, vect_location,
1844 "cannot truncate variable step.\n");
1845 return false;
1846 }
1847
1848 /* Get the number of bits in an element. */
1849 tree vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
1850 scalar_mode element_mode = SCALAR_TYPE_MODE (TREE_TYPE (vectype))(as_a <scalar_mode> ((tree_class_check ((((contains_struct_check
((vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1850, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1850, __FUNCTION__))->type_common.mode))
;
1851 unsigned int element_bits = GET_MODE_BITSIZE (element_mode);
1852
1853 /* Set COUNT to the upper limit on the number of elements - 1.
1854 Start with the maximum vectorization factor. */
1855 unsigned HOST_WIDE_INTlong count = vect_max_vf (loop_vinfo) - 1;
1856
1857 /* Try lowering COUNT to the number of scalar latch iterations. */
1858 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo)(loop_vinfo)->loop;
1859 widest_int max_iters;
1860 if (max_loop_iterations (loop, &max_iters)
1861 && max_iters < count)
1862 count = max_iters.to_shwi ();
1863
1864 /* Try scales of 1 and the element size. */
1865 int scales[] = { 1, vect_get_scalar_dr_size (dr_info) };
1866 wi::overflow_type overflow = wi::OVF_NONE;
1867 for (int i = 0; i < 2; ++i)
1868 {
1869 int scale = scales[i];
1870 widest_int factor;
1871 if (!wi::multiple_of_p (wi::to_widest (step), scale, SIGNED, &factor))
1872 continue;
1873
1874 /* Determine the minimum precision of (COUNT - 1) * STEP / SCALE. */
1875 widest_int range = wi::mul (count, factor, SIGNED, &overflow);
1876 if (overflow)
1877 continue;
1878 signop sign = range >= 0 ? UNSIGNED : SIGNED;
1879 unsigned int min_offset_bits = wi::min_precision (range, sign);
1880
1881 /* Find the narrowest viable offset type. */
1882 unsigned int offset_bits = 1U << ceil_log2 (min_offset_bits);
1883 tree offset_type = build_nonstandard_integer_type (offset_bits,
1884 sign == UNSIGNED);
1885
1886 /* See whether the target supports the operation with an offset
1887 no narrower than OFFSET_TYPE. */
1888 tree memory_type = TREE_TYPE (DR_REF (dr))((contains_struct_check (((dr)->ref), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1888, __FUNCTION__))->typed.type)
;
1889 if (!vect_gather_scatter_fn_p (loop_vinfo, DR_IS_READ (dr)(dr)->is_read, masked_p,
1890 vectype, memory_type, offset_type, scale,
1891 &gs_info->ifn, &gs_info->offset_vectype)
1892 || gs_info->ifn == IFN_LAST)
1893 continue;
1894
1895 gs_info->decl = NULL_TREE(tree) nullptr;
1896 /* Logically the sum of DR_BASE_ADDRESS, DR_INIT and DR_OFFSET,
1897 but we don't need to store that here. */
1898 gs_info->base = NULL_TREE(tree) nullptr;
1899 gs_info->element_type = TREE_TYPE (vectype)((contains_struct_check ((vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1899, __FUNCTION__))->typed.type)
;
1900 gs_info->offset = fold_convert (offset_type, step)fold_convert_loc (((location_t) 0), offset_type, step);
1901 gs_info->offset_dt = vect_constant_def;
1902 gs_info->scale = scale;
1903 gs_info->memory_type = memory_type;
1904 return true;
1905 }
1906
1907 if (overflow && dump_enabled_p ())
1908 dump_printf_loc (MSG_NOTE, vect_location,
1909 "truncating gather/scatter offset to %d bits"
1910 " might change its value.\n", element_bits);
1911
1912 return false;
1913}
1914
1915/* Return true if we can use gather/scatter internal functions to
1916 vectorize STMT_INFO, which is a grouped or strided load or store.
1917 MASKED_P is true if load or store is conditional. When returning
1918 true, fill in GS_INFO with the information required to perform the
1919 operation. */
1920
1921static bool
1922vect_use_strided_gather_scatters_p (stmt_vec_info stmt_info,
1923 loop_vec_info loop_vinfo, bool masked_p,
1924 gather_scatter_info *gs_info)
1925{
1926 if (!vect_check_gather_scatter (stmt_info, loop_vinfo, gs_info)
1927 || gs_info->ifn == IFN_LAST)
1928 return vect_truncate_gather_scatter_offset (stmt_info, loop_vinfo,
1929 masked_p, gs_info);
1930
1931 tree old_offset_type = TREE_TYPE (gs_info->offset)((contains_struct_check ((gs_info->offset), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1931, __FUNCTION__))->typed.type)
;
1932 tree new_offset_type = TREE_TYPE (gs_info->offset_vectype)((contains_struct_check ((gs_info->offset_vectype), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1932, __FUNCTION__))->typed.type)
;
1933
1934 gcc_assert (TYPE_PRECISION (new_offset_type)((void)(!(((tree_class_check ((new_offset_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1934, __FUNCTION__))->type_common.precision) >= ((tree_class_check
((old_offset_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1935, __FUNCTION__))->type_common.precision)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1935, __FUNCTION__), 0 : 0))
1935 >= TYPE_PRECISION (old_offset_type))((void)(!(((tree_class_check ((new_offset_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1934, __FUNCTION__))->type_common.precision) >= ((tree_class_check
((old_offset_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1935, __FUNCTION__))->type_common.precision)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1935, __FUNCTION__), 0 : 0))
;
1936 gs_info->offset = fold_convert (new_offset_type, gs_info->offset)fold_convert_loc (((location_t) 0), new_offset_type, gs_info->
offset)
;
1937
1938 if (dump_enabled_p ())
1939 dump_printf_loc (MSG_NOTE, vect_location,
1940 "using gather/scatter for strided/grouped access,"
1941 " scale = %d\n", gs_info->scale);
1942
1943 return true;
1944}
1945
1946/* STMT_INFO is a non-strided load or store, meaning that it accesses
1947 elements with a known constant step. Return -1 if that step
1948 is negative, 0 if it is zero, and 1 if it is greater than zero. */
1949
1950static int
1951compare_step_with_zero (vec_info *vinfo, stmt_vec_info stmt_info)
1952{
1953 dr_vec_info *dr_info = STMT_VINFO_DR_INFO (stmt_info)(((void)(!((stmt_info)->dr_aux.stmt == (stmt_info)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1953, __FUNCTION__), 0 : 0)), &(stmt_info)->dr_aux)
;
1954 return tree_int_cst_compare (vect_dr_behavior (vinfo, dr_info)->step,
1955 size_zero_nodeglobal_trees[TI_SIZE_ZERO]);
1956}
1957
1958/* If the target supports a permute mask that reverses the elements in
1959 a vector of type VECTYPE, return that mask, otherwise return null. */
1960
1961static tree
1962perm_mask_for_reverse (tree vectype)
1963{
1964 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
1965
1966 /* The encoding has a single stepped pattern. */
1967 vec_perm_builder sel (nunits, 1, 3);
1968 for (int i = 0; i < 3; ++i)
1969 sel.quick_push (nunits - 1 - i);
1970
1971 vec_perm_indices indices (sel, 1, nunits);
1972 if (!can_vec_perm_const_p (TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1972, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
, indices))
1973 return NULL_TREE(tree) nullptr;
1974 return vect_gen_perm_mask_checked (vectype, indices);
1975}
1976
1977/* A subroutine of get_load_store_type, with a subset of the same
1978 arguments. Handle the case where STMT_INFO is a load or store that
1979 accesses consecutive elements with a negative step. Sets *POFFSET
1980 to the offset to be applied to the DR for the first access. */
1981
1982static vect_memory_access_type
1983get_negative_load_store_type (vec_info *vinfo,
1984 stmt_vec_info stmt_info, tree vectype,
1985 vec_load_store_type vls_type,
1986 unsigned int ncopies, poly_int64 *poffset)
1987{
1988 dr_vec_info *dr_info = STMT_VINFO_DR_INFO (stmt_info)(((void)(!((stmt_info)->dr_aux.stmt == (stmt_info)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1988, __FUNCTION__), 0 : 0)), &(stmt_info)->dr_aux)
;
1989 dr_alignment_support alignment_support_scheme;
1990
1991 if (ncopies > 1)
1992 {
1993 if (dump_enabled_p ())
1994 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
1995 "multiple types with negative step.\n");
1996 return VMAT_ELEMENTWISE;
1997 }
1998
1999 /* For backward running DRs the first access in vectype actually is
2000 N-1 elements before the address of the DR. */
2001 *poffset = ((-TYPE_VECTOR_SUBPARTS (vectype) + 1)
2002 * TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (vectype)))((unsigned long) (*tree_int_cst_elt_check ((((tree_class_check
((((contains_struct_check ((vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2002, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2002, __FUNCTION__))->type_common.size_unit)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2002, __FUNCTION__)))
);
2003
2004 int misalignment = dr_misalignment (dr_info, vectype, *poffset);
2005 alignment_support_scheme
2006 = vect_supportable_dr_alignment (vinfo, dr_info, vectype, misalignment);
2007 if (alignment_support_scheme != dr_aligned
2008 && alignment_support_scheme != dr_unaligned_supported)
2009 {
2010 if (dump_enabled_p ())
2011 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2012 "negative step but alignment required.\n");
2013 *poffset = 0;
2014 return VMAT_ELEMENTWISE;
2015 }
2016
2017 if (vls_type == VLS_STORE_INVARIANT)
2018 {
2019 if (dump_enabled_p ())
2020 dump_printf_loc (MSG_NOTE, vect_location,
2021 "negative step with invariant source;"
2022 " no permute needed.\n");
2023 return VMAT_CONTIGUOUS_DOWN;
2024 }
2025
2026 if (!perm_mask_for_reverse (vectype))
2027 {
2028 if (dump_enabled_p ())
2029 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2030 "negative step and reversing not supported.\n");
2031 *poffset = 0;
2032 return VMAT_ELEMENTWISE;
2033 }
2034
2035 return VMAT_CONTIGUOUS_REVERSE;
2036}
2037
2038/* STMT_INFO is either a masked or unconditional store. Return the value
2039 being stored. */
2040
2041tree
2042vect_get_store_rhs (stmt_vec_info stmt_info)
2043{
2044 if (gassign *assign = dyn_cast <gassign *> (stmt_info->stmt))
2045 {
2046 gcc_assert (gimple_assign_single_p (assign))((void)(!(gimple_assign_single_p (assign)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2046, __FUNCTION__), 0 : 0))
;
2047 return gimple_assign_rhs1 (assign);
2048 }
2049 if (gcall *call = dyn_cast <gcall *> (stmt_info->stmt))
2050 {
2051 internal_fn ifn = gimple_call_internal_fn (call);
2052 int index = internal_fn_stored_value_index (ifn);
2053 gcc_assert (index >= 0)((void)(!(index >= 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2053, __FUNCTION__), 0 : 0))
;
2054 return gimple_call_arg (call, index);
2055 }
2056 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2056, __FUNCTION__))
;
2057}
2058
2059/* Function VECTOR_VECTOR_COMPOSITION_TYPE
2060
2061 This function returns a vector type which can be composed with NETLS pieces,
2062 whose type is recorded in PTYPE. VTYPE should be a vector type, and has the
2063 same vector size as the return vector. It checks target whether supports
2064 pieces-size vector mode for construction firstly, if target fails to, check
2065 pieces-size scalar mode for construction further. It returns NULL_TREE if
2066 fails to find the available composition.
2067
2068 For example, for (vtype=V16QI, nelts=4), we can probably get:
2069 - V16QI with PTYPE V4QI.
2070 - V4SI with PTYPE SI.
2071 - NULL_TREE. */
2072
2073static tree
2074vector_vector_composition_type (tree vtype, poly_uint64 nelts, tree *ptype)
2075{
2076 gcc_assert (VECTOR_TYPE_P (vtype))((void)(!((((enum tree_code) (vtype)->base.code) == VECTOR_TYPE
)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2076, __FUNCTION__), 0 : 0))
;
2077 gcc_assert (known_gt (nelts, 0U))((void)(!((!maybe_le (nelts, 0U))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2077, __FUNCTION__), 0 : 0))
;
2078
2079 machine_mode vmode = TYPE_MODE (vtype)((((enum tree_code) ((tree_class_check ((vtype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2079, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vtype) : (vtype)->type_common.mode)
;
2080 if (!VECTOR_MODE_P (vmode)(((enum mode_class) mode_class[vmode]) == MODE_VECTOR_BOOL ||
((enum mode_class) mode_class[vmode]) == MODE_VECTOR_INT || (
(enum mode_class) mode_class[vmode]) == MODE_VECTOR_FLOAT || (
(enum mode_class) mode_class[vmode]) == MODE_VECTOR_FRACT || (
(enum mode_class) mode_class[vmode]) == MODE_VECTOR_UFRACT ||
((enum mode_class) mode_class[vmode]) == MODE_VECTOR_ACCUM ||
((enum mode_class) mode_class[vmode]) == MODE_VECTOR_UACCUM)
)
2081 return NULL_TREE(tree) nullptr;
2082
2083 poly_uint64 vbsize = GET_MODE_BITSIZE (vmode);
2084 unsigned int pbsize;
2085 if (constant_multiple_p (vbsize, nelts, &pbsize))
2086 {
2087 /* First check if vec_init optab supports construction from
2088 vector pieces directly. */
2089 scalar_mode elmode = SCALAR_TYPE_MODE (TREE_TYPE (vtype))(as_a <scalar_mode> ((tree_class_check ((((contains_struct_check
((vtype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2089, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2089, __FUNCTION__))->type_common.mode))
;
2090 poly_uint64 inelts = pbsize / GET_MODE_BITSIZE (elmode);
2091 machine_mode rmode;
2092 if (related_vector_mode (vmode, elmode, inelts).exists (&rmode)
2093 && (convert_optab_handler (vec_init_optab, vmode, rmode)
2094 != CODE_FOR_nothing))
2095 {
2096 *ptype = build_vector_type (TREE_TYPE (vtype)((contains_struct_check ((vtype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2096, __FUNCTION__))->typed.type)
, inelts);
2097 return vtype;
2098 }
2099
2100 /* Otherwise check if exists an integer type of the same piece size and
2101 if vec_init optab supports construction from it directly. */
2102 if (int_mode_for_size (pbsize, 0).exists (&elmode)
2103 && related_vector_mode (vmode, elmode, nelts).exists (&rmode)
2104 && (convert_optab_handler (vec_init_optab, rmode, elmode)
2105 != CODE_FOR_nothing))
2106 {
2107 *ptype = build_nonstandard_integer_type (pbsize, 1);
2108 return build_vector_type (*ptype, nelts);
2109 }
2110 }
2111
2112 return NULL_TREE(tree) nullptr;
2113}
2114
2115/* A subroutine of get_load_store_type, with a subset of the same
2116 arguments. Handle the case where STMT_INFO is part of a grouped load
2117 or store.
2118
2119 For stores, the statements in the group are all consecutive
2120 and there is no gap at the end. For loads, the statements in the
2121 group might not be consecutive; there can be gaps between statements
2122 as well as at the end. */
2123
2124static bool
2125get_group_load_store_type (vec_info *vinfo, stmt_vec_info stmt_info,
2126 tree vectype, slp_tree slp_node,
2127 bool masked_p, vec_load_store_type vls_type,
2128 vect_memory_access_type *memory_access_type,
2129 poly_int64 *poffset,
2130 dr_alignment_support *alignment_support_scheme,
2131 int *misalignment,
2132 gather_scatter_info *gs_info)
2133{
2134 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
2135 class loop *loop = loop_vinfo ? LOOP_VINFO_LOOP (loop_vinfo)(loop_vinfo)->loop : NULLnullptr;
2136 stmt_vec_info first_stmt_info = DR_GROUP_FIRST_ELEMENT (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2136, __FUNCTION__), 0 : 0)), (stmt_info)->first_element
)
;
2137 dr_vec_info *first_dr_info = STMT_VINFO_DR_INFO (first_stmt_info)(((void)(!((first_stmt_info)->dr_aux.stmt == (first_stmt_info
)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2137, __FUNCTION__), 0 : 0)), &(first_stmt_info)->dr_aux
)
;
2138 unsigned int group_size = DR_GROUP_SIZE (first_stmt_info)(((void)(!((first_stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2138, __FUNCTION__), 0 : 0)), (first_stmt_info)->size)
;
2139 bool single_element_p = (stmt_info == first_stmt_info
2140 && !DR_GROUP_NEXT_ELEMENT (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2140, __FUNCTION__), 0 : 0)), (stmt_info)->next_element)
);
2141 unsigned HOST_WIDE_INTlong gap = DR_GROUP_GAP (first_stmt_info)(((void)(!((first_stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2141, __FUNCTION__), 0 : 0)), (first_stmt_info)->gap)
;
2142 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
2143
2144 /* True if the vectorized statements would access beyond the last
2145 statement in the group. */
2146 bool overrun_p = false;
2147
2148 /* True if we can cope with such overrun by peeling for gaps, so that
2149 there is at least one final scalar iteration after the vector loop. */
2150 bool can_overrun_p = (!masked_p
2151 && vls_type == VLS_LOAD
2152 && loop_vinfo
2153 && !loop->inner);
2154
2155 /* There can only be a gap at the end of the group if the stride is
2156 known at compile time. */
2157 gcc_assert (!STMT_VINFO_STRIDED_P (first_stmt_info) || gap == 0)((void)(!(!(first_stmt_info)->strided_p || gap == 0) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2157, __FUNCTION__), 0 : 0))
;
2158
2159 /* Stores can't yet have gaps. */
2160 gcc_assert (slp_node || vls_type == VLS_LOAD || gap == 0)((void)(!(slp_node || vls_type == VLS_LOAD || gap == 0) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2160, __FUNCTION__), 0 : 0))
;
2161
2162 if (slp_node)
2163 {
2164 /* For SLP vectorization we directly vectorize a subchain
2165 without permutation. */
2166 if (! SLP_TREE_LOAD_PERMUTATION (slp_node)(slp_node)->load_permutation.exists ())
2167 first_dr_info
2168 = STMT_VINFO_DR_INFO (SLP_TREE_SCALAR_STMTS (slp_node)[0])(((void)(!(((slp_node)->stmts[0])->dr_aux.stmt == ((slp_node
)->stmts[0])) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2168, __FUNCTION__), 0 : 0)), &((slp_node)->stmts[0]
)->dr_aux)
;
2169 if (STMT_VINFO_STRIDED_P (first_stmt_info)(first_stmt_info)->strided_p)
2170 {
2171 /* Try to use consecutive accesses of DR_GROUP_SIZE elements,
2172 separated by the stride, until we have a complete vector.
2173 Fall back to scalar accesses if that isn't possible. */
2174 if (multiple_p (nunits, group_size))
2175 *memory_access_type = VMAT_STRIDED_SLP;
2176 else
2177 *memory_access_type = VMAT_ELEMENTWISE;
2178 }
2179 else
2180 {
2181 overrun_p = loop_vinfo && gap != 0;
2182 if (overrun_p && vls_type != VLS_LOAD)
2183 {
2184 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2185 "Grouped store with gaps requires"
2186 " non-consecutive accesses\n");
2187 return false;
2188 }
2189 /* An overrun is fine if the trailing elements are smaller
2190 than the alignment boundary B. Every vector access will
2191 be a multiple of B and so we are guaranteed to access a
2192 non-gap element in the same B-sized block. */
2193 if (overrun_p
2194 && gap < (vect_known_alignment_in_bytes (first_dr_info,
2195 vectype)
2196 / vect_get_scalar_dr_size (first_dr_info)))
2197 overrun_p = false;
2198
2199 /* If the gap splits the vector in half and the target
2200 can do half-vector operations avoid the epilogue peeling
2201 by simply loading half of the vector only. Usually
2202 the construction with an upper zero half will be elided. */
2203 dr_alignment_support alss;
2204 int misalign = dr_misalignment (first_dr_info, vectype);
2205 tree half_vtype;
2206 if (overrun_p
2207 && !masked_p
2208 && (((alss = vect_supportable_dr_alignment (vinfo, first_dr_info,
2209 vectype, misalign)))
2210 == dr_aligned
2211 || alss == dr_unaligned_supported)
2212 && known_eq (nunits, (group_size - gap) * 2)(!maybe_ne (nunits, (group_size - gap) * 2))
2213 && known_eq (nunits, group_size)(!maybe_ne (nunits, group_size))
2214 && (vector_vector_composition_type (vectype, 2, &half_vtype)
2215 != NULL_TREE(tree) nullptr))
2216 overrun_p = false;
2217
2218 if (overrun_p && !can_overrun_p)
2219 {
2220 if (dump_enabled_p ())
2221 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2222 "Peeling for outer loop is not supported\n");
2223 return false;
2224 }
2225 int cmp = compare_step_with_zero (vinfo, stmt_info);
2226 if (cmp < 0)
2227 {
2228 if (single_element_p)
2229 /* ??? The VMAT_CONTIGUOUS_REVERSE code generation is
2230 only correct for single element "interleaving" SLP. */
2231 *memory_access_type = get_negative_load_store_type
2232 (vinfo, stmt_info, vectype, vls_type, 1, poffset);
2233 else
2234 {
2235 /* Try to use consecutive accesses of DR_GROUP_SIZE elements,
2236 separated by the stride, until we have a complete vector.
2237 Fall back to scalar accesses if that isn't possible. */
2238 if (multiple_p (nunits, group_size))
2239 *memory_access_type = VMAT_STRIDED_SLP;
2240 else
2241 *memory_access_type = VMAT_ELEMENTWISE;
2242 }
2243 }
2244 else
2245 {
2246 gcc_assert (!loop_vinfo || cmp > 0)((void)(!(!loop_vinfo || cmp > 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2246, __FUNCTION__), 0 : 0))
;
2247 *memory_access_type = VMAT_CONTIGUOUS;
2248 }
2249 }
2250 }
2251 else
2252 {
2253 /* We can always handle this case using elementwise accesses,
2254 but see if something more efficient is available. */
2255 *memory_access_type = VMAT_ELEMENTWISE;
2256
2257 /* If there is a gap at the end of the group then these optimizations
2258 would access excess elements in the last iteration. */
2259 bool would_overrun_p = (gap != 0);
2260 /* An overrun is fine if the trailing elements are smaller than the
2261 alignment boundary B. Every vector access will be a multiple of B
2262 and so we are guaranteed to access a non-gap element in the
2263 same B-sized block. */
2264 if (would_overrun_p
2265 && !masked_p
2266 && gap < (vect_known_alignment_in_bytes (first_dr_info, vectype)
2267 / vect_get_scalar_dr_size (first_dr_info)))
2268 would_overrun_p = false;
2269
2270 if (!STMT_VINFO_STRIDED_P (first_stmt_info)(first_stmt_info)->strided_p
2271 && (can_overrun_p || !would_overrun_p)
2272 && compare_step_with_zero (vinfo, stmt_info) > 0)
2273 {
2274 /* First cope with the degenerate case of a single-element
2275 vector. */
2276 if (known_eq (TYPE_VECTOR_SUBPARTS (vectype), 1U)(!maybe_ne (TYPE_VECTOR_SUBPARTS (vectype), 1U)))
2277 ;
2278
2279 /* Otherwise try using LOAD/STORE_LANES. */
2280 else if (vls_type == VLS_LOAD
2281 ? vect_load_lanes_supported (vectype, group_size, masked_p)
2282 : vect_store_lanes_supported (vectype, group_size,
2283 masked_p))
2284 {
2285 *memory_access_type = VMAT_LOAD_STORE_LANES;
2286 overrun_p = would_overrun_p;
2287 }
2288
2289 /* If that fails, try using permuting loads. */
2290 else if (vls_type == VLS_LOAD
2291 ? vect_grouped_load_supported (vectype, single_element_p,
2292 group_size)
2293 : vect_grouped_store_supported (vectype, group_size))
2294 {
2295 *memory_access_type = VMAT_CONTIGUOUS_PERMUTE;
2296 overrun_p = would_overrun_p;
2297 }
2298 }
2299
2300 /* As a last resort, trying using a gather load or scatter store.
2301
2302 ??? Although the code can handle all group sizes correctly,
2303 it probably isn't a win to use separate strided accesses based
2304 on nearby locations. Or, even if it's a win over scalar code,
2305 it might not be a win over vectorizing at a lower VF, if that
2306 allows us to use contiguous accesses. */
2307 if (*memory_access_type == VMAT_ELEMENTWISE
2308 && single_element_p
2309 && loop_vinfo
2310 && vect_use_strided_gather_scatters_p (stmt_info, loop_vinfo,
2311 masked_p, gs_info))
2312 *memory_access_type = VMAT_GATHER_SCATTER;
2313 }
2314
2315 if (*memory_access_type == VMAT_GATHER_SCATTER
2316 || *memory_access_type == VMAT_ELEMENTWISE)
2317 {
2318 *alignment_support_scheme = dr_unaligned_supported;
2319 *misalignment = DR_MISALIGNMENT_UNKNOWN(-1);
2320 }
2321 else
2322 {
2323 *misalignment = dr_misalignment (first_dr_info, vectype, *poffset);
2324 *alignment_support_scheme
2325 = vect_supportable_dr_alignment (vinfo, first_dr_info, vectype,
2326 *misalignment);
2327 }
2328
2329 if (vls_type != VLS_LOAD && first_stmt_info == stmt_info)
2330 {
2331 /* STMT is the leader of the group. Check the operands of all the
2332 stmts of the group. */
2333 stmt_vec_info next_stmt_info = DR_GROUP_NEXT_ELEMENT (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2333, __FUNCTION__), 0 : 0)), (stmt_info)->next_element)
;
2334 while (next_stmt_info)
2335 {
2336 tree op = vect_get_store_rhs (next_stmt_info);
2337 enum vect_def_type dt;
2338 if (!vect_is_simple_use (op, vinfo, &dt))
2339 {
2340 if (dump_enabled_p ())
2341 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2342 "use not simple.\n");
2343 return false;
2344 }
2345 next_stmt_info = DR_GROUP_NEXT_ELEMENT (next_stmt_info)(((void)(!((next_stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2345, __FUNCTION__), 0 : 0)), (next_stmt_info)->next_element
)
;
2346 }
2347 }
2348
2349 if (overrun_p)
2350 {
2351 gcc_assert (can_overrun_p)((void)(!(can_overrun_p) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2351, __FUNCTION__), 0 : 0))
;
2352 if (dump_enabled_p ())
2353 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2354 "Data access with gaps requires scalar "
2355 "epilogue loop\n");
2356 LOOP_VINFO_PEELING_FOR_GAPS (loop_vinfo)(loop_vinfo)->peeling_for_gaps = true;
2357 }
2358
2359 return true;
2360}
2361
2362/* Analyze load or store statement STMT_INFO of type VLS_TYPE. Return true
2363 if there is a memory access type that the vectorized form can use,
2364 storing it in *MEMORY_ACCESS_TYPE if so. If we decide to use gathers
2365 or scatters, fill in GS_INFO accordingly. In addition
2366 *ALIGNMENT_SUPPORT_SCHEME is filled out and false is returned if
2367 the target does not support the alignment scheme. *MISALIGNMENT
2368 is set according to the alignment of the access (including
2369 DR_MISALIGNMENT_UNKNOWN when it is unknown).
2370
2371 SLP says whether we're performing SLP rather than loop vectorization.
2372 MASKED_P is true if the statement is conditional on a vectorized mask.
2373 VECTYPE is the vector type that the vectorized statements will use.
2374 NCOPIES is the number of vector statements that will be needed. */
2375
2376static bool
2377get_load_store_type (vec_info *vinfo, stmt_vec_info stmt_info,
2378 tree vectype, slp_tree slp_node,
2379 bool masked_p, vec_load_store_type vls_type,
2380 unsigned int ncopies,
2381 vect_memory_access_type *memory_access_type,
2382 poly_int64 *poffset,
2383 dr_alignment_support *alignment_support_scheme,
2384 int *misalignment,
2385 gather_scatter_info *gs_info)
2386{
2387 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
2388 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
2389 *misalignment = DR_MISALIGNMENT_UNKNOWN(-1);
2390 *poffset = 0;
2391 if (STMT_VINFO_GATHER_SCATTER_P (stmt_info)(stmt_info)->gather_scatter_p)
2392 {
2393 *memory_access_type = VMAT_GATHER_SCATTER;
2394 if (!vect_check_gather_scatter (stmt_info, loop_vinfo, gs_info))
2395 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2395, __FUNCTION__))
;
2396 else if (!vect_is_simple_use (gs_info->offset, vinfo,
2397 &gs_info->offset_dt,
2398 &gs_info->offset_vectype))
2399 {
2400 if (dump_enabled_p ())
2401 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2402 "%s index use not simple.\n",
2403 vls_type == VLS_LOAD ? "gather" : "scatter");
2404 return false;
2405 }
2406 else if (gs_info->ifn == IFN_LAST && !gs_info->decl)
2407 {
2408 if (vls_type != VLS_LOAD)
2409 {
2410 if (dump_enabled_p ())
2411 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2412 "unsupported emulated scatter.\n");
2413 return false;
2414 }
2415 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant ()
2416 || !TYPE_VECTOR_SUBPARTS
2417 (gs_info->offset_vectype).is_constant ()
2418 || !constant_multiple_p (TYPE_VECTOR_SUBPARTS
2419 (gs_info->offset_vectype),
2420 TYPE_VECTOR_SUBPARTS (vectype)))
2421 {
2422 if (dump_enabled_p ())
2423 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2424 "unsupported vector types for emulated "
2425 "gather.\n");
2426 return false;
2427 }
2428 }
2429 /* Gather-scatter accesses perform only component accesses, alignment
2430 is irrelevant for them. */
2431 *alignment_support_scheme = dr_unaligned_supported;
2432 }
2433 else if (STMT_VINFO_GROUPED_ACCESS (stmt_info)((stmt_info)->dr_aux.dr && (((void)(!((stmt_info)->
dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2433, __FUNCTION__), 0 : 0)), (stmt_info)->first_element
))
)
2434 {
2435 if (!get_group_load_store_type (vinfo, stmt_info, vectype, slp_node,
2436 masked_p,
2437 vls_type, memory_access_type, poffset,
2438 alignment_support_scheme,
2439 misalignment, gs_info))
2440 return false;
2441 }
2442 else if (STMT_VINFO_STRIDED_P (stmt_info)(stmt_info)->strided_p)
2443 {
2444 gcc_assert (!slp_node)((void)(!(!slp_node) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2444, __FUNCTION__), 0 : 0))
;
2445 if (loop_vinfo
2446 && vect_use_strided_gather_scatters_p (stmt_info, loop_vinfo,
2447 masked_p, gs_info))
2448 *memory_access_type = VMAT_GATHER_SCATTER;
2449 else
2450 *memory_access_type = VMAT_ELEMENTWISE;
2451 /* Alignment is irrelevant here. */
2452 *alignment_support_scheme = dr_unaligned_supported;
2453 }
2454 else
2455 {
2456 int cmp = compare_step_with_zero (vinfo, stmt_info);
2457 if (cmp == 0)
2458 {
2459 gcc_assert (vls_type == VLS_LOAD)((void)(!(vls_type == VLS_LOAD) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2459, __FUNCTION__), 0 : 0))
;
2460 *memory_access_type = VMAT_INVARIANT;
2461 /* Invariant accesses perform only component accesses, alignment
2462 is irrelevant for them. */
2463 *alignment_support_scheme = dr_unaligned_supported;
2464 }
2465 else
2466 {
2467 if (cmp < 0)
2468 *memory_access_type = get_negative_load_store_type
2469 (vinfo, stmt_info, vectype, vls_type, ncopies, poffset);
2470 else
2471 *memory_access_type = VMAT_CONTIGUOUS;
2472 *misalignment = dr_misalignment (STMT_VINFO_DR_INFO (stmt_info)(((void)(!((stmt_info)->dr_aux.stmt == (stmt_info)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2472, __FUNCTION__), 0 : 0)), &(stmt_info)->dr_aux)
,
2473 vectype, *poffset);
2474 *alignment_support_scheme
2475 = vect_supportable_dr_alignment (vinfo,
2476 STMT_VINFO_DR_INFO (stmt_info)(((void)(!((stmt_info)->dr_aux.stmt == (stmt_info)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2476, __FUNCTION__), 0 : 0)), &(stmt_info)->dr_aux)
,
2477 vectype, *misalignment);
2478 }
2479 }
2480
2481 if ((*memory_access_type == VMAT_ELEMENTWISE
2482 || *memory_access_type == VMAT_STRIDED_SLP)
2483 && !nunits.is_constant ())
2484 {
2485 if (dump_enabled_p ())
2486 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2487 "Not using elementwise accesses due to variable "
2488 "vectorization factor.\n");
2489 return false;
2490 }
2491
2492 if (*alignment_support_scheme == dr_unaligned_unsupported)
2493 {
2494 if (dump_enabled_p ())
2495 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2496 "unsupported unaligned access\n");
2497 return false;
2498 }
2499
2500 /* FIXME: At the moment the cost model seems to underestimate the
2501 cost of using elementwise accesses. This check preserves the
2502 traditional behavior until that can be fixed. */
2503 stmt_vec_info first_stmt_info = DR_GROUP_FIRST_ELEMENT (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2503, __FUNCTION__), 0 : 0)), (stmt_info)->first_element
)
;
2504 if (!first_stmt_info)
2505 first_stmt_info = stmt_info;
2506 if (*memory_access_type == VMAT_ELEMENTWISE
2507 && !STMT_VINFO_STRIDED_P (first_stmt_info)(first_stmt_info)->strided_p
2508 && !(stmt_info == DR_GROUP_FIRST_ELEMENT (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2508, __FUNCTION__), 0 : 0)), (stmt_info)->first_element
)
2509 && !DR_GROUP_NEXT_ELEMENT (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2509, __FUNCTION__), 0 : 0)), (stmt_info)->next_element)
2510 && !pow2p_hwi (DR_GROUP_SIZE (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2510, __FUNCTION__), 0 : 0)), (stmt_info)->size)
)))
2511 {
2512 if (dump_enabled_p ())
2513 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2514 "not falling back to elementwise accesses\n");
2515 return false;
2516 }
2517 return true;
2518}
2519
2520/* Return true if boolean argument at MASK_INDEX is suitable for vectorizing
2521 conditional operation STMT_INFO. When returning true, store the mask
2522 in *MASK, the type of its definition in *MASK_DT_OUT, the type of the
2523 vectorized mask in *MASK_VECTYPE_OUT and the SLP node corresponding
2524 to the mask in *MASK_NODE if MASK_NODE is not NULL. */
2525
2526static bool
2527vect_check_scalar_mask (vec_info *vinfo, stmt_vec_info stmt_info,
2528 slp_tree slp_node, unsigned mask_index,
2529 tree *mask, slp_tree *mask_node,
2530 vect_def_type *mask_dt_out, tree *mask_vectype_out)
2531{
2532 enum vect_def_type mask_dt;
2533 tree mask_vectype;
2534 slp_tree mask_node_1;
2535 if (!vect_is_simple_use (vinfo, stmt_info, slp_node, mask_index,
2536 mask, &mask_node_1, &mask_dt, &mask_vectype))
2537 {
2538 if (dump_enabled_p ())
2539 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2540 "mask use not simple.\n");
2541 return false;
2542 }
2543
2544 if (!VECT_SCALAR_BOOLEAN_TYPE_P (TREE_TYPE (*mask))(((enum tree_code) (((contains_struct_check ((*mask), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2544, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((((enum tree_code) (((contains_struct_check ((*mask), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2544, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
|| ((enum tree_code) (((contains_struct_check ((*mask), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2544, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
) && ((tree_class_check ((((contains_struct_check ((*
mask), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2544, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2544, __FUNCTION__))->type_common.precision) == 1 &&
((tree_class_check ((((contains_struct_check ((*mask), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2544, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2544, __FUNCTION__))->base.u.bits.unsigned_flag)))
)
2545 {
2546 if (dump_enabled_p ())
2547 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2548 "mask argument is not a boolean.\n");
2549 return false;
2550 }
2551
2552 /* If the caller is not prepared for adjusting an external/constant
2553 SLP mask vector type fail. */
2554 if (slp_node
2555 && !mask_node
2556 && SLP_TREE_DEF_TYPE (mask_node_1)(mask_node_1)->def_type != vect_internal_def)
2557 {
2558 if (dump_enabled_p ())
2559 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2560 "SLP mask argument is not vectorized.\n");
2561 return false;
2562 }
2563
2564 tree vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
2565 if (!mask_vectype)
2566 mask_vectype = get_mask_type_for_scalar_type (vinfo, TREE_TYPE (vectype)((contains_struct_check ((vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2566, __FUNCTION__))->typed.type)
);
2567
2568 if (!mask_vectype || !VECTOR_BOOLEAN_TYPE_P (mask_vectype)(((enum tree_code) (mask_vectype)->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((mask_vectype
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2568, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
)
2569 {
2570 if (dump_enabled_p ())
2571 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2572 "could not find an appropriate vector mask type.\n");
2573 return false;
2574 }
2575
2576 if (maybe_ne (TYPE_VECTOR_SUBPARTS (mask_vectype),
2577 TYPE_VECTOR_SUBPARTS (vectype)))
2578 {
2579 if (dump_enabled_p ())
2580 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2581 "vector mask type %T"
2582 " does not match vector data type %T.\n",
2583 mask_vectype, vectype);
2584
2585 return false;
2586 }
2587
2588 *mask_dt_out = mask_dt;
2589 *mask_vectype_out = mask_vectype;
2590 if (mask_node)
2591 *mask_node = mask_node_1;
2592 return true;
2593}
2594
2595/* Return true if stored value RHS is suitable for vectorizing store
2596 statement STMT_INFO. When returning true, store the type of the
2597 definition in *RHS_DT_OUT, the type of the vectorized store value in
2598 *RHS_VECTYPE_OUT and the type of the store in *VLS_TYPE_OUT. */
2599
2600static bool
2601vect_check_store_rhs (vec_info *vinfo, stmt_vec_info stmt_info,
2602 slp_tree slp_node, tree rhs,
2603 vect_def_type *rhs_dt_out, tree *rhs_vectype_out,
2604 vec_load_store_type *vls_type_out)
2605{
2606 /* In the case this is a store from a constant make sure
2607 native_encode_expr can handle it. */
2608 if (CONSTANT_CLASS_P (rhs)(tree_code_type[(int) (((enum tree_code) (rhs)->base.code)
)] == tcc_constant)
&& native_encode_expr (rhs, NULLnullptr, 64) == 0)
2609 {
2610 if (dump_enabled_p ())
2611 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2612 "cannot encode constant as a byte sequence.\n");
2613 return false;
2614 }
2615
2616 unsigned op_no = 0;
2617 if (gcall *call = dyn_cast <gcall *> (stmt_info->stmt))
2618 {
2619 if (gimple_call_internal_p (call)
2620 && internal_store_fn_p (gimple_call_internal_fn (call)))
2621 op_no = internal_fn_stored_value_index (gimple_call_internal_fn (call));
2622 }
2623
2624 enum vect_def_type rhs_dt;
2625 tree rhs_vectype;
2626 slp_tree slp_op;
2627 if (!vect_is_simple_use (vinfo, stmt_info, slp_node, op_no,
2628 &rhs, &slp_op, &rhs_dt, &rhs_vectype))
2629 {
2630 if (dump_enabled_p ())
2631 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2632 "use not simple.\n");
2633 return false;
2634 }
2635
2636 tree vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
2637 if (rhs_vectype && !useless_type_conversion_p (vectype, rhs_vectype))
2638 {
2639 if (dump_enabled_p ())
2640 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2641 "incompatible vector types.\n");
2642 return false;
2643 }
2644
2645 *rhs_dt_out = rhs_dt;
2646 *rhs_vectype_out = rhs_vectype;
2647 if (rhs_dt == vect_constant_def || rhs_dt == vect_external_def)
2648 *vls_type_out = VLS_STORE_INVARIANT;
2649 else
2650 *vls_type_out = VLS_STORE;
2651 return true;
2652}
2653
2654/* Build an all-ones vector mask of type MASKTYPE while vectorizing STMT_INFO.
2655 Note that we support masks with floating-point type, in which case the
2656 floats are interpreted as a bitmask. */
2657
2658static tree
2659vect_build_all_ones_mask (vec_info *vinfo,
2660 stmt_vec_info stmt_info, tree masktype)
2661{
2662 if (TREE_CODE (masktype)((enum tree_code) (masktype)->base.code) == INTEGER_TYPE)
2663 return build_int_cst (masktype, -1);
2664 else if (TREE_CODE (TREE_TYPE (masktype))((enum tree_code) (((contains_struct_check ((masktype), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2664, __FUNCTION__))->typed.type))->base.code)
== INTEGER_TYPE)
2665 {
2666 tree mask = build_int_cst (TREE_TYPE (masktype)((contains_struct_check ((masktype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2666, __FUNCTION__))->typed.type)
, -1);
2667 mask = build_vector_from_val (masktype, mask);
2668 return vect_init_vector (vinfo, stmt_info, mask, masktype, NULLnullptr);
2669 }
2670 else if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (masktype))(((enum tree_code) (((contains_struct_check ((masktype), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2670, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE
)
)
2671 {
2672 REAL_VALUE_TYPEstruct real_value r;
2673 long tmp[6];
2674 for (int j = 0; j < 6; ++j)
2675 tmp[j] = -1;
2676 real_from_target (&r, tmp, TYPE_MODE (TREE_TYPE (masktype))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((masktype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2676, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2676, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((masktype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2676, __FUNCTION__))->typed.type)) : (((contains_struct_check
((masktype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2676, __FUNCTION__))->typed.type))->type_common.mode)
);
2677 tree mask = build_real (TREE_TYPE (masktype)((contains_struct_check ((masktype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2677, __FUNCTION__))->typed.type)
, r);
2678 mask = build_vector_from_val (masktype, mask);
2679 return vect_init_vector (vinfo, stmt_info, mask, masktype, NULLnullptr);
2680 }
2681 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2681, __FUNCTION__))
;
2682}
2683
2684/* Build an all-zero merge value of type VECTYPE while vectorizing
2685 STMT_INFO as a gather load. */
2686
2687static tree
2688vect_build_zero_merge_argument (vec_info *vinfo,
2689 stmt_vec_info stmt_info, tree vectype)
2690{
2691 tree merge;
2692 if (TREE_CODE (TREE_TYPE (vectype))((enum tree_code) (((contains_struct_check ((vectype), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2692, __FUNCTION__))->typed.type))->base.code)
== INTEGER_TYPE)
2693 merge = build_int_cst (TREE_TYPE (vectype)((contains_struct_check ((vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2693, __FUNCTION__))->typed.type)
, 0);
2694 else if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (vectype))(((enum tree_code) (((contains_struct_check ((vectype), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2694, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE
)
)
2695 {
2696 REAL_VALUE_TYPEstruct real_value r;
2697 long tmp[6];
2698 for (int j = 0; j < 6; ++j)
2699 tmp[j] = 0;
2700 real_from_target (&r, tmp, TYPE_MODE (TREE_TYPE (vectype))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2700, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2700, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2700, __FUNCTION__))->typed.type)) : (((contains_struct_check
((vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2700, __FUNCTION__))->typed.type))->type_common.mode)
);
2701 merge = build_real (TREE_TYPE (vectype)((contains_struct_check ((vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2701, __FUNCTION__))->typed.type)
, r);
2702 }
2703 else
2704 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2704, __FUNCTION__))
;
2705 merge = build_vector_from_val (vectype, merge);
2706 return vect_init_vector (vinfo, stmt_info, merge, vectype, NULLnullptr);
2707}
2708
2709/* Build a gather load call while vectorizing STMT_INFO. Insert new
2710 instructions before GSI and add them to VEC_STMT. GS_INFO describes
2711 the gather load operation. If the load is conditional, MASK is the
2712 unvectorized condition and MASK_DT is its definition type, otherwise
2713 MASK is null. */
2714
2715static void
2716vect_build_gather_load_calls (vec_info *vinfo, stmt_vec_info stmt_info,
2717 gimple_stmt_iterator *gsi,
2718 gimple **vec_stmt,
2719 gather_scatter_info *gs_info,
2720 tree mask)
2721{
2722 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
2723 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo)(loop_vinfo)->loop;
2724 tree vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
2725 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
2726 int ncopies = vect_get_num_copies (loop_vinfo, vectype);
2727 edge pe = loop_preheader_edge (loop);
2728 enum { NARROW, NONE, WIDEN } modifier;
2729 poly_uint64 gather_off_nunits
2730 = TYPE_VECTOR_SUBPARTS (gs_info->offset_vectype);
2731
2732 tree arglist = TYPE_ARG_TYPES (TREE_TYPE (gs_info->decl))((tree_check2 ((((contains_struct_check ((gs_info->decl), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2732, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2732, __FUNCTION__, (FUNCTION_TYPE), (METHOD_TYPE)))->type_non_common
.values)
;
2733 tree rettype = TREE_TYPE (TREE_TYPE (gs_info->decl))((contains_struct_check ((((contains_struct_check ((gs_info->
decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2733, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2733, __FUNCTION__))->typed.type)
;
2734 tree srctype = TREE_VALUE (arglist)((tree_check ((arglist), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2734, __FUNCTION__, (TREE_LIST)))->list.value)
; arglist = TREE_CHAIN (arglist)((contains_struct_check ((arglist), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2734, __FUNCTION__))->common.chain)
;
2735 tree ptrtype = TREE_VALUE (arglist)((tree_check ((arglist), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2735, __FUNCTION__, (TREE_LIST)))->list.value)
; arglist = TREE_CHAIN (arglist)((contains_struct_check ((arglist), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2735, __FUNCTION__))->common.chain)
;
2736 tree idxtype = TREE_VALUE (arglist)((tree_check ((arglist), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2736, __FUNCTION__, (TREE_LIST)))->list.value)
; arglist = TREE_CHAIN (arglist)((contains_struct_check ((arglist), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2736, __FUNCTION__))->common.chain)
;
2737 tree masktype = TREE_VALUE (arglist)((tree_check ((arglist), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2737, __FUNCTION__, (TREE_LIST)))->list.value)
; arglist = TREE_CHAIN (arglist)((contains_struct_check ((arglist), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2737, __FUNCTION__))->common.chain)
;
2738 tree scaletype = TREE_VALUE (arglist)((tree_check ((arglist), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2738, __FUNCTION__, (TREE_LIST)))->list.value)
;
2739 tree real_masktype = masktype;
2740 gcc_checking_assert (types_compatible_p (srctype, rettype)((void)(!(types_compatible_p (srctype, rettype) && (!
mask || ((enum tree_code) (masktype)->base.code) == INTEGER_TYPE
|| types_compatible_p (srctype, masktype))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2743, __FUNCTION__), 0 : 0))
2741 && (!mask((void)(!(types_compatible_p (srctype, rettype) && (!
mask || ((enum tree_code) (masktype)->base.code) == INTEGER_TYPE
|| types_compatible_p (srctype, masktype))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2743, __FUNCTION__), 0 : 0))
2742 || TREE_CODE (masktype) == INTEGER_TYPE((void)(!(types_compatible_p (srctype, rettype) && (!
mask || ((enum tree_code) (masktype)->base.code) == INTEGER_TYPE
|| types_compatible_p (srctype, masktype))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2743, __FUNCTION__), 0 : 0))
2743 || types_compatible_p (srctype, masktype)))((void)(!(types_compatible_p (srctype, rettype) && (!
mask || ((enum tree_code) (masktype)->base.code) == INTEGER_TYPE
|| types_compatible_p (srctype, masktype))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2743, __FUNCTION__), 0 : 0))
;
2744 if (mask && TREE_CODE (masktype)((enum tree_code) (masktype)->base.code) == INTEGER_TYPE)
2745 masktype = truth_type_for (srctype);
2746
2747 tree mask_halftype = masktype;
2748 tree perm_mask = NULL_TREE(tree) nullptr;
2749 tree mask_perm_mask = NULL_TREE(tree) nullptr;
2750 if (known_eq (nunits, gather_off_nunits)(!maybe_ne (nunits, gather_off_nunits)))
2751 modifier = NONE;
2752 else if (known_eq (nunits * 2, gather_off_nunits)(!maybe_ne (nunits * 2, gather_off_nunits)))
2753 {
2754 modifier = WIDEN;
2755
2756 /* Currently widening gathers and scatters are only supported for
2757 fixed-length vectors. */
2758 int count = gather_off_nunits.to_constant ();
2759 vec_perm_builder sel (count, count, 1);
2760 for (int i = 0; i < count; ++i)
2761 sel.quick_push (i | (count / 2));
2762
2763 vec_perm_indices indices (sel, 1, count);
2764 perm_mask = vect_gen_perm_mask_checked (gs_info->offset_vectype,
2765 indices);
2766 }
2767 else if (known_eq (nunits, gather_off_nunits * 2)(!maybe_ne (nunits, gather_off_nunits * 2)))
2768 {
2769 modifier = NARROW;
2770
2771 /* Currently narrowing gathers and scatters are only supported for
2772 fixed-length vectors. */
2773 int count = nunits.to_constant ();
2774 vec_perm_builder sel (count, count, 1);
2775 sel.quick_grow (count);
2776 for (int i = 0; i < count; ++i)
2777 sel[i] = i < count / 2 ? i : i + count / 2;
2778 vec_perm_indices indices (sel, 2, count);
2779 perm_mask = vect_gen_perm_mask_checked (vectype, indices);
2780
2781 ncopies *= 2;
2782
2783 if (mask && masktype == real_masktype)
2784 {
2785 for (int i = 0; i < count; ++i)
2786 sel[i] = i | (count / 2);
2787 indices.new_vector (sel, 2, count);
2788 mask_perm_mask = vect_gen_perm_mask_checked (masktype, indices);
2789 }
2790 else if (mask)
2791 mask_halftype = truth_type_for (gs_info->offset_vectype);
2792 }
2793 else
2794 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2794, __FUNCTION__))
;
2795
2796 tree scalar_dest = gimple_get_lhs (stmt_info->stmt);
2797 tree vec_dest = vect_create_destination_var (scalar_dest, vectype);
2798
2799 tree ptr = fold_convert (ptrtype, gs_info->base)fold_convert_loc (((location_t) 0), ptrtype, gs_info->base
)
;
2800 if (!is_gimple_min_invariant (ptr))
2801 {
2802 gimple_seq seq;
2803 ptr = force_gimple_operand (ptr, &seq, true, NULL_TREE(tree) nullptr);
2804 basic_block new_bb = gsi_insert_seq_on_edge_immediate (pe, seq);
2805 gcc_assert (!new_bb)((void)(!(!new_bb) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2805, __FUNCTION__), 0 : 0))
;
2806 }
2807
2808 tree scale = build_int_cst (scaletype, gs_info->scale);
2809
2810 tree vec_oprnd0 = NULL_TREE(tree) nullptr;
2811 tree vec_mask = NULL_TREE(tree) nullptr;
2812 tree src_op = NULL_TREE(tree) nullptr;
2813 tree mask_op = NULL_TREE(tree) nullptr;
2814 tree prev_res = NULL_TREE(tree) nullptr;
2815
2816 if (!mask)
2817 {
2818 src_op = vect_build_zero_merge_argument (vinfo, stmt_info, rettype);
2819 mask_op = vect_build_all_ones_mask (vinfo, stmt_info, masktype);
2820 }
2821
2822 auto_vec<tree> vec_oprnds0;
2823 auto_vec<tree> vec_masks;
2824 vect_get_vec_defs_for_operand (vinfo, stmt_info,
2825 modifier == WIDEN ? ncopies / 2 : ncopies,
2826 gs_info->offset, &vec_oprnds0);
2827 if (mask)
2828 vect_get_vec_defs_for_operand (vinfo, stmt_info,
2829 modifier == NARROW ? ncopies / 2 : ncopies,
2830 mask, &vec_masks, masktype);
2831 for (int j = 0; j < ncopies; ++j)
2832 {
2833 tree op, var;
2834 if (modifier == WIDEN && (j & 1))
2835 op = permute_vec_elements (vinfo, vec_oprnd0, vec_oprnd0,
2836 perm_mask, stmt_info, gsi);
2837 else
2838 op = vec_oprnd0 = vec_oprnds0[modifier == WIDEN ? j / 2 : j];
2839
2840 if (!useless_type_conversion_p (idxtype, TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2840, __FUNCTION__))->typed.type)
))
2841 {
2842 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (op)),((void)(!((!maybe_ne (TYPE_VECTOR_SUBPARTS (((contains_struct_check
((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2842, __FUNCTION__))->typed.type)), TYPE_VECTOR_SUBPARTS
(idxtype)))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2843, __FUNCTION__), 0 : 0))
2843 TYPE_VECTOR_SUBPARTS (idxtype)))((void)(!((!maybe_ne (TYPE_VECTOR_SUBPARTS (((contains_struct_check
((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2842, __FUNCTION__))->typed.type)), TYPE_VECTOR_SUBPARTS
(idxtype)))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2843, __FUNCTION__), 0 : 0))
;
2844 var = vect_get_new_ssa_name (idxtype, vect_simple_var);
2845 op = build1 (VIEW_CONVERT_EXPR, idxtype, op);
2846 gassign *new_stmt = gimple_build_assign (var, VIEW_CONVERT_EXPR, op);
2847 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
2848 op = var;
2849 }
2850
2851 if (mask)
2852 {
2853 if (mask_perm_mask && (j & 1))
2854 mask_op = permute_vec_elements (vinfo, mask_op, mask_op,
2855 mask_perm_mask, stmt_info, gsi);
2856 else
2857 {
2858 if (modifier == NARROW)
2859 {
2860 if ((j & 1) == 0)
2861 vec_mask = vec_masks[j / 2];
2862 }
2863 else
2864 vec_mask = vec_masks[j];
2865
2866 mask_op = vec_mask;
2867 if (!useless_type_conversion_p (masktype, TREE_TYPE (vec_mask)((contains_struct_check ((vec_mask), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2867, __FUNCTION__))->typed.type)
))
2868 {
2869 poly_uint64 sub1 = TYPE_VECTOR_SUBPARTS (TREE_TYPE (mask_op)((contains_struct_check ((mask_op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2869, __FUNCTION__))->typed.type)
);
2870 poly_uint64 sub2 = TYPE_VECTOR_SUBPARTS (masktype);
2871 gcc_assert (known_eq (sub1, sub2))((void)(!((!maybe_ne (sub1, sub2))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2871, __FUNCTION__), 0 : 0))
;
2872 var = vect_get_new_ssa_name (masktype, vect_simple_var);
2873 mask_op = build1 (VIEW_CONVERT_EXPR, masktype, mask_op);
2874 gassign *new_stmt
2875 = gimple_build_assign (var, VIEW_CONVERT_EXPR, mask_op);
2876 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
2877 mask_op = var;
2878 }
2879 }
2880 if (modifier == NARROW && masktype != real_masktype)
2881 {
2882 var = vect_get_new_ssa_name (mask_halftype, vect_simple_var);
2883 gassign *new_stmt
2884 = gimple_build_assign (var, (j & 1) ? VEC_UNPACK_HI_EXPR
2885 : VEC_UNPACK_LO_EXPR,
2886 mask_op);
2887 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
2888 mask_op = var;
2889 }
2890 src_op = mask_op;
2891 }
2892
2893 tree mask_arg = mask_op;
2894 if (masktype != real_masktype)
2895 {
2896 tree utype, optype = TREE_TYPE (mask_op)((contains_struct_check ((mask_op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2896, __FUNCTION__))->typed.type)
;
2897 if (TYPE_MODE (real_masktype)((((enum tree_code) ((tree_class_check ((real_masktype), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2897, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(real_masktype) : (real_masktype)->type_common.mode)
== TYPE_MODE (optype)((((enum tree_code) ((tree_class_check ((optype), (tcc_type),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2897, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(optype) : (optype)->type_common.mode)
)
2898 utype = real_masktype;
2899 else
2900 utype = lang_hooks.types.type_for_mode (TYPE_MODE (optype)((((enum tree_code) ((tree_class_check ((optype), (tcc_type),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2900, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(optype) : (optype)->type_common.mode)
, 1);
2901 var = vect_get_new_ssa_name (utype, vect_scalar_var);
2902 mask_arg = build1 (VIEW_CONVERT_EXPR, utype, mask_op);
2903 gassign *new_stmt
2904 = gimple_build_assign (var, VIEW_CONVERT_EXPR, mask_arg);
2905 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
2906 mask_arg = var;
2907 if (!useless_type_conversion_p (real_masktype, utype))
2908 {
2909 gcc_assert (TYPE_PRECISION (utype)((void)(!(((tree_class_check ((utype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2909, __FUNCTION__))->type_common.precision) <= ((tree_class_check
((real_masktype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2910, __FUNCTION__))->type_common.precision)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2910, __FUNCTION__), 0 : 0))
2910 <= TYPE_PRECISION (real_masktype))((void)(!(((tree_class_check ((utype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2909, __FUNCTION__))->type_common.precision) <= ((tree_class_check
((real_masktype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2910, __FUNCTION__))->type_common.precision)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2910, __FUNCTION__), 0 : 0))
;
2911 var = vect_get_new_ssa_name (real_masktype, vect_scalar_var);
2912 new_stmt = gimple_build_assign (var, NOP_EXPR, mask_arg);
2913 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
2914 mask_arg = var;
2915 }
2916 src_op = build_zero_cst (srctype);
2917 }
2918 gimple *new_stmt = gimple_build_call (gs_info->decl, 5, src_op, ptr, op,
2919 mask_arg, scale);
2920
2921 if (!useless_type_conversion_p (vectype, rettype))
2922 {
2923 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (vectype),((void)(!((!maybe_ne (TYPE_VECTOR_SUBPARTS (vectype), TYPE_VECTOR_SUBPARTS
(rettype)))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2924, __FUNCTION__), 0 : 0))
2924 TYPE_VECTOR_SUBPARTS (rettype)))((void)(!((!maybe_ne (TYPE_VECTOR_SUBPARTS (vectype), TYPE_VECTOR_SUBPARTS
(rettype)))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2924, __FUNCTION__), 0 : 0))
;
2925 op = vect_get_new_ssa_name (rettype, vect_simple_var);
2926 gimple_call_set_lhs (new_stmt, op);
2927 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
2928 var = make_ssa_name (vec_dest);
2929 op = build1 (VIEW_CONVERT_EXPR, vectype, op);
2930 new_stmt = gimple_build_assign (var, VIEW_CONVERT_EXPR, op);
2931 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
2932 }
2933 else
2934 {
2935 var = make_ssa_name (vec_dest, new_stmt);
2936 gimple_call_set_lhs (new_stmt, var);
2937 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
2938 }
2939
2940 if (modifier == NARROW)
2941 {
2942 if ((j & 1) == 0)
2943 {
2944 prev_res = var;
2945 continue;
2946 }
2947 var = permute_vec_elements (vinfo, prev_res, var, perm_mask,
2948 stmt_info, gsi);
2949 new_stmt = SSA_NAME_DEF_STMT (var)(tree_check ((var), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2949, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2950 }
2951
2952 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
2953 }
2954 *vec_stmt = STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts[0];
2955}
2956
2957/* Prepare the base and offset in GS_INFO for vectorization.
2958 Set *DATAREF_PTR to the loop-invariant base address and *VEC_OFFSET
2959 to the vectorized offset argument for the first copy of STMT_INFO.
2960 STMT_INFO is the statement described by GS_INFO and LOOP is the
2961 containing loop. */
2962
2963static void
2964vect_get_gather_scatter_ops (loop_vec_info loop_vinfo,
2965 class loop *loop, stmt_vec_info stmt_info,
2966 gather_scatter_info *gs_info,
2967 tree *dataref_ptr, vec<tree> *vec_offset)
2968{
2969 gimple_seq stmts = NULLnullptr;
2970 *dataref_ptr = force_gimple_operand (gs_info->base, &stmts, true, NULL_TREE(tree) nullptr);
2971 if (stmts != NULLnullptr)
2972 {
2973 basic_block new_bb;
2974 edge pe = loop_preheader_edge (loop);
2975 new_bb = gsi_insert_seq_on_edge_immediate (pe, stmts);
2976 gcc_assert (!new_bb)((void)(!(!new_bb) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2976, __FUNCTION__), 0 : 0))
;
2977 }
2978 unsigned ncopies = vect_get_num_copies (loop_vinfo, gs_info->offset_vectype);
2979 vect_get_vec_defs_for_operand (loop_vinfo, stmt_info, ncopies,
2980 gs_info->offset, vec_offset,
2981 gs_info->offset_vectype);
2982}
2983
2984/* Prepare to implement a grouped or strided load or store using
2985 the gather load or scatter store operation described by GS_INFO.
2986 STMT_INFO is the load or store statement.
2987
2988 Set *DATAREF_BUMP to the amount that should be added to the base
2989 address after each copy of the vectorized statement. Set *VEC_OFFSET
2990 to an invariant offset vector in which element I has the value
2991 I * DR_STEP / SCALE. */
2992
2993static void
2994vect_get_strided_load_store_ops (stmt_vec_info stmt_info,
2995 loop_vec_info loop_vinfo,
2996 gather_scatter_info *gs_info,
2997 tree *dataref_bump, tree *vec_offset)
2998{
2999 struct data_reference *dr = STMT_VINFO_DATA_REF (stmt_info)((stmt_info)->dr_aux.dr + 0);
3000 tree vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
3001
3002 tree bump = size_binop (MULT_EXPR,size_binop_loc (((location_t) 0), MULT_EXPR, fold_convert_loc
(((location_t) 0), sizetype_tab[(int) stk_sizetype], unshare_expr
((dr)->innermost.step)), size_int_kind (TYPE_VECTOR_SUBPARTS
(vectype), stk_sizetype))
3003 fold_convert (sizetype, unshare_expr (DR_STEP (dr))),size_binop_loc (((location_t) 0), MULT_EXPR, fold_convert_loc
(((location_t) 0), sizetype_tab[(int) stk_sizetype], unshare_expr
((dr)->innermost.step)), size_int_kind (TYPE_VECTOR_SUBPARTS
(vectype), stk_sizetype))
3004 size_int (TYPE_VECTOR_SUBPARTS (vectype)))size_binop_loc (((location_t) 0), MULT_EXPR, fold_convert_loc
(((location_t) 0), sizetype_tab[(int) stk_sizetype], unshare_expr
((dr)->innermost.step)), size_int_kind (TYPE_VECTOR_SUBPARTS
(vectype), stk_sizetype))
;
3005 *dataref_bump = cse_and_gimplify_to_preheader (loop_vinfo, bump);
3006
3007 /* The offset given in GS_INFO can have pointer type, so use the element
3008 type of the vector instead. */
3009 tree offset_type = TREE_TYPE (gs_info->offset_vectype)((contains_struct_check ((gs_info->offset_vectype), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3009, __FUNCTION__))->typed.type)
;
3010
3011 /* Calculate X = DR_STEP / SCALE and convert it to the appropriate type. */
3012 tree step = size_binop (EXACT_DIV_EXPR, unshare_expr (DR_STEP (dr)),size_binop_loc (((location_t) 0), EXACT_DIV_EXPR, unshare_expr
((dr)->innermost.step), size_int_kind (gs_info->scale,
stk_ssizetype))
3013 ssize_int (gs_info->scale))size_binop_loc (((location_t) 0), EXACT_DIV_EXPR, unshare_expr
((dr)->innermost.step), size_int_kind (gs_info->scale,
stk_ssizetype))
;
3014 step = fold_convert (offset_type, step)fold_convert_loc (((location_t) 0), offset_type, step);
3015
3016 /* Create {0, X, X*2, X*3, ...}. */
3017 tree offset = fold_build2 (VEC_SERIES_EXPR, gs_info->offset_vectype,fold_build2_loc (((location_t) 0), VEC_SERIES_EXPR, gs_info->
offset_vectype, build_zero_cst (offset_type), step )
3018 build_zero_cst (offset_type), step)fold_build2_loc (((location_t) 0), VEC_SERIES_EXPR, gs_info->
offset_vectype, build_zero_cst (offset_type), step )
;
3019 *vec_offset = cse_and_gimplify_to_preheader (loop_vinfo, offset);
3020}
3021
3022/* Return the amount that should be added to a vector pointer to move
3023 to the next or previous copy of AGGR_TYPE. DR_INFO is the data reference
3024 being vectorized and MEMORY_ACCESS_TYPE describes the type of
3025 vectorization. */
3026
3027static tree
3028vect_get_data_ptr_increment (vec_info *vinfo,
3029 dr_vec_info *dr_info, tree aggr_type,
3030 vect_memory_access_type memory_access_type)
3031{
3032 if (memory_access_type == VMAT_INVARIANT)
3033 return size_zero_nodeglobal_trees[TI_SIZE_ZERO];
3034
3035 tree iv_step = TYPE_SIZE_UNIT (aggr_type)((tree_class_check ((aggr_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3035, __FUNCTION__))->type_common.size_unit)
;
3036 tree step = vect_dr_behavior (vinfo, dr_info)->step;
3037 if (tree_int_cst_sgn (step) == -1)
3038 iv_step = fold_build1 (NEGATE_EXPR, TREE_TYPE (iv_step), iv_step)fold_build1_loc (((location_t) 0), NEGATE_EXPR, ((contains_struct_check
((iv_step), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3038, __FUNCTION__))->typed.type), iv_step )
;
3039 return iv_step;
3040}
3041
3042/* Check and perform vectorization of BUILT_IN_BSWAP{16,32,64,128}. */
3043
3044static bool
3045vectorizable_bswap (vec_info *vinfo,
3046 stmt_vec_info stmt_info, gimple_stmt_iterator *gsi,
3047 gimple **vec_stmt, slp_tree slp_node,
3048 slp_tree *slp_op,
3049 tree vectype_in, stmt_vector_for_cost *cost_vec)
3050{
3051 tree op, vectype;
3052 gcall *stmt = as_a <gcall *> (stmt_info->stmt);
3053 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
3054 unsigned ncopies;
3055
3056 op = gimple_call_arg (stmt, 0);
3057 vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
3058 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
3059
3060 /* Multiple types in SLP are handled by creating the appropriate number of
3061 vectorized stmts for each SLP node. Hence, NCOPIES is always 1 in
3062 case of SLP. */
3063 if (slp_node)
3064 ncopies = 1;
3065 else
3066 ncopies = vect_get_num_copies (loop_vinfo, vectype);
3067
3068 gcc_assert (ncopies >= 1)((void)(!(ncopies >= 1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3068, __FUNCTION__), 0 : 0))
;
3069
3070 tree char_vectype = get_same_sized_vectype (char_type_nodeinteger_types[itk_char], vectype_in);
3071 if (! char_vectype)
3072 return false;
3073
3074 poly_uint64 num_bytes = TYPE_VECTOR_SUBPARTS (char_vectype);
3075 unsigned word_bytes;
3076 if (!constant_multiple_p (num_bytes, nunits, &word_bytes))
3077 return false;
3078
3079 /* The encoding uses one stepped pattern for each byte in the word. */
3080 vec_perm_builder elts (num_bytes, word_bytes, 3);
3081 for (unsigned i = 0; i < 3; ++i)
3082 for (unsigned j = 0; j < word_bytes; ++j)
3083 elts.quick_push ((i + 1) * word_bytes - j - 1);
3084
3085 vec_perm_indices indices (elts, 1, num_bytes);
3086 if (!can_vec_perm_const_p (TYPE_MODE (char_vectype)((((enum tree_code) ((tree_class_check ((char_vectype), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3086, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(char_vectype) : (char_vectype)->type_common.mode)
, indices))
3087 return false;
3088
3089 if (! vec_stmt)
3090 {
3091 if (slp_node
3092 && !vect_maybe_update_slp_op_vectype (slp_op[0], vectype_in))
3093 {
3094 if (dump_enabled_p ())
3095 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3096 "incompatible vector types for invariants\n");
3097 return false;
3098 }
3099
3100 STMT_VINFO_TYPE (stmt_info)(stmt_info)->type = call_vec_info_type;
3101 DUMP_VECT_SCOPE ("vectorizable_bswap")auto_dump_scope scope ("vectorizable_bswap", vect_location);
3102 record_stmt_cost (cost_vec,
3103 1, vector_stmt, stmt_info, 0, vect_prologue);
3104 record_stmt_cost (cost_vec,
3105 slp_node
3106 ? SLP_TREE_NUMBER_OF_VEC_STMTS (slp_node)(slp_node)->vec_stmts_size : ncopies,
3107 vec_perm, stmt_info, 0, vect_body);
3108 return true;
3109 }
3110
3111 tree bswap_vconst = vec_perm_indices_to_tree (char_vectype, indices);
3112
3113 /* Transform. */
3114 vec<tree> vec_oprnds = vNULL;
3115 vect_get_vec_defs (vinfo, stmt_info, slp_node, ncopies,
3116 op, &vec_oprnds);
3117 /* Arguments are ready. create the new vector stmt. */
3118 unsigned i;
3119 tree vop;
3120 FOR_EACH_VEC_ELT (vec_oprnds, i, vop)for (i = 0; (vec_oprnds).iterate ((i), &(vop)); ++(i))
3121 {
3122 gimple *new_stmt;
3123 tree tem = make_ssa_name (char_vectype);
3124 new_stmt = gimple_build_assign (tem, build1 (VIEW_CONVERT_EXPR,
3125 char_vectype, vop));
3126 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
3127 tree tem2 = make_ssa_name (char_vectype);
3128 new_stmt = gimple_build_assign (tem2, VEC_PERM_EXPR,
3129 tem, tem, bswap_vconst);
3130 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
3131 tem = make_ssa_name (vectype);
3132 new_stmt = gimple_build_assign (tem, build1 (VIEW_CONVERT_EXPR,
3133 vectype, tem2));
3134 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
3135 if (slp_node)
3136 SLP_TREE_VEC_STMTS (slp_node)(slp_node)->vec_stmts.quick_push (new_stmt);
3137 else
3138 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
3139 }
3140
3141 if (!slp_node)
3142 *vec_stmt = STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts[0];
3143
3144 vec_oprnds.release ();
3145 return true;
3146}
3147
3148/* Return true if vector types VECTYPE_IN and VECTYPE_OUT have
3149 integer elements and if we can narrow VECTYPE_IN to VECTYPE_OUT
3150 in a single step. On success, store the binary pack code in
3151 *CONVERT_CODE. */
3152
3153static bool
3154simple_integer_narrowing (tree vectype_out, tree vectype_in,
3155 tree_code *convert_code)
3156{
3157 if (!INTEGRAL_TYPE_P (TREE_TYPE (vectype_out))(((enum tree_code) (((contains_struct_check ((vectype_out), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3157, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((vectype_out)
, (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3157, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((vectype_out)
, (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3157, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
3158 || !INTEGRAL_TYPE_P (TREE_TYPE (vectype_in))(((enum tree_code) (((contains_struct_check ((vectype_in), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3158, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((vectype_in),
(TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3158, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((vectype_in),
(TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3158, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
)
3159 return false;
3160
3161 tree_code code;
3162 int multi_step_cvt = 0;
3163 auto_vec <tree, 8> interm_types;
3164 if (!supportable_narrowing_operation (NOP_EXPR, vectype_out, vectype_in,
3165 &code, &multi_step_cvt, &interm_types)
3166 || multi_step_cvt)
3167 return false;
3168
3169 *convert_code = code;
3170 return true;
3171}
3172
3173/* Function vectorizable_call.
3174
3175 Check if STMT_INFO performs a function call that can be vectorized.
3176 If VEC_STMT is also passed, vectorize STMT_INFO: create a vectorized
3177 stmt to replace it, put it in VEC_STMT, and insert it at GSI.
3178 Return true if STMT_INFO is vectorizable in this way. */
3179
3180static bool
3181vectorizable_call (vec_info *vinfo,
3182 stmt_vec_info stmt_info, gimple_stmt_iterator *gsi,
3183 gimple **vec_stmt, slp_tree slp_node,
3184 stmt_vector_for_cost *cost_vec)
3185{
3186 gcall *stmt;
3187 tree vec_dest;
3188 tree scalar_dest;
3189 tree op;
3190 tree vec_oprnd0 = NULL_TREE(tree) nullptr, vec_oprnd1 = NULL_TREE(tree) nullptr;
3191 tree vectype_out, vectype_in;
3192 poly_uint64 nunits_in;
3193 poly_uint64 nunits_out;
3194 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
3195 bb_vec_info bb_vinfo = dyn_cast <bb_vec_info> (vinfo);
3196 tree fndecl, new_temp, rhs_type;
3197 enum vect_def_type dt[4]
3198 = { vect_unknown_def_type, vect_unknown_def_type, vect_unknown_def_type,
3199 vect_unknown_def_type };
3200 tree vectypes[ARRAY_SIZE (dt)(sizeof (dt) / sizeof ((dt)[0]))] = {};
3201 slp_tree slp_op[ARRAY_SIZE (dt)(sizeof (dt) / sizeof ((dt)[0]))] = {};
3202 int ndts = ARRAY_SIZE (dt)(sizeof (dt) / sizeof ((dt)[0]));
3203 int ncopies, j;
3204 auto_vec<tree, 8> vargs;
3205 auto_vec<tree, 8> orig_vargs;
3206 enum { NARROW, NONE, WIDEN } modifier;
3207 size_t i, nargs;
3208 tree lhs;
3209
3210 if (!STMT_VINFO_RELEVANT_P (stmt_info)((stmt_info)->relevant != vect_unused_in_scope) && !bb_vinfo)
3211 return false;
3212
3213 if (STMT_VINFO_DEF_TYPE (stmt_info)(stmt_info)->def_type != vect_internal_def
3214 && ! vec_stmt)
3215 return false;
3216
3217 /* Is STMT_INFO a vectorizable call? */
3218 stmt = dyn_cast <gcall *> (stmt_info->stmt);
3219 if (!stmt)
3220 return false;
3221
3222 if (gimple_call_internal_p (stmt)
3223 && (internal_load_fn_p (gimple_call_internal_fn (stmt))
3224 || internal_store_fn_p (gimple_call_internal_fn (stmt))))
3225 /* Handled by vectorizable_load and vectorizable_store. */
3226 return false;
3227
3228 if (gimple_call_lhs (stmt) == NULL_TREE(tree) nullptr
3229 || TREE_CODE (gimple_call_lhs (stmt))((enum tree_code) (gimple_call_lhs (stmt))->base.code) != SSA_NAME)
3230 return false;
3231
3232 gcc_checking_assert (!stmt_can_throw_internal (cfun, stmt))((void)(!(!stmt_can_throw_internal ((cfun + 0), stmt)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3232, __FUNCTION__), 0 : 0))
;
3233
3234 vectype_out = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
3235
3236 /* Process function arguments. */
3237 rhs_type = NULL_TREE(tree) nullptr;
3238 vectype_in = NULL_TREE(tree) nullptr;
3239 nargs = gimple_call_num_args (stmt);
3240
3241 /* Bail out if the function has more than four arguments, we do not have
3242 interesting builtin functions to vectorize with more than two arguments
3243 except for fma. No arguments is also not good. */
3244 if (nargs == 0 || nargs > 4)
3245 return false;
3246
3247 /* Ignore the arguments of IFN_GOMP_SIMD_LANE, they are magic. */
3248 combined_fn cfn = gimple_call_combined_fn (stmt);
3249 if (cfn == CFN_GOMP_SIMD_LANE)
3250 {
3251 nargs = 0;
3252 rhs_type = unsigned_type_nodeinteger_types[itk_unsigned_int];
3253 }
3254
3255 int mask_opno = -1;
3256 if (internal_fn_p (cfn))
3257 mask_opno = internal_fn_mask_index (as_internal_fn (cfn));
3258
3259 for (i = 0; i < nargs; i++)
3260 {
3261 if ((int) i == mask_opno)
3262 {
3263 if (!vect_check_scalar_mask (vinfo, stmt_info, slp_node, mask_opno,
3264 &op, &slp_op[i], &dt[i], &vectypes[i]))
3265 return false;
3266 continue;
3267 }
3268
3269 if (!vect_is_simple_use (vinfo, stmt_info, slp_node,
3270 i, &op, &slp_op[i], &dt[i], &vectypes[i]))
3271 {
3272 if (dump_enabled_p ())
3273 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3274 "use not simple.\n");
3275 return false;
3276 }
3277
3278 /* We can only handle calls with arguments of the same type. */
3279 if (rhs_type
3280 && !types_compatible_p (rhs_type, TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3280, __FUNCTION__))->typed.type)
))
3281 {
3282 if (dump_enabled_p ())
3283 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3284 "argument types differ.\n");
3285 return false;
3286 }
3287 if (!rhs_type)
3288 rhs_type = TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3288, __FUNCTION__))->typed.type)
;
3289
3290 if (!vectype_in)
3291 vectype_in = vectypes[i];
3292 else if (vectypes[i]
3293 && !types_compatible_p (vectypes[i], vectype_in))
3294 {
3295 if (dump_enabled_p ())
3296 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3297 "argument vector types differ.\n");
3298 return false;
3299 }
3300 }
3301 /* If all arguments are external or constant defs, infer the vector type
3302 from the scalar type. */
3303 if (!vectype_in)
3304 vectype_in = get_vectype_for_scalar_type (vinfo, rhs_type, slp_node);
3305 if (vec_stmt)
3306 gcc_assert (vectype_in)((void)(!(vectype_in) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3306, __FUNCTION__), 0 : 0))
;
3307 if (!vectype_in)
3308 {
3309 if (dump_enabled_p ())
3310 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3311 "no vectype for scalar type %T\n", rhs_type);
3312
3313 return false;
3314 }
3315 /* FORNOW: we don't yet support mixtures of vector sizes for calls,
3316 just mixtures of nunits. E.g. DI->SI versions of __builtin_ctz*
3317 are traditionally vectorized as two VnDI->VnDI IFN_CTZs followed
3318 by a pack of the two vectors into an SI vector. We would need
3319 separate code to handle direct VnDI->VnSI IFN_CTZs. */
3320 if (TYPE_SIZE (vectype_in)((tree_class_check ((vectype_in), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3320, __FUNCTION__))->type_common.size)
!= TYPE_SIZE (vectype_out)((tree_class_check ((vectype_out), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3320, __FUNCTION__))->type_common.size)
)
3321 {
3322 if (dump_enabled_p ())
3323 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3324 "mismatched vector sizes %T and %T\n",
3325 vectype_in, vectype_out);
3326 return false;
3327 }
3328
3329 if (VECTOR_BOOLEAN_TYPE_P (vectype_out)(((enum tree_code) (vectype_out)->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((vectype_out
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3329, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
3330 != VECTOR_BOOLEAN_TYPE_P (vectype_in)(((enum tree_code) (vectype_in)->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((vectype_in
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3330, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
)
3331 {
3332 if (dump_enabled_p ())
3333 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3334 "mixed mask and nonmask vector types\n");
3335 return false;
3336 }
3337
3338 /* FORNOW */
3339 nunits_in = TYPE_VECTOR_SUBPARTS (vectype_in);
3340 nunits_out = TYPE_VECTOR_SUBPARTS (vectype_out);
3341 if (known_eq (nunits_in * 2, nunits_out)(!maybe_ne (nunits_in * 2, nunits_out)))
3342 modifier = NARROW;
3343 else if (known_eq (nunits_out, nunits_in)(!maybe_ne (nunits_out, nunits_in)))
3344 modifier = NONE;
3345 else if (known_eq (nunits_out * 2, nunits_in)(!maybe_ne (nunits_out * 2, nunits_in)))
3346 modifier = WIDEN;
3347 else
3348 return false;
3349
3350 /* We only handle functions that do not read or clobber memory. */
3351 if (gimple_vuse (stmt))
3352 {
3353 if (dump_enabled_p ())
3354 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3355 "function reads from or writes to memory.\n");
3356 return false;
3357 }
3358
3359 /* For now, we only vectorize functions if a target specific builtin
3360 is available. TODO -- in some cases, it might be profitable to
3361 insert the calls for pieces of the vector, in order to be able
3362 to vectorize other operations in the loop. */
3363 fndecl = NULL_TREE(tree) nullptr;
3364 internal_fn ifn = IFN_LAST;
3365 tree callee = gimple_call_fndecl (stmt);
3366
3367 /* First try using an internal function. */
3368 tree_code convert_code = ERROR_MARK;
3369 if (cfn != CFN_LAST
3370 && (modifier == NONE
3371 || (modifier == NARROW
3372 && simple_integer_narrowing (vectype_out, vectype_in,
3373 &convert_code))))
3374 ifn = vectorizable_internal_function (cfn, callee, vectype_out,
3375 vectype_in);
3376
3377 /* If that fails, try asking for a target-specific built-in function. */
3378 if (ifn == IFN_LAST)
3379 {
3380 if (cfn != CFN_LAST)
3381 fndecl = targetm.vectorize.builtin_vectorized_function
3382 (cfn, vectype_out, vectype_in);
3383 else if (callee && fndecl_built_in_p (callee, BUILT_IN_MD))
3384 fndecl = targetm.vectorize.builtin_md_vectorized_function
3385 (callee, vectype_out, vectype_in);
3386 }
3387
3388 if (ifn == IFN_LAST && !fndecl)
3389 {
3390 if (cfn == CFN_GOMP_SIMD_LANE
3391 && !slp_node
3392 && loop_vinfo
3393 && LOOP_VINFO_LOOP (loop_vinfo)(loop_vinfo)->loop->simduid
3394 && TREE_CODE (gimple_call_arg (stmt, 0))((enum tree_code) (gimple_call_arg (stmt, 0))->base.code) == SSA_NAME
3395 && LOOP_VINFO_LOOP (loop_vinfo)(loop_vinfo)->loop->simduid
3396 == SSA_NAME_VAR (gimple_call_arg (stmt, 0))((tree_check ((gimple_call_arg (stmt, 0)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3396, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((gimple_call_arg (stmt, 0))->
ssa_name.var)->base.code) == IDENTIFIER_NODE ? (tree) nullptr
: (gimple_call_arg (stmt, 0))->ssa_name.var)
)
3397 {
3398 /* We can handle IFN_GOMP_SIMD_LANE by returning a
3399 { 0, 1, 2, ... vf - 1 } vector. */
3400 gcc_assert (nargs == 0)((void)(!(nargs == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3400, __FUNCTION__), 0 : 0))
;
3401 }
3402 else if (modifier == NONE
3403 && (gimple_call_builtin_p (stmt, BUILT_IN_BSWAP16)
3404 || gimple_call_builtin_p (stmt, BUILT_IN_BSWAP32)
3405 || gimple_call_builtin_p (stmt, BUILT_IN_BSWAP64)
3406 || gimple_call_builtin_p (stmt, BUILT_IN_BSWAP128)))
3407 return vectorizable_bswap (vinfo, stmt_info, gsi, vec_stmt, slp_node,
3408 slp_op, vectype_in, cost_vec);
3409 else
3410 {
3411 if (dump_enabled_p ())
3412 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3413 "function is not vectorizable.\n");
3414 return false;
3415 }
3416 }
3417
3418 if (slp_node)
3419 ncopies = 1;
3420 else if (modifier == NARROW && ifn == IFN_LAST)
3421 ncopies = vect_get_num_copies (loop_vinfo, vectype_out);
3422 else
3423 ncopies = vect_get_num_copies (loop_vinfo, vectype_in);
3424
3425 /* Sanity check: make sure that at least one copy of the vectorized stmt
3426 needs to be generated. */
3427 gcc_assert (ncopies >= 1)((void)(!(ncopies >= 1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3427, __FUNCTION__), 0 : 0))
;
3428
3429 vec_loop_masks *masks = (loop_vinfo ? &LOOP_VINFO_MASKS (loop_vinfo)(loop_vinfo)->masks : NULLnullptr);
3430 if (!vec_stmt) /* transformation not required. */
3431 {
3432 if (slp_node)
3433 for (i = 0; i < nargs; ++i)
3434 if (!vect_maybe_update_slp_op_vectype (slp_op[i], vectype_in))
3435 {
3436 if (dump_enabled_p ())
3437 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3438 "incompatible vector types for invariants\n");
3439 return false;
3440 }
3441 STMT_VINFO_TYPE (stmt_info)(stmt_info)->type = call_vec_info_type;
3442 DUMP_VECT_SCOPE ("vectorizable_call")auto_dump_scope scope ("vectorizable_call", vect_location);
3443 vect_model_simple_cost (vinfo, stmt_info,
3444 ncopies, dt, ndts, slp_node, cost_vec);
3445 if (ifn != IFN_LAST && modifier == NARROW && !slp_node)
3446 record_stmt_cost (cost_vec, ncopies / 2,
3447 vec_promote_demote, stmt_info, 0, vect_body);
3448
3449 if (loop_vinfo && mask_opno >= 0)
3450 {
3451 unsigned int nvectors = (slp_node
3452 ? SLP_TREE_NUMBER_OF_VEC_STMTS (slp_node)(slp_node)->vec_stmts_size
3453 : ncopies);
3454 tree scalar_mask = gimple_call_arg (stmt_info->stmt, mask_opno);
3455 vect_record_loop_mask (loop_vinfo, masks, nvectors,
3456 vectype_out, scalar_mask);
3457 }
3458 return true;
3459 }
3460
3461 /* Transform. */
3462
3463 if (dump_enabled_p ())
3464 dump_printf_loc (MSG_NOTE, vect_location, "transform call.\n");
3465
3466 /* Handle def. */
3467 scalar_dest = gimple_call_lhs (stmt);
3468 vec_dest = vect_create_destination_var (scalar_dest, vectype_out);
3469
3470 bool masked_loop_p = loop_vinfo && LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)((loop_vinfo)->using_partial_vectors_p && !(loop_vinfo
)->masks.is_empty ())
;
3471
3472 if (modifier == NONE || ifn != IFN_LAST)
3473 {
3474 tree prev_res = NULL_TREE(tree) nullptr;
3475 vargs.safe_grow (nargs, true);
3476 orig_vargs.safe_grow (nargs, true);
3477 auto_vec<vec<tree> > vec_defs (nargs);
3478 for (j = 0; j < ncopies; ++j)
3479 {
3480 /* Build argument list for the vectorized call. */
3481 if (slp_node)
3482 {
3483 vec<tree> vec_oprnds0;
3484
3485 vect_get_slp_defs (vinfo, slp_node, &vec_defs);
3486 vec_oprnds0 = vec_defs[0];
3487
3488 /* Arguments are ready. Create the new vector stmt. */
3489 FOR_EACH_VEC_ELT (vec_oprnds0, i, vec_oprnd0)for (i = 0; (vec_oprnds0).iterate ((i), &(vec_oprnd0)); ++
(i))
3490 {
3491 size_t k;
3492 for (k = 0; k < nargs; k++)
3493 {
3494 vec<tree> vec_oprndsk = vec_defs[k];
3495 vargs[k] = vec_oprndsk[i];
3496 }
3497 gimple *new_stmt;
3498 if (modifier == NARROW)
3499 {
3500 /* We don't define any narrowing conditional functions
3501 at present. */
3502 gcc_assert (mask_opno < 0)((void)(!(mask_opno < 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3502, __FUNCTION__), 0 : 0))
;
3503 tree half_res = make_ssa_name (vectype_in);
3504 gcall *call
3505 = gimple_build_call_internal_vec (ifn, vargs);
3506 gimple_call_set_lhs (call, half_res);
3507 gimple_call_set_nothrow (call, true);
3508 vect_finish_stmt_generation (vinfo, stmt_info, call, gsi);
3509 if ((i & 1) == 0)
3510 {
3511 prev_res = half_res;
3512 continue;
3513 }
3514 new_temp = make_ssa_name (vec_dest);
3515 new_stmt = gimple_build_assign (new_temp, convert_code,
3516 prev_res, half_res);
3517 vect_finish_stmt_generation (vinfo, stmt_info,
3518 new_stmt, gsi);
3519 }
3520 else
3521 {
3522 if (mask_opno >= 0 && masked_loop_p)
3523 {
3524 unsigned int vec_num = vec_oprnds0.length ();
3525 /* Always true for SLP. */
3526 gcc_assert (ncopies == 1)((void)(!(ncopies == 1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3526, __FUNCTION__), 0 : 0))
;
3527 tree mask = vect_get_loop_mask (gsi, masks, vec_num,
3528 vectype_out, i);
3529 vargs[mask_opno] = prepare_load_store_mask
3530 (TREE_TYPE (mask)((contains_struct_check ((mask), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3530, __FUNCTION__))->typed.type)
, mask, vargs[mask_opno], gsi);
3531 }
3532
3533 gcall *call;
3534 if (ifn != IFN_LAST)
3535 call = gimple_build_call_internal_vec (ifn, vargs);
3536 else
3537 call = gimple_build_call_vec (fndecl, vargs);
3538 new_temp = make_ssa_name (vec_dest, call);
3539 gimple_call_set_lhs (call, new_temp);
3540 gimple_call_set_nothrow (call, true);
3541 vect_finish_stmt_generation (vinfo, stmt_info, call, gsi);
3542 new_stmt = call;
3543 }
3544 SLP_TREE_VEC_STMTS (slp_node)(slp_node)->vec_stmts.quick_push (new_stmt);
3545 }
3546 continue;
3547 }
3548
3549 for (i = 0; i < nargs; i++)
3550 {
3551 op = gimple_call_arg (stmt, i);
3552 if (j == 0)
3553 {
3554 vec_defs.quick_push (vNULL);
3555 vect_get_vec_defs_for_operand (vinfo, stmt_info, ncopies,
3556 op, &vec_defs[i],
3557 vectypes[i]);
3558 }
3559 orig_vargs[i] = vargs[i] = vec_defs[i][j];
3560 }
3561
3562 if (mask_opno >= 0 && masked_loop_p)
3563 {
3564 tree mask = vect_get_loop_mask (gsi, masks, ncopies,
3565 vectype_out, j);
3566 vargs[mask_opno]
3567 = prepare_load_store_mask (TREE_TYPE (mask)((contains_struct_check ((mask), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3567, __FUNCTION__))->typed.type)
, mask,
3568 vargs[mask_opno], gsi);
3569 }
3570
3571 gimple *new_stmt;
3572 if (cfn == CFN_GOMP_SIMD_LANE)
3573 {
3574 tree cst = build_index_vector (vectype_out, j * nunits_out, 1);
3575 tree new_var
3576 = vect_get_new_ssa_name (vectype_out, vect_simple_var, "cst_");
3577 gimple *init_stmt = gimple_build_assign (new_var, cst);
3578 vect_init_vector_1 (vinfo, stmt_info, init_stmt, NULLnullptr);
3579 new_temp = make_ssa_name (vec_dest);
3580 new_stmt = gimple_build_assign (new_temp, new_var);
3581 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
3582 }
3583 else if (modifier == NARROW)
3584 {
3585 /* We don't define any narrowing conditional functions at
3586 present. */
3587 gcc_assert (mask_opno < 0)((void)(!(mask_opno < 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3587, __FUNCTION__), 0 : 0))
;
3588 tree half_res = make_ssa_name (vectype_in);
3589 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3590 gimple_call_set_lhs (call, half_res);
3591 gimple_call_set_nothrow (call, true);
3592 vect_finish_stmt_generation (vinfo, stmt_info, call, gsi);
3593 if ((j & 1) == 0)
3594 {
3595 prev_res = half_res;
3596 continue;
3597 }
3598 new_temp = make_ssa_name (vec_dest);
3599 new_stmt = gimple_build_assign (new_temp, convert_code,
3600 prev_res, half_res);
3601 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
3602 }
3603 else
3604 {
3605 gcall *call;
3606 if (ifn != IFN_LAST)
3607 call = gimple_build_call_internal_vec (ifn, vargs);
3608 else
3609 call = gimple_build_call_vec (fndecl, vargs);
3610 new_temp = make_ssa_name (vec_dest, call);
3611 gimple_call_set_lhs (call, new_temp);
3612 gimple_call_set_nothrow (call, true);
3613 vect_finish_stmt_generation (vinfo, stmt_info, call, gsi);
3614 new_stmt = call;
3615 }
3616
3617 if (j == (modifier == NARROW ? 1 : 0))
3618 *vec_stmt = new_stmt;
3619 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
3620 }
3621 for (i = 0; i < nargs; i++)
3622 {
3623 vec<tree> vec_oprndsi = vec_defs[i];
3624 vec_oprndsi.release ();
3625 }
3626 }
3627 else if (modifier == NARROW)
3628 {
3629 auto_vec<vec<tree> > vec_defs (nargs);
3630 /* We don't define any narrowing conditional functions at present. */
3631 gcc_assert (mask_opno < 0)((void)(!(mask_opno < 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3631, __FUNCTION__), 0 : 0))
;
3632 for (j = 0; j < ncopies; ++j)
3633 {
3634 /* Build argument list for the vectorized call. */
3635 if (j == 0)
3636 vargs.create (nargs * 2);
3637 else
3638 vargs.truncate (0);
3639
3640 if (slp_node)
3641 {
3642 vec<tree> vec_oprnds0;
3643
3644 vect_get_slp_defs (vinfo, slp_node, &vec_defs);
3645 vec_oprnds0 = vec_defs[0];
3646
3647 /* Arguments are ready. Create the new vector stmt. */
3648 for (i = 0; vec_oprnds0.iterate (i, &vec_oprnd0); i += 2)
3649 {
3650 size_t k;
3651 vargs.truncate (0);
3652 for (k = 0; k < nargs; k++)
3653 {
3654 vec<tree> vec_oprndsk = vec_defs[k];
3655 vargs.quick_push (vec_oprndsk[i]);
3656 vargs.quick_push (vec_oprndsk[i + 1]);
3657 }
3658 gcall *call;
3659 if (ifn != IFN_LAST)
3660 call = gimple_build_call_internal_vec (ifn, vargs);
3661 else
3662 call = gimple_build_call_vec (fndecl, vargs);
3663 new_temp = make_ssa_name (vec_dest, call);
3664 gimple_call_set_lhs (call, new_temp);
3665 gimple_call_set_nothrow (call, true);
3666 vect_finish_stmt_generation (vinfo, stmt_info, call, gsi);
3667 SLP_TREE_VEC_STMTS (slp_node)(slp_node)->vec_stmts.quick_push (call);
3668 }
3669 continue;
3670 }
3671
3672 for (i = 0; i < nargs; i++)
3673 {
3674 op = gimple_call_arg (stmt, i);
3675 if (j == 0)
3676 {
3677 vec_defs.quick_push (vNULL);
3678 vect_get_vec_defs_for_operand (vinfo, stmt_info, 2 * ncopies,
3679 op, &vec_defs[i], vectypes[i]);
3680 }
3681 vec_oprnd0 = vec_defs[i][2*j];
3682 vec_oprnd1 = vec_defs[i][2*j+1];
3683
3684 vargs.quick_push (vec_oprnd0);
3685 vargs.quick_push (vec_oprnd1);
3686 }
3687
3688 gcall *new_stmt = gimple_build_call_vec (fndecl, vargs);
3689 new_temp = make_ssa_name (vec_dest, new_stmt);
3690 gimple_call_set_lhs (new_stmt, new_temp);
3691 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
3692
3693 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
3694 }
3695
3696 if (!slp_node)
3697 *vec_stmt = STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts[0];
3698
3699 for (i = 0; i < nargs; i++)
3700 {
3701 vec<tree> vec_oprndsi = vec_defs[i];
3702 vec_oprndsi.release ();
3703 }
3704 }
3705 else
3706 /* No current target implements this case. */
3707 return false;
3708
3709 vargs.release ();
3710
3711 /* The call in STMT might prevent it from being removed in dce.
3712 We however cannot remove it here, due to the way the ssa name
3713 it defines is mapped to the new definition. So just replace
3714 rhs of the statement with something harmless. */
3715
3716 if (slp_node)
3717 return true;
3718
3719 stmt_info = vect_orig_stmt (stmt_info);
3720 lhs = gimple_get_lhs (stmt_info->stmt);
3721
3722 gassign *new_stmt
3723 = gimple_build_assign (lhs, build_zero_cst (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3723, __FUNCTION__))->typed.type)
));
3724 vinfo->replace_stmt (gsi, stmt_info, new_stmt);
3725
3726 return true;
3727}
3728
3729
3730struct simd_call_arg_info
3731{
3732 tree vectype;
3733 tree op;
3734 HOST_WIDE_INTlong linear_step;
3735 enum vect_def_type dt;
3736 unsigned int align;
3737 bool simd_lane_linear;
3738};
3739
3740/* Helper function of vectorizable_simd_clone_call. If OP, an SSA_NAME,
3741 is linear within simd lane (but not within whole loop), note it in
3742 *ARGINFO. */
3743
3744static void
3745vect_simd_lane_linear (tree op, class loop *loop,
3746 struct simd_call_arg_info *arginfo)
3747{
3748 gimple *def_stmt = SSA_NAME_DEF_STMT (op)(tree_check ((op), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3748, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
3749
3750 if (!is_gimple_assign (def_stmt)
3751 || gimple_assign_rhs_code (def_stmt) != POINTER_PLUS_EXPR
3752 || !is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
3753 return;
3754
3755 tree base = gimple_assign_rhs1 (def_stmt);
3756 HOST_WIDE_INTlong linear_step = 0;
3757 tree v = gimple_assign_rhs2 (def_stmt);
3758 while (TREE_CODE (v)((enum tree_code) (v)->base.code) == SSA_NAME)
3759 {
3760 tree t;
3761 def_stmt = SSA_NAME_DEF_STMT (v)(tree_check ((v), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3761, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
3762 if (is_gimple_assign (def_stmt))
3763 switch (gimple_assign_rhs_code (def_stmt))
3764 {
3765 case PLUS_EXPR:
3766 t = gimple_assign_rhs2 (def_stmt);
3767 if (linear_step || TREE_CODE (t)((enum tree_code) (t)->base.code) != INTEGER_CST)
3768 return;
3769 base = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (base), base, t)fold_build2_loc (((location_t) 0), POINTER_PLUS_EXPR, ((contains_struct_check
((base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3769, __FUNCTION__))->typed.type), base, t )
;
3770 v = gimple_assign_rhs1 (def_stmt);
3771 continue;
3772 case MULT_EXPR:
3773 t = gimple_assign_rhs2 (def_stmt);
3774 if (linear_step || !tree_fits_shwi_p (t) || integer_zerop (t))
3775 return;
3776 linear_step = tree_to_shwi (t);
3777 v = gimple_assign_rhs1 (def_stmt);
3778 continue;
3779 CASE_CONVERTcase NOP_EXPR: case CONVERT_EXPR:
3780 t = gimple_assign_rhs1 (def_stmt);
3781 if (TREE_CODE (TREE_TYPE (t))((enum tree_code) (((contains_struct_check ((t), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3781, __FUNCTION__))->typed.type))->base.code)
!= INTEGER_TYPE
3782 || (TYPE_PRECISION (TREE_TYPE (v))((tree_class_check ((((contains_struct_check ((v), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3782, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3782, __FUNCTION__))->type_common.precision)
3783 < TYPE_PRECISION (TREE_TYPE (t))((tree_class_check ((((contains_struct_check ((t), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3783, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3783, __FUNCTION__))->type_common.precision)
))
3784 return;
3785 if (!linear_step)
3786 linear_step = 1;
3787 v = t;
3788 continue;
3789 default:
3790 return;
3791 }
3792 else if (gimple_call_internal_p (def_stmt, IFN_GOMP_SIMD_LANE)
3793 && loop->simduid
3794 && TREE_CODE (gimple_call_arg (def_stmt, 0))((enum tree_code) (gimple_call_arg (def_stmt, 0))->base.code
)
== SSA_NAME
3795 && (SSA_NAME_VAR (gimple_call_arg (def_stmt, 0))((tree_check ((gimple_call_arg (def_stmt, 0)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3795, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((gimple_call_arg (def_stmt, 0)
)->ssa_name.var)->base.code) == IDENTIFIER_NODE ? (tree
) nullptr : (gimple_call_arg (def_stmt, 0))->ssa_name.var)
3796 == loop->simduid))
3797 {
3798 if (!linear_step)
3799 linear_step = 1;
3800 arginfo->linear_step = linear_step;
3801 arginfo->op = base;
3802 arginfo->simd_lane_linear = true;
3803 return;
3804 }
3805 }
3806}
3807
3808/* Return the number of elements in vector type VECTYPE, which is associated
3809 with a SIMD clone. At present these vectors always have a constant
3810 length. */
3811
3812static unsigned HOST_WIDE_INTlong
3813simd_clone_subparts (tree vectype)
3814{
3815 return TYPE_VECTOR_SUBPARTS (vectype).to_constant ();
3816}
3817
3818/* Function vectorizable_simd_clone_call.
3819
3820 Check if STMT_INFO performs a function call that can be vectorized
3821 by calling a simd clone of the function.
3822 If VEC_STMT is also passed, vectorize STMT_INFO: create a vectorized
3823 stmt to replace it, put it in VEC_STMT, and insert it at GSI.
3824 Return true if STMT_INFO is vectorizable in this way. */
3825
3826static bool
3827vectorizable_simd_clone_call (vec_info *vinfo, stmt_vec_info stmt_info,
3828 gimple_stmt_iterator *gsi,
3829 gimple **vec_stmt, slp_tree slp_node,
3830 stmt_vector_for_cost *)
3831{
3832 tree vec_dest;
3833 tree scalar_dest;
3834 tree op, type;
3835 tree vec_oprnd0 = NULL_TREE(tree) nullptr;
3836 tree vectype;
3837 poly_uint64 nunits;
3838 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
3839 bb_vec_info bb_vinfo = dyn_cast <bb_vec_info> (vinfo);
3840 class loop *loop = loop_vinfo ? LOOP_VINFO_LOOP (loop_vinfo)(loop_vinfo)->loop : NULLnullptr;
3841 tree fndecl, new_temp;
3842 int ncopies, j;
3843 auto_vec<simd_call_arg_info> arginfo;
3844 vec<tree> vargs = vNULL;
3845 size_t i, nargs;
3846 tree lhs, rtype, ratype;
3847 vec<constructor_elt, va_gc> *ret_ctor_elts = NULLnullptr;
3848
3849 /* Is STMT a vectorizable call? */
3850 gcall *stmt = dyn_cast <gcall *> (stmt_info->stmt);
3851 if (!stmt)
3852 return false;
3853
3854 fndecl = gimple_call_fndecl (stmt);
3855 if (fndecl == NULL_TREE(tree) nullptr)
3856 return false;
3857
3858 struct cgraph_node *node = cgraph_node::get (fndecl);
3859 if (node == NULLnullptr || node->simd_clones == NULLnullptr)
3860 return false;
3861
3862 if (!STMT_VINFO_RELEVANT_P (stmt_info)((stmt_info)->relevant != vect_unused_in_scope) && !bb_vinfo)
3863 return false;
3864
3865 if (STMT_VINFO_DEF_TYPE (stmt_info)(stmt_info)->def_type != vect_internal_def
3866 && ! vec_stmt)
3867 return false;
3868
3869 if (gimple_call_lhs (stmt)
3870 && TREE_CODE (gimple_call_lhs (stmt))((enum tree_code) (gimple_call_lhs (stmt))->base.code) != SSA_NAME)
3871 return false;
3872
3873 gcc_checking_assert (!stmt_can_throw_internal (cfun, stmt))((void)(!(!stmt_can_throw_internal ((cfun + 0), stmt)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3873, __FUNCTION__), 0 : 0))
;
3874
3875 vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
3876
3877 if (loop_vinfo && nested_in_vect_loop_p (loop, stmt_info))
3878 return false;
3879
3880 /* FORNOW */
3881 if (slp_node)
3882 return false;
3883
3884 /* Process function arguments. */
3885 nargs = gimple_call_num_args (stmt);
3886
3887 /* Bail out if the function has zero arguments. */
3888 if (nargs == 0)
3889 return false;
3890
3891 arginfo.reserve (nargs, true);
3892
3893 for (i = 0; i < nargs; i++)
3894 {
3895 simd_call_arg_info thisarginfo;
3896 affine_iv iv;
3897
3898 thisarginfo.linear_step = 0;
3899 thisarginfo.align = 0;
3900 thisarginfo.op = NULL_TREE(tree) nullptr;
3901 thisarginfo.simd_lane_linear = false;
3902
3903 op = gimple_call_arg (stmt, i);
3904 if (!vect_is_simple_use (op, vinfo, &thisarginfo.dt,
3905 &thisarginfo.vectype)
3906 || thisarginfo.dt == vect_uninitialized_def)
3907 {
3908 if (dump_enabled_p ())
3909 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3910 "use not simple.\n");
3911 return false;
3912 }
3913
3914 if (thisarginfo.dt == vect_constant_def
3915 || thisarginfo.dt == vect_external_def)
3916 gcc_assert (thisarginfo.vectype == NULL_TREE)((void)(!(thisarginfo.vectype == (tree) nullptr) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3916, __FUNCTION__), 0 : 0))
;
3917 else
3918 {
3919 gcc_assert (thisarginfo.vectype != NULL_TREE)((void)(!(thisarginfo.vectype != (tree) nullptr) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3919, __FUNCTION__), 0 : 0))
;
3920 if (VECTOR_BOOLEAN_TYPE_P (thisarginfo.vectype)(((enum tree_code) (thisarginfo.vectype)->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((thisarginfo
.vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3920, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
)
3921 {
3922 if (dump_enabled_p ())
3923 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3924 "vector mask arguments are not supported\n");
3925 return false;
3926 }
3927 }
3928
3929 /* For linear arguments, the analyze phase should have saved
3930 the base and step in STMT_VINFO_SIMD_CLONE_INFO. */
3931 if (i * 3 + 4 <= STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info.length ()
3932 && STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info[i * 3 + 2])
3933 {
3934 gcc_assert (vec_stmt)((void)(!(vec_stmt) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3934, __FUNCTION__), 0 : 0))
;
3935 thisarginfo.linear_step
3936 = tree_to_shwi (STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info[i * 3 + 2]);
3937 thisarginfo.op
3938 = STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info[i * 3 + 1];
3939 thisarginfo.simd_lane_linear
3940 = (STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info[i * 3 + 3]
3941 == boolean_true_nodeglobal_trees[TI_BOOLEAN_TRUE]);
3942 /* If loop has been peeled for alignment, we need to adjust it. */
3943 tree n1 = LOOP_VINFO_NITERS_UNCHANGED (loop_vinfo)(loop_vinfo)->num_iters_unchanged;
3944 tree n2 = LOOP_VINFO_NITERS (loop_vinfo)(loop_vinfo)->num_iters;
3945 if (n1 != n2 && !thisarginfo.simd_lane_linear)
3946 {
3947 tree bias = fold_build2 (MINUS_EXPR, TREE_TYPE (n1), n1, n2)fold_build2_loc (((location_t) 0), MINUS_EXPR, ((contains_struct_check
((n1), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3947, __FUNCTION__))->typed.type), n1, n2 )
;
3948 tree step = STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info[i * 3 + 2];
3949 tree opt = TREE_TYPE (thisarginfo.op)((contains_struct_check ((thisarginfo.op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3949, __FUNCTION__))->typed.type)
;
3950 bias = fold_convert (TREE_TYPE (step), bias)fold_convert_loc (((location_t) 0), ((contains_struct_check (
(step), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3950, __FUNCTION__))->typed.type), bias)
;
3951 bias = fold_build2 (MULT_EXPR, TREE_TYPE (step), bias, step)fold_build2_loc (((location_t) 0), MULT_EXPR, ((contains_struct_check
((step), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3951, __FUNCTION__))->typed.type), bias, step )
;
3952 thisarginfo.op
3953 = fold_build2 (POINTER_TYPE_P (opt)fold_build2_loc (((location_t) 0), (((enum tree_code) (opt)->
base.code) == POINTER_TYPE || ((enum tree_code) (opt)->base
.code) == REFERENCE_TYPE) ? POINTER_PLUS_EXPR : PLUS_EXPR, opt
, thisarginfo.op, bias )
3954 ? POINTER_PLUS_EXPR : PLUS_EXPR, opt,fold_build2_loc (((location_t) 0), (((enum tree_code) (opt)->
base.code) == POINTER_TYPE || ((enum tree_code) (opt)->base
.code) == REFERENCE_TYPE) ? POINTER_PLUS_EXPR : PLUS_EXPR, opt
, thisarginfo.op, bias )
3955 thisarginfo.op, bias)fold_build2_loc (((location_t) 0), (((enum tree_code) (opt)->
base.code) == POINTER_TYPE || ((enum tree_code) (opt)->base
.code) == REFERENCE_TYPE) ? POINTER_PLUS_EXPR : PLUS_EXPR, opt
, thisarginfo.op, bias )
;
3956 }
3957 }
3958 else if (!vec_stmt
3959 && thisarginfo.dt != vect_constant_def
3960 && thisarginfo.dt != vect_external_def
3961 && loop_vinfo
3962 && TREE_CODE (op)((enum tree_code) (op)->base.code) == SSA_NAME
3963 && simple_iv (loop, loop_containing_stmt (stmt), op,
3964 &iv, false)
3965 && tree_fits_shwi_p (iv.step))
3966 {
3967 thisarginfo.linear_step = tree_to_shwi (iv.step);
3968 thisarginfo.op = iv.base;
3969 }
3970 else if ((thisarginfo.dt == vect_constant_def
3971 || thisarginfo.dt == vect_external_def)
3972 && POINTER_TYPE_P (TREE_TYPE (op))(((enum tree_code) (((contains_struct_check ((op), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3972, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((op), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3972, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
3973 thisarginfo.align = get_pointer_alignment (op) / BITS_PER_UNIT(8);
3974 /* Addresses of array elements indexed by GOMP_SIMD_LANE are
3975 linear too. */
3976 if (POINTER_TYPE_P (TREE_TYPE (op))(((enum tree_code) (((contains_struct_check ((op), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3976, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((op), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3976, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
3977 && !thisarginfo.linear_step
3978 && !vec_stmt
3979 && thisarginfo.dt != vect_constant_def
3980 && thisarginfo.dt != vect_external_def
3981 && loop_vinfo
3982 && !slp_node
3983 && TREE_CODE (op)((enum tree_code) (op)->base.code) == SSA_NAME)
3984 vect_simd_lane_linear (op, loop, &thisarginfo);
3985
3986 arginfo.quick_push (thisarginfo);
3987 }
3988
3989 poly_uint64 vf = LOOP_VINFO_VECT_FACTOR (loop_vinfo)(loop_vinfo)->vectorization_factor;
3990 if (!vf.is_constant ())
3991 {
3992 if (dump_enabled_p ())
3993 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3994 "not considering SIMD clones; not yet supported"
3995 " for variable-width vectors.\n");
3996 return false;
3997 }
3998
3999 unsigned int badness = 0;
4000 struct cgraph_node *bestn = NULLnullptr;
4001 if (STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info.exists ())
4002 bestn = cgraph_node::get (STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info[0]);
4003 else
4004 for (struct cgraph_node *n = node->simd_clones; n != NULLnullptr;
4005 n = n->simdclone->next_clone)
4006 {
4007 unsigned int this_badness = 0;
4008 unsigned int num_calls;
4009 if (!constant_multiple_p (vf, n->simdclone->simdlen, &num_calls)
4010 || n->simdclone->nargs != nargs)
4011 continue;
4012 if (num_calls != 1)
4013 this_badness += exact_log2 (num_calls) * 4096;
4014 if (n->simdclone->inbranch)
4015 this_badness += 8192;
4016 int target_badness = targetm.simd_clone.usable (n);
4017 if (target_badness < 0)
4018 continue;
4019 this_badness += target_badness * 512;
4020 /* FORNOW: Have to add code to add the mask argument. */
4021 if (n->simdclone->inbranch)
4022 continue;
4023 for (i = 0; i < nargs; i++)
4024 {
4025 switch (n->simdclone->args[i].arg_type)
4026 {
4027 case SIMD_CLONE_ARG_TYPE_VECTOR:
4028 if (!useless_type_conversion_p
4029 (n->simdclone->args[i].orig_type,
4030 TREE_TYPE (gimple_call_arg (stmt, i))((contains_struct_check ((gimple_call_arg (stmt, i)), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4030, __FUNCTION__))->typed.type)
))
4031 i = -1;
4032 else if (arginfo[i].dt == vect_constant_def
4033 || arginfo[i].dt == vect_external_def
4034 || arginfo[i].linear_step)
4035 this_badness += 64;
4036 break;
4037 case SIMD_CLONE_ARG_TYPE_UNIFORM:
4038 if (arginfo[i].dt != vect_constant_def
4039 && arginfo[i].dt != vect_external_def)
4040 i = -1;
4041 break;
4042 case SIMD_CLONE_ARG_TYPE_LINEAR_CONSTANT_STEP:
4043 case SIMD_CLONE_ARG_TYPE_LINEAR_REF_CONSTANT_STEP:
4044 if (arginfo[i].dt == vect_constant_def
4045 || arginfo[i].dt == vect_external_def
4046 || (arginfo[i].linear_step
4047 != n->simdclone->args[i].linear_step))
4048 i = -1;
4049 break;
4050 case SIMD_CLONE_ARG_TYPE_LINEAR_VARIABLE_STEP:
4051 case SIMD_CLONE_ARG_TYPE_LINEAR_VAL_CONSTANT_STEP:
4052 case SIMD_CLONE_ARG_TYPE_LINEAR_UVAL_CONSTANT_STEP:
4053 case SIMD_CLONE_ARG_TYPE_LINEAR_REF_VARIABLE_STEP:
4054 case SIMD_CLONE_ARG_TYPE_LINEAR_VAL_VARIABLE_STEP:
4055 case SIMD_CLONE_ARG_TYPE_LINEAR_UVAL_VARIABLE_STEP:
4056 /* FORNOW */
4057 i = -1;
4058 break;
4059 case SIMD_CLONE_ARG_TYPE_MASK:
4060 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4060, __FUNCTION__))
;
4061 }
4062 if (i == (size_t) -1)
4063 break;
4064 if (n->simdclone->args[i].alignment > arginfo[i].align)
4065 {
4066 i = -1;
4067 break;
4068 }
4069 if (arginfo[i].align)
4070 this_badness += (exact_log2 (arginfo[i].align)
4071 - exact_log2 (n->simdclone->args[i].alignment));
4072 }
4073 if (i == (size_t) -1)
4074 continue;
4075 if (bestn == NULLnullptr || this_badness < badness)
4076 {
4077 bestn = n;
4078 badness = this_badness;
4079 }
4080 }
4081
4082 if (bestn == NULLnullptr)
4083 return false;
4084
4085 for (i = 0; i < nargs; i++)
4086 if ((arginfo[i].dt == vect_constant_def
4087 || arginfo[i].dt == vect_external_def)
4088 && bestn->simdclone->args[i].arg_type == SIMD_CLONE_ARG_TYPE_VECTOR)
4089 {
4090 tree arg_type = TREE_TYPE (gimple_call_arg (stmt, i))((contains_struct_check ((gimple_call_arg (stmt, i)), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4090, __FUNCTION__))->typed.type)
;
4091 arginfo[i].vectype = get_vectype_for_scalar_type (vinfo, arg_type,
4092 slp_node);
4093 if (arginfo[i].vectype == NULLnullptr
4094 || !constant_multiple_p (bestn->simdclone->simdlen,
4095 simd_clone_subparts (arginfo[i].vectype)))
4096 return false;
4097 }
4098
4099 fndecl = bestn->decl;
4100 nunits = bestn->simdclone->simdlen;
4101 ncopies = vector_unroll_factor (vf, nunits)(exact_div (vf, nunits).to_constant ());
4102
4103 /* If the function isn't const, only allow it in simd loops where user
4104 has asserted that at least nunits consecutive iterations can be
4105 performed using SIMD instructions. */
4106 if ((loop == NULLnullptr || maybe_lt ((unsigned) loop->safelen, nunits))
4107 && gimple_vuse (stmt))
4108 return false;
4109
4110 /* Sanity check: make sure that at least one copy of the vectorized stmt
4111 needs to be generated. */
4112 gcc_assert (ncopies >= 1)((void)(!(ncopies >= 1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4112, __FUNCTION__), 0 : 0))
;
4113
4114 if (!vec_stmt) /* transformation not required. */
4115 {
4116 STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info.safe_push (bestn->decl);
4117 for (i = 0; i < nargs; i++)
4118 if ((bestn->simdclone->args[i].arg_type
4119 == SIMD_CLONE_ARG_TYPE_LINEAR_CONSTANT_STEP)
4120 || (bestn->simdclone->args[i].arg_type
4121 == SIMD_CLONE_ARG_TYPE_LINEAR_REF_CONSTANT_STEP))
4122 {
4123 STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info.safe_grow_cleared (i * 3
4124 + 1,
4125 true);
4126 STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info.safe_push (arginfo[i].op);
4127 tree lst = POINTER_TYPE_P (TREE_TYPE (arginfo[i].op))(((enum tree_code) (((contains_struct_check ((arginfo[i].op),
(TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4127, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((arginfo[i].op
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4127, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
4128 ? size_type_nodeglobal_trees[TI_SIZE_TYPE] : TREE_TYPE (arginfo[i].op)((contains_struct_check ((arginfo[i].op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4128, __FUNCTION__))->typed.type)
;
4129 tree ls = build_int_cst (lst, arginfo[i].linear_step);
4130 STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info.safe_push (ls);
4131 tree sll = arginfo[i].simd_lane_linear
4132 ? boolean_true_nodeglobal_trees[TI_BOOLEAN_TRUE] : boolean_false_nodeglobal_trees[TI_BOOLEAN_FALSE];
4133 STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info.safe_push (sll);
4134 }
4135 STMT_VINFO_TYPE (stmt_info)(stmt_info)->type = call_simd_clone_vec_info_type;
4136 DUMP_VECT_SCOPE ("vectorizable_simd_clone_call")auto_dump_scope scope ("vectorizable_simd_clone_call", vect_location
)
;
4137/* vect_model_simple_cost (vinfo, stmt_info, ncopies,
4138 dt, slp_node, cost_vec); */
4139 return true;
4140 }
4141
4142 /* Transform. */
4143
4144 if (dump_enabled_p ())
4145 dump_printf_loc (MSG_NOTE, vect_location, "transform call.\n");
4146
4147 /* Handle def. */
4148 scalar_dest = gimple_call_lhs (stmt);
4149 vec_dest = NULL_TREE(tree) nullptr;
4150 rtype = NULL_TREE(tree) nullptr;
4151 ratype = NULL_TREE(tree) nullptr;
4152 if (scalar_dest)
4153 {
4154 vec_dest = vect_create_destination_var (scalar_dest, vectype);
4155 rtype = TREE_TYPE (TREE_TYPE (fndecl))((contains_struct_check ((((contains_struct_check ((fndecl), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4155, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4155, __FUNCTION__))->typed.type)
;
4156 if (TREE_CODE (rtype)((enum tree_code) (rtype)->base.code) == ARRAY_TYPE)
4157 {
4158 ratype = rtype;
4159 rtype = TREE_TYPE (ratype)((contains_struct_check ((ratype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4159, __FUNCTION__))->typed.type)
;
4160 }
4161 }
4162
4163 auto_vec<vec<tree> > vec_oprnds;
4164 auto_vec<unsigned> vec_oprnds_i;
4165 vec_oprnds.safe_grow_cleared (nargs, true);
4166 vec_oprnds_i.safe_grow_cleared (nargs, true);
4167 for (j = 0; j < ncopies; ++j)
4168 {
4169 /* Build argument list for the vectorized call. */
4170 if (j == 0)
4171 vargs.create (nargs);
4172 else
4173 vargs.truncate (0);
4174
4175 for (i = 0; i < nargs; i++)
4176 {
4177 unsigned int k, l, m, o;
4178 tree atype;
4179 op = gimple_call_arg (stmt, i);
4180 switch (bestn->simdclone->args[i].arg_type)
4181 {
4182 case SIMD_CLONE_ARG_TYPE_VECTOR:
4183 atype = bestn->simdclone->args[i].vector_type;
4184 o = vector_unroll_factor (nunits,(exact_div (nunits, simd_clone_subparts (atype)).to_constant (
))
4185 simd_clone_subparts (atype))(exact_div (nunits, simd_clone_subparts (atype)).to_constant (
))
;
4186 for (m = j * o; m < (j + 1) * o; m++)
4187 {
4188 if (simd_clone_subparts (atype)
4189 < simd_clone_subparts (arginfo[i].vectype))
4190 {
4191 poly_uint64 prec = GET_MODE_BITSIZE (TYPE_MODE (atype)((((enum tree_code) ((tree_class_check ((atype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4191, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(atype) : (atype)->type_common.mode)
);
4192 k = (simd_clone_subparts (arginfo[i].vectype)
4193 / simd_clone_subparts (atype));
4194 gcc_assert ((k & (k - 1)) == 0)((void)(!((k & (k - 1)) == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4194, __FUNCTION__), 0 : 0))
;
4195 if (m == 0)
4196 {
4197 vect_get_vec_defs_for_operand (vinfo, stmt_info,
4198 ncopies * o / k, op,
4199 &vec_oprnds[i]);
4200 vec_oprnds_i[i] = 0;
4201 vec_oprnd0 = vec_oprnds[i][vec_oprnds_i[i]++];
4202 }
4203 else
4204 {
4205 vec_oprnd0 = arginfo[i].op;
4206 if ((m & (k - 1)) == 0)
4207 vec_oprnd0 = vec_oprnds[i][vec_oprnds_i[i]++];
4208 }
4209 arginfo[i].op = vec_oprnd0;
4210 vec_oprnd0
4211 = build3 (BIT_FIELD_REF, atype, vec_oprnd0,
4212 bitsize_int (prec)size_int_kind (prec, stk_bitsizetype),
4213 bitsize_int ((m & (k - 1)) * prec)size_int_kind ((m & (k - 1)) * prec, stk_bitsizetype));
4214 gassign *new_stmt
4215 = gimple_build_assign (make_ssa_name (atype),
4216 vec_oprnd0);
4217 vect_finish_stmt_generation (vinfo, stmt_info,
4218 new_stmt, gsi);
4219 vargs.safe_push (gimple_assign_lhs (new_stmt));
4220 }
4221 else
4222 {
4223 k = (simd_clone_subparts (atype)
4224 / simd_clone_subparts (arginfo[i].vectype));
4225 gcc_assert ((k & (k - 1)) == 0)((void)(!((k & (k - 1)) == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4225, __FUNCTION__), 0 : 0))
;
4226 vec<constructor_elt, va_gc> *ctor_elts;
4227 if (k != 1)
4228 vec_alloc (ctor_elts, k);
4229 else
4230 ctor_elts = NULLnullptr;
4231 for (l = 0; l < k; l++)
4232 {
4233 if (m == 0 && l == 0)
4234 {
4235 vect_get_vec_defs_for_operand (vinfo, stmt_info,
4236 k * o * ncopies,
4237 op,
4238 &vec_oprnds[i]);
4239 vec_oprnds_i[i] = 0;
4240 vec_oprnd0 = vec_oprnds[i][vec_oprnds_i[i]++];
4241 }
4242 else
4243 vec_oprnd0 = vec_oprnds[i][vec_oprnds_i[i]++];
4244 arginfo[i].op = vec_oprnd0;
4245 if (k == 1)
4246 break;
4247 CONSTRUCTOR_APPEND_ELT (ctor_elts, NULL_TREE,do { constructor_elt _ce___ = {(tree) nullptr, vec_oprnd0}; vec_safe_push
((ctor_elts), _ce___); } while (0)
4248 vec_oprnd0)do { constructor_elt _ce___ = {(tree) nullptr, vec_oprnd0}; vec_safe_push
((ctor_elts), _ce___); } while (0)
;
4249 }
4250 if (k == 1)
4251 if (!useless_type_conversion_p (TREE_TYPE (vec_oprnd0)((contains_struct_check ((vec_oprnd0), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4251, __FUNCTION__))->typed.type)
,
4252 atype))
4253 {
4254 vec_oprnd0
4255 = build1 (VIEW_CONVERT_EXPR, atype, vec_oprnd0);
4256 gassign *new_stmt
4257 = gimple_build_assign (make_ssa_name (atype),
4258 vec_oprnd0);
4259 vect_finish_stmt_generation (vinfo, stmt_info,
4260 new_stmt, gsi);
4261 vargs.safe_push (gimple_assign_lhs (new_stmt));
4262 }
4263 else
4264 vargs.safe_push (vec_oprnd0);
4265 else
4266 {
4267 vec_oprnd0 = build_constructor (atype, ctor_elts);
4268 gassign *new_stmt
4269 = gimple_build_assign (make_ssa_name (atype),
4270 vec_oprnd0);
4271 vect_finish_stmt_generation (vinfo, stmt_info,
4272 new_stmt, gsi);
4273 vargs.safe_push (gimple_assign_lhs (new_stmt));
4274 }
4275 }
4276 }
4277 break;
4278 case SIMD_CLONE_ARG_TYPE_UNIFORM:
4279 vargs.safe_push (op);
4280 break;
4281 case SIMD_CLONE_ARG_TYPE_LINEAR_CONSTANT_STEP:
4282 case SIMD_CLONE_ARG_TYPE_LINEAR_REF_CONSTANT_STEP:
4283 if (j == 0)
4284 {
4285 gimple_seq stmts;
4286 arginfo[i].op
4287 = force_gimple_operand (unshare_expr (arginfo[i].op),
4288 &stmts, true, NULL_TREE(tree) nullptr);
4289 if (stmts != NULLnullptr)
4290 {
4291 basic_block new_bb;
4292 edge pe = loop_preheader_edge (loop);
4293 new_bb = gsi_insert_seq_on_edge_immediate (pe, stmts);
4294 gcc_assert (!new_bb)((void)(!(!new_bb) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4294, __FUNCTION__), 0 : 0))
;
4295 }
4296 if (arginfo[i].simd_lane_linear)
4297 {
4298 vargs.safe_push (arginfo[i].op);
4299 break;
4300 }
4301 tree phi_res = copy_ssa_name (op);
4302 gphi *new_phi = create_phi_node (phi_res, loop->header);
4303 add_phi_arg (new_phi, arginfo[i].op,
4304 loop_preheader_edge (loop), UNKNOWN_LOCATION((location_t) 0));
4305 enum tree_code code
4306 = POINTER_TYPE_P (TREE_TYPE (op))(((enum tree_code) (((contains_struct_check ((op), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4306, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((op), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4306, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
4307 ? POINTER_PLUS_EXPR : PLUS_EXPR;
4308 tree type = POINTER_TYPE_P (TREE_TYPE (op))(((enum tree_code) (((contains_struct_check ((op), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4308, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((op), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4308, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
4309 ? sizetypesizetype_tab[(int) stk_sizetype] : TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4309, __FUNCTION__))->typed.type)
;
4310 poly_widest_int cst
4311 = wi::mul (bestn->simdclone->args[i].linear_step,
4312 ncopies * nunits);
4313 tree tcst = wide_int_to_tree (type, cst);
4314 tree phi_arg = copy_ssa_name (op);
4315 gassign *new_stmt
4316 = gimple_build_assign (phi_arg, code, phi_res, tcst);
4317 gimple_stmt_iterator si = gsi_after_labels (loop->header);
4318 gsi_insert_after (&si, new_stmt, GSI_NEW_STMT);
4319 add_phi_arg (new_phi, phi_arg, loop_latch_edge (loop),
4320 UNKNOWN_LOCATION((location_t) 0));
4321 arginfo[i].op = phi_res;
4322 vargs.safe_push (phi_res);
4323 }
4324 else
4325 {
4326 enum tree_code code
4327 = POINTER_TYPE_P (TREE_TYPE (op))(((enum tree_code) (((contains_struct_check ((op), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4327, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((op), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4327, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
4328 ? POINTER_PLUS_EXPR : PLUS_EXPR;
4329 tree type = POINTER_TYPE_P (TREE_TYPE (op))(((enum tree_code) (((contains_struct_check ((op), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4329, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((op), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4329, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
4330 ? sizetypesizetype_tab[(int) stk_sizetype] : TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4330, __FUNCTION__))->typed.type)
;
4331 poly_widest_int cst
4332 = wi::mul (bestn->simdclone->args[i].linear_step,
4333 j * nunits);
4334 tree tcst = wide_int_to_tree (type, cst);
4335 new_temp = make_ssa_name (TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4335, __FUNCTION__))->typed.type)
);
4336 gassign *new_stmt
4337 = gimple_build_assign (new_temp, code,
4338 arginfo[i].op, tcst);
4339 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
4340 vargs.safe_push (new_temp);
4341 }
4342 break;
4343 case SIMD_CLONE_ARG_TYPE_LINEAR_VAL_CONSTANT_STEP:
4344 case SIMD_CLONE_ARG_TYPE_LINEAR_UVAL_CONSTANT_STEP:
4345 case SIMD_CLONE_ARG_TYPE_LINEAR_VARIABLE_STEP:
4346 case SIMD_CLONE_ARG_TYPE_LINEAR_REF_VARIABLE_STEP:
4347 case SIMD_CLONE_ARG_TYPE_LINEAR_VAL_VARIABLE_STEP:
4348 case SIMD_CLONE_ARG_TYPE_LINEAR_UVAL_VARIABLE_STEP:
4349 default:
4350 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4350, __FUNCTION__))
;
4351 }
4352 }
4353
4354 gcall *new_call = gimple_build_call_vec (fndecl, vargs);
4355 if (vec_dest)
4356 {
4357 gcc_assert (ratype((void)(!(ratype || (!maybe_ne (simd_clone_subparts (rtype), nunits
))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4358, __FUNCTION__), 0 : 0))
4358 || known_eq (simd_clone_subparts (rtype), nunits))((void)(!(ratype || (!maybe_ne (simd_clone_subparts (rtype), nunits
))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4358, __FUNCTION__), 0 : 0))
;
4359 if (ratype)
4360 new_temp = create_tmp_var (ratype);
4361 else if (useless_type_conversion_p (vectype, rtype))
4362 new_temp = make_ssa_name (vec_dest, new_call);
4363 else
4364 new_temp = make_ssa_name (rtype, new_call);
4365 gimple_call_set_lhs (new_call, new_temp);
4366 }
4367 vect_finish_stmt_generation (vinfo, stmt_info, new_call, gsi);
4368 gimple *new_stmt = new_call;
4369
4370 if (vec_dest)
4371 {
4372 if (!multiple_p (simd_clone_subparts (vectype), nunits))
4373 {
4374 unsigned int k, l;
4375 poly_uint64 prec = GET_MODE_BITSIZE (TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4375, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
);
4376 poly_uint64 bytes = GET_MODE_SIZE (TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4376, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
);
4377 k = vector_unroll_factor (nunits,(exact_div (nunits, simd_clone_subparts (vectype)).to_constant
())
4378 simd_clone_subparts (vectype))(exact_div (nunits, simd_clone_subparts (vectype)).to_constant
())
;
4379 gcc_assert ((k & (k - 1)) == 0)((void)(!((k & (k - 1)) == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4379, __FUNCTION__), 0 : 0))
;
4380 for (l = 0; l < k; l++)
4381 {
4382 tree t;
4383 if (ratype)
4384 {
4385 t = build_fold_addr_expr (new_temp)build_fold_addr_expr_loc (((location_t) 0), (new_temp));
4386 t = build2 (MEM_REF, vectype, t,
4387 build_int_cst (TREE_TYPE (t)((contains_struct_check ((t), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4387, __FUNCTION__))->typed.type)
, l * bytes));
4388 }
4389 else
4390 t = build3 (BIT_FIELD_REF, vectype, new_temp,
4391 bitsize_int (prec)size_int_kind (prec, stk_bitsizetype), bitsize_int (l * prec)size_int_kind (l * prec, stk_bitsizetype));
4392 new_stmt = gimple_build_assign (make_ssa_name (vectype), t);
4393 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
4394
4395 if (j == 0 && l == 0)
4396 *vec_stmt = new_stmt;
4397 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
4398 }
4399
4400 if (ratype)
4401 vect_clobber_variable (vinfo, stmt_info, gsi, new_temp);
4402 continue;
4403 }
4404 else if (!multiple_p (nunits, simd_clone_subparts (vectype)))
4405 {
4406 unsigned int k = (simd_clone_subparts (vectype)
4407 / simd_clone_subparts (rtype));
4408 gcc_assert ((k & (k - 1)) == 0)((void)(!((k & (k - 1)) == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4408, __FUNCTION__), 0 : 0))
;
4409 if ((j & (k - 1)) == 0)
4410 vec_alloc (ret_ctor_elts, k);
4411 if (ratype)
4412 {
4413 unsigned int m, o;
4414 o = vector_unroll_factor (nunits,(exact_div (nunits, simd_clone_subparts (rtype)).to_constant (
))
4415 simd_clone_subparts (rtype))(exact_div (nunits, simd_clone_subparts (rtype)).to_constant (
))
;
4416 for (m = 0; m < o; m++)
4417 {
4418 tree tem = build4 (ARRAY_REF, rtype, new_temp,
4419 size_int (m)size_int_kind (m, stk_sizetype), NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr);
4420 new_stmt = gimple_build_assign (make_ssa_name (rtype),
4421 tem);
4422 vect_finish_stmt_generation (vinfo, stmt_info,
4423 new_stmt, gsi);
4424 CONSTRUCTOR_APPEND_ELT (ret_ctor_elts, NULL_TREE,do { constructor_elt _ce___ = {(tree) nullptr, gimple_assign_lhs
(new_stmt)}; vec_safe_push ((ret_ctor_elts), _ce___); } while
(0)
4425 gimple_assign_lhs (new_stmt))do { constructor_elt _ce___ = {(tree) nullptr, gimple_assign_lhs
(new_stmt)}; vec_safe_push ((ret_ctor_elts), _ce___); } while
(0)
;
4426 }
4427 vect_clobber_variable (vinfo, stmt_info, gsi, new_temp);
4428 }
4429 else
4430 CONSTRUCTOR_APPEND_ELT (ret_ctor_elts, NULL_TREE, new_temp)do { constructor_elt _ce___ = {(tree) nullptr, new_temp}; vec_safe_push
((ret_ctor_elts), _ce___); } while (0)
;
4431 if ((j & (k - 1)) != k - 1)
4432 continue;
4433 vec_oprnd0 = build_constructor (vectype, ret_ctor_elts);
4434 new_stmt
4435 = gimple_build_assign (make_ssa_name (vec_dest), vec_oprnd0);
4436 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
4437
4438 if ((unsigned) j == k - 1)
4439 *vec_stmt = new_stmt;
4440 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
4441 continue;
4442 }
4443 else if (ratype)
4444 {
4445 tree t = build_fold_addr_expr (new_temp)build_fold_addr_expr_loc (((location_t) 0), (new_temp));
4446 t = build2 (MEM_REF, vectype, t,
4447 build_int_cst (TREE_TYPE (t)((contains_struct_check ((t), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4447, __FUNCTION__))->typed.type)
, 0));
4448 new_stmt = gimple_build_assign (make_ssa_name (vec_dest), t);
4449 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
4450 vect_clobber_variable (vinfo, stmt_info, gsi, new_temp);
4451 }
4452 else if (!useless_type_conversion_p (vectype, rtype))
4453 {
4454 vec_oprnd0 = build1 (VIEW_CONVERT_EXPR, vectype, new_temp);
4455 new_stmt
4456 = gimple_build_assign (make_ssa_name (vec_dest), vec_oprnd0);
4457 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
4458 }
4459 }
4460
4461 if (j == 0)
4462 *vec_stmt = new_stmt;
4463 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
4464 }
4465
4466 for (i = 0; i < nargs; ++i)
4467 {
4468 vec<tree> oprndsi = vec_oprnds[i];
4469 oprndsi.release ();
4470 }
4471 vargs.release ();
4472
4473 /* The call in STMT might prevent it from being removed in dce.
4474 We however cannot remove it here, due to the way the ssa name
4475 it defines is mapped to the new definition. So just replace
4476 rhs of the statement with something harmless. */
4477
4478 if (slp_node)
4479 return true;
4480
4481 gimple *new_stmt;
4482 if (scalar_dest)
4483 {
4484 type = TREE_TYPE (scalar_dest)((contains_struct_check ((scalar_dest), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4484, __FUNCTION__))->typed.type)
;
4485 lhs = gimple_call_lhs (vect_orig_stmt (stmt_info)->stmt);
4486 new_stmt = gimple_build_assign (lhs, build_zero_cst (type));
4487 }
4488 else
4489 new_stmt = gimple_build_nop ();
4490 vinfo->replace_stmt (gsi, vect_orig_stmt (stmt_info), new_stmt);
4491 unlink_stmt_vdef (stmt);
4492
4493 return true;
4494}
4495
4496
4497/* Function vect_gen_widened_results_half
4498
4499 Create a vector stmt whose code, type, number of arguments, and result
4500 variable are CODE, OP_TYPE, and VEC_DEST, and its arguments are
4501 VEC_OPRND0 and VEC_OPRND1. The new vector stmt is to be inserted at GSI.
4502 In the case that CODE is a CALL_EXPR, this means that a call to DECL
4503 needs to be created (DECL is a function-decl of a target-builtin).
4504 STMT_INFO is the original scalar stmt that we are vectorizing. */
4505
4506static gimple *
4507vect_gen_widened_results_half (vec_info *vinfo, enum tree_code code,
4508 tree vec_oprnd0, tree vec_oprnd1, int op_type,
4509 tree vec_dest, gimple_stmt_iterator *gsi,
4510 stmt_vec_info stmt_info)
4511{
4512 gimple *new_stmt;
4513 tree new_temp;
4514
4515 /* Generate half of the widened result: */
4516 gcc_assert (op_type == TREE_CODE_LENGTH (code))((void)(!(op_type == tree_code_length[(int) (code)]) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4516, __FUNCTION__), 0 : 0))
;
4517 if (op_type != binary_op)
4518 vec_oprnd1 = NULLnullptr;
4519 new_stmt = gimple_build_assign (vec_dest, code, vec_oprnd0, vec_oprnd1);
4520 new_temp = make_ssa_name (vec_dest, new_stmt);
4521 gimple_assign_set_lhs (new_stmt, new_temp);
4522 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
4523
4524 return new_stmt;
4525}
4526
4527
4528/* Create vectorized demotion statements for vector operands from VEC_OPRNDS.
4529 For multi-step conversions store the resulting vectors and call the function
4530 recursively. */
4531
4532static void
4533vect_create_vectorized_demotion_stmts (vec_info *vinfo, vec<tree> *vec_oprnds,
4534 int multi_step_cvt,
4535 stmt_vec_info stmt_info,
4536 vec<tree> &vec_dsts,
4537 gimple_stmt_iterator *gsi,
4538 slp_tree slp_node, enum tree_code code)
4539{
4540 unsigned int i;
4541 tree vop0, vop1, new_tmp, vec_dest;
4542
4543 vec_dest = vec_dsts.pop ();
4544
4545 for (i = 0; i < vec_oprnds->length (); i += 2)
4546 {
4547 /* Create demotion operation. */
4548 vop0 = (*vec_oprnds)[i];
4549 vop1 = (*vec_oprnds)[i + 1];
4550 gassign *new_stmt = gimple_build_assign (vec_dest, code, vop0, vop1);
4551 new_tmp = make_ssa_name (vec_dest, new_stmt);
4552 gimple_assign_set_lhs (new_stmt, new_tmp);
4553 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
4554
4555 if (multi_step_cvt)
4556 /* Store the resulting vector for next recursive call. */
4557 (*vec_oprnds)[i/2] = new_tmp;
4558 else
4559 {
4560 /* This is the last step of the conversion sequence. Store the
4561 vectors in SLP_NODE or in vector info of the scalar statement
4562 (or in STMT_VINFO_RELATED_STMT chain). */
4563 if (slp_node)
4564 SLP_TREE_VEC_STMTS (slp_node)(slp_node)->vec_stmts.quick_push (new_stmt);
4565 else
4566 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
4567 }
4568 }
4569
4570 /* For multi-step demotion operations we first generate demotion operations
4571 from the source type to the intermediate types, and then combine the
4572 results (stored in VEC_OPRNDS) in demotion operation to the destination
4573 type. */
4574 if (multi_step_cvt)
4575 {
4576 /* At each level of recursion we have half of the operands we had at the
4577 previous level. */
4578 vec_oprnds->truncate ((i+1)/2);
4579 vect_create_vectorized_demotion_stmts (vinfo, vec_oprnds,
4580 multi_step_cvt - 1,
4581 stmt_info, vec_dsts, gsi,
4582 slp_node, VEC_PACK_TRUNC_EXPR);
4583 }
4584
4585 vec_dsts.quick_push (vec_dest);
4586}
4587
4588
4589/* Create vectorized promotion statements for vector operands from VEC_OPRNDS0
4590 and VEC_OPRNDS1, for a binary operation associated with scalar statement
4591 STMT_INFO. For multi-step conversions store the resulting vectors and
4592 call the function recursively. */
4593
4594static void
4595vect_create_vectorized_promotion_stmts (vec_info *vinfo,
4596 vec<tree> *vec_oprnds0,
4597 vec<tree> *vec_oprnds1,
4598 stmt_vec_info stmt_info, tree vec_dest,
4599 gimple_stmt_iterator *gsi,
4600 enum tree_code code1,
4601 enum tree_code code2, int op_type)
4602{
4603 int i;
4604 tree vop0, vop1, new_tmp1, new_tmp2;
4605 gimple *new_stmt1, *new_stmt2;
4606 vec<tree> vec_tmp = vNULL;
4607
4608 vec_tmp.create (vec_oprnds0->length () * 2);
4609 FOR_EACH_VEC_ELT (*vec_oprnds0, i, vop0)for (i = 0; (*vec_oprnds0).iterate ((i), &(vop0)); ++(i))
4610 {
4611 if (op_type == binary_op)
4612 vop1 = (*vec_oprnds1)[i];
4613 else
4614 vop1 = NULL_TREE(tree) nullptr;
4615
4616 /* Generate the two halves of promotion operation. */
4617 new_stmt1 = vect_gen_widened_results_half (vinfo, code1, vop0, vop1,
4618 op_type, vec_dest, gsi,
4619 stmt_info);
4620 new_stmt2 = vect_gen_widened_results_half (vinfo, code2, vop0, vop1,
4621 op_type, vec_dest, gsi,
4622 stmt_info);
4623 if (is_gimple_call (new_stmt1))
4624 {
4625 new_tmp1 = gimple_call_lhs (new_stmt1);
4626 new_tmp2 = gimple_call_lhs (new_stmt2);
4627 }
4628 else
4629 {
4630 new_tmp1 = gimple_assign_lhs (new_stmt1);
4631 new_tmp2 = gimple_assign_lhs (new_stmt2);
4632 }
4633
4634 /* Store the results for the next step. */
4635 vec_tmp.quick_push (new_tmp1);
4636 vec_tmp.quick_push (new_tmp2);
4637 }
4638
4639 vec_oprnds0->release ();
4640 *vec_oprnds0 = vec_tmp;
4641}
4642
4643/* Create vectorized promotion stmts for widening stmts using only half the
4644 potential vector size for input. */
4645static void
4646vect_create_half_widening_stmts (vec_info *vinfo,
4647 vec<tree> *vec_oprnds0,
4648 vec<tree> *vec_oprnds1,
4649 stmt_vec_info stmt_info, tree vec_dest,
4650 gimple_stmt_iterator *gsi,
4651 enum tree_code code1,
4652 int op_type)
4653{
4654 int i;
4655 tree vop0, vop1;
4656 gimple *new_stmt1;
4657 gimple *new_stmt2;
4658 gimple *new_stmt3;
4659 vec<tree> vec_tmp = vNULL;
4660
4661 vec_tmp.create (vec_oprnds0->length ());
4662 FOR_EACH_VEC_ELT (*vec_oprnds0, i, vop0)for (i = 0; (*vec_oprnds0).iterate ((i), &(vop0)); ++(i))
4663 {
4664 tree new_tmp1, new_tmp2, new_tmp3, out_type;
4665
4666 gcc_assert (op_type == binary_op)((void)(!(op_type == binary_op) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4666, __FUNCTION__), 0 : 0))
;
4667 vop1 = (*vec_oprnds1)[i];
4668
4669 /* Widen the first vector input. */
4670 out_type = TREE_TYPE (vec_dest)((contains_struct_check ((vec_dest), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4670, __FUNCTION__))->typed.type)
;
4671 new_tmp1 = make_ssa_name (out_type);
4672 new_stmt1 = gimple_build_assign (new_tmp1, NOP_EXPR, vop0);
4673 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt1, gsi);
4674 if (VECTOR_TYPE_P (TREE_TYPE (vop1))(((enum tree_code) (((contains_struct_check ((vop1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4674, __FUNCTION__))->typed.type))->base.code) == VECTOR_TYPE
)
)
4675 {
4676 /* Widen the second vector input. */
4677 new_tmp2 = make_ssa_name (out_type);
4678 new_stmt2 = gimple_build_assign (new_tmp2, NOP_EXPR, vop1);
4679 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt2, gsi);
4680 /* Perform the operation. With both vector inputs widened. */
4681 new_stmt3 = gimple_build_assign (vec_dest, code1, new_tmp1, new_tmp2);
4682 }
4683 else
4684 {
4685 /* Perform the operation. With the single vector input widened. */
4686 new_stmt3 = gimple_build_assign (vec_dest, code1, new_tmp1, vop1);
4687 }
4688
4689 new_tmp3 = make_ssa_name (vec_dest, new_stmt3);
4690 gimple_assign_set_lhs (new_stmt3, new_tmp3);
4691 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt3, gsi);
4692
4693 /* Store the results for the next step. */
4694 vec_tmp.quick_push (new_tmp3);
4695 }
4696
4697 vec_oprnds0->release ();
4698 *vec_oprnds0 = vec_tmp;
4699}
4700
4701
4702/* Check if STMT_INFO performs a conversion operation that can be vectorized.
4703 If VEC_STMT is also passed, vectorize STMT_INFO: create a vectorized
4704 stmt to replace it, put it in VEC_STMT, and insert it at GSI.
4705 Return true if STMT_INFO is vectorizable in this way. */
4706
4707static bool
4708vectorizable_conversion (vec_info *vinfo,
4709 stmt_vec_info stmt_info, gimple_stmt_iterator *gsi,
4710 gimple **vec_stmt, slp_tree slp_node,
4711 stmt_vector_for_cost *cost_vec)
4712{
4713 tree vec_dest;
4714 tree scalar_dest;
4715 tree op0, op1 = NULL_TREE(tree) nullptr;
4716 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
4717 enum tree_code code, code1 = ERROR_MARK, code2 = ERROR_MARK;
4718 enum tree_code codecvt1 = ERROR_MARK, codecvt2 = ERROR_MARK;
4719 tree new_temp;
4720 enum vect_def_type dt[2] = {vect_unknown_def_type, vect_unknown_def_type};
4721 int ndts = 2;
4722 poly_uint64 nunits_in;
4723 poly_uint64 nunits_out;
4724 tree vectype_out, vectype_in;
4725 int ncopies, i;
4726 tree lhs_type, rhs_type;
4727 enum { NARROW, NONE, WIDEN } modifier;
4728 vec<tree> vec_oprnds0 = vNULL;
4729 vec<tree> vec_oprnds1 = vNULL;
4730 tree vop0;
4731 bb_vec_info bb_vinfo = dyn_cast <bb_vec_info> (vinfo);
4732 int multi_step_cvt = 0;
4733 vec<tree> interm_types = vNULL;
4734 tree intermediate_type, cvt_type = NULL_TREE(tree) nullptr;
4735 int op_type;
4736 unsigned short fltsz;
4737
4738 /* Is STMT a vectorizable conversion? */
4739
4740 if (!STMT_VINFO_RELEVANT_P (stmt_info)((stmt_info)->relevant != vect_unused_in_scope) && !bb_vinfo)
4741 return false;
4742
4743 if (STMT_VINFO_DEF_TYPE (stmt_info)(stmt_info)->def_type != vect_internal_def
4744 && ! vec_stmt)
4745 return false;
4746
4747 gassign *stmt = dyn_cast <gassign *> (stmt_info->stmt);
4748 if (!stmt)
4749 return false;
4750
4751 if (TREE_CODE (gimple_assign_lhs (stmt))((enum tree_code) (gimple_assign_lhs (stmt))->base.code) != SSA_NAME)
4752 return false;
4753
4754 code = gimple_assign_rhs_code (stmt);
4755 if (!CONVERT_EXPR_CODE_P (code)((code) == NOP_EXPR || (code) == CONVERT_EXPR)
4756 && code != FIX_TRUNC_EXPR
4757 && code != FLOAT_EXPR
4758 && code != WIDEN_PLUS_EXPR
4759 && code != WIDEN_MINUS_EXPR
4760 && code != WIDEN_MULT_EXPR
4761 && code != WIDEN_LSHIFT_EXPR)
4762 return false;
4763
4764 bool widen_arith = (code == WIDEN_PLUS_EXPR
4765 || code == WIDEN_MINUS_EXPR
4766 || code == WIDEN_MULT_EXPR
4767 || code == WIDEN_LSHIFT_EXPR);
4768 op_type = TREE_CODE_LENGTH (code)tree_code_length[(int) (code)];
4769
4770 /* Check types of lhs and rhs. */
4771 scalar_dest = gimple_assign_lhs (stmt);
4772 lhs_type = TREE_TYPE (scalar_dest)((contains_struct_check ((scalar_dest), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4772, __FUNCTION__))->typed.type)
;
4773 vectype_out = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
4774
4775 /* Check the operands of the operation. */
4776 slp_tree slp_op0, slp_op1 = NULLnullptr;
4777 if (!vect_is_simple_use (vinfo, stmt_info, slp_node,
4778 0, &op0, &slp_op0, &dt[0], &vectype_in))
4779 {
4780 if (dump_enabled_p ())
4781 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
4782 "use not simple.\n");
4783 return false;
4784 }
4785
4786 rhs_type = TREE_TYPE (op0)((contains_struct_check ((op0), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4786, __FUNCTION__))->typed.type)
;
4787 if ((code != FIX_TRUNC_EXPR && code != FLOAT_EXPR)
4788 && !((INTEGRAL_TYPE_P (lhs_type)(((enum tree_code) (lhs_type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (lhs_type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (lhs_type)->base.code) == INTEGER_TYPE
)
4789 && INTEGRAL_TYPE_P (rhs_type)(((enum tree_code) (rhs_type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (rhs_type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (rhs_type)->base.code) == INTEGER_TYPE
)
)
4790 || (SCALAR_FLOAT_TYPE_P (lhs_type)(((enum tree_code) (lhs_type)->base.code) == REAL_TYPE)
4791 && SCALAR_FLOAT_TYPE_P (rhs_type)(((enum tree_code) (rhs_type)->base.code) == REAL_TYPE))))
4792 return false;
4793
4794 if (!VECTOR_BOOLEAN_TYPE_P (vectype_out)(((enum tree_code) (vectype_out)->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((vectype_out
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4794, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
4795 && ((INTEGRAL_TYPE_P (lhs_type)(((enum tree_code) (lhs_type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (lhs_type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (lhs_type)->base.code) == INTEGER_TYPE
)
4796 && !type_has_mode_precision_p (lhs_type))
4797 || (INTEGRAL_TYPE_P (rhs_type)(((enum tree_code) (rhs_type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (rhs_type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (rhs_type)->base.code) == INTEGER_TYPE
)
4798 && !type_has_mode_precision_p (rhs_type))))
4799 {
4800 if (dump_enabled_p ())
4801 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
4802 "type conversion to/from bit-precision unsupported."
4803 "\n");
4804 return false;
4805 }
4806
4807 if (op_type == binary_op)
4808 {
4809 gcc_assert (code == WIDEN_MULT_EXPR || code == WIDEN_LSHIFT_EXPR((void)(!(code == WIDEN_MULT_EXPR || code == WIDEN_LSHIFT_EXPR
|| code == WIDEN_PLUS_EXPR || code == WIDEN_MINUS_EXPR) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4810, __FUNCTION__), 0 : 0))
4810 || code == WIDEN_PLUS_EXPR || code == WIDEN_MINUS_EXPR)((void)(!(code == WIDEN_MULT_EXPR || code == WIDEN_LSHIFT_EXPR
|| code == WIDEN_PLUS_EXPR || code == WIDEN_MINUS_EXPR) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4810, __FUNCTION__), 0 : 0))
;
4811
4812 op1 = gimple_assign_rhs2 (stmt);
4813 tree vectype1_in;
4814 if (!vect_is_simple_use (vinfo, stmt_info, slp_node, 1,
4815 &op1, &slp_op1, &dt[1], &vectype1_in))
4816 {
4817 if (dump_enabled_p ())
4818 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
4819 "use not simple.\n");
4820 return false;
4821 }
4822 /* For WIDEN_MULT_EXPR, if OP0 is a constant, use the type of
4823 OP1. */
4824 if (!vectype_in)
4825 vectype_in = vectype1_in;
4826 }
4827
4828 /* If op0 is an external or constant def, infer the vector type
4829 from the scalar type. */
4830 if (!vectype_in)
4831 vectype_in = get_vectype_for_scalar_type (vinfo, rhs_type, slp_node);
4832 if (vec_stmt)
4833 gcc_assert (vectype_in)((void)(!(vectype_in) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4833, __FUNCTION__), 0 : 0))
;
4834 if (!vectype_in)
4835 {
4836 if (dump_enabled_p ())
4837 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
4838 "no vectype for scalar type %T\n", rhs_type);
4839
4840 return false;
4841 }
4842
4843 if (VECTOR_BOOLEAN_TYPE_P (vectype_out)(((enum tree_code) (vectype_out)->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((vectype_out
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4843, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
4844 && !VECTOR_BOOLEAN_TYPE_P (vectype_in)(((enum tree_code) (vectype_in)->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((vectype_in
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4844, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
)
4845 {
4846 if (dump_enabled_p ())
4847 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
4848 "can't convert between boolean and non "
4849 "boolean vectors %T\n", rhs_type);
4850
4851 return false;
4852 }
4853
4854 nunits_in = TYPE_VECTOR_SUBPARTS (vectype_in);
4855 nunits_out = TYPE_VECTOR_SUBPARTS (vectype_out);
4856 if (known_eq (nunits_out, nunits_in)(!maybe_ne (nunits_out, nunits_in)))
4857 if (widen_arith)
4858 modifier = WIDEN;
4859 else
4860 modifier = NONE;
4861 else if (multiple_p (nunits_out, nunits_in))
4862 modifier = NARROW;
4863 else
4864 {
4865 gcc_checking_assert (multiple_p (nunits_in, nunits_out))((void)(!(multiple_p (nunits_in, nunits_out)) ? fancy_abort (
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4865, __FUNCTION__), 0 : 0))
;
4866 modifier = WIDEN;
4867 }
4868
4869 /* Multiple types in SLP are handled by creating the appropriate number of
4870 vectorized stmts for each SLP node. Hence, NCOPIES is always 1 in
4871 case of SLP. */
4872 if (slp_node)
4873 ncopies = 1;
4874 else if (modifier == NARROW)
4875 ncopies = vect_get_num_copies (loop_vinfo, vectype_out);
4876 else
4877 ncopies = vect_get_num_copies (loop_vinfo, vectype_in);
4878
4879 /* Sanity check: make sure that at least one copy of the vectorized stmt
4880 needs to be generated. */
4881 gcc_assert (ncopies >= 1)((void)(!(ncopies >= 1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4881, __FUNCTION__), 0 : 0))
;
4882
4883 bool found_mode = false;
4884 scalar_mode lhs_mode = SCALAR_TYPE_MODE (lhs_type)(as_a <scalar_mode> ((tree_class_check ((lhs_type), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4884, __FUNCTION__))->type_common.mode))
;
4885 scalar_mode rhs_mode = SCALAR_TYPE_MODE (rhs_type)(as_a <scalar_mode> ((tree_class_check ((rhs_type), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4885, __FUNCTION__))->type_common.mode))
;
4886 opt_scalar_mode rhs_mode_iter;
4887
4888 /* Supportable by target? */
4889 switch (modifier)
4890 {
4891 case NONE:
4892 if (code != FIX_TRUNC_EXPR
4893 && code != FLOAT_EXPR
4894 && !CONVERT_EXPR_CODE_P (code)((code) == NOP_EXPR || (code) == CONVERT_EXPR))
4895 return false;
4896 if (supportable_convert_operation (code, vectype_out, vectype_in, &code1))
4897 break;
4898 /* FALLTHRU */
4899 unsupported:
4900 if (dump_enabled_p ())
4901 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
4902 "conversion not supported by target.\n");
4903 return false;
4904
4905 case WIDEN:
4906 if (known_eq (nunits_in, nunits_out)(!maybe_ne (nunits_in, nunits_out)))
4907 {
4908 if (!supportable_half_widening_operation (code, vectype_out,
4909 vectype_in, &code1))
4910 goto unsupported;
4911 gcc_assert (!(multi_step_cvt && op_type == binary_op))((void)(!(!(multi_step_cvt && op_type == binary_op)) ?
fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4911, __FUNCTION__), 0 : 0))
;
4912 break;
4913 }
4914 if (supportable_widening_operation (vinfo, code, stmt_info,
4915 vectype_out, vectype_in, &code1,
4916 &code2, &multi_step_cvt,
4917 &interm_types))
4918 {
4919 /* Binary widening operation can only be supported directly by the
4920 architecture. */
4921 gcc_assert (!(multi_step_cvt && op_type == binary_op))((void)(!(!(multi_step_cvt && op_type == binary_op)) ?
fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4921, __FUNCTION__), 0 : 0))
;
4922 break;
4923 }
4924
4925 if (code != FLOAT_EXPR
4926 || GET_MODE_SIZE (lhs_mode) <= GET_MODE_SIZE (rhs_mode))
4927 goto unsupported;
4928
4929 fltsz = GET_MODE_SIZE (lhs_mode);
4930 FOR_EACH_2XWIDER_MODE (rhs_mode_iter, rhs_mode)for ((rhs_mode_iter) = (rhs_mode), mode_iterator::get_2xwider
(&(rhs_mode_iter)); mode_iterator::iterate_p (&(rhs_mode_iter
)); mode_iterator::get_2xwider (&(rhs_mode_iter)))
4931 {
4932 rhs_mode = rhs_mode_iter.require ();
4933 if (GET_MODE_SIZE (rhs_mode) > fltsz)
4934 break;
4935
4936 cvt_type
4937 = build_nonstandard_integer_type (GET_MODE_BITSIZE (rhs_mode), 0);
4938 cvt_type = get_same_sized_vectype (cvt_type, vectype_in);
4939 if (cvt_type == NULL_TREE(tree) nullptr)
4940 goto unsupported;
4941
4942 if (GET_MODE_SIZE (rhs_mode) == fltsz)
4943 {
4944 if (!supportable_convert_operation (code, vectype_out,
4945 cvt_type, &codecvt1))
4946 goto unsupported;
4947 }
4948 else if (!supportable_widening_operation (vinfo, code, stmt_info,
4949 vectype_out, cvt_type,
4950 &codecvt1, &codecvt2,
4951 &multi_step_cvt,
4952 &interm_types))
4953 continue;
4954 else
4955 gcc_assert (multi_step_cvt == 0)((void)(!(multi_step_cvt == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4955, __FUNCTION__), 0 : 0))
;
4956
4957 if (supportable_widening_operation (vinfo, NOP_EXPR, stmt_info,
4958 cvt_type,
4959 vectype_in, &code1, &code2,
4960 &multi_step_cvt, &interm_types))
4961 {
4962 found_mode = true;
4963 break;
4964 }
4965 }
4966
4967 if (!found_mode)
4968 goto unsupported;
4969
4970 if (GET_MODE_SIZE (rhs_mode) == fltsz)
4971 codecvt2 = ERROR_MARK;
4972 else
4973 {
4974 multi_step_cvt++;
4975 interm_types.safe_push (cvt_type);
4976 cvt_type = NULL_TREE(tree) nullptr;
4977 }
4978 break;
4979
4980 case NARROW:
4981 gcc_assert (op_type == unary_op)((void)(!(op_type == unary_op) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4981, __FUNCTION__), 0 : 0))
;
4982 if (supportable_narrowing_operation (code, vectype_out, vectype_in,
4983 &code1, &multi_step_cvt,
4984 &interm_types))
4985 break;
4986
4987 if (code != FIX_TRUNC_EXPR
4988 || GET_MODE_SIZE (lhs_mode) >= GET_MODE_SIZE (rhs_mode))
4989 goto unsupported;
4990
4991 cvt_type
4992 = build_nonstandard_integer_type (GET_MODE_BITSIZE (rhs_mode), 0);
4993 cvt_type = get_same_sized_vectype (cvt_type, vectype_in);
4994 if (cvt_type == NULL_TREE(tree) nullptr)
4995 goto unsupported;
4996 if (!supportable_convert_operation (code, cvt_type, vectype_in,
4997 &codecvt1))
4998 goto unsupported;
4999 if (supportable_narrowing_operation (NOP_EXPR, vectype_out, cvt_type,
5000 &code1, &multi_step_cvt,
5001 &interm_types))
5002 break;
5003 goto unsupported;
5004
5005 default:
5006 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5006, __FUNCTION__))
;
5007 }
5008
5009 if (!vec_stmt) /* transformation not required. */
5010 {
5011 if (slp_node
5012 && (!vect_maybe_update_slp_op_vectype (slp_op0, vectype_in)
5013 || !vect_maybe_update_slp_op_vectype (slp_op1, vectype_in)))
5014 {
5015 if (dump_enabled_p ())
5016 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5017 "incompatible vector types for invariants\n");
5018 return false;
5019 }
5020 DUMP_VECT_SCOPE ("vectorizable_conversion")auto_dump_scope scope ("vectorizable_conversion", vect_location
)
;
5021 if (modifier == NONE)
5022 {
5023 STMT_VINFO_TYPE (stmt_info)(stmt_info)->type = type_conversion_vec_info_type;
5024 vect_model_simple_cost (vinfo, stmt_info, ncopies, dt, ndts, slp_node,
5025 cost_vec);
5026 }
5027 else if (modifier == NARROW)
5028 {
5029 STMT_VINFO_TYPE (stmt_info)(stmt_info)->type = type_demotion_vec_info_type;
5030 /* The final packing step produces one vector result per copy. */
5031 unsigned int nvectors
5032 = (slp_node ? SLP_TREE_NUMBER_OF_VEC_STMTS (slp_node)(slp_node)->vec_stmts_size : ncopies);
5033 vect_model_promotion_demotion_cost (stmt_info, dt, nvectors,
5034 multi_step_cvt, cost_vec,
5035 widen_arith);
5036 }
5037 else
5038 {
5039 STMT_VINFO_TYPE (stmt_info)(stmt_info)->type = type_promotion_vec_info_type;
5040 /* The initial unpacking step produces two vector results
5041 per copy. MULTI_STEP_CVT is 0 for a single conversion,
5042 so >> MULTI_STEP_CVT divides by 2^(number of steps - 1). */
5043 unsigned int nvectors
5044 = (slp_node
5045 ? SLP_TREE_NUMBER_OF_VEC_STMTS (slp_node)(slp_node)->vec_stmts_size >> multi_step_cvt
5046 : ncopies * 2);
5047 vect_model_promotion_demotion_cost (stmt_info, dt, nvectors,
5048 multi_step_cvt, cost_vec,
5049 widen_arith);
5050 }
5051 interm_types.release ();
5052 return true;
5053 }
5054
5055 /* Transform. */
5056 if (dump_enabled_p ())
5057 dump_printf_loc (MSG_NOTE, vect_location,
5058 "transform conversion. ncopies = %d.\n", ncopies);
5059
5060 if (op_type == binary_op)
5061 {
5062 if (CONSTANT_CLASS_P (op0)(tree_code_type[(int) (((enum tree_code) (op0)->base.code)
)] == tcc_constant)
)
5063 op0 = fold_convert (TREE_TYPE (op1), op0)fold_convert_loc (((location_t) 0), ((contains_struct_check (
(op1), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5063, __FUNCTION__))->typed.type), op0)
;
5064 else if (CONSTANT_CLASS_P (op1)(tree_code_type[(int) (((enum tree_code) (op1)->base.code)
)] == tcc_constant)
)
5065 op1 = fold_convert (TREE_TYPE (op0), op1)fold_convert_loc (((location_t) 0), ((contains_struct_check (
(op0), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5065, __FUNCTION__))->typed.type), op1)
;
5066 }
5067
5068 /* In case of multi-step conversion, we first generate conversion operations
5069 to the intermediate types, and then from that types to the final one.
5070 We create vector destinations for the intermediate type (TYPES) received
5071 from supportable_*_operation, and store them in the correct order
5072 for future use in vect_create_vectorized_*_stmts (). */
5073 auto_vec<tree> vec_dsts (multi_step_cvt + 1);
5074 vec_dest = vect_create_destination_var (scalar_dest,
5075 (cvt_type && modifier == WIDEN)
5076 ? cvt_type : vectype_out);
5077 vec_dsts.quick_push (vec_dest);
5078
5079 if (multi_step_cvt)
5080 {
5081 for (i = interm_types.length () - 1;
5082 interm_types.iterate (i, &intermediate_type); i--)
5083 {
5084 vec_dest = vect_create_destination_var (scalar_dest,
5085 intermediate_type);
5086 vec_dsts.quick_push (vec_dest);
5087 }
5088 }
5089
5090 if (cvt_type)
5091 vec_dest = vect_create_destination_var (scalar_dest,
5092 modifier == WIDEN
5093 ? vectype_out : cvt_type);
5094
5095 int ninputs = 1;
5096 if (!slp_node)
5097 {
5098 if (modifier == WIDEN)
5099 ;
5100 else if (modifier == NARROW)
5101 {
5102 if (multi_step_cvt)
5103 ninputs = vect_pow2 (multi_step_cvt);
5104 ninputs *= 2;
5105 }
5106 }
5107
5108 switch (modifier)
5109 {
5110 case NONE:
5111 vect_get_vec_defs (vinfo, stmt_info, slp_node, ncopies,
5112 op0, &vec_oprnds0);
5113 FOR_EACH_VEC_ELT (vec_oprnds0, i, vop0)for (i = 0; (vec_oprnds0).iterate ((i), &(vop0)); ++(i))
5114 {
5115 /* Arguments are ready, create the new vector stmt. */
5116 gcc_assert (TREE_CODE_LENGTH (code1) == unary_op)((void)(!(tree_code_length[(int) (code1)] == unary_op) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5116, __FUNCTION__), 0 : 0))
;
5117 gassign *new_stmt = gimple_build_assign (vec_dest, code1, vop0);
5118 new_temp = make_ssa_name (vec_dest, new_stmt);
5119 gimple_assign_set_lhs (new_stmt, new_temp);
5120 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
5121
5122 if (slp_node)
5123 SLP_TREE_VEC_STMTS (slp_node)(slp_node)->vec_stmts.quick_push (new_stmt);
5124 else
5125 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
5126 }
5127 break;
5128
5129 case WIDEN:
5130 /* In case the vectorization factor (VF) is bigger than the number
5131 of elements that we can fit in a vectype (nunits), we have to
5132 generate more than one vector stmt - i.e - we need to "unroll"
5133 the vector stmt by a factor VF/nunits. */
5134 vect_get_vec_defs (vinfo, stmt_info, slp_node, ncopies * ninputs,
5135 op0, &vec_oprnds0,
5136 code == WIDEN_LSHIFT_EXPR ? NULL_TREE(tree) nullptr : op1,
5137 &vec_oprnds1);
5138 if (code == WIDEN_LSHIFT_EXPR)
5139 {
5140 int oprnds_size = vec_oprnds0.length ();
5141 vec_oprnds1.create (oprnds_size);
5142 for (i = 0; i < oprnds_size; ++i)
5143 vec_oprnds1.quick_push (op1);
5144 }
5145 /* Arguments are ready. Create the new vector stmts. */
5146 for (i = multi_step_cvt; i >= 0; i--)
5147 {
5148 tree this_dest = vec_dsts[i];
5149 enum tree_code c1 = code1, c2 = code2;
5150 if (i == 0 && codecvt2 != ERROR_MARK)
5151 {
5152 c1 = codecvt1;
5153 c2 = codecvt2;
5154 }
5155 if (known_eq (nunits_out, nunits_in)(!maybe_ne (nunits_out, nunits_in)))
5156 vect_create_half_widening_stmts (vinfo, &vec_oprnds0,
5157 &vec_oprnds1, stmt_info,
5158 this_dest, gsi,
5159 c1, op_type);
5160 else
5161 vect_create_vectorized_promotion_stmts (vinfo, &vec_oprnds0,
5162 &vec_oprnds1, stmt_info,
5163 this_dest, gsi,
5164 c1, c2, op_type);
5165 }
5166
5167 FOR_EACH_VEC_ELT (vec_oprnds0, i, vop0)for (i = 0; (vec_oprnds0).iterate ((i), &(vop0)); ++(i))
5168 {
5169 gimple *new_stmt;
5170 if (cvt_type)
5171 {
5172 gcc_assert (TREE_CODE_LENGTH (codecvt1) == unary_op)((void)(!(tree_code_length[(int) (codecvt1)] == unary_op) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5172, __FUNCTION__), 0 : 0))
;
5173 new_temp = make_ssa_name (vec_dest);
5174 new_stmt = gimple_build_assign (new_temp, codecvt1, vop0);
5175 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
5176 }
5177 else
5178 new_stmt = SSA_NAME_DEF_STMT (vop0)(tree_check ((vop0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5178, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
5179
5180 if (slp_node)
5181 SLP_TREE_VEC_STMTS (slp_node)(slp_node)->vec_stmts.quick_push (new_stmt);
5182 else
5183 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
5184 }
5185 break;
5186
5187 case NARROW:
5188 /* In case the vectorization factor (VF) is bigger than the number
5189 of elements that we can fit in a vectype (nunits), we have to
5190 generate more than one vector stmt - i.e - we need to "unroll"
5191 the vector stmt by a factor VF/nunits. */
5192 vect_get_vec_defs (vinfo, stmt_info, slp_node, ncopies * ninputs,
5193 op0, &vec_oprnds0);
5194 /* Arguments are ready. Create the new vector stmts. */
5195 if (cvt_type)
5196 FOR_EACH_VEC_ELT (vec_oprnds0, i, vop0)for (i = 0; (vec_oprnds0).iterate ((i), &(vop0)); ++(i))
5197 {
5198 gcc_assert (TREE_CODE_LENGTH (codecvt1) == unary_op)((void)(!(tree_code_length[(int) (codecvt1)] == unary_op) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5198, __FUNCTION__), 0 : 0))
;
5199 new_temp = make_ssa_name (vec_dest);
5200 gassign *new_stmt
5201 = gimple_build_assign (new_temp, codecvt1, vop0);
5202 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
5203 vec_oprnds0[i] = new_temp;
5204 }
5205
5206 vect_create_vectorized_demotion_stmts (vinfo, &vec_oprnds0,
5207 multi_step_cvt,
5208 stmt_info, vec_dsts, gsi,
5209 slp_node, code1);
5210 break;
5211 }
5212 if (!slp_node)
5213 *vec_stmt = STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts[0];
5214
5215 vec_oprnds0.release ();
5216 vec_oprnds1.release ();
5217 interm_types.release ();
5218
5219 return true;
5220}
5221
5222/* Return true if we can assume from the scalar form of STMT_INFO that
5223 neither the scalar nor the vector forms will generate code. STMT_INFO
5224 is known not to involve a data reference. */
5225
5226bool
5227vect_nop_conversion_p (stmt_vec_info stmt_info)
5228{
5229 gassign *stmt = dyn_cast <gassign *> (stmt_info->stmt);
5230 if (!stmt)
5231 return false;
5232
5233 tree lhs = gimple_assign_lhs (stmt);
5234 tree_code code = gimple_assign_rhs_code (stmt);
5235 tree rhs = gimple_assign_rhs1 (stmt);
5236
5237 if (code == SSA_NAME || code == VIEW_CONVERT_EXPR)
5238 return true;
5239
5240 if (CONVERT_EXPR_CODE_P (code)((code) == NOP_EXPR || (code) == CONVERT_EXPR))
5241 return tree_nop_conversion_p (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5241, __FUNCTION__))->typed.type)
, TREE_TYPE (rhs)((contains_struct_check ((rhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5241, __FUNCTION__))->typed.type)
);
5242
5243 return false;
5244}
5245
5246/* Function vectorizable_assignment.
5247
5248 Check if STMT_INFO performs an assignment (copy) that can be vectorized.
5249 If VEC_STMT is also passed, vectorize the STMT_INFO: create a vectorized
5250 stmt to replace it, put it in VEC_STMT, and insert it at GSI.
5251 Return true if STMT_INFO is vectorizable in this way. */
5252
5253static bool
5254vectorizable_assignment (vec_info *vinfo,
5255 stmt_vec_info stmt_info, gimple_stmt_iterator *gsi,
5256 gimple **vec_stmt, slp_tree slp_node,
5257 stmt_vector_for_cost *cost_vec)
5258{
5259 tree vec_dest;
5260 tree scalar_dest;
5261 tree op;
5262 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
5263 tree new_temp;
5264 enum vect_def_type dt[1] = {vect_unknown_def_type};
5265 int ndts = 1;
5266 int ncopies;
5267 int i;
5268 vec<tree> vec_oprnds = vNULL;
5269 tree vop;
5270 bb_vec_info bb_vinfo = dyn_cast <bb_vec_info> (vinfo);
5271 enum tree_code code;
5272 tree vectype_in;
5273
5274 if (!STMT_VINFO_RELEVANT_P (stmt_info)((stmt_info)->relevant != vect_unused_in_scope) && !bb_vinfo)
5275 return false;
5276
5277 if (STMT_VINFO_DEF_TYPE (stmt_info)(stmt_info)->def_type != vect_internal_def
5278 && ! vec_stmt)
5279 return false;
5280
5281 /* Is vectorizable assignment? */
5282 gassign *stmt = dyn_cast <gassign *> (stmt_info->stmt);
5283 if (!stmt)
5284 return false;
5285
5286 scalar_dest = gimple_assign_lhs (stmt);
5287 if (TREE_CODE (scalar_dest)((enum tree_code) (scalar_dest)->base.code) != SSA_NAME)
5288 return false;
5289
5290 if (STMT_VINFO_DATA_REF (stmt_info)((stmt_info)->dr_aux.dr + 0))
5291 return false;
5292
5293 code = gimple_assign_rhs_code (stmt);
5294 if (!(gimple_assign_single_p (stmt)
5295 || code == PAREN_EXPR
5296 || CONVERT_EXPR_CODE_P (code)((code) == NOP_EXPR || (code) == CONVERT_EXPR)))
5297 return false;
5298
5299 tree vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
5300 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
5301
5302 /* Multiple types in SLP are handled by creating the appropriate number of
5303 vectorized stmts for each SLP node. Hence, NCOPIES is always 1 in
5304 case of SLP. */
5305 if (slp_node)
5306 ncopies = 1;
5307 else
5308 ncopies = vect_get_num_copies (loop_vinfo, vectype);
5309
5310 gcc_assert (ncopies >= 1)((void)(!(ncopies >= 1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5310, __FUNCTION__), 0 : 0))
;
5311
5312 slp_tree slp_op;
5313 if (!vect_is_simple_use (vinfo, stmt_info, slp_node, 0, &op, &slp_op,
5314 &dt[0], &vectype_in))
5315 {
5316 if (dump_enabled_p ())
5317 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5318 "use not simple.\n");
5319 return false;
5320 }
5321 if (!vectype_in)
5322 vectype_in = get_vectype_for_scalar_type (vinfo, TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5322, __FUNCTION__))->typed.type)
, slp_node);
5323
5324 /* We can handle NOP_EXPR conversions that do not change the number
5325 of elements or the vector size. */
5326 if ((CONVERT_EXPR_CODE_P (code)((code) == NOP_EXPR || (code) == CONVERT_EXPR)
5327 || code == VIEW_CONVERT_EXPR)
5328 && (!vectype_in
5329 || maybe_ne (TYPE_VECTOR_SUBPARTS (vectype_in), nunits)
5330 || maybe_ne (GET_MODE_SIZE (TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5330, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
),
5331 GET_MODE_SIZE (TYPE_MODE (vectype_in)((((enum tree_code) ((tree_class_check ((vectype_in), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5331, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype_in) : (vectype_in)->type_common.mode)
))))
5332 return false;
5333
5334 if (VECTOR_BOOLEAN_TYPE_P (vectype)(((enum tree_code) (vectype)->base.code) == VECTOR_TYPE &&
((enum tree_code) (((contains_struct_check ((vectype), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5334, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
5335 && !VECTOR_BOOLEAN_TYPE_P (vectype_in)(((enum tree_code) (vectype_in)->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((vectype_in
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5335, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
)
5336 {
5337 if (dump_enabled_p ())
5338 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5339 "can't convert between boolean and non "
5340 "boolean vectors %T\n", TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5340, __FUNCTION__))->typed.type)
);
5341
5342 return false;
5343 }
5344
5345 /* We do not handle bit-precision changes. */
5346 if ((CONVERT_EXPR_CODE_P (code)((code) == NOP_EXPR || (code) == CONVERT_EXPR)
5347 || code == VIEW_CONVERT_EXPR)
5348 && INTEGRAL_TYPE_P (TREE_TYPE (scalar_dest))(((enum tree_code) (((contains_struct_check ((scalar_dest), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5348, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((scalar_dest)
, (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5348, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((scalar_dest)
, (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5348, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
5349 && (!type_has_mode_precision_p (TREE_TYPE (scalar_dest)((contains_struct_check ((scalar_dest), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5349, __FUNCTION__))->typed.type)
)
5350 || !type_has_mode_precision_p (TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5350, __FUNCTION__))->typed.type)
))
5351 /* But a conversion that does not change the bit-pattern is ok. */
5352 && !((TYPE_PRECISION (TREE_TYPE (scalar_dest))((tree_class_check ((((contains_struct_check ((scalar_dest), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5352, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5352, __FUNCTION__))->type_common.precision)
5353 > TYPE_PRECISION (TREE_TYPE (op))((tree_class_check ((((contains_struct_check ((op), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5353, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5353, __FUNCTION__))->type_common.precision)
)
5354 && TYPE_UNSIGNED (TREE_TYPE (op))((tree_class_check ((((contains_struct_check ((op), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5354, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5354, __FUNCTION__))->base.u.bits.unsigned_flag)
))
5355 {
5356 if (dump_enabled_p ())
5357 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5358 "type conversion to/from bit-precision "
5359 "unsupported.\n");
5360 return false;
5361 }
5362
5363 if (!vec_stmt) /* transformation not required. */
5364 {
5365 if (slp_node
5366 && !vect_maybe_update_slp_op_vectype (slp_op, vectype_in))
5367 {
5368 if (dump_enabled_p ())
5369 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5370 "incompatible vector types for invariants\n");
5371 return false;
5372 }
5373 STMT_VINFO_TYPE (stmt_info)(stmt_info)->type = assignment_vec_info_type;
5374 DUMP_VECT_SCOPE ("vectorizable_assignment")auto_dump_scope scope ("vectorizable_assignment", vect_location
)
;
5375 if (!vect_nop_conversion_p (stmt_info))
5376 vect_model_simple_cost (vinfo, stmt_info, ncopies, dt, ndts, slp_node,
5377 cost_vec);
5378 return true;
5379 }
5380
5381 /* Transform. */
5382 if (dump_enabled_p ())
5383 dump_printf_loc (MSG_NOTE, vect_location, "transform assignment.\n");
5384
5385 /* Handle def. */
5386 vec_dest = vect_create_destination_var (scalar_dest, vectype);
5387
5388 /* Handle use. */
5389 vect_get_vec_defs (vinfo, stmt_info, slp_node, ncopies, op, &vec_oprnds);
5390
5391 /* Arguments are ready. create the new vector stmt. */
5392 FOR_EACH_VEC_ELT (vec_oprnds, i, vop)for (i = 0; (vec_oprnds).iterate ((i), &(vop)); ++(i))
5393 {
5394 if (CONVERT_EXPR_CODE_P (code)((code) == NOP_EXPR || (code) == CONVERT_EXPR)
5395 || code == VIEW_CONVERT_EXPR)
5396 vop = build1 (VIEW_CONVERT_EXPR, vectype, vop);
5397 gassign *new_stmt = gimple_build_assign (vec_dest, vop);
5398 new_temp = make_ssa_name (vec_dest, new_stmt);
5399 gimple_assign_set_lhs (new_stmt, new_temp);
5400 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
5401 if (slp_node)
5402 SLP_TREE_VEC_STMTS (slp_node)(slp_node)->vec_stmts.quick_push (new_stmt);
5403 else
5404 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
5405 }
5406 if (!slp_node)
5407 *vec_stmt = STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts[0];
5408
5409 vec_oprnds.release ();
5410 return true;
5411}
5412
5413
5414/* Return TRUE if CODE (a shift operation) is supported for SCALAR_TYPE
5415 either as shift by a scalar or by a vector. */
5416
5417bool
5418vect_supportable_shift (vec_info *vinfo, enum tree_code code, tree scalar_type)
5419{
5420
5421 machine_mode vec_mode;
5422 optab optab;
5423 int icode;
5424 tree vectype;
5425
5426 vectype = get_vectype_for_scalar_type (vinfo, scalar_type);
5427 if (!vectype)
5428 return false;
5429
5430 optab = optab_for_tree_code (code, vectype, optab_scalar);
5431 if (!optab
5432 || optab_handler (optab, TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5432, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
) == CODE_FOR_nothing)
5433 {
5434 optab = optab_for_tree_code (code, vectype, optab_vector);
5435 if (!optab
5436 || (optab_handler (optab, TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5436, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
)
5437 == CODE_FOR_nothing))
5438 return false;
5439 }
5440
5441 vec_mode = TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5441, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
;
5442 icode = (int) optab_handler (optab, vec_mode);
5443 if (icode == CODE_FOR_nothing)
5444 return false;
5445
5446 return true;
5447}
5448
5449
5450/* Function vectorizable_shift.
5451
5452 Check if STMT_INFO performs a shift operation that can be vectorized.
5453 If VEC_STMT is also passed, vectorize the STMT_INFO: create a vectorized
5454 stmt to replace it, put it in VEC_STMT, and insert it at GSI.
5455 Return true if STMT_INFO is vectorizable in this way. */
5456
5457static bool
5458vectorizable_shift (vec_info *vinfo,
5459 stmt_vec_info stmt_info, gimple_stmt_iterator *gsi,
5460 gimple **vec_stmt, slp_tree slp_node,
5461 stmt_vector_for_cost *cost_vec)
5462{
5463 tree vec_dest;
5464 tree scalar_dest;
5465 tree op0, op1 = NULLnullptr;
5466 tree vec_oprnd1 = NULL_TREE(tree) nullptr;
5467 tree vectype;
5468 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
5469 enum tree_code code;
5470 machine_mode vec_mode;
5471 tree new_temp;
5472 optab optab;
5473 int icode;
5474 machine_mode optab_op2_mode;
5475 enum vect_def_type dt[2] = {vect_unknown_def_type, vect_unknown_def_type};
5476 int ndts = 2;
5477 poly_uint64 nunits_in;
5478 poly_uint64 nunits_out;
5479 tree vectype_out;
5480 tree op1_vectype;
5481 int ncopies;
5482 int i;
5483 vec<tree> vec_oprnds0 = vNULL;
5484 vec<tree> vec_oprnds1 = vNULL;
5485 tree vop0, vop1;
5486 unsigned int k;
5487 bool scalar_shift_arg = true;
5488 bb_vec_info bb_vinfo = dyn_cast <bb_vec_info> (vinfo);
5489 bool incompatible_op1_vectype_p = false;
5490
5491 if (!STMT_VINFO_RELEVANT_P (stmt_info)((stmt_info)->relevant != vect_unused_in_scope) && !bb_vinfo)
5492 return false;
5493
5494 if (STMT_VINFO_DEF_TYPE (stmt_info)(stmt_info)->def_type != vect_internal_def
5495 && STMT_VINFO_DEF_TYPE (stmt_info)(stmt_info)->def_type != vect_nested_cycle
5496 && ! vec_stmt)
5497 return false;
5498
5499 /* Is STMT a vectorizable binary/unary operation? */
5500 gassign *stmt = dyn_cast <gassign *> (stmt_info->stmt);
5501 if (!stmt)
5502 return false;
5503
5504 if (TREE_CODE (gimple_assign_lhs (stmt))((enum tree_code) (gimple_assign_lhs (stmt))->base.code) != SSA_NAME)
5505 return false;
5506
5507 code = gimple_assign_rhs_code (stmt);
5508
5509 if (!(code == LSHIFT_EXPR || code == RSHIFT_EXPR || code == LROTATE_EXPR
5510 || code == RROTATE_EXPR))
5511 return false;
5512
5513 scalar_dest = gimple_assign_lhs (stmt);
5514 vectype_out = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
5515 if (!type_has_mode_precision_p (TREE_TYPE (scalar_dest)((contains_struct_check ((scalar_dest), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5515, __FUNCTION__))->typed.type)
))
5516 {
5517 if (dump_enabled_p ())
5518 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5519 "bit-precision shifts not supported.\n");
5520 return false;
5521 }
5522
5523 slp_tree slp_op0;
5524 if (!vect_is_simple_use (vinfo, stmt_info, slp_node,
5525 0, &op0, &slp_op0, &dt[0], &vectype))
5526 {
5527 if (dump_enabled_p ())
5528 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5529 "use not simple.\n");
5530 return false;
5531 }
5532 /* If op0 is an external or constant def, infer the vector type
5533 from the scalar type. */
5534 if (!vectype)
5535 vectype = get_vectype_for_scalar_type (vinfo, TREE_TYPE (op0)((contains_struct_check ((op0), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5535, __FUNCTION__))->typed.type)
, slp_node);
5536 if (vec_stmt)
5537 gcc_assert (vectype)((void)(!(vectype) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5537, __FUNCTION__), 0 : 0))
;
5538 if (!vectype)
5539 {
5540 if (dump_enabled_p ())
5541 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5542 "no vectype for scalar type\n");
5543 return false;
5544 }
5545
5546 nunits_out = TYPE_VECTOR_SUBPARTS (vectype_out);
5547 nunits_in = TYPE_VECTOR_SUBPARTS (vectype);
5548 if (maybe_ne (nunits_out, nunits_in))
5549 return false;
5550
5551 stmt_vec_info op1_def_stmt_info;
5552 slp_tree slp_op1;
5553 if (!vect_is_simple_use (vinfo, stmt_info, slp_node, 1, &op1, &slp_op1,
5554 &dt[1], &op1_vectype, &op1_def_stmt_info))
5555 {
5556 if (dump_enabled_p ())
5557 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5558 "use not simple.\n");
5559 return false;
5560 }
5561
5562 /* Multiple types in SLP are handled by creating the appropriate number of
5563 vectorized stmts for each SLP node. Hence, NCOPIES is always 1 in
5564 case of SLP. */
5565 if (slp_node)
5566 ncopies = 1;
5567 else
5568 ncopies = vect_get_num_copies (loop_vinfo, vectype);
5569
5570 gcc_assert (ncopies >= 1)((void)(!(ncopies >= 1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5570, __FUNCTION__), 0 : 0))
;
5571
5572 /* Determine whether the shift amount is a vector, or scalar. If the
5573 shift/rotate amount is a vector, use the vector/vector shift optabs. */
5574
5575 if ((dt[1] == vect_internal_def
5576 || dt[1] == vect_induction_def
5577 || dt[1] == vect_nested_cycle)
5578 && !slp_node)
5579 scalar_shift_arg = false;
5580 else if (dt[1] == vect_constant_def
5581 || dt[1] == vect_external_def
5582 || dt[1] == vect_internal_def)
5583 {
5584 /* In SLP, need to check whether the shift count is the same,
5585 in loops if it is a constant or invariant, it is always
5586 a scalar shift. */
5587 if (slp_node)
5588 {
5589 vec<stmt_vec_info> stmts = SLP_TREE_SCALAR_STMTS (slp_node)(slp_node)->stmts;
5590 stmt_vec_info slpstmt_info;
5591
5592 FOR_EACH_VEC_ELT (stmts, k, slpstmt_info)for (k = 0; (stmts).iterate ((k), &(slpstmt_info)); ++(k)
)
5593 {
5594 gassign *slpstmt = as_a <gassign *> (slpstmt_info->stmt);
5595 if (!operand_equal_p (gimple_assign_rhs2 (slpstmt), op1, 0))
5596 scalar_shift_arg = false;
5597 }
5598
5599 /* For internal SLP defs we have to make sure we see scalar stmts
5600 for all vector elements.
5601 ??? For different vectors we could resort to a different
5602 scalar shift operand but code-generation below simply always
5603 takes the first. */
5604 if (dt[1] == vect_internal_def
5605 && maybe_ne (nunits_out * SLP_TREE_NUMBER_OF_VEC_STMTS (slp_node)(slp_node)->vec_stmts_size,
5606 stmts.length ()))
5607 scalar_shift_arg = false;
5608 }
5609
5610 /* If the shift amount is computed by a pattern stmt we cannot
5611 use the scalar amount directly thus give up and use a vector
5612 shift. */
5613 if (op1_def_stmt_info && is_pattern_stmt_p (op1_def_stmt_info))
5614 scalar_shift_arg = false;
5615 }
5616 else
5617 {
5618 if (dump_enabled_p ())
5619 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5620 "operand mode requires invariant argument.\n");
5621 return false;
5622 }
5623
5624 /* Vector shifted by vector. */
5625 bool was_scalar_shift_arg = scalar_shift_arg;
5626 if (!scalar_shift_arg)
5627 {
5628 optab = optab_for_tree_code (code, vectype, optab_vector);
5629 if (dump_enabled_p ())
5630 dump_printf_loc (MSG_NOTE, vect_location,
5631 "vector/vector shift/rotate found.\n");
5632
5633 if (!op1_vectype)
5634 op1_vectype = get_vectype_for_scalar_type (vinfo, TREE_TYPE (op1)((contains_struct_check ((op1), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5634, __FUNCTION__))->typed.type)
,
5635 slp_op1);
5636 incompatible_op1_vectype_p
5637 = (op1_vectype == NULL_TREE(tree) nullptr
5638 || maybe_ne (TYPE_VECTOR_SUBPARTS (op1_vectype),
5639 TYPE_VECTOR_SUBPARTS (vectype))
5640 || TYPE_MODE (op1_vectype)((((enum tree_code) ((tree_class_check ((op1_vectype), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5640, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(op1_vectype) : (op1_vectype)->type_common.mode)
!= TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5640, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
);
5641 if (incompatible_op1_vectype_p
5642 && (!slp_node
5643 || SLP_TREE_DEF_TYPE (slp_op1)(slp_op1)->def_type != vect_constant_def
5644 || slp_op1->refcnt != 1))
5645 {
5646 if (dump_enabled_p ())
5647 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5648 "unusable type for last operand in"
5649 " vector/vector shift/rotate.\n");
5650 return false;
5651 }
5652 }
5653 /* See if the machine has a vector shifted by scalar insn and if not
5654 then see if it has a vector shifted by vector insn. */
5655 else
5656 {
5657 optab = optab_for_tree_code (code, vectype, optab_scalar);
5658 if (optab
5659 && optab_handler (optab, TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5659, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
) != CODE_FOR_nothing)
5660 {
5661 if (dump_enabled_p ())
5662 dump_printf_loc (MSG_NOTE, vect_location,
5663 "vector/scalar shift/rotate found.\n");
5664 }
5665 else
5666 {
5667 optab = optab_for_tree_code (code, vectype, optab_vector);
5668 if (optab
5669 && (optab_handler (optab, TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5669, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
)
5670 != CODE_FOR_nothing))
5671 {
5672 scalar_shift_arg = false;
5673
5674 if (dump_enabled_p ())
5675 dump_printf_loc (MSG_NOTE, vect_location,
5676 "vector/vector shift/rotate found.\n");
5677
5678 if (!op1_vectype)
5679 op1_vectype = get_vectype_for_scalar_type (vinfo,
5680 TREE_TYPE (op1)((contains_struct_check ((op1), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5680, __FUNCTION__))->typed.type)
,
5681 slp_op1);
5682
5683 /* Unlike the other binary operators, shifts/rotates have
5684 the rhs being int, instead of the same type as the lhs,
5685 so make sure the scalar is the right type if we are
5686 dealing with vectors of long long/long/short/char. */
5687 incompatible_op1_vectype_p
5688 = (!op1_vectype
5689 || !tree_nop_conversion_p (TREE_TYPE (vectype)((contains_struct_check ((vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5689, __FUNCTION__))->typed.type)
,
5690 TREE_TYPE (op1)((contains_struct_check ((op1), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5690, __FUNCTION__))->typed.type)
));
5691 if (incompatible_op1_vectype_p
5692 && dt[1] == vect_internal_def)
5693 {
5694 if (dump_enabled_p ())
5695 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5696 "unusable type for last operand in"
5697 " vector/vector shift/rotate.\n");
5698 return false;
5699 }
5700 }
5701 }
5702 }
5703
5704 /* Supportable by target? */
5705 if (!optab)
5706 {
5707 if (dump_enabled_p ())
5708 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5709 "no optab.\n");
5710 return false;
5711 }
5712 vec_mode = TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5712, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
;
5713 icode = (int) optab_handler (optab, vec_mode);
5714 if (icode == CODE_FOR_nothing)
5715 {
5716 if (dump_enabled_p ())
5717 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5718 "op not supported by target.\n");
5719 return false;
5720 }
5721 /* vector lowering cannot optimize vector shifts using word arithmetic. */
5722 if (vect_emulated_vector_p (vectype))
5723 return false;
5724
5725 if (!vec_stmt) /* transformation not required. */
5726 {
5727 if (slp_node
5728 && (!vect_maybe_update_slp_op_vectype (slp_op0, vectype)
5729 || ((!scalar_shift_arg || dt[1] == vect_internal_def)
5730 && (!incompatible_op1_vectype_p
5731 || dt[1] == vect_constant_def)
5732 && !vect_maybe_update_slp_op_vectype
5733 (slp_op1,
5734 incompatible_op1_vectype_p ? vectype : op1_vectype))))
5735 {
5736 if (dump_enabled_p ())
5737 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5738 "incompatible vector types for invariants\n");
5739 return false;
5740 }
5741 /* Now adjust the constant shift amount in place. */
5742 if (slp_node
5743 && incompatible_op1_vectype_p
5744 && dt[1] == vect_constant_def)
5745 {
5746 for (unsigned i = 0;
5747 i < SLP_TREE_SCALAR_OPS (slp_op1)(slp_op1)->ops.length (); ++i)
5748 {
5749 SLP_TREE_SCALAR_OPS (slp_op1)(slp_op1)->ops[i]
5750 = fold_convert (TREE_TYPE (vectype),fold_convert_loc (((location_t) 0), ((contains_struct_check (
(vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5750, __FUNCTION__))->typed.type), (slp_op1)->ops[i])
5751 SLP_TREE_SCALAR_OPS (slp_op1)[i])fold_convert_loc (((location_t) 0), ((contains_struct_check (
(vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5750, __FUNCTION__))->typed.type), (slp_op1)->ops[i])
;
5752 gcc_assert ((TREE_CODE (SLP_TREE_SCALAR_OPS (slp_op1)[i])((void)(!((((enum tree_code) ((slp_op1)->ops[i])->base.
code) == INTEGER_CST)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5753, __FUNCTION__), 0 : 0))
5753 == INTEGER_CST))((void)(!((((enum tree_code) ((slp_op1)->ops[i])->base.
code) == INTEGER_CST)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5753, __FUNCTION__), 0 : 0))
;
5754 }
5755 }
5756 STMT_VINFO_TYPE (stmt_info)(stmt_info)->type = shift_vec_info_type;
5757 DUMP_VECT_SCOPE ("vectorizable_shift")auto_dump_scope scope ("vectorizable_shift", vect_location);
5758 vect_model_simple_cost (vinfo, stmt_info, ncopies, dt,
5759 scalar_shift_arg ? 1 : ndts, slp_node, cost_vec);
5760 return true;
5761 }
5762
5763 /* Transform. */
5764
5765 if (dump_enabled_p ())
5766 dump_printf_loc (MSG_NOTE, vect_location,
5767 "transform binary/unary operation.\n");
5768
5769 if (incompatible_op1_vectype_p && !slp_node)
5770 {
5771 gcc_assert (!scalar_shift_arg && was_scalar_shift_arg)((void)(!(!scalar_shift_arg && was_scalar_shift_arg) ?
fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5771, __FUNCTION__), 0 : 0))
;
5772 op1 = fold_convert (TREE_TYPE (vectype), op1)fold_convert_loc (((location_t) 0), ((contains_struct_check (
(vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5772, __FUNCTION__))->typed.type), op1)
;
5773 if (dt[1] != vect_constant_def)
5774 op1 = vect_init_vector (vinfo, stmt_info, op1,
5775 TREE_TYPE (vectype)((contains_struct_check ((vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5775, __FUNCTION__))->typed.type)
, NULLnullptr);
5776 }
5777
5778 /* Handle def. */
5779 vec_dest = vect_create_destination_var (scalar_dest, vectype);
5780
5781 if (scalar_shift_arg && dt[1] != vect_internal_def)
5782 {
5783 /* Vector shl and shr insn patterns can be defined with scalar
5784 operand 2 (shift operand). In this case, use constant or loop
5785 invariant op1 directly, without extending it to vector mode
5786 first. */
5787 optab_op2_mode = insn_data[icode].operand[2].mode;
5788 if (!VECTOR_MODE_P (optab_op2_mode)(((enum mode_class) mode_class[optab_op2_mode]) == MODE_VECTOR_BOOL
|| ((enum mode_class) mode_class[optab_op2_mode]) == MODE_VECTOR_INT
|| ((enum mode_class) mode_class[optab_op2_mode]) == MODE_VECTOR_FLOAT
|| ((enum mode_class) mode_class[optab_op2_mode]) == MODE_VECTOR_FRACT
|| ((enum mode_class) mode_class[optab_op2_mode]) == MODE_VECTOR_UFRACT
|| ((enum mode_class) mode_class[optab_op2_mode]) == MODE_VECTOR_ACCUM
|| ((enum mode_class) mode_class[optab_op2_mode]) == MODE_VECTOR_UACCUM
)
)
5789 {
5790 if (dump_enabled_p ())
5791 dump_printf_loc (MSG_NOTE, vect_location,
5792 "operand 1 using scalar mode.\n");
5793 vec_oprnd1 = op1;
5794 vec_oprnds1.create (slp_node ? slp_node->vec_stmts_size : ncopies);
5795 vec_oprnds1.quick_push (vec_oprnd1);
5796 /* Store vec_oprnd1 for every vector stmt to be created.
5797 We check during the analysis that all the shift arguments
5798 are the same.
5799 TODO: Allow different constants for different vector
5800 stmts generated for an SLP instance. */
5801 for (k = 0;
5802 k < (slp_node ? slp_node->vec_stmts_size - 1 : ncopies - 1); k++)
5803 vec_oprnds1.quick_push (vec_oprnd1);
5804 }
5805 }
5806 else if (!scalar_shift_arg && slp_node && incompatible_op1_vectype_p)
5807 {
5808 if (was_scalar_shift_arg)
5809 {
5810 /* If the argument was the same in all lanes create
5811 the correctly typed vector shift amount directly. */
5812 op1 = fold_convert (TREE_TYPE (vectype), op1)fold_convert_loc (((location_t) 0), ((contains_struct_check (
(vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5812, __FUNCTION__))->typed.type), op1)
;
5813 op1 = vect_init_vector (vinfo, stmt_info, op1, TREE_TYPE (vectype)((contains_struct_check ((vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5813, __FUNCTION__))->typed.type)
,
5814 !loop_vinfo ? gsi : NULLnullptr);
5815 vec_oprnd1 = vect_init_vector (vinfo, stmt_info, op1, vectype,
5816 !loop_vinfo ? gsi : NULLnullptr);
5817 vec_oprnds1.create (slp_node->vec_stmts_size);
5818 for (k = 0; k < slp_node->vec_stmts_size; k++)
5819 vec_oprnds1.quick_push (vec_oprnd1);
5820 }
5821 else if (dt[1] == vect_constant_def)
5822 /* The constant shift amount has been adjusted in place. */
5823 ;
5824 else
5825 gcc_assert (TYPE_MODE (op1_vectype) == TYPE_MODE (vectype))((void)(!(((((enum tree_code) ((tree_class_check ((op1_vectype
), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5825, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(op1_vectype) : (op1_vectype)->type_common.mode) == ((((enum
tree_code) ((tree_class_check ((vectype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5825, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)) ? fancy_abort (
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5825, __FUNCTION__), 0 : 0))
;
5826 }
5827
5828 /* vec_oprnd1 is available if operand 1 should be of a scalar-type
5829 (a special case for certain kind of vector shifts); otherwise,
5830 operand 1 should be of a vector type (the usual case). */
5831 vect_get_vec_defs (vinfo, stmt_info, slp_node, ncopies,
5832 op0, &vec_oprnds0,
5833 vec_oprnd1 ? NULL_TREE(tree) nullptr : op1, &vec_oprnds1);
5834
5835 /* Arguments are ready. Create the new vector stmt. */
5836 FOR_EACH_VEC_ELT (vec_oprnds0, i, vop0)for (i = 0; (vec_oprnds0).iterate ((i), &(vop0)); ++(i))
5837 {
5838 /* For internal defs where we need to use a scalar shift arg
5839 extract the first lane. */
5840 if (scalar_shift_arg && dt[1] == vect_internal_def)
5841 {
5842 vop1 = vec_oprnds1[0];
5843 new_temp = make_ssa_name (TREE_TYPE (TREE_TYPE (vop1))((contains_struct_check ((((contains_struct_check ((vop1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5843, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5843, __FUNCTION__))->typed.type)
);
5844 gassign *n