Bug Summary

File:build/gcc/omp-expand.c
Warning:line 261, column 7
Called C++ object pointer is null

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-unknown-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name omp-expand.c -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -fno-rounding-math -mconstructor-aliases -munwind-tables -target-cpu x86-64 -tune-cpu generic -debugger-tuning=gdb -fcoverage-compilation-dir=/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -resource-dir /usr/lib64/clang/13.0.0 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11/backward -internal-isystem /usr/lib64/clang/13.0.0/include -internal-isystem /usr/local/include -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../x86_64-suse-linux/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-error=format-diag -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir=/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -D__GCC_HAVE_DWARF2_CFI_ASM=1 -o /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2021-11-20-133755-20252-1/report-NuKClD.plist -x c++ /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c

/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c

1/* Expansion pass for OMP directives. Outlines regions of certain OMP
2 directives to separate functions, converts others into explicit calls to the
3 runtime library (libgomp) and so forth
4
5Copyright (C) 2005-2021 Free Software Foundation, Inc.
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
11Software Foundation; either version 3, or (at your option) any later
12version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "memmodel.h"
27#include "backend.h"
28#include "target.h"
29#include "rtl.h"
30#include "tree.h"
31#include "gimple.h"
32#include "cfghooks.h"
33#include "tree-pass.h"
34#include "ssa.h"
35#include "optabs.h"
36#include "cgraph.h"
37#include "pretty-print.h"
38#include "diagnostic-core.h"
39#include "fold-const.h"
40#include "stor-layout.h"
41#include "cfganal.h"
42#include "internal-fn.h"
43#include "gimplify.h"
44#include "gimple-iterator.h"
45#include "gimplify-me.h"
46#include "gimple-walk.h"
47#include "tree-cfg.h"
48#include "tree-into-ssa.h"
49#include "tree-ssa.h"
50#include "splay-tree.h"
51#include "cfgloop.h"
52#include "omp-general.h"
53#include "omp-offload.h"
54#include "tree-cfgcleanup.h"
55#include "alloc-pool.h"
56#include "symbol-summary.h"
57#include "gomp-constants.h"
58#include "gimple-pretty-print.h"
59#include "stringpool.h"
60#include "attribs.h"
61#include "tree-eh.h"
62#include "opts.h"
63
64/* OMP region information. Every parallel and workshare
65 directive is enclosed between two markers, the OMP_* directive
66 and a corresponding GIMPLE_OMP_RETURN statement. */
67
68struct omp_region
69{
70 /* The enclosing region. */
71 struct omp_region *outer;
72
73 /* First child region. */
74 struct omp_region *inner;
75
76 /* Next peer region. */
77 struct omp_region *next;
78
79 /* Block containing the omp directive as its last stmt. */
80 basic_block entry;
81
82 /* Block containing the GIMPLE_OMP_RETURN as its last stmt. */
83 basic_block exit;
84
85 /* Block containing the GIMPLE_OMP_CONTINUE as its last stmt. */
86 basic_block cont;
87
88 /* If this is a combined parallel+workshare region, this is a list
89 of additional arguments needed by the combined parallel+workshare
90 library call. */
91 vec<tree, va_gc> *ws_args;
92
93 /* The code for the omp directive of this region. */
94 enum gimple_code type;
95
96 /* Schedule kind, only used for GIMPLE_OMP_FOR type regions. */
97 enum omp_clause_schedule_kind sched_kind;
98
99 /* Schedule modifiers. */
100 unsigned char sched_modifiers;
101
102 /* True if this is a combined parallel+workshare region. */
103 bool is_combined_parallel;
104
105 /* Copy of fd.lastprivate_conditional != 0. */
106 bool has_lastprivate_conditional;
107
108 /* The ordered stmt if type is GIMPLE_OMP_ORDERED and it has
109 a depend clause. */
110 gomp_ordered *ord_stmt;
111};
112
113static struct omp_region *root_omp_region;
114static bool omp_any_child_fn_dumped;
115
116static void expand_omp_build_assign (gimple_stmt_iterator *, tree, tree,
117 bool = false);
118static gphi *find_phi_with_arg_on_edge (tree, edge);
119static void expand_omp (struct omp_region *region);
120
121/* Return true if REGION is a combined parallel+workshare region. */
122
123static inline bool
124is_combined_parallel (struct omp_region *region)
125{
126 return region->is_combined_parallel;
127}
128
129/* Given two blocks PAR_ENTRY_BB and WS_ENTRY_BB such that WS_ENTRY_BB
130 is the immediate dominator of PAR_ENTRY_BB, return true if there
131 are no data dependencies that would prevent expanding the parallel
132 directive at PAR_ENTRY_BB as a combined parallel+workshare region.
133
134 When expanding a combined parallel+workshare region, the call to
135 the child function may need additional arguments in the case of
136 GIMPLE_OMP_FOR regions. In some cases, these arguments are
137 computed out of variables passed in from the parent to the child
138 via 'struct .omp_data_s'. For instance:
139
140 #pragma omp parallel for schedule (guided, i * 4)
141 for (j ...)
142
143 Is lowered into:
144
145 # BLOCK 2 (PAR_ENTRY_BB)
146 .omp_data_o.i = i;
147 #pragma omp parallel [child fn: bar.omp_fn.0 ( ..., D.1598)
148
149 # BLOCK 3 (WS_ENTRY_BB)
150 .omp_data_i = &.omp_data_o;
151 D.1667 = .omp_data_i->i;
152 D.1598 = D.1667 * 4;
153 #pragma omp for schedule (guided, D.1598)
154
155 When we outline the parallel region, the call to the child function
156 'bar.omp_fn.0' will need the value D.1598 in its argument list, but
157 that value is computed *after* the call site. So, in principle we
158 cannot do the transformation.
159
160 To see whether the code in WS_ENTRY_BB blocks the combined
161 parallel+workshare call, we collect all the variables used in the
162 GIMPLE_OMP_FOR header check whether they appear on the LHS of any
163 statement in WS_ENTRY_BB. If so, then we cannot emit the combined
164 call.
165
166 FIXME. If we had the SSA form built at this point, we could merely
167 hoist the code in block 3 into block 2 and be done with it. But at
168 this point we don't have dataflow information and though we could
169 hack something up here, it is really not worth the aggravation. */
170
171static bool
172workshare_safe_to_combine_p (basic_block ws_entry_bb)
173{
174 struct omp_for_data fd;
175 gimple *ws_stmt = last_stmt (ws_entry_bb);
176
177 if (gimple_code (ws_stmt) == GIMPLE_OMP_SECTIONS)
29
Assuming the condition is true
30
Taking true branch
178 return true;
31
Returning the value 1, which participates in a condition later
179
180 gcc_assert (gimple_code (ws_stmt) == GIMPLE_OMP_FOR)((void)(!(gimple_code (ws_stmt) == GIMPLE_OMP_FOR) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 180, __FUNCTION__), 0 : 0))
;
181 if (gimple_omp_for_kind (ws_stmt) != GF_OMP_FOR_KIND_FOR)
182 return false;
183
184 omp_extract_for_data (as_a <gomp_for *> (ws_stmt), &fd, NULLnullptr);
185
186 if (fd.collapse > 1 && TREE_CODE (fd.loop.n2)((enum tree_code) (fd.loop.n2)->base.code) != INTEGER_CST)
187 return false;
188 if (fd.iter_type != long_integer_type_nodeinteger_types[itk_long])
189 return false;
190
191 /* FIXME. We give up too easily here. If any of these arguments
192 are not constants, they will likely involve variables that have
193 been mapped into fields of .omp_data_s for sharing with the child
194 function. With appropriate data flow, it would be possible to
195 see through this. */
196 if (!is_gimple_min_invariant (fd.loop.n1)
197 || !is_gimple_min_invariant (fd.loop.n2)
198 || !is_gimple_min_invariant (fd.loop.step)
199 || (fd.chunk_size && !is_gimple_min_invariant (fd.chunk_size)))
200 return false;
201
202 return true;
203}
204
205/* Adjust CHUNK_SIZE from SCHEDULE clause, depending on simd modifier
206 presence (SIMD_SCHEDULE). */
207
208static tree
209omp_adjust_chunk_size (tree chunk_size, bool simd_schedule)
210{
211 if (!simd_schedule || integer_zerop (chunk_size))
212 return chunk_size;
213
214 poly_uint64 vf = omp_max_vf ();
215 if (known_eq (vf, 1U)(!maybe_ne (vf, 1U)))
216 return chunk_size;
217
218 tree type = TREE_TYPE (chunk_size)((contains_struct_check ((chunk_size), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 218, __FUNCTION__))->typed.type)
;
219 chunk_size = fold_build2 (PLUS_EXPR, type, chunk_size,fold_build2_loc (((location_t) 0), PLUS_EXPR, type, chunk_size
, build_int_cst (type, vf - 1) )
220 build_int_cst (type, vf - 1))fold_build2_loc (((location_t) 0), PLUS_EXPR, type, chunk_size
, build_int_cst (type, vf - 1) )
;
221 return fold_build2 (BIT_AND_EXPR, type, chunk_size,fold_build2_loc (((location_t) 0), BIT_AND_EXPR, type, chunk_size
, build_int_cst (type, -vf) )
222 build_int_cst (type, -vf))fold_build2_loc (((location_t) 0), BIT_AND_EXPR, type, chunk_size
, build_int_cst (type, -vf) )
;
223}
224
225/* Collect additional arguments needed to emit a combined
226 parallel+workshare call. WS_STMT is the workshare directive being
227 expanded. */
228
229static vec<tree, va_gc> *
230get_ws_args_for (gimple *par_stmt, gimple *ws_stmt)
231{
232 tree t;
233 location_t loc = gimple_location (ws_stmt);
234 vec<tree, va_gc> *ws_args;
235
236 if (gomp_for *for_stmt
59.1
'for_stmt' is non-null
59.1
'for_stmt' is non-null
59.1
'for_stmt' is non-null
59.1
'for_stmt' is non-null
= dyn_cast <gomp_for *> (ws_stmt))
50
Calling 'dyn_cast<gomp_for *, gimple>'
59
Returning from 'dyn_cast<gomp_for *, gimple>'
60
Taking true branch
237 {
238 struct omp_for_data fd;
239 tree n1, n2;
240
241 omp_extract_for_data (for_stmt, &fd, NULLnullptr);
242 n1 = fd.loop.n1;
243 n2 = fd.loop.n2;
244
245 if (gimple_omp_for_combined_into_p (for_stmt))
61
Calling 'gimple_omp_for_combined_into_p'
67
Returning from 'gimple_omp_for_combined_into_p'
68
Taking false branch
246 {
247 tree innerc
248 = omp_find_clause (gimple_omp_parallel_clauses (par_stmt),
249 OMP_CLAUSE__LOOPTEMP_);
250 gcc_assert (innerc)((void)(!(innerc) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 250, __FUNCTION__), 0 : 0))
;
251 n1 = OMP_CLAUSE_DECL (innerc)(*(omp_clause_elt_check (((omp_clause_range_check (((tree_check
((innerc), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 251, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 251, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 251, __FUNCTION__)))
;
252 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc)((contains_struct_check (((tree_check ((innerc), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 252, __FUNCTION__, (OMP_CLAUSE)))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 252, __FUNCTION__))->common.chain)
,
253 OMP_CLAUSE__LOOPTEMP_);
254 gcc_assert (innerc)((void)(!(innerc) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 254, __FUNCTION__), 0 : 0))
;
255 n2 = OMP_CLAUSE_DECL (innerc)(*(omp_clause_elt_check (((omp_clause_range_check (((tree_check
((innerc), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 255, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 255, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 255, __FUNCTION__)))
;
256 }
257
258 vec_alloc (ws_args, 3 + (fd.chunk_size != 0));
69
Assuming field 'chunk_size' is equal to null
70
Calling 'vec_alloc<tree_node *, va_gc>'
78
Returning from 'vec_alloc<tree_node *, va_gc>'
259
260 t = fold_convert_loc (loc, long_integer_type_nodeinteger_types[itk_long], n1);
261 ws_args->quick_push (t);
79
Called C++ object pointer is null
262
263 t = fold_convert_loc (loc, long_integer_type_nodeinteger_types[itk_long], n2);
264 ws_args->quick_push (t);
265
266 t = fold_convert_loc (loc, long_integer_type_nodeinteger_types[itk_long], fd.loop.step);
267 ws_args->quick_push (t);
268
269 if (fd.chunk_size)
270 {
271 t = fold_convert_loc (loc, long_integer_type_nodeinteger_types[itk_long], fd.chunk_size);
272 t = omp_adjust_chunk_size (t, fd.simd_schedule);
273 ws_args->quick_push (t);
274 }
275
276 return ws_args;
277 }
278 else if (gimple_code (ws_stmt) == GIMPLE_OMP_SECTIONS)
279 {
280 /* Number of sections is equal to the number of edges from the
281 GIMPLE_OMP_SECTIONS_SWITCH statement, except for the one to
282 the exit of the sections region. */
283 basic_block bb = single_succ (gimple_bb (ws_stmt));
284 t = build_int_cst (unsigned_type_nodeinteger_types[itk_unsigned_int], EDGE_COUNT (bb->succs)vec_safe_length (bb->succs) - 1);
285 vec_alloc (ws_args, 1);
286 ws_args->quick_push (t);
287 return ws_args;
288 }
289
290 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 290, __FUNCTION__))
;
291}
292
293/* Discover whether REGION is a combined parallel+workshare region. */
294
295static void
296determine_parallel_type (struct omp_region *region)
297{
298 basic_block par_entry_bb, par_exit_bb;
299 basic_block ws_entry_bb, ws_exit_bb;
300
301 if (region == NULLnullptr || region->inner == NULLnullptr
18
Assuming the condition is false
22
Taking false branch
302 || region->exit == NULLnullptr || region->inner->exit == NULLnullptr
19
Assuming the condition is false
20
Assuming the condition is false
303 || region->inner->cont == NULLnullptr)
21
Assuming the condition is false
304 return;
305
306 /* We only support parallel+for and parallel+sections. */
307 if (region->type
22.1
Field 'type' is equal to GIMPLE_OMP_PARALLEL
22.1
Field 'type' is equal to GIMPLE_OMP_PARALLEL
22.1
Field 'type' is equal to GIMPLE_OMP_PARALLEL
22.1
Field 'type' is equal to GIMPLE_OMP_PARALLEL
!= GIMPLE_OMP_PARALLEL
308 || (region->inner->type != GIMPLE_OMP_FOR
23
Assuming field 'type' is equal to GIMPLE_OMP_FOR
309 && region->inner->type != GIMPLE_OMP_SECTIONS))
310 return;
311
312 /* Check for perfect nesting PAR_ENTRY_BB -> WS_ENTRY_BB and
313 WS_EXIT_BB -> PAR_EXIT_BB. */
314 par_entry_bb = region->entry;
315 par_exit_bb = region->exit;
316 ws_entry_bb = region->inner->entry;
317 ws_exit_bb = region->inner->exit;
318
319 /* Give up for task reductions on the parallel, while it is implementable,
320 adding another big set of APIs or slowing down the normal paths is
321 not acceptable. */
322 tree pclauses = gimple_omp_parallel_clauses (last_stmt (par_entry_bb));
323 if (omp_find_clause (pclauses, OMP_CLAUSE__REDUCTEMP_))
24
Assuming the condition is false
25
Taking false branch
324 return;
325
326 if (single_succ (par_entry_bb) == ws_entry_bb
26
Assuming the condition is true
42
Taking true branch
327 && single_succ (ws_exit_bb) == par_exit_bb
27
Assuming the condition is true
328 && workshare_safe_to_combine_p (ws_entry_bb)
28
Calling 'workshare_safe_to_combine_p'
32
Returning from 'workshare_safe_to_combine_p'
329 && (gimple_omp_parallel_combined_p (last_stmt (par_entry_bb))
33
Calling 'gimple_omp_parallel_combined_p'
39
Returning from 'gimple_omp_parallel_combined_p'
330 || (last_and_only_stmt (ws_entry_bb)
40
Assuming the condition is true
331 && last_and_only_stmt (par_exit_bb))))
41
Assuming the condition is true
332 {
333 gimple *par_stmt = last_stmt (par_entry_bb);
334 gimple *ws_stmt = last_stmt (ws_entry_bb);
335
336 if (region->inner->type
42.1
Field 'type' is equal to GIMPLE_OMP_FOR
42.1
Field 'type' is equal to GIMPLE_OMP_FOR
42.1
Field 'type' is equal to GIMPLE_OMP_FOR
42.1
Field 'type' is equal to GIMPLE_OMP_FOR
== GIMPLE_OMP_FOR)
43
Taking true branch
337 {
338 /* If this is a combined parallel loop, we need to determine
339 whether or not to use the combined library calls. There
340 are two cases where we do not apply the transformation:
341 static loops and any kind of ordered loop. In the first
342 case, we already open code the loop so there is no need
343 to do anything else. In the latter case, the combined
344 parallel loop call would still need extra synchronization
345 to implement ordered semantics, so there would not be any
346 gain in using the combined call. */
347 tree clauses = gimple_omp_for_clauses (ws_stmt);
348 tree c = omp_find_clause (clauses, OMP_CLAUSE_SCHEDULE);
349 if (c == NULLnullptr
44
Assuming the condition is false
350 || ((OMP_CLAUSE_SCHEDULE_KIND (c)((omp_clause_subcode_check ((c), (OMP_CLAUSE_SCHEDULE), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 350, __FUNCTION__))->omp_clause.subcode.schedule_kind)
& OMP_CLAUSE_SCHEDULE_MASK)
45
Assuming the condition is false
351 == OMP_CLAUSE_SCHEDULE_STATIC)
352 || omp_find_clause (clauses, OMP_CLAUSE_ORDERED)
46
Assuming the condition is false
353 || omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_)
47
Assuming the condition is false
354 || ((c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_))
48
Assuming 'c' is null
355 && POINTER_TYPE_P (TREE_TYPE (OMP_CLAUSE_DECL (c)))(((enum tree_code) (((contains_struct_check (((*(omp_clause_elt_check
(((omp_clause_range_check (((tree_check ((c), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 355, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 355, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 355, __FUNCTION__)))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 355, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check (((*(omp_clause_elt_check
(((omp_clause_range_check (((tree_check ((c), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 355, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 355, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 355, __FUNCTION__)))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 355, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
))
356 return;
357 }
358 else if (region->inner->type == GIMPLE_OMP_SECTIONS
359 && (omp_find_clause (gimple_omp_sections_clauses (ws_stmt),
360 OMP_CLAUSE__REDUCTEMP_)
361 || omp_find_clause (gimple_omp_sections_clauses (ws_stmt),
362 OMP_CLAUSE__CONDTEMP_)))
363 return;
364
365 region->is_combined_parallel = true;
366 region->inner->is_combined_parallel = true;
367 region->ws_args = get_ws_args_for (par_stmt, ws_stmt);
49
Calling 'get_ws_args_for'
368 }
369}
370
371/* Debugging dumps for parallel regions. */
372void dump_omp_region (FILE *, struct omp_region *, int);
373void debug_omp_region (struct omp_region *);
374void debug_all_omp_regions (void);
375
376/* Dump the parallel region tree rooted at REGION. */
377
378void
379dump_omp_region (FILE *file, struct omp_region *region, int indent)
380{
381 fprintf (file, "%*sbb %d: %s\n", indent, "", region->entry->index,
382 gimple_code_name[region->type]);
383
384 if (region->inner)
385 dump_omp_region (file, region->inner, indent + 4);
386
387 if (region->cont)
388 {
389 fprintf (file, "%*sbb %d: GIMPLE_OMP_CONTINUE\n", indent, "",
390 region->cont->index);
391 }
392
393 if (region->exit)
394 fprintf (file, "%*sbb %d: GIMPLE_OMP_RETURN\n", indent, "",
395 region->exit->index);
396 else
397 fprintf (file, "%*s[no exit marker]\n", indent, "");
398
399 if (region->next)
400 dump_omp_region (file, region->next, indent);
401}
402
403DEBUG_FUNCTION__attribute__ ((__used__)) void
404debug_omp_region (struct omp_region *region)
405{
406 dump_omp_region (stderrstderr, region, 0);
407}
408
409DEBUG_FUNCTION__attribute__ ((__used__)) void
410debug_all_omp_regions (void)
411{
412 dump_omp_region (stderrstderr, root_omp_region, 0);
413}
414
415/* Create a new parallel region starting at STMT inside region PARENT. */
416
417static struct omp_region *
418new_omp_region (basic_block bb, enum gimple_code type,
419 struct omp_region *parent)
420{
421 struct omp_region *region = XCNEW (struct omp_region)((struct omp_region *) xcalloc (1, sizeof (struct omp_region)
))
;
422
423 region->outer = parent;
424 region->entry = bb;
425 region->type = type;
426
427 if (parent)
428 {
429 /* This is a nested region. Add it to the list of inner
430 regions in PARENT. */
431 region->next = parent->inner;
432 parent->inner = region;
433 }
434 else
435 {
436 /* This is a toplevel region. Add it to the list of toplevel
437 regions in ROOT_OMP_REGION. */
438 region->next = root_omp_region;
439 root_omp_region = region;
440 }
441
442 return region;
443}
444
445/* Release the memory associated with the region tree rooted at REGION. */
446
447static void
448free_omp_region_1 (struct omp_region *region)
449{
450 struct omp_region *i, *n;
451
452 for (i = region->inner; i ; i = n)
453 {
454 n = i->next;
455 free_omp_region_1 (i);
456 }
457
458 free (region);
459}
460
461/* Release the memory for the entire omp region tree. */
462
463void
464omp_free_regions (void)
465{
466 struct omp_region *r, *n;
467 for (r = root_omp_region; r ; r = n)
468 {
469 n = r->next;
470 free_omp_region_1 (r);
471 }
472 root_omp_region = NULLnullptr;
473}
474
475/* A convenience function to build an empty GIMPLE_COND with just the
476 condition. */
477
478static gcond *
479gimple_build_cond_empty (tree cond)
480{
481 enum tree_code pred_code;
482 tree lhs, rhs;
483
484 gimple_cond_get_ops_from_tree (cond, &pred_code, &lhs, &rhs);
485 return gimple_build_cond (pred_code, lhs, rhs, NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr);
486}
487
488/* Change DECL_CONTEXT of CHILD_FNDECL to that of the parent function.
489 Add CHILD_FNDECL to decl chain of the supercontext of the block
490 ENTRY_BLOCK - this is the block which originally contained the
491 code from which CHILD_FNDECL was created.
492
493 Together, these actions ensure that the debug info for the outlined
494 function will be emitted with the correct lexical scope. */
495
496static void
497adjust_context_and_scope (struct omp_region *region, tree entry_block,
498 tree child_fndecl)
499{
500 tree parent_fndecl = NULL_TREE(tree) nullptr;
501 gimple *entry_stmt;
502 /* OMP expansion expands inner regions before outer ones, so if
503 we e.g. have explicit task region nested in parallel region, when
504 expanding the task region current_function_decl will be the original
505 source function, but we actually want to use as context the child
506 function of the parallel. */
507 for (region = region->outer;
508 region && parent_fndecl == NULL_TREE(tree) nullptr; region = region->outer)
509 switch (region->type)
510 {
511 case GIMPLE_OMP_PARALLEL:
512 case GIMPLE_OMP_TASK:
513 case GIMPLE_OMP_TEAMS:
514 entry_stmt = last_stmt (region->entry);
515 parent_fndecl = gimple_omp_taskreg_child_fn (entry_stmt);
516 break;
517 case GIMPLE_OMP_TARGET:
518 entry_stmt = last_stmt (region->entry);
519 parent_fndecl
520 = gimple_omp_target_child_fn (as_a <gomp_target *> (entry_stmt));
521 break;
522 default:
523 break;
524 }
525
526 if (parent_fndecl == NULL_TREE(tree) nullptr)
527 parent_fndecl = current_function_decl;
528 DECL_CONTEXT (child_fndecl)((contains_struct_check ((child_fndecl), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 528, __FUNCTION__))->decl_minimal.context)
= parent_fndecl;
529
530 if (entry_block != NULL_TREE(tree) nullptr && TREE_CODE (entry_block)((enum tree_code) (entry_block)->base.code) == BLOCK)
531 {
532 tree b = BLOCK_SUPERCONTEXT (entry_block)((tree_check ((entry_block), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 532, __FUNCTION__, (BLOCK)))->block.supercontext)
;
533 if (TREE_CODE (b)((enum tree_code) (b)->base.code) == BLOCK)
534 {
535 DECL_CHAIN (child_fndecl)(((contains_struct_check (((contains_struct_check ((child_fndecl
), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 535, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 535, __FUNCTION__))->common.chain))
= BLOCK_VARS (b)((tree_check ((b), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 535, __FUNCTION__, (BLOCK)))->block.vars)
;
536 BLOCK_VARS (b)((tree_check ((b), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 536, __FUNCTION__, (BLOCK)))->block.vars)
= child_fndecl;
537 }
538 }
539}
540
541/* Build the function calls to GOMP_parallel etc to actually
542 generate the parallel operation. REGION is the parallel region
543 being expanded. BB is the block where to insert the code. WS_ARGS
544 will be set if this is a call to a combined parallel+workshare
545 construct, it contains the list of additional arguments needed by
546 the workshare construct. */
547
548static void
549expand_parallel_call (struct omp_region *region, basic_block bb,
550 gomp_parallel *entry_stmt,
551 vec<tree, va_gc> *ws_args)
552{
553 tree t, t1, t2, val, cond, c, clauses, flags;
554 gimple_stmt_iterator gsi;
555 gimple *stmt;
556 enum built_in_function start_ix;
557 int start_ix2;
558 location_t clause_loc;
559 vec<tree, va_gc> *args;
560
561 clauses = gimple_omp_parallel_clauses (entry_stmt);
562
563 /* Determine what flavor of GOMP_parallel we will be
564 emitting. */
565 start_ix = BUILT_IN_GOMP_PARALLEL;
566 tree rtmp = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
567 if (rtmp)
568 start_ix = BUILT_IN_GOMP_PARALLEL_REDUCTIONS;
569 else if (is_combined_parallel (region))
570 {
571 switch (region->inner->type)
572 {
573 case GIMPLE_OMP_FOR:
574 gcc_assert (region->inner->sched_kind != OMP_CLAUSE_SCHEDULE_AUTO)((void)(!(region->inner->sched_kind != OMP_CLAUSE_SCHEDULE_AUTO
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 574, __FUNCTION__), 0 : 0))
;
575 switch (region->inner->sched_kind)
576 {
577 case OMP_CLAUSE_SCHEDULE_RUNTIME:
578 /* For lastprivate(conditional:), our implementation
579 requires monotonic behavior. */
580 if (region->inner->has_lastprivate_conditional != 0)
581 start_ix2 = 3;
582 else if ((region->inner->sched_modifiers
583 & OMP_CLAUSE_SCHEDULE_NONMONOTONIC) != 0)
584 start_ix2 = 6;
585 else if ((region->inner->sched_modifiers
586 & OMP_CLAUSE_SCHEDULE_MONOTONIC) == 0)
587 start_ix2 = 7;
588 else
589 start_ix2 = 3;
590 break;
591 case OMP_CLAUSE_SCHEDULE_DYNAMIC:
592 case OMP_CLAUSE_SCHEDULE_GUIDED:
593 if ((region->inner->sched_modifiers
594 & OMP_CLAUSE_SCHEDULE_MONOTONIC) == 0
595 && !region->inner->has_lastprivate_conditional)
596 {
597 start_ix2 = 3 + region->inner->sched_kind;
598 break;
599 }
600 /* FALLTHRU */
601 default:
602 start_ix2 = region->inner->sched_kind;
603 break;
604 }
605 start_ix2 += (int) BUILT_IN_GOMP_PARALLEL_LOOP_STATIC;
606 start_ix = (enum built_in_function) start_ix2;
607 break;
608 case GIMPLE_OMP_SECTIONS:
609 start_ix = BUILT_IN_GOMP_PARALLEL_SECTIONS;
610 break;
611 default:
612 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 612, __FUNCTION__))
;
613 }
614 }
615
616 /* By default, the value of NUM_THREADS is zero (selected at run time)
617 and there is no conditional. */
618 cond = NULL_TREE(tree) nullptr;
619 val = build_int_cst (unsigned_type_nodeinteger_types[itk_unsigned_int], 0);
620 flags = build_int_cst (unsigned_type_nodeinteger_types[itk_unsigned_int], 0);
621
622 c = omp_find_clause (clauses, OMP_CLAUSE_IF);
623 if (c)
624 cond = OMP_CLAUSE_IF_EXPR (c)(*(omp_clause_elt_check (((omp_clause_subcode_check ((c), (OMP_CLAUSE_IF
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 624, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 624, __FUNCTION__)))
;
625
626 c = omp_find_clause (clauses, OMP_CLAUSE_NUM_THREADS);
627 if (c)
628 {
629 val = OMP_CLAUSE_NUM_THREADS_EXPR (c)(*(omp_clause_elt_check (((omp_clause_subcode_check ((c), (OMP_CLAUSE_NUM_THREADS
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 629, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 629, __FUNCTION__)))
;
630 clause_loc = OMP_CLAUSE_LOCATION (c)((tree_check ((c), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 630, __FUNCTION__, (OMP_CLAUSE))))->omp_clause.locus
;
631 }
632 else
633 clause_loc = gimple_location (entry_stmt);
634
635 c = omp_find_clause (clauses, OMP_CLAUSE_PROC_BIND);
636 if (c)
637 flags = build_int_cst (unsigned_type_nodeinteger_types[itk_unsigned_int], OMP_CLAUSE_PROC_BIND_KIND (c)((omp_clause_subcode_check ((c), (OMP_CLAUSE_PROC_BIND), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 637, __FUNCTION__))->omp_clause.subcode.proc_bind_kind)
);
638
639 /* Ensure 'val' is of the correct type. */
640 val = fold_convert_loc (clause_loc, unsigned_type_nodeinteger_types[itk_unsigned_int], val);
641
642 /* If we found the clause 'if (cond)', build either
643 (cond != 0) or (cond ? val : 1u). */
644 if (cond)
645 {
646 cond = gimple_boolify (cond);
647
648 if (integer_zerop (val))
649 val = fold_build2_loc (clause_loc,
650 EQ_EXPR, unsigned_type_nodeinteger_types[itk_unsigned_int], cond,
651 build_int_cst (TREE_TYPE (cond)((contains_struct_check ((cond), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 651, __FUNCTION__))->typed.type)
, 0));
652 else
653 {
654 basic_block cond_bb, then_bb, else_bb;
655 edge e, e_then, e_else;
656 tree tmp_then, tmp_else, tmp_join, tmp_var;
657
658 tmp_var = create_tmp_var (TREE_TYPE (val)((contains_struct_check ((val), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 658, __FUNCTION__))->typed.type)
);
659 if (gimple_in_ssa_p (cfun(cfun + 0)))
660 {
661 tmp_then = make_ssa_name (tmp_var);
662 tmp_else = make_ssa_name (tmp_var);
663 tmp_join = make_ssa_name (tmp_var);
664 }
665 else
666 {
667 tmp_then = tmp_var;
668 tmp_else = tmp_var;
669 tmp_join = tmp_var;
670 }
671
672 e = split_block_after_labels (bb);
673 cond_bb = e->src;
674 bb = e->dest;
675 remove_edge (e);
676
677 then_bb = create_empty_bb (cond_bb);
678 else_bb = create_empty_bb (then_bb);
679 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
680 set_immediate_dominator (CDI_DOMINATORS, else_bb, cond_bb);
681
682 stmt = gimple_build_cond_empty (cond);
683 gsi = gsi_start_bb (cond_bb);
684 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
685
686 gsi = gsi_start_bb (then_bb);
687 expand_omp_build_assign (&gsi, tmp_then, val, true);
688
689 gsi = gsi_start_bb (else_bb);
690 expand_omp_build_assign (&gsi, tmp_else,
691 build_int_cst (unsigned_type_nodeinteger_types[itk_unsigned_int], 1),
692 true);
693
694 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
695 make_edge (cond_bb, else_bb, EDGE_FALSE_VALUE);
696 add_bb_to_loop (then_bb, cond_bb->loop_father);
697 add_bb_to_loop (else_bb, cond_bb->loop_father);
698 e_then = make_edge (then_bb, bb, EDGE_FALLTHRU);
699 e_else = make_edge (else_bb, bb, EDGE_FALLTHRU);
700
701 if (gimple_in_ssa_p (cfun(cfun + 0)))
702 {
703 gphi *phi = create_phi_node (tmp_join, bb);
704 add_phi_arg (phi, tmp_then, e_then, UNKNOWN_LOCATION((location_t) 0));
705 add_phi_arg (phi, tmp_else, e_else, UNKNOWN_LOCATION((location_t) 0));
706 }
707
708 val = tmp_join;
709 }
710
711 gsi = gsi_start_bb (bb);
712 val = force_gimple_operand_gsi (&gsi, val, true, NULL_TREE(tree) nullptr,
713 false, GSI_CONTINUE_LINKING);
714 }
715
716 gsi = gsi_last_nondebug_bb (bb);
717 t = gimple_omp_parallel_data_arg (entry_stmt);
718 if (t == NULLnullptr)
719 t1 = null_pointer_nodeglobal_trees[TI_NULL_POINTER];
720 else
721 t1 = build_fold_addr_expr (t)build_fold_addr_expr_loc (((location_t) 0), (t));
722 tree child_fndecl = gimple_omp_parallel_child_fn (entry_stmt);
723 t2 = build_fold_addr_expr (child_fndecl)build_fold_addr_expr_loc (((location_t) 0), (child_fndecl));
724
725 vec_alloc (args, 4 + vec_safe_length (ws_args));
726 args->quick_push (t2);
727 args->quick_push (t1);
728 args->quick_push (val);
729 if (ws_args)
730 args->splice (*ws_args);
731 args->quick_push (flags);
732
733 t = build_call_expr_loc_vec (UNKNOWN_LOCATION((location_t) 0),
734 builtin_decl_explicit (start_ix), args);
735
736 if (rtmp)
737 {
738 tree type = TREE_TYPE (OMP_CLAUSE_DECL (rtmp))((contains_struct_check (((*(omp_clause_elt_check (((omp_clause_range_check
(((tree_check ((rtmp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 738, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 738, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 738, __FUNCTION__)))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 738, __FUNCTION__))->typed.type)
;
739 t = build2 (MODIFY_EXPR, type, OMP_CLAUSE_DECL (rtmp)(*(omp_clause_elt_check (((omp_clause_range_check (((tree_check
((rtmp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 739, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 739, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 739, __FUNCTION__)))
,
740 fold_convert (type,fold_convert_loc (((location_t) 0), type, fold_convert_loc ((
(location_t) 0), global_trees[TI_POINTER_SIZED_TYPE], t))
741 fold_convert (pointer_sized_int_node, t))fold_convert_loc (((location_t) 0), type, fold_convert_loc ((
(location_t) 0), global_trees[TI_POINTER_SIZED_TYPE], t))
);
742 }
743 force_gimple_operand_gsi (&gsi, t, true, NULL_TREE(tree) nullptr,
744 false, GSI_CONTINUE_LINKING);
745}
746
747/* Build the function call to GOMP_task to actually
748 generate the task operation. BB is the block where to insert the code. */
749
750static void
751expand_task_call (struct omp_region *region, basic_block bb,
752 gomp_task *entry_stmt)
753{
754 tree t1, t2, t3;
755 gimple_stmt_iterator gsi;
756 location_t loc = gimple_location (entry_stmt);
757
758 tree clauses = gimple_omp_task_clauses (entry_stmt);
759
760 tree ifc = omp_find_clause (clauses, OMP_CLAUSE_IF);
761 tree untied = omp_find_clause (clauses, OMP_CLAUSE_UNTIED);
762 tree mergeable = omp_find_clause (clauses, OMP_CLAUSE_MERGEABLE);
763 tree depend = omp_find_clause (clauses, OMP_CLAUSE_DEPEND);
764 tree finalc = omp_find_clause (clauses, OMP_CLAUSE_FINAL);
765 tree priority = omp_find_clause (clauses, OMP_CLAUSE_PRIORITY);
766 tree detach = omp_find_clause (clauses, OMP_CLAUSE_DETACH);
767
768 unsigned int iflags
769 = (untied ? GOMP_TASK_FLAG_UNTIED(1 << 0) : 0)
770 | (mergeable ? GOMP_TASK_FLAG_MERGEABLE(1 << 2) : 0)
771 | (depend ? GOMP_TASK_FLAG_DEPEND(1 << 3) : 0);
772
773 bool taskloop_p = gimple_omp_task_taskloop_p (entry_stmt);
774 tree startvar = NULL_TREE(tree) nullptr, endvar = NULL_TREE(tree) nullptr, step = NULL_TREE(tree) nullptr;
775 tree num_tasks = NULL_TREE(tree) nullptr;
776 bool ull = false;
777 if (taskloop_p)
778 {
779 gimple *g = last_stmt (region->outer->entry);
780 gcc_assert (gimple_code (g) == GIMPLE_OMP_FOR((void)(!(gimple_code (g) == GIMPLE_OMP_FOR && gimple_omp_for_kind
(g) == GF_OMP_FOR_KIND_TASKLOOP) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 781, __FUNCTION__), 0 : 0))
781 && gimple_omp_for_kind (g) == GF_OMP_FOR_KIND_TASKLOOP)((void)(!(gimple_code (g) == GIMPLE_OMP_FOR && gimple_omp_for_kind
(g) == GF_OMP_FOR_KIND_TASKLOOP) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 781, __FUNCTION__), 0 : 0))
;
782 struct omp_for_data fd;
783 omp_extract_for_data (as_a <gomp_for *> (g), &fd, NULLnullptr);
784 startvar = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
785 endvar = omp_find_clause (OMP_CLAUSE_CHAIN (startvar)((contains_struct_check (((tree_check ((startvar), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 785, __FUNCTION__, (OMP_CLAUSE)))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 785, __FUNCTION__))->common.chain)
,
786 OMP_CLAUSE__LOOPTEMP_);
787 startvar = OMP_CLAUSE_DECL (startvar)(*(omp_clause_elt_check (((omp_clause_range_check (((tree_check
((startvar), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 787, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 787, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 787, __FUNCTION__)))
;
788 endvar = OMP_CLAUSE_DECL (endvar)(*(omp_clause_elt_check (((omp_clause_range_check (((tree_check
((endvar), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 788, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 788, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 788, __FUNCTION__)))
;
789 step = fold_convert_loc (loc, fd.iter_type, fd.loop.step);
790 if (fd.loop.cond_code == LT_EXPR)
791 iflags |= GOMP_TASK_FLAG_UP(1 << 8);
792 tree tclauses = gimple_omp_for_clauses (g);
793 num_tasks = omp_find_clause (tclauses, OMP_CLAUSE_NUM_TASKS);
794 if (num_tasks)
795 {
796 if (OMP_CLAUSE_NUM_TASKS_STRICT (num_tasks)(((omp_clause_subcode_check ((num_tasks), (OMP_CLAUSE_NUM_TASKS
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 796, __FUNCTION__)))->base.private_flag)
)
797 iflags |= GOMP_TASK_FLAG_STRICT(1 << 14);
798 num_tasks = OMP_CLAUSE_NUM_TASKS_EXPR (num_tasks)(*(omp_clause_elt_check (((omp_clause_subcode_check ((num_tasks
), (OMP_CLAUSE_NUM_TASKS), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 798, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 798, __FUNCTION__)))
;
799 }
800 else
801 {
802 num_tasks = omp_find_clause (tclauses, OMP_CLAUSE_GRAINSIZE);
803 if (num_tasks)
804 {
805 iflags |= GOMP_TASK_FLAG_GRAINSIZE(1 << 9);
806 if (OMP_CLAUSE_GRAINSIZE_STRICT (num_tasks)(((omp_clause_subcode_check ((num_tasks), (OMP_CLAUSE_GRAINSIZE
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 806, __FUNCTION__)))->base.private_flag)
)
807 iflags |= GOMP_TASK_FLAG_STRICT(1 << 14);
808 num_tasks = OMP_CLAUSE_GRAINSIZE_EXPR (num_tasks)(*(omp_clause_elt_check (((omp_clause_subcode_check ((num_tasks
), (OMP_CLAUSE_GRAINSIZE), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 808, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 808, __FUNCTION__)))
;
809 }
810 else
811 num_tasks = integer_zero_nodeglobal_trees[TI_INTEGER_ZERO];
812 }
813 num_tasks = fold_convert_loc (loc, long_integer_type_nodeinteger_types[itk_long], num_tasks);
814 if (ifc == NULL_TREE(tree) nullptr)
815 iflags |= GOMP_TASK_FLAG_IF(1 << 10);
816 if (omp_find_clause (tclauses, OMP_CLAUSE_NOGROUP))
817 iflags |= GOMP_TASK_FLAG_NOGROUP(1 << 11);
818 ull = fd.iter_type == long_long_unsigned_type_nodeinteger_types[itk_unsigned_long_long];
819 if (omp_find_clause (clauses, OMP_CLAUSE_REDUCTION))
820 iflags |= GOMP_TASK_FLAG_REDUCTION(1 << 12);
821 }
822 else
823 {
824 if (priority)
825 iflags |= GOMP_TASK_FLAG_PRIORITY(1 << 4);
826 if (detach)
827 iflags |= GOMP_TASK_FLAG_DETACH(1 << 13);
828 }
829
830 tree flags = build_int_cst (unsigned_type_nodeinteger_types[itk_unsigned_int], iflags);
831
832 tree cond = boolean_true_nodeglobal_trees[TI_BOOLEAN_TRUE];
833 if (ifc)
834 {
835 if (taskloop_p)
836 {
837 tree t = gimple_boolify (OMP_CLAUSE_IF_EXPR (ifc)(*(omp_clause_elt_check (((omp_clause_subcode_check ((ifc), (
OMP_CLAUSE_IF), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 837, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 837, __FUNCTION__)))
);
838 t = fold_build3_loc (loc, COND_EXPR, unsigned_type_nodeinteger_types[itk_unsigned_int], t,
839 build_int_cst (unsigned_type_nodeinteger_types[itk_unsigned_int],
840 GOMP_TASK_FLAG_IF(1 << 10)),
841 build_int_cst (unsigned_type_nodeinteger_types[itk_unsigned_int], 0));
842 flags = fold_build2_loc (loc, PLUS_EXPR, unsigned_type_nodeinteger_types[itk_unsigned_int],
843 flags, t);
844 }
845 else
846 cond = gimple_boolify (OMP_CLAUSE_IF_EXPR (ifc)(*(omp_clause_elt_check (((omp_clause_subcode_check ((ifc), (
OMP_CLAUSE_IF), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 846, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 846, __FUNCTION__)))
);
847 }
848
849 if (finalc)
850 {
851 tree t = gimple_boolify (OMP_CLAUSE_FINAL_EXPR (finalc)(*(omp_clause_elt_check (((omp_clause_subcode_check ((finalc)
, (OMP_CLAUSE_FINAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 851, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 851, __FUNCTION__)))
);
852 t = fold_build3_loc (loc, COND_EXPR, unsigned_type_nodeinteger_types[itk_unsigned_int], t,
853 build_int_cst (unsigned_type_nodeinteger_types[itk_unsigned_int],
854 GOMP_TASK_FLAG_FINAL(1 << 1)),
855 build_int_cst (unsigned_type_nodeinteger_types[itk_unsigned_int], 0));
856 flags = fold_build2_loc (loc, PLUS_EXPR, unsigned_type_nodeinteger_types[itk_unsigned_int], flags, t);
857 }
858 if (depend)
859 depend = OMP_CLAUSE_DECL (depend)(*(omp_clause_elt_check (((omp_clause_range_check (((tree_check
((depend), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 859, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 859, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 859, __FUNCTION__)))
;
860 else
861 depend = build_int_cst (ptr_type_nodeglobal_trees[TI_PTR_TYPE], 0);
862 if (priority)
863 priority = fold_convert (integer_type_node,fold_convert_loc (((location_t) 0), integer_types[itk_int], (
*(omp_clause_elt_check (((omp_clause_subcode_check ((priority
), (OMP_CLAUSE_PRIORITY), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 864, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 864, __FUNCTION__))))
864 OMP_CLAUSE_PRIORITY_EXPR (priority))fold_convert_loc (((location_t) 0), integer_types[itk_int], (
*(omp_clause_elt_check (((omp_clause_subcode_check ((priority
), (OMP_CLAUSE_PRIORITY), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 864, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 864, __FUNCTION__))))
;
865 else
866 priority = integer_zero_nodeglobal_trees[TI_INTEGER_ZERO];
867
868 gsi = gsi_last_nondebug_bb (bb);
869
870 detach = (detach
871 ? build_fold_addr_expr (OMP_CLAUSE_DECL (detach))build_fold_addr_expr_loc (((location_t) 0), ((*(omp_clause_elt_check
(((omp_clause_range_check (((tree_check ((detach), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 871, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 871, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 871, __FUNCTION__)))))
872 : null_pointer_nodeglobal_trees[TI_NULL_POINTER]);
873
874 tree t = gimple_omp_task_data_arg (entry_stmt);
875 if (t == NULLnullptr)
876 t2 = null_pointer_nodeglobal_trees[TI_NULL_POINTER];
877 else
878 t2 = build_fold_addr_expr_loc (loc, t);
879 t1 = build_fold_addr_expr_loc (loc, gimple_omp_task_child_fn (entry_stmt));
880 t = gimple_omp_task_copy_fn (entry_stmt);
881 if (t == NULLnullptr)
882 t3 = null_pointer_nodeglobal_trees[TI_NULL_POINTER];
883 else
884 t3 = build_fold_addr_expr_loc (loc, t);
885
886 if (taskloop_p)
887 t = build_call_expr (ull
888 ? builtin_decl_explicit (BUILT_IN_GOMP_TASKLOOP_ULL)
889 : builtin_decl_explicit (BUILT_IN_GOMP_TASKLOOP),
890 11, t1, t2, t3,
891 gimple_omp_task_arg_size (entry_stmt),
892 gimple_omp_task_arg_align (entry_stmt), flags,
893 num_tasks, priority, startvar, endvar, step);
894 else
895 t = build_call_expr (builtin_decl_explicit (BUILT_IN_GOMP_TASK),
896 10, t1, t2, t3,
897 gimple_omp_task_arg_size (entry_stmt),
898 gimple_omp_task_arg_align (entry_stmt), cond, flags,
899 depend, priority, detach);
900
901 force_gimple_operand_gsi (&gsi, t, true, NULL_TREE(tree) nullptr,
902 false, GSI_CONTINUE_LINKING);
903}
904
905/* Build the function call to GOMP_taskwait_depend to actually
906 generate the taskwait operation. BB is the block where to insert the
907 code. */
908
909static void
910expand_taskwait_call (basic_block bb, gomp_task *entry_stmt)
911{
912 tree clauses = gimple_omp_task_clauses (entry_stmt);
913 tree depend = omp_find_clause (clauses, OMP_CLAUSE_DEPEND);
914 if (depend == NULL_TREE(tree) nullptr)
915 return;
916
917 depend = OMP_CLAUSE_DECL (depend)(*(omp_clause_elt_check (((omp_clause_range_check (((tree_check
((depend), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 917, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 917, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 917, __FUNCTION__)))
;
918
919 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
920 tree t
921 = build_call_expr (builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT_DEPEND),
922 1, depend);
923
924 force_gimple_operand_gsi (&gsi, t, true, NULL_TREE(tree) nullptr,
925 false, GSI_CONTINUE_LINKING);
926}
927
928/* Build the function call to GOMP_teams_reg to actually
929 generate the host teams operation. REGION is the teams region
930 being expanded. BB is the block where to insert the code. */
931
932static void
933expand_teams_call (basic_block bb, gomp_teams *entry_stmt)
934{
935 tree clauses = gimple_omp_teams_clauses (entry_stmt);
936 tree num_teams = omp_find_clause (clauses, OMP_CLAUSE_NUM_TEAMS);
937 if (num_teams == NULL_TREE(tree) nullptr)
938 num_teams = build_int_cst (unsigned_type_nodeinteger_types[itk_unsigned_int], 0);
939 else
940 {
941 num_teams = OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (num_teams)(*(omp_clause_elt_check (((omp_clause_subcode_check ((num_teams
), (OMP_CLAUSE_NUM_TEAMS), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 941, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 941, __FUNCTION__)))
;
942 num_teams = fold_convert (unsigned_type_node, num_teams)fold_convert_loc (((location_t) 0), integer_types[itk_unsigned_int
], num_teams)
;
943 }
944 tree thread_limit = omp_find_clause (clauses, OMP_CLAUSE_THREAD_LIMIT);
945 if (thread_limit == NULL_TREE(tree) nullptr)
946 thread_limit = build_int_cst (unsigned_type_nodeinteger_types[itk_unsigned_int], 0);
947 else
948 {
949 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit)(*(omp_clause_elt_check (((omp_clause_subcode_check ((thread_limit
), (OMP_CLAUSE_THREAD_LIMIT), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 949, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 949, __FUNCTION__)))
;
950 thread_limit = fold_convert (unsigned_type_node, thread_limit)fold_convert_loc (((location_t) 0), integer_types[itk_unsigned_int
], thread_limit)
;
951 }
952
953 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
954 tree t = gimple_omp_teams_data_arg (entry_stmt), t1;
955 if (t == NULLnullptr)
956 t1 = null_pointer_nodeglobal_trees[TI_NULL_POINTER];
957 else
958 t1 = build_fold_addr_expr (t)build_fold_addr_expr_loc (((location_t) 0), (t));
959 tree child_fndecl = gimple_omp_teams_child_fn (entry_stmt);
960 tree t2 = build_fold_addr_expr (child_fndecl)build_fold_addr_expr_loc (((location_t) 0), (child_fndecl));
961
962 vec<tree, va_gc> *args;
963 vec_alloc (args, 5);
964 args->quick_push (t2);
965 args->quick_push (t1);
966 args->quick_push (num_teams);
967 args->quick_push (thread_limit);
968 /* For future extensibility. */
969 args->quick_push (build_zero_cst (unsigned_type_nodeinteger_types[itk_unsigned_int]));
970
971 t = build_call_expr_loc_vec (UNKNOWN_LOCATION((location_t) 0),
972 builtin_decl_explicit (BUILT_IN_GOMP_TEAMS_REG),
973 args);
974
975 force_gimple_operand_gsi (&gsi, t, true, NULL_TREE(tree) nullptr,
976 false, GSI_CONTINUE_LINKING);
977}
978
979/* Chain all the DECLs in LIST by their TREE_CHAIN fields. */
980
981static tree
982vec2chain (vec<tree, va_gc> *v)
983{
984 tree chain = NULL_TREE(tree) nullptr, t;
985 unsigned ix;
986
987 FOR_EACH_VEC_SAFE_ELT_REVERSE (v, ix, t)for (ix = vec_safe_length (v) - 1; vec_safe_iterate ((v), (ix
), &(t)); (ix)--)
988 {
989 DECL_CHAIN (t)(((contains_struct_check (((contains_struct_check ((t), (TS_DECL_MINIMAL
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 989, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 989, __FUNCTION__))->common.chain))
= chain;
990 chain = t;
991 }
992
993 return chain;
994}
995
996/* Remove barriers in REGION->EXIT's block. Note that this is only
997 valid for GIMPLE_OMP_PARALLEL regions. Since the end of a parallel region
998 is an implicit barrier, any workshare inside the GIMPLE_OMP_PARALLEL that
999 left a barrier at the end of the GIMPLE_OMP_PARALLEL region can now be
1000 removed. */
1001
1002static void
1003remove_exit_barrier (struct omp_region *region)
1004{
1005 gimple_stmt_iterator gsi;
1006 basic_block exit_bb;
1007 edge_iterator ei;
1008 edge e;
1009 gimple *stmt;
1010 int any_addressable_vars = -1;
1011
1012 exit_bb = region->exit;
1013
1014 /* If the parallel region doesn't return, we don't have REGION->EXIT
1015 block at all. */
1016 if (! exit_bb)
1017 return;
1018
1019 /* The last insn in the block will be the parallel's GIMPLE_OMP_RETURN. The
1020 workshare's GIMPLE_OMP_RETURN will be in a preceding block. The kinds of
1021 statements that can appear in between are extremely limited -- no
1022 memory operations at all. Here, we allow nothing at all, so the
1023 only thing we allow to precede this GIMPLE_OMP_RETURN is a label. */
1024 gsi = gsi_last_nondebug_bb (exit_bb);
1025 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_RETURN)((void)(!(gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_RETURN) ?
fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1025, __FUNCTION__), 0 : 0))
;
1026 gsi_prev_nondebug (&gsi);
1027 if (!gsi_end_p (gsi) && gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
1028 return;
1029
1030 FOR_EACH_EDGE (e, ei, exit_bb->preds)for ((ei) = ei_start_1 (&((exit_bb->preds))); ei_cond (
(ei), &(e)); ei_next (&(ei)))
1031 {
1032 gsi = gsi_last_nondebug_bb (e->src);
1033 if (gsi_end_p (gsi))
1034 continue;
1035 stmt = gsi_stmt (gsi);
1036 if (gimple_code (stmt) == GIMPLE_OMP_RETURN
1037 && !gimple_omp_return_nowait_p (stmt))
1038 {
1039 /* OpenMP 3.0 tasks unfortunately prevent this optimization
1040 in many cases. If there could be tasks queued, the barrier
1041 might be needed to let the tasks run before some local
1042 variable of the parallel that the task uses as shared
1043 runs out of scope. The task can be spawned either
1044 from within current function (this would be easy to check)
1045 or from some function it calls and gets passed an address
1046 of such a variable. */
1047 if (any_addressable_vars < 0)
1048 {
1049 gomp_parallel *parallel_stmt
1050 = as_a <gomp_parallel *> (last_stmt (region->entry));
1051 tree child_fun = gimple_omp_parallel_child_fn (parallel_stmt);
1052 tree local_decls, block, decl;
1053 unsigned ix;
1054
1055 any_addressable_vars = 0;
1056 FOR_EACH_LOCAL_DECL (DECL_STRUCT_FUNCTION (child_fun), ix, decl)for (ix = vec_safe_length ((((tree_check ((child_fun), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1056, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f))
->local_decls) - 1; vec_safe_iterate (((((tree_check ((child_fun
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1056, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f))
->local_decls), (ix), &(decl)); (ix)--)
1057 if (TREE_ADDRESSABLE (decl)((decl)->base.addressable_flag))
1058 {
1059 any_addressable_vars = 1;
1060 break;
1061 }
1062 for (block = gimple_block (stmt);
1063 !any_addressable_vars
1064 && block
1065 && TREE_CODE (block)((enum tree_code) (block)->base.code) == BLOCK;
1066 block = BLOCK_SUPERCONTEXT (block)((tree_check ((block), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1066, __FUNCTION__, (BLOCK)))->block.supercontext)
)
1067 {
1068 for (local_decls = BLOCK_VARS (block)((tree_check ((block), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1068, __FUNCTION__, (BLOCK)))->block.vars)
;
1069 local_decls;
1070 local_decls = DECL_CHAIN (local_decls)(((contains_struct_check (((contains_struct_check ((local_decls
), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1070, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1070, __FUNCTION__))->common.chain))
)
1071 if (TREE_ADDRESSABLE (local_decls)((local_decls)->base.addressable_flag))
1072 {
1073 any_addressable_vars = 1;
1074 break;
1075 }
1076 if (block == gimple_block (parallel_stmt))
1077 break;
1078 }
1079 }
1080 if (!any_addressable_vars)
1081 gimple_omp_return_set_nowait (stmt);
1082 }
1083 }
1084}
1085
1086static void
1087remove_exit_barriers (struct omp_region *region)
1088{
1089 if (region->type == GIMPLE_OMP_PARALLEL)
1090 remove_exit_barrier (region);
1091
1092 if (region->inner)
1093 {
1094 region = region->inner;
1095 remove_exit_barriers (region);
1096 while (region->next)
1097 {
1098 region = region->next;
1099 remove_exit_barriers (region);
1100 }
1101 }
1102}
1103
1104/* Optimize omp_get_thread_num () and omp_get_num_threads ()
1105 calls. These can't be declared as const functions, but
1106 within one parallel body they are constant, so they can be
1107 transformed there into __builtin_omp_get_{thread_num,num_threads} ()
1108 which are declared const. Similarly for task body, except
1109 that in untied task omp_get_thread_num () can change at any task
1110 scheduling point. */
1111
1112static void
1113optimize_omp_library_calls (gimple *entry_stmt)
1114{
1115 basic_block bb;
1116 gimple_stmt_iterator gsi;
1117 tree thr_num_tree = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
1118 tree thr_num_id = DECL_ASSEMBLER_NAME (thr_num_tree)decl_assembler_name (thr_num_tree);
1119 tree num_thr_tree = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
1120 tree num_thr_id = DECL_ASSEMBLER_NAME (num_thr_tree)decl_assembler_name (num_thr_tree);
1121 bool untied_task = (gimple_code (entry_stmt) == GIMPLE_OMP_TASK
1122 && omp_find_clause (gimple_omp_task_clauses (entry_stmt),
1123 OMP_CLAUSE_UNTIED) != NULLnullptr);
1124
1125 FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb
; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb->
next_bb)
1126 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1127 {
1128 gimple *call = gsi_stmt (gsi);
1129 tree decl;
1130
1131 if (is_gimple_call (call)
1132 && (decl = gimple_call_fndecl (call))
1133 && DECL_EXTERNAL (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1133, __FUNCTION__))->decl_common.decl_flag_1)
1134 && TREE_PUBLIC (decl)((decl)->base.public_flag)
1135 && DECL_INITIAL (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1135, __FUNCTION__))->decl_common.initial)
== NULLnullptr)
1136 {
1137 tree built_in;
1138
1139 if (DECL_NAME (decl)((contains_struct_check ((decl), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1139, __FUNCTION__))->decl_minimal.name)
== thr_num_id)
1140 {
1141 /* In #pragma omp task untied omp_get_thread_num () can change
1142 during the execution of the task region. */
1143 if (untied_task)
1144 continue;
1145 built_in = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
1146 }
1147 else if (DECL_NAME (decl)((contains_struct_check ((decl), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1147, __FUNCTION__))->decl_minimal.name)
== num_thr_id)
1148 built_in = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
1149 else
1150 continue;
1151
1152 if (DECL_ASSEMBLER_NAME (decl)decl_assembler_name (decl) != DECL_ASSEMBLER_NAME (built_in)decl_assembler_name (built_in)
1153 || gimple_call_num_args (call) != 0)
1154 continue;
1155
1156 if (flag_exceptionsglobal_options.x_flag_exceptions && !TREE_NOTHROW (decl)((decl)->base.nothrow_flag))
1157 continue;
1158
1159 if (TREE_CODE (TREE_TYPE (decl))((enum tree_code) (((contains_struct_check ((decl), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1159, __FUNCTION__))->typed.type))->base.code)
!= FUNCTION_TYPE
1160 || !types_compatible_p (TREE_TYPE (TREE_TYPE (decl))((contains_struct_check ((((contains_struct_check ((decl), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1160, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1160, __FUNCTION__))->typed.type)
,
1161 TREE_TYPE (TREE_TYPE (built_in))((contains_struct_check ((((contains_struct_check ((built_in)
, (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1161, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1161, __FUNCTION__))->typed.type)
))
1162 continue;
1163
1164 gimple_call_set_fndecl (call, built_in);
1165 }
1166 }
1167}
1168
1169/* Callback for expand_omp_build_assign. Return non-NULL if *tp needs to be
1170 regimplified. */
1171
1172static tree
1173expand_omp_regimplify_p (tree *tp, int *walk_subtrees, void *)
1174{
1175 tree t = *tp;
1176
1177 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
1178 if (VAR_P (t)(((enum tree_code) (t)->base.code) == VAR_DECL) && DECL_HAS_VALUE_EXPR_P (t)((tree_check3 ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1178, __FUNCTION__, (VAR_DECL), (PARM_DECL), (RESULT_DECL))
) ->decl_common.decl_flag_2)
)
1179 return t;
1180
1181 if (TREE_CODE (t)((enum tree_code) (t)->base.code) == ADDR_EXPR)
1182 recompute_tree_invariant_for_addr_expr (t);
1183
1184 *walk_subtrees = !TYPE_P (t)(tree_code_type[(int) (((enum tree_code) (t)->base.code))]
== tcc_type)
&& !DECL_P (t)(tree_code_type[(int) (((enum tree_code) (t)->base.code))]
== tcc_declaration)
;
1185 return NULL_TREE(tree) nullptr;
1186}
1187
1188/* Prepend or append TO = FROM assignment before or after *GSI_P. */
1189
1190static void
1191expand_omp_build_assign (gimple_stmt_iterator *gsi_p, tree to, tree from,
1192 bool after)
1193{
1194 bool simple_p = DECL_P (to)(tree_code_type[(int) (((enum tree_code) (to)->base.code))
] == tcc_declaration)
&& TREE_ADDRESSABLE (to)((to)->base.addressable_flag);
1195 from = force_gimple_operand_gsi (gsi_p, from, simple_p, NULL_TREE(tree) nullptr,
1196 !after, after ? GSI_CONTINUE_LINKING
1197 : GSI_SAME_STMT);
1198 gimple *stmt = gimple_build_assign (to, from);
1199 if (after)
1200 gsi_insert_after (gsi_p, stmt, GSI_CONTINUE_LINKING);
1201 else
1202 gsi_insert_before (gsi_p, stmt, GSI_SAME_STMT);
1203 if (walk_tree (&from, expand_omp_regimplify_p, NULL, NULL)walk_tree_1 (&from, expand_omp_regimplify_p, nullptr, nullptr
, nullptr)
1204 || walk_tree (&to, expand_omp_regimplify_p, NULL, NULL)walk_tree_1 (&to, expand_omp_regimplify_p, nullptr, nullptr
, nullptr)
)
1205 {
1206 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1207 gimple_regimplify_operands (stmt, &gsi);
1208 }
1209}
1210
1211/* Prepend or append LHS CODE RHS condition before or after *GSI_P. */
1212
1213static gcond *
1214expand_omp_build_cond (gimple_stmt_iterator *gsi_p, enum tree_code code,
1215 tree lhs, tree rhs, bool after = false)
1216{
1217 gcond *cond_stmt = gimple_build_cond (code, lhs, rhs, NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr);
1218 if (after)
1219 gsi_insert_after (gsi_p, cond_stmt, GSI_CONTINUE_LINKING);
1220 else
1221 gsi_insert_before (gsi_p, cond_stmt, GSI_SAME_STMT);
1222 if (walk_tree (gimple_cond_lhs_ptr (cond_stmt), expand_omp_regimplify_p,walk_tree_1 (gimple_cond_lhs_ptr (cond_stmt), expand_omp_regimplify_p
, nullptr, nullptr, nullptr)
1223 NULL, NULL)walk_tree_1 (gimple_cond_lhs_ptr (cond_stmt), expand_omp_regimplify_p
, nullptr, nullptr, nullptr)
1224 || walk_tree (gimple_cond_rhs_ptr (cond_stmt), expand_omp_regimplify_p,walk_tree_1 (gimple_cond_rhs_ptr (cond_stmt), expand_omp_regimplify_p
, nullptr, nullptr, nullptr)
1225 NULL, NULL)walk_tree_1 (gimple_cond_rhs_ptr (cond_stmt), expand_omp_regimplify_p
, nullptr, nullptr, nullptr)
)
1226 {
1227 gimple_stmt_iterator gsi = gsi_for_stmt (cond_stmt);
1228 gimple_regimplify_operands (cond_stmt, &gsi);
1229 }
1230 return cond_stmt;
1231}
1232
1233/* Expand the OpenMP parallel or task directive starting at REGION. */
1234
1235static void
1236expand_omp_taskreg (struct omp_region *region)
1237{
1238 basic_block entry_bb, exit_bb, new_bb;
1239 struct function *child_cfun;
1240 tree child_fn, block, t;
1241 gimple_stmt_iterator gsi;
1242 gimple *entry_stmt, *stmt;
1243 edge e;
1244 vec<tree, va_gc> *ws_args;
1245
1246 entry_stmt = last_stmt (region->entry);
1247 if (gimple_code (entry_stmt) == GIMPLE_OMP_TASK
1248 && gimple_omp_task_taskwait_p (entry_stmt))
1249 {
1250 new_bb = region->entry;
1251 gsi = gsi_last_nondebug_bb (region->entry);
1252 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_TASK)((void)(!(gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_TASK) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1252, __FUNCTION__), 0 : 0))
;
1253 gsi_remove (&gsi, true);
1254 expand_taskwait_call (new_bb, as_a <gomp_task *> (entry_stmt));
1255 return;
1256 }
1257
1258 child_fn = gimple_omp_taskreg_child_fn (entry_stmt);
1259 child_cfun = DECL_STRUCT_FUNCTION (child_fn)((tree_check ((child_fn), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1259, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
;
1260
1261 entry_bb = region->entry;
1262 if (gimple_code (entry_stmt) == GIMPLE_OMP_TASK)
1263 exit_bb = region->cont;
1264 else
1265 exit_bb = region->exit;
1266
1267 if (is_combined_parallel (region))
1268 ws_args = region->ws_args;
1269 else
1270 ws_args = NULLnullptr;
1271
1272 if (child_cfun->cfg)
1273 {
1274 /* Due to inlining, it may happen that we have already outlined
1275 the region, in which case all we need to do is make the
1276 sub-graph unreachable and emit the parallel call. */
1277 edge entry_succ_e, exit_succ_e;
1278
1279 entry_succ_e = single_succ_edge (entry_bb);
1280
1281 gsi = gsi_last_nondebug_bb (entry_bb);
1282 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_PARALLEL((void)(!(gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_PARALLEL
|| gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_TASK || gimple_code
(gsi_stmt (gsi)) == GIMPLE_OMP_TEAMS) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1284, __FUNCTION__), 0 : 0))
1283 || gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_TASK((void)(!(gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_PARALLEL
|| gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_TASK || gimple_code
(gsi_stmt (gsi)) == GIMPLE_OMP_TEAMS) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1284, __FUNCTION__), 0 : 0))
1284 || gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_TEAMS)((void)(!(gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_PARALLEL
|| gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_TASK || gimple_code
(gsi_stmt (gsi)) == GIMPLE_OMP_TEAMS) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1284, __FUNCTION__), 0 : 0))
;
1285 gsi_remove (&gsi, true);
1286
1287 new_bb = entry_bb;
1288 if (exit_bb)
1289 {
1290 exit_succ_e = single_succ_edge (exit_bb);
1291 make_edge (new_bb, exit_succ_e->dest, EDGE_FALLTHRU);
1292 }
1293 remove_edge_and_dominated_blocks (entry_succ_e);
1294 }
1295 else
1296 {
1297 unsigned srcidx, dstidx, num;
1298
1299 /* If the parallel region needs data sent from the parent
1300 function, then the very first statement (except possible
1301 tree profile counter updates) of the parallel body
1302 is a copy assignment .OMP_DATA_I = &.OMP_DATA_O. Since
1303 &.OMP_DATA_O is passed as an argument to the child function,
1304 we need to replace it with the argument as seen by the child
1305 function.
1306
1307 In most cases, this will end up being the identity assignment
1308 .OMP_DATA_I = .OMP_DATA_I. However, if the parallel body had
1309 a function call that has been inlined, the original PARM_DECL
1310 .OMP_DATA_I may have been converted into a different local
1311 variable. In which case, we need to keep the assignment. */
1312 if (gimple_omp_taskreg_data_arg (entry_stmt))
1313 {
1314 basic_block entry_succ_bb
1315 = single_succ_p (entry_bb) ? single_succ (entry_bb)
1316 : FALLTHRU_EDGE (entry_bb)((*((entry_bb))->succs)[(0)]->flags & EDGE_FALLTHRU
? (*((entry_bb))->succs)[(0)] : (*((entry_bb))->succs)
[(1)])
->dest;
1317 tree arg;
1318 gimple *parcopy_stmt = NULLnullptr;
1319
1320 for (gsi = gsi_start_bb (entry_succ_bb); ; gsi_next (&gsi))
1321 {
1322 gimple *stmt;
1323
1324 gcc_assert (!gsi_end_p (gsi))((void)(!(!gsi_end_p (gsi)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1324, __FUNCTION__), 0 : 0))
;
1325 stmt = gsi_stmt (gsi);
1326 if (gimple_code (stmt) != GIMPLE_ASSIGN)
1327 continue;
1328
1329 if (gimple_num_ops (stmt) == 2)
1330 {
1331 tree arg = gimple_assign_rhs1 (stmt);
1332
1333 /* We're ignore the subcode because we're
1334 effectively doing a STRIP_NOPS. */
1335
1336 if (TREE_CODE (arg)((enum tree_code) (arg)->base.code) == ADDR_EXPR
1337 && (TREE_OPERAND (arg, 0)(*((const_cast<tree*> (tree_operand_check ((arg), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1337, __FUNCTION__)))))
1338 == gimple_omp_taskreg_data_arg (entry_stmt)))
1339 {
1340 parcopy_stmt = stmt;
1341 break;
1342 }
1343 }
1344 }
1345
1346 gcc_assert (parcopy_stmt != NULL)((void)(!(parcopy_stmt != nullptr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1346, __FUNCTION__), 0 : 0))
;
1347 arg = DECL_ARGUMENTS (child_fn)((tree_check ((child_fn), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1347, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
;
1348
1349 if (!gimple_in_ssa_p (cfun(cfun + 0)))
1350 {
1351 if (gimple_assign_lhs (parcopy_stmt) == arg)
1352 gsi_remove (&gsi, true);
1353 else
1354 {
1355 /* ?? Is setting the subcode really necessary ?? */
1356 gimple_omp_set_subcode (parcopy_stmt, TREE_CODE (arg)((enum tree_code) (arg)->base.code));
1357 gimple_assign_set_rhs1 (parcopy_stmt, arg);
1358 }
1359 }
1360 else
1361 {
1362 tree lhs = gimple_assign_lhs (parcopy_stmt);
1363 gcc_assert (SSA_NAME_VAR (lhs) == arg)((void)(!(((tree_check ((lhs), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1363, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((lhs)->ssa_name.var)->base
.code) == IDENTIFIER_NODE ? (tree) nullptr : (lhs)->ssa_name
.var) == arg) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1363, __FUNCTION__), 0 : 0))
;
1364 /* We'd like to set the rhs to the default def in the child_fn,
1365 but it's too early to create ssa names in the child_fn.
1366 Instead, we set the rhs to the parm. In
1367 move_sese_region_to_fn, we introduce a default def for the
1368 parm, map the parm to it's default def, and once we encounter
1369 this stmt, replace the parm with the default def. */
1370 gimple_assign_set_rhs1 (parcopy_stmt, arg);
1371 update_stmt (parcopy_stmt);
1372 }
1373 }
1374
1375 /* Declare local variables needed in CHILD_CFUN. */
1376 block = DECL_INITIAL (child_fn)((contains_struct_check ((child_fn), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1376, __FUNCTION__))->decl_common.initial)
;
1377 BLOCK_VARS (block)((tree_check ((block), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1377, __FUNCTION__, (BLOCK)))->block.vars)
= vec2chain (child_cfun->local_decls);
1378 /* The gimplifier could record temporaries in parallel/task block
1379 rather than in containing function's local_decls chain,
1380 which would mean cgraph missed finalizing them. Do it now. */
1381 for (t = BLOCK_VARS (block)((tree_check ((block), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1381, __FUNCTION__, (BLOCK)))->block.vars)
; t; t = DECL_CHAIN (t)(((contains_struct_check (((contains_struct_check ((t), (TS_DECL_MINIMAL
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1381, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1381, __FUNCTION__))->common.chain))
)
1382 if (VAR_P (t)(((enum tree_code) (t)->base.code) == VAR_DECL) && TREE_STATIC (t)((t)->base.static_flag) && !DECL_EXTERNAL (t)((contains_struct_check ((t), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1382, __FUNCTION__))->decl_common.decl_flag_1)
)
1383 varpool_node::finalize_decl (t);
1384 DECL_SAVED_TREE (child_fn)((tree_check ((child_fn), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1384, __FUNCTION__, (FUNCTION_DECL)))->function_decl.saved_tree
)
= NULLnullptr;
1385 /* We'll create a CFG for child_fn, so no gimple body is needed. */
1386 gimple_set_body (child_fn, NULLnullptr);
1387 TREE_USED (block)((block)->base.used_flag) = 1;
1388
1389 /* Reset DECL_CONTEXT on function arguments. */
1390 for (t = DECL_ARGUMENTS (child_fn)((tree_check ((child_fn), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1390, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
; t; t = DECL_CHAIN (t)(((contains_struct_check (((contains_struct_check ((t), (TS_DECL_MINIMAL
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1390, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1390, __FUNCTION__))->common.chain))
)
1391 DECL_CONTEXT (t)((contains_struct_check ((t), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1391, __FUNCTION__))->decl_minimal.context)
= child_fn;
1392
1393 /* Split ENTRY_BB at GIMPLE_OMP_PARALLEL or GIMPLE_OMP_TASK,
1394 so that it can be moved to the child function. */
1395 gsi = gsi_last_nondebug_bb (entry_bb);
1396 stmt = gsi_stmt (gsi);
1397 gcc_assert (stmt && (gimple_code (stmt) == GIMPLE_OMP_PARALLEL((void)(!(stmt && (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
|| gimple_code (stmt) == GIMPLE_OMP_TASK || gimple_code (stmt
) == GIMPLE_OMP_TEAMS)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1399, __FUNCTION__), 0 : 0))
1398 || gimple_code (stmt) == GIMPLE_OMP_TASK((void)(!(stmt && (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
|| gimple_code (stmt) == GIMPLE_OMP_TASK || gimple_code (stmt
) == GIMPLE_OMP_TEAMS)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1399, __FUNCTION__), 0 : 0))
1399 || gimple_code (stmt) == GIMPLE_OMP_TEAMS))((void)(!(stmt && (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
|| gimple_code (stmt) == GIMPLE_OMP_TASK || gimple_code (stmt
) == GIMPLE_OMP_TEAMS)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1399, __FUNCTION__), 0 : 0))
;
1400 e = split_block (entry_bb, stmt);
1401 gsi_remove (&gsi, true);
1402 entry_bb = e->dest;
1403 edge e2 = NULLnullptr;
1404 if (gimple_code (entry_stmt) != GIMPLE_OMP_TASK)
1405 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
1406 else
1407 {
1408 e2 = make_edge (e->src, BRANCH_EDGE (entry_bb)((*((entry_bb))->succs)[(0)]->flags & EDGE_FALLTHRU
? (*((entry_bb))->succs)[(1)] : (*((entry_bb))->succs)
[(0)])
->dest, EDGE_ABNORMAL);
1409 gcc_assert (e2->dest == region->exit)((void)(!(e2->dest == region->exit) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1409, __FUNCTION__), 0 : 0))
;
1410 remove_edge (BRANCH_EDGE (entry_bb)((*((entry_bb))->succs)[(0)]->flags & EDGE_FALLTHRU
? (*((entry_bb))->succs)[(1)] : (*((entry_bb))->succs)
[(0)])
);
1411 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e->src);
1412 gsi = gsi_last_nondebug_bb (region->exit);
1413 gcc_assert (!gsi_end_p (gsi)((void)(!(!gsi_end_p (gsi) && gimple_code (gsi_stmt (
gsi)) == GIMPLE_OMP_RETURN) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1414, __FUNCTION__), 0 : 0))
1414 && gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_RETURN)((void)(!(!gsi_end_p (gsi) && gimple_code (gsi_stmt (
gsi)) == GIMPLE_OMP_RETURN) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1414, __FUNCTION__), 0 : 0))
;
1415 gsi_remove (&gsi, true);
1416 }
1417
1418 /* Convert GIMPLE_OMP_{RETURN,CONTINUE} into a RETURN_EXPR. */
1419 if (exit_bb)
1420 {
1421 gsi = gsi_last_nondebug_bb (exit_bb);
1422 gcc_assert (!gsi_end_p (gsi)((void)(!(!gsi_end_p (gsi) && (gimple_code (gsi_stmt (
gsi)) == (e2 ? GIMPLE_OMP_CONTINUE : GIMPLE_OMP_RETURN))) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1424, __FUNCTION__), 0 : 0))
1423 && (gimple_code (gsi_stmt (gsi))((void)(!(!gsi_end_p (gsi) && (gimple_code (gsi_stmt (
gsi)) == (e2 ? GIMPLE_OMP_CONTINUE : GIMPLE_OMP_RETURN))) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1424, __FUNCTION__), 0 : 0))
1424 == (e2 ? GIMPLE_OMP_CONTINUE : GIMPLE_OMP_RETURN)))((void)(!(!gsi_end_p (gsi) && (gimple_code (gsi_stmt (
gsi)) == (e2 ? GIMPLE_OMP_CONTINUE : GIMPLE_OMP_RETURN))) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1424, __FUNCTION__), 0 : 0))
;
1425 stmt = gimple_build_return (NULLnullptr);
1426 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
1427 gsi_remove (&gsi, true);
1428 }
1429
1430 /* Move the parallel region into CHILD_CFUN. */
1431
1432 if (gimple_in_ssa_p (cfun(cfun + 0)))
1433 {
1434 init_tree_ssa (child_cfun);
1435 init_ssa_operands (child_cfun);
1436 child_cfun->gimple_df->in_ssa_p = true;
1437 block = NULL_TREE(tree) nullptr;
1438 }
1439 else
1440 block = gimple_block (entry_stmt);
1441
1442 new_bb = move_sese_region_to_fn (child_cfun, entry_bb, exit_bb, block);
1443 if (exit_bb)
1444 single_succ_edge (new_bb)->flags = EDGE_FALLTHRU;
1445 if (e2)
1446 {
1447 basic_block dest_bb = e2->dest;
1448 if (!exit_bb)
1449 make_edge (new_bb, dest_bb, EDGE_FALLTHRU);
1450 remove_edge (e2);
1451 set_immediate_dominator (CDI_DOMINATORS, dest_bb, new_bb);
1452 }
1453 /* When the OMP expansion process cannot guarantee an up-to-date
1454 loop tree arrange for the child function to fixup loops. */
1455 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
1456 child_cfun->x_current_loops->state |= LOOPS_NEED_FIXUP;
1457
1458 /* Remove non-local VAR_DECLs from child_cfun->local_decls list. */
1459 num = vec_safe_length (child_cfun->local_decls);
1460 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
1461 {
1462 t = (*child_cfun->local_decls)[srcidx];
1463 if (DECL_CONTEXT (t)((contains_struct_check ((t), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1463, __FUNCTION__))->decl_minimal.context)
== cfun(cfun + 0)->decl)
1464 continue;
1465 if (srcidx != dstidx)
1466 (*child_cfun->local_decls)[dstidx] = t;
1467 dstidx++;
1468 }
1469 if (dstidx != num)
1470 vec_safe_truncate (child_cfun->local_decls, dstidx);
1471
1472 /* Inform the callgraph about the new function. */
1473 child_cfun->curr_properties = cfun(cfun + 0)->curr_properties;
1474 child_cfun->has_simduid_loops |= cfun(cfun + 0)->has_simduid_loops;
1475 child_cfun->has_force_vectorize_loops |= cfun(cfun + 0)->has_force_vectorize_loops;
1476 cgraph_node *node = cgraph_node::get_create (child_fn);
1477 node->parallelized_function = 1;
1478 cgraph_node::add_new_function (child_fn, true);
1479
1480 bool need_asm = DECL_ASSEMBLER_NAME_SET_P (current_function_decl)(((contains_struct_check ((current_function_decl), (TS_DECL_WITH_VIS
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1480, __FUNCTION__))->decl_with_vis.assembler_name) != (
tree) nullptr)
1481 && !DECL_ASSEMBLER_NAME_SET_P (child_fn)(((contains_struct_check ((child_fn), (TS_DECL_WITH_VIS), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1481, __FUNCTION__))->decl_with_vis.assembler_name) != (
tree) nullptr)
;
1482
1483 /* Fix the callgraph edges for child_cfun. Those for cfun will be
1484 fixed in a following pass. */
1485 push_cfun (child_cfun);
1486 if (need_asm)
1487 assign_assembler_name_if_needed (child_fn);
1488
1489 if (optimizeglobal_options.x_optimize)
1490 optimize_omp_library_calls (entry_stmt);
1491 update_max_bb_count ();
1492 cgraph_edge::rebuild_edges ();
1493
1494 /* Some EH regions might become dead, see PR34608. If
1495 pass_cleanup_cfg isn't the first pass to happen with the
1496 new child, these dead EH edges might cause problems.
1497 Clean them up now. */
1498 if (flag_exceptionsglobal_options.x_flag_exceptions)
1499 {
1500 basic_block bb;
1501 bool changed = false;
1502
1503 FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb
; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb->
next_bb)
1504 changed |= gimple_purge_dead_eh_edges (bb);
1505 if (changed)
1506 cleanup_tree_cfg ();
1507 }
1508 if (gimple_in_ssa_p (cfun(cfun + 0)))
1509 update_ssa (TODO_update_ssa(1 << 11));
1510 if (flag_checkingglobal_options.x_flag_checking && !loops_state_satisfies_p (LOOPS_NEED_FIXUP))
1511 verify_loop_structure ();
1512 pop_cfun ();
1513
1514 if (dump_file && !gimple_in_ssa_p (cfun(cfun + 0)))
1515 {
1516 omp_any_child_fn_dumped = true;
1517 dump_function_header (dump_file, child_fn, dump_flags);
1518 dump_function_to_file (child_fn, dump_file, dump_flags);
1519 }
1520 }
1521
1522 adjust_context_and_scope (region, gimple_block (entry_stmt), child_fn);
1523
1524 if (gimple_code (entry_stmt) == GIMPLE_OMP_PARALLEL)
1525 expand_parallel_call (region, new_bb,
1526 as_a <gomp_parallel *> (entry_stmt), ws_args);
1527 else if (gimple_code (entry_stmt) == GIMPLE_OMP_TEAMS)
1528 expand_teams_call (new_bb, as_a <gomp_teams *> (entry_stmt));
1529 else
1530 expand_task_call (region, new_bb, as_a <gomp_task *> (entry_stmt));
1531 if (gimple_in_ssa_p (cfun(cfun + 0)))
1532 update_ssa (TODO_update_ssa_only_virtuals(1 << 14));
1533}
1534
1535/* Information about members of an OpenACC collapsed loop nest. */
1536
1537struct oacc_collapse
1538{
1539 tree base; /* Base value. */
1540 tree iters; /* Number of steps. */
1541 tree step; /* Step size. */
1542 tree tile; /* Tile increment (if tiled). */
1543 tree outer; /* Tile iterator var. */
1544};
1545
1546/* Helper for expand_oacc_for. Determine collapsed loop information.
1547 Fill in COUNTS array. Emit any initialization code before GSI.
1548 Return the calculated outer loop bound of BOUND_TYPE. */
1549
1550static tree
1551expand_oacc_collapse_init (const struct omp_for_data *fd,
1552 gimple_stmt_iterator *gsi,
1553 oacc_collapse *counts, tree diff_type,
1554 tree bound_type, location_t loc)
1555{
1556 tree tiling = fd->tiling;
1557 tree total = build_int_cst (bound_type, 1);
1558 int ix;
1559
1560 gcc_assert (integer_onep (fd->loop.step))((void)(!(integer_onep (fd->loop.step)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1560, __FUNCTION__), 0 : 0))
;
1561 gcc_assert (integer_zerop (fd->loop.n1))((void)(!(integer_zerop (fd->loop.n1)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1561, __FUNCTION__), 0 : 0))
;
1562
1563 /* When tiling, the first operand of the tile clause applies to the
1564 innermost loop, and we work outwards from there. Seems
1565 backwards, but whatever. */
1566 for (ix = fd->collapse; ix--;)
1567 {
1568 const omp_for_data_loop *loop = &fd->loops[ix];
1569
1570 tree iter_type = TREE_TYPE (loop->v)((contains_struct_check ((loop->v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1570, __FUNCTION__))->typed.type)
;
1571 tree plus_type = iter_type;
1572
1573 gcc_assert (loop->cond_code == LT_EXPR || loop->cond_code == GT_EXPR)((void)(!(loop->cond_code == LT_EXPR || loop->cond_code
== GT_EXPR) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1573, __FUNCTION__), 0 : 0))
;
1574
1575 if (POINTER_TYPE_P (iter_type)(((enum tree_code) (iter_type)->base.code) == POINTER_TYPE
|| ((enum tree_code) (iter_type)->base.code) == REFERENCE_TYPE
)
)
1576 plus_type = sizetypesizetype_tab[(int) stk_sizetype];
1577
1578 if (tiling)
1579 {
1580 tree num = build_int_cst (integer_type_nodeinteger_types[itk_int], fd->collapse);
1581 tree loop_no = build_int_cst (integer_type_nodeinteger_types[itk_int], ix);
1582 tree tile = TREE_VALUE (tiling)((tree_check ((tiling), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1582, __FUNCTION__, (TREE_LIST)))->list.value)
;
1583 gcall *call
1584 = gimple_build_call_internal (IFN_GOACC_TILE, 5, num, loop_no, tile,
1585 /* gwv-outer=*/integer_zero_nodeglobal_trees[TI_INTEGER_ZERO],
1586 /* gwv-inner=*/integer_zero_nodeglobal_trees[TI_INTEGER_ZERO]);
1587
1588 counts[ix].outer = create_tmp_var (iter_type, ".outer");
1589 counts[ix].tile = create_tmp_var (diff_type, ".tile");
1590 gimple_call_set_lhs (call, counts[ix].tile);
1591 gimple_set_location (call, loc);
1592 gsi_insert_before (gsi, call, GSI_SAME_STMT);
1593
1594 tiling = TREE_CHAIN (tiling)((contains_struct_check ((tiling), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1594, __FUNCTION__))->common.chain)
;
1595 }
1596 else
1597 {
1598 counts[ix].tile = NULLnullptr;
1599 counts[ix].outer = loop->v;
1600 }
1601
1602 tree b = loop->n1;
1603 tree e = loop->n2;
1604 tree s = loop->step;
1605 bool up = loop->cond_code == LT_EXPR;
1606 tree dir = build_int_cst (diff_type, up ? +1 : -1);
1607 bool negating;
1608 tree expr;
1609
1610 b = force_gimple_operand_gsi (gsi, b, true, NULL_TREE(tree) nullptr,
1611 true, GSI_SAME_STMT);
1612 e = force_gimple_operand_gsi (gsi, e, true, NULL_TREE(tree) nullptr,
1613 true, GSI_SAME_STMT);
1614
1615 /* Convert the step, avoiding possible unsigned->signed overflow. */
1616 negating = !up && TYPE_UNSIGNED (TREE_TYPE (s))((tree_class_check ((((contains_struct_check ((s), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1616, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1616, __FUNCTION__))->base.u.bits.unsigned_flag)
;
1617 if (negating)
1618 s = fold_build1 (NEGATE_EXPR, TREE_TYPE (s), s)fold_build1_loc (((location_t) 0), NEGATE_EXPR, ((contains_struct_check
((s), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1618, __FUNCTION__))->typed.type), s )
;
1619 s = fold_convert (diff_type, s)fold_convert_loc (((location_t) 0), diff_type, s);
1620 if (negating)
1621 s = fold_build1 (NEGATE_EXPR, diff_type, s)fold_build1_loc (((location_t) 0), NEGATE_EXPR, diff_type, s );
1622 s = force_gimple_operand_gsi (gsi, s, true, NULL_TREE(tree) nullptr,
1623 true, GSI_SAME_STMT);
1624
1625 /* Determine the range, avoiding possible unsigned->signed overflow. */
1626 negating = !up && TYPE_UNSIGNED (iter_type)((tree_class_check ((iter_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1626, __FUNCTION__))->base.u.bits.unsigned_flag)
;
1627 expr = fold_build2 (MINUS_EXPR, plus_type,fold_build2_loc (((location_t) 0), MINUS_EXPR, plus_type, fold_convert_loc
(((location_t) 0), plus_type, negating ? b : e), fold_convert_loc
(((location_t) 0), plus_type, negating ? e : b) )
1628 fold_convert (plus_type, negating ? b : e),fold_build2_loc (((location_t) 0), MINUS_EXPR, plus_type, fold_convert_loc
(((location_t) 0), plus_type, negating ? b : e), fold_convert_loc
(((location_t) 0), plus_type, negating ? e : b) )
1629 fold_convert (plus_type, negating ? e : b))fold_build2_loc (((location_t) 0), MINUS_EXPR, plus_type, fold_convert_loc
(((location_t) 0), plus_type, negating ? b : e), fold_convert_loc
(((location_t) 0), plus_type, negating ? e : b) )
;
1630 expr = fold_convert (diff_type, expr)fold_convert_loc (((location_t) 0), diff_type, expr);
1631 if (negating)
1632 expr = fold_build1 (NEGATE_EXPR, diff_type, expr)fold_build1_loc (((location_t) 0), NEGATE_EXPR, diff_type, expr
)
;
1633 tree range = force_gimple_operand_gsi
1634 (gsi, expr, true, NULL_TREE(tree) nullptr, true, GSI_SAME_STMT);
1635
1636 /* Determine number of iterations. */
1637 expr = fold_build2 (MINUS_EXPR, diff_type, range, dir)fold_build2_loc (((location_t) 0), MINUS_EXPR, diff_type, range
, dir )
;
1638 expr = fold_build2 (PLUS_EXPR, diff_type, expr, s)fold_build2_loc (((location_t) 0), PLUS_EXPR, diff_type, expr
, s )
;
1639 expr = fold_build2 (TRUNC_DIV_EXPR, diff_type, expr, s)fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, diff_type,
expr, s )
;
1640
1641 tree iters = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE(tree) nullptr,
1642 true, GSI_SAME_STMT);
1643
1644 counts[ix].base = b;
1645 counts[ix].iters = iters;
1646 counts[ix].step = s;
1647
1648 total = fold_build2 (MULT_EXPR, bound_type, total,fold_build2_loc (((location_t) 0), MULT_EXPR, bound_type, total
, fold_convert_loc (((location_t) 0), bound_type, iters) )
1649 fold_convert (bound_type, iters))fold_build2_loc (((location_t) 0), MULT_EXPR, bound_type, total
, fold_convert_loc (((location_t) 0), bound_type, iters) )
;
1650 }
1651
1652 return total;
1653}
1654
1655/* Emit initializers for collapsed loop members. INNER is true if
1656 this is for the element loop of a TILE. IVAR is the outer
1657 loop iteration variable, from which collapsed loop iteration values
1658 are calculated. COUNTS array has been initialized by
1659 expand_oacc_collapse_inits. */
1660
1661static void
1662expand_oacc_collapse_vars (const struct omp_for_data *fd, bool inner,
1663 gimple_stmt_iterator *gsi,
1664 const oacc_collapse *counts, tree ivar,
1665 tree diff_type)
1666{
1667 tree ivar_type = TREE_TYPE (ivar)((contains_struct_check ((ivar), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1667, __FUNCTION__))->typed.type)
;
1668
1669 /* The most rapidly changing iteration variable is the innermost
1670 one. */
1671 for (int ix = fd->collapse; ix--;)
1672 {
1673 const omp_for_data_loop *loop = &fd->loops[ix];
1674 const oacc_collapse *collapse = &counts[ix];
1675 tree v = inner ? loop->v : collapse->outer;
1676 tree iter_type = TREE_TYPE (v)((contains_struct_check ((v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1676, __FUNCTION__))->typed.type)
;
1677 tree plus_type = iter_type;
1678 enum tree_code plus_code = PLUS_EXPR;
1679 tree expr;
1680
1681 if (POINTER_TYPE_P (iter_type)(((enum tree_code) (iter_type)->base.code) == POINTER_TYPE
|| ((enum tree_code) (iter_type)->base.code) == REFERENCE_TYPE
)
)
1682 {
1683 plus_code = POINTER_PLUS_EXPR;
1684 plus_type = sizetypesizetype_tab[(int) stk_sizetype];
1685 }
1686
1687 expr = ivar;
1688 if (ix)
1689 {
1690 tree mod = fold_convert (ivar_type, collapse->iters)fold_convert_loc (((location_t) 0), ivar_type, collapse->iters
)
;
1691 ivar = fold_build2 (TRUNC_DIV_EXPR, ivar_type, expr, mod)fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, ivar_type,
expr, mod )
;
1692 expr = fold_build2 (TRUNC_MOD_EXPR, ivar_type, expr, mod)fold_build2_loc (((location_t) 0), TRUNC_MOD_EXPR, ivar_type,
expr, mod )
;
1693 ivar = force_gimple_operand_gsi (gsi, ivar, true, NULL_TREE(tree) nullptr,
1694 true, GSI_SAME_STMT);
1695 }
1696
1697 expr = fold_build2 (MULT_EXPR, diff_type, fold_convert (diff_type, expr),fold_build2_loc (((location_t) 0), MULT_EXPR, diff_type, fold_convert_loc
(((location_t) 0), diff_type, expr), fold_convert_loc (((location_t
) 0), diff_type, collapse->step) )
1698 fold_convert (diff_type, collapse->step))fold_build2_loc (((location_t) 0), MULT_EXPR, diff_type, fold_convert_loc
(((location_t) 0), diff_type, expr), fold_convert_loc (((location_t
) 0), diff_type, collapse->step) )
;
1699 expr = fold_build2 (plus_code, iter_type,fold_build2_loc (((location_t) 0), plus_code, iter_type, inner
? collapse->outer : collapse->base, fold_convert_loc (
((location_t) 0), plus_type, expr) )
1700 inner ? collapse->outer : collapse->base,fold_build2_loc (((location_t) 0), plus_code, iter_type, inner
? collapse->outer : collapse->base, fold_convert_loc (
((location_t) 0), plus_type, expr) )
1701 fold_convert (plus_type, expr))fold_build2_loc (((location_t) 0), plus_code, iter_type, inner
? collapse->outer : collapse->base, fold_convert_loc (
((location_t) 0), plus_type, expr) )
;
1702 expr = force_gimple_operand_gsi (gsi, expr, false, NULL_TREE(tree) nullptr,
1703 true, GSI_SAME_STMT);
1704 gassign *ass = gimple_build_assign (v, expr);
1705 gsi_insert_before (gsi, ass, GSI_SAME_STMT);
1706 }
1707}
1708
1709/* Helper function for expand_omp_{for_*,simd}. If this is the outermost
1710 of the combined collapse > 1 loop constructs, generate code like:
1711 if (__builtin_expect (N32 cond3 N31, 0)) goto ZERO_ITER_BB;
1712 if (cond3 is <)
1713 adj = STEP3 - 1;
1714 else
1715 adj = STEP3 + 1;
1716 count3 = (adj + N32 - N31) / STEP3;
1717 if (__builtin_expect (N22 cond2 N21, 0)) goto ZERO_ITER_BB;
1718 if (cond2 is <)
1719 adj = STEP2 - 1;
1720 else
1721 adj = STEP2 + 1;
1722 count2 = (adj + N22 - N21) / STEP2;
1723 if (__builtin_expect (N12 cond1 N11, 0)) goto ZERO_ITER_BB;
1724 if (cond1 is <)
1725 adj = STEP1 - 1;
1726 else
1727 adj = STEP1 + 1;
1728 count1 = (adj + N12 - N11) / STEP1;
1729 count = count1 * count2 * count3;
1730 Furthermore, if ZERO_ITER_BB is NULL, create a BB which does:
1731 count = 0;
1732 and set ZERO_ITER_BB to that bb. If this isn't the outermost
1733 of the combined loop constructs, just initialize COUNTS array
1734 from the _looptemp_ clauses. For loop nests with non-rectangular
1735 loops, do this only for the rectangular loops. Then pick
1736 the loops which reference outer vars in their bound expressions
1737 and the loops which they refer to and for this sub-nest compute
1738 number of iterations. For triangular loops use Faulhaber's formula,
1739 otherwise as a fallback, compute by iterating the loops.
1740 If e.g. the sub-nest is
1741 for (I = N11; I COND1 N12; I += STEP1)
1742 for (J = M21 * I + N21; J COND2 M22 * I + N22; J += STEP2)
1743 for (K = M31 * J + N31; K COND3 M32 * J + N32; K += STEP3)
1744 do:
1745 COUNT = 0;
1746 for (tmpi = N11; tmpi COND1 N12; tmpi += STEP1)
1747 for (tmpj = M21 * tmpi + N21;
1748 tmpj COND2 M22 * tmpi + N22; tmpj += STEP2)
1749 {
1750 int tmpk1 = M31 * tmpj + N31;
1751 int tmpk2 = M32 * tmpj + N32;
1752 if (tmpk1 COND3 tmpk2)
1753 {
1754 if (COND3 is <)
1755 adj = STEP3 - 1;
1756 else
1757 adj = STEP3 + 1;
1758 COUNT += (adj + tmpk2 - tmpk1) / STEP3;
1759 }
1760 }
1761 and finally multiply the counts of the rectangular loops not
1762 in the sub-nest with COUNT. Also, as counts[fd->last_nonrect]
1763 store number of iterations of the loops from fd->first_nonrect
1764 to fd->last_nonrect inclusive, i.e. the above COUNT multiplied
1765 by the counts of rectangular loops not referenced in any non-rectangular
1766 loops sandwitched in between those. */
1767
1768/* NOTE: It *could* be better to moosh all of the BBs together,
1769 creating one larger BB with all the computation and the unexpected
1770 jump at the end. I.e.
1771
1772 bool zero3, zero2, zero1, zero;
1773
1774 zero3 = N32 c3 N31;
1775 count3 = (N32 - N31) /[cl] STEP3;
1776 zero2 = N22 c2 N21;
1777 count2 = (N22 - N21) /[cl] STEP2;
1778 zero1 = N12 c1 N11;
1779 count1 = (N12 - N11) /[cl] STEP1;
1780 zero = zero3 || zero2 || zero1;
1781 count = count1 * count2 * count3;
1782 if (__builtin_expect(zero, false)) goto zero_iter_bb;
1783
1784 After all, we expect the zero=false, and thus we expect to have to
1785 evaluate all of the comparison expressions, so short-circuiting
1786 oughtn't be a win. Since the condition isn't protecting a
1787 denominator, we're not concerned about divide-by-zero, so we can
1788 fully evaluate count even if a numerator turned out to be wrong.
1789
1790 It seems like putting this all together would create much better
1791 scheduling opportunities, and less pressure on the chip's branch
1792 predictor. */
1793
1794static void
1795expand_omp_for_init_counts (struct omp_for_data *fd, gimple_stmt_iterator *gsi,
1796 basic_block &entry_bb, tree *counts,
1797 basic_block &zero_iter1_bb, int &first_zero_iter1,
1798 basic_block &zero_iter2_bb, int &first_zero_iter2,
1799 basic_block &l2_dom_bb)
1800{
1801 tree t, type = TREE_TYPE (fd->loop.v)((contains_struct_check ((fd->loop.v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1801, __FUNCTION__))->typed.type)
;
1802 edge e, ne;
1803 int i;
1804
1805 /* Collapsed loops need work for expansion into SSA form. */
1806 gcc_assert (!gimple_in_ssa_p (cfun))((void)(!(!gimple_in_ssa_p ((cfun + 0))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1806, __FUNCTION__), 0 : 0))
;
1807
1808 if (gimple_omp_for_combined_into_p (fd->for_stmt)
1809 && TREE_CODE (fd->loop.n2)((enum tree_code) (fd->loop.n2)->base.code) != INTEGER_CST)
1810 {
1811 gcc_assert (fd->ordered == 0)((void)(!(fd->ordered == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1811, __FUNCTION__), 0 : 0))
;
1812 /* First two _looptemp_ clauses are for istart/iend, counts[0]
1813 isn't supposed to be handled, as the inner loop doesn't
1814 use it. */
1815 tree innerc = omp_find_clause (gimple_omp_for_clauses (fd->for_stmt),
1816 OMP_CLAUSE__LOOPTEMP_);
1817 gcc_assert (innerc)((void)(!(innerc) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1817, __FUNCTION__), 0 : 0))
;
1818 for (i = 0; i < fd->collapse; i++)
1819 {
1820 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc)((contains_struct_check (((tree_check ((innerc), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1820, __FUNCTION__, (OMP_CLAUSE)))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1820, __FUNCTION__))->common.chain)
,
1821 OMP_CLAUSE__LOOPTEMP_);
1822 gcc_assert (innerc)((void)(!(innerc) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1822, __FUNCTION__), 0 : 0))
;
1823 if (i)
1824 counts[i] = OMP_CLAUSE_DECL (innerc)(*(omp_clause_elt_check (((omp_clause_range_check (((tree_check
((innerc), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1824, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1824, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1824, __FUNCTION__)))
;
1825 else
1826 counts[0] = NULL_TREE(tree) nullptr;
1827 }
1828 if (fd->non_rect
1829 && fd->last_nonrect == fd->first_nonrect + 1
1830 && !TYPE_UNSIGNED (TREE_TYPE (fd->loops[fd->last_nonrect].v))((tree_class_check ((((contains_struct_check ((fd->loops[fd
->last_nonrect].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1830, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1830, __FUNCTION__))->base.u.bits.unsigned_flag)
)
1831 {
1832 tree c[4];
1833 for (i = 0; i < 4; i++)
1834 {
1835 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc)((contains_struct_check (((tree_check ((innerc), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1835, __FUNCTION__, (OMP_CLAUSE)))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1835, __FUNCTION__))->common.chain)
,
1836 OMP_CLAUSE__LOOPTEMP_);
1837 gcc_assert (innerc)((void)(!(innerc) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1837, __FUNCTION__), 0 : 0))
;
1838 c[i] = OMP_CLAUSE_DECL (innerc)(*(omp_clause_elt_check (((omp_clause_range_check (((tree_check
((innerc), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1838, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1838, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1838, __FUNCTION__)))
;
1839 }
1840 counts[0] = c[0];
1841 fd->first_inner_iterations = c[1];
1842 fd->factor = c[2];
1843 fd->adjn1 = c[3];
1844 }
1845 return;
1846 }
1847
1848 for (i = fd->collapse; i < fd->ordered; i++)
1849 {
1850 tree itype = TREE_TYPE (fd->loops[i].v)((contains_struct_check ((fd->loops[i].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1850, __FUNCTION__))->typed.type)
;
1851 counts[i] = NULL_TREE(tree) nullptr;
1852 t = fold_binary (fd->loops[i].cond_code, boolean_type_node,fold_binary_loc (((location_t) 0), fd->loops[i].cond_code,
global_trees[TI_BOOLEAN_TYPE], fold_convert_loc (((location_t
) 0), itype, fd->loops[i].n1), fold_convert_loc (((location_t
) 0), itype, fd->loops[i].n2))
1853 fold_convert (itype, fd->loops[i].n1),fold_binary_loc (((location_t) 0), fd->loops[i].cond_code,
global_trees[TI_BOOLEAN_TYPE], fold_convert_loc (((location_t
) 0), itype, fd->loops[i].n1), fold_convert_loc (((location_t
) 0), itype, fd->loops[i].n2))
1854 fold_convert (itype, fd->loops[i].n2))fold_binary_loc (((location_t) 0), fd->loops[i].cond_code,
global_trees[TI_BOOLEAN_TYPE], fold_convert_loc (((location_t
) 0), itype, fd->loops[i].n1), fold_convert_loc (((location_t
) 0), itype, fd->loops[i].n2))
;
1855 if (t && integer_zerop (t))
1856 {
1857 for (i = fd->collapse; i < fd->ordered; i++)
1858 counts[i] = build_int_cst (type, 0);
1859 break;
1860 }
1861 }
1862 bool rect_count_seen = false;
1863 for (i = 0; i < (fd->ordered ? fd->ordered : fd->collapse); i++)
1864 {
1865 tree itype = TREE_TYPE (fd->loops[i].v)((contains_struct_check ((fd->loops[i].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1865, __FUNCTION__))->typed.type)
;
1866
1867 if (i >= fd->collapse && counts[i])
1868 continue;
1869 if (fd->non_rect)
1870 {
1871 /* Skip loops that use outer iterators in their expressions
1872 during this phase. */
1873 if (fd->loops[i].m1 || fd->loops[i].m2)
1874 {
1875 counts[i] = build_zero_cst (type);
1876 continue;
1877 }
1878 }
1879 if ((SSA_VAR_P (fd->loop.n2)(((enum tree_code) (fd->loop.n2)->base.code) == VAR_DECL
|| ((enum tree_code) (fd->loop.n2)->base.code) == PARM_DECL
|| ((enum tree_code) (fd->loop.n2)->base.code) == RESULT_DECL
|| ((enum tree_code) (fd->loop.n2)->base.code) == SSA_NAME
)
|| i >= fd->collapse)
1880 && ((t = fold_binary (fd->loops[i].cond_code, boolean_type_node,fold_binary_loc (((location_t) 0), fd->loops[i].cond_code,
global_trees[TI_BOOLEAN_TYPE], fold_convert_loc (((location_t
) 0), itype, fd->loops[i].n1), fold_convert_loc (((location_t
) 0), itype, fd->loops[i].n2))
1881 fold_convert (itype, fd->loops[i].n1),fold_binary_loc (((location_t) 0), fd->loops[i].cond_code,
global_trees[TI_BOOLEAN_TYPE], fold_convert_loc (((location_t
) 0), itype, fd->loops[i].n1), fold_convert_loc (((location_t
) 0), itype, fd->loops[i].n2))
1882 fold_convert (itype, fd->loops[i].n2))fold_binary_loc (((location_t) 0), fd->loops[i].cond_code,
global_trees[TI_BOOLEAN_TYPE], fold_convert_loc (((location_t
) 0), itype, fd->loops[i].n1), fold_convert_loc (((location_t
) 0), itype, fd->loops[i].n2))
)
1883 == NULL_TREE(tree) nullptr || !integer_onep (t)))
1884 {
1885 gcond *cond_stmt;
1886 tree n1, n2;
1887 n1 = fold_convert (itype, unshare_expr (fd->loops[i].n1))fold_convert_loc (((location_t) 0), itype, unshare_expr (fd->
loops[i].n1))
;
1888 n1 = force_gimple_operand_gsi (gsi, n1, true, NULL_TREE(tree) nullptr,
1889 true, GSI_SAME_STMT);
1890 n2 = fold_convert (itype, unshare_expr (fd->loops[i].n2))fold_convert_loc (((location_t) 0), itype, unshare_expr (fd->
loops[i].n2))
;
1891 n2 = force_gimple_operand_gsi (gsi, n2, true, NULL_TREE(tree) nullptr,
1892 true, GSI_SAME_STMT);
1893 cond_stmt = expand_omp_build_cond (gsi, fd->loops[i].cond_code,
1894 n1, n2);
1895 e = split_block (entry_bb, cond_stmt);
1896 basic_block &zero_iter_bb
1897 = i < fd->collapse ? zero_iter1_bb : zero_iter2_bb;
1898 int &first_zero_iter
1899 = i < fd->collapse ? first_zero_iter1 : first_zero_iter2;
1900 if (zero_iter_bb == NULLnullptr)
1901 {
1902 gassign *assign_stmt;
1903 first_zero_iter = i;
1904 zero_iter_bb = create_empty_bb (entry_bb);
1905 add_bb_to_loop (zero_iter_bb, entry_bb->loop_father);
1906 *gsi = gsi_after_labels (zero_iter_bb);
1907 if (i < fd->collapse)
1908 assign_stmt = gimple_build_assign (fd->loop.n2,
1909 build_zero_cst (type));
1910 else
1911 {
1912 counts[i] = create_tmp_reg (type, ".count");
1913 assign_stmt
1914 = gimple_build_assign (counts[i], build_zero_cst (type));
1915 }
1916 gsi_insert_before (gsi, assign_stmt, GSI_SAME_STMT);
1917 set_immediate_dominator (CDI_DOMINATORS, zero_iter_bb,
1918 entry_bb);
1919 }
1920 ne = make_edge (entry_bb, zero_iter_bb, EDGE_FALSE_VALUE);
1921 ne->probability = profile_probability::very_unlikely ();
1922 e->flags = EDGE_TRUE_VALUE;
1923 e->probability = ne->probability.invert ();
1924 if (l2_dom_bb == NULLnullptr)
1925 l2_dom_bb = entry_bb;
1926 entry_bb = e->dest;
1927 *gsi = gsi_last_nondebug_bb (entry_bb);
1928 }
1929
1930 if (POINTER_TYPE_P (itype)(((enum tree_code) (itype)->base.code) == POINTER_TYPE || (
(enum tree_code) (itype)->base.code) == REFERENCE_TYPE)
)
1931 itype = signed_type_for (itype);
1932 t = build_int_cst (itype, (fd->loops[i].cond_code == LT_EXPR
1933 ? -1 : 1));
1934 t = fold_build2 (PLUS_EXPR, itype,fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, fold_convert_loc
(((location_t) 0), itype, fd->loops[i].step), t )
1935 fold_convert (itype, fd->loops[i].step), t)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, fold_convert_loc
(((location_t) 0), itype, fd->loops[i].step), t )
;
1936 t = fold_build2 (PLUS_EXPR, itype, t,fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, t, fold_convert_loc
(((location_t) 0), itype, fd->loops[i].n2) )
1937 fold_convert (itype, fd->loops[i].n2))fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, t, fold_convert_loc
(((location_t) 0), itype, fd->loops[i].n2) )
;
1938 t = fold_build2 (MINUS_EXPR, itype, t,fold_build2_loc (((location_t) 0), MINUS_EXPR, itype, t, fold_convert_loc
(((location_t) 0), itype, fd->loops[i].n1) )
1939 fold_convert (itype, fd->loops[i].n1))fold_build2_loc (((location_t) 0), MINUS_EXPR, itype, t, fold_convert_loc
(((location_t) 0), itype, fd->loops[i].n1) )
;
1940 /* ?? We could probably use CEIL_DIV_EXPR instead of
1941 TRUNC_DIV_EXPR and adjusting by hand. Unless we can't
1942 generate the same code in the end because generically we
1943 don't know that the values involved must be negative for
1944 GT?? */
1945 if (TYPE_UNSIGNED (itype)((tree_class_check ((itype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1945, __FUNCTION__))->base.u.bits.unsigned_flag)
&& fd->loops[i].cond_code == GT_EXPR)
1946 t = fold_build2 (TRUNC_DIV_EXPR, itype,fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, itype, fold_build1_loc
(((location_t) 0), NEGATE_EXPR, itype, t ), fold_build1_loc (
((location_t) 0), NEGATE_EXPR, itype, fold_convert_loc (((location_t
) 0), itype, fd->loops[i].step) ) )
1947 fold_build1 (NEGATE_EXPR, itype, t),fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, itype, fold_build1_loc
(((location_t) 0), NEGATE_EXPR, itype, t ), fold_build1_loc (
((location_t) 0), NEGATE_EXPR, itype, fold_convert_loc (((location_t
) 0), itype, fd->loops[i].step) ) )
1948 fold_build1 (NEGATE_EXPR, itype,fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, itype, fold_build1_loc
(((location_t) 0), NEGATE_EXPR, itype, t ), fold_build1_loc (
((location_t) 0), NEGATE_EXPR, itype, fold_convert_loc (((location_t
) 0), itype, fd->loops[i].step) ) )
1949 fold_convert (itype,fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, itype, fold_build1_loc
(((location_t) 0), NEGATE_EXPR, itype, t ), fold_build1_loc (
((location_t) 0), NEGATE_EXPR, itype, fold_convert_loc (((location_t
) 0), itype, fd->loops[i].step) ) )
1950 fd->loops[i].step)))fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, itype, fold_build1_loc
(((location_t) 0), NEGATE_EXPR, itype, t ), fold_build1_loc (
((location_t) 0), NEGATE_EXPR, itype, fold_convert_loc (((location_t
) 0), itype, fd->loops[i].step) ) )
;
1951 else
1952 t = fold_build2 (TRUNC_DIV_EXPR, itype, t,fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, itype, t, fold_convert_loc
(((location_t) 0), itype, fd->loops[i].step) )
1953 fold_convert (itype, fd->loops[i].step))fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, itype, t, fold_convert_loc
(((location_t) 0), itype, fd->loops[i].step) )
;
1954 t = fold_convert (type, t)fold_convert_loc (((location_t) 0), type, t);
1955 if (TREE_CODE (t)((enum tree_code) (t)->base.code) == INTEGER_CST)
1956 counts[i] = t;
1957 else
1958 {
1959 if (i < fd->collapse || i != first_zero_iter2)
1960 counts[i] = create_tmp_reg (type, ".count");
1961 expand_omp_build_assign (gsi, counts[i], t);
1962 }
1963 if (SSA_VAR_P (fd->loop.n2)(((enum tree_code) (fd->loop.n2)->base.code) == VAR_DECL
|| ((enum tree_code) (fd->loop.n2)->base.code) == PARM_DECL
|| ((enum tree_code) (fd->loop.n2)->base.code) == RESULT_DECL
|| ((enum tree_code) (fd->loop.n2)->base.code) == SSA_NAME
)
&& i < fd->collapse)
1964 {
1965 if (fd->non_rect && i >= fd->first_nonrect && i <= fd->last_nonrect)
1966 continue;
1967 if (!rect_count_seen)
1968 {
1969 t = counts[i];
1970 rect_count_seen = true;
1971 }
1972 else
1973 t = fold_build2 (MULT_EXPR, type, fd->loop.n2, counts[i])fold_build2_loc (((location_t) 0), MULT_EXPR, type, fd->loop
.n2, counts[i] )
;
1974 expand_omp_build_assign (gsi, fd->loop.n2, t);
1975 }
1976 }
1977 if (fd->non_rect && SSA_VAR_P (fd->loop.n2)(((enum tree_code) (fd->loop.n2)->base.code) == VAR_DECL
|| ((enum tree_code) (fd->loop.n2)->base.code) == PARM_DECL
|| ((enum tree_code) (fd->loop.n2)->base.code) == RESULT_DECL
|| ((enum tree_code) (fd->loop.n2)->base.code) == SSA_NAME
)
)
1978 {
1979 gcc_assert (fd->last_nonrect != -1)((void)(!(fd->last_nonrect != -1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1979, __FUNCTION__), 0 : 0))
;
1980
1981 counts[fd->last_nonrect] = create_tmp_reg (type, ".count");
1982 expand_omp_build_assign (gsi, counts[fd->last_nonrect],
1983 build_zero_cst (type));
1984 for (i = fd->first_nonrect + 1; i < fd->last_nonrect; i++)
1985 if (fd->loops[i].m1
1986 || fd->loops[i].m2
1987 || fd->loops[i].non_rect_referenced)
1988 break;
1989 if (i == fd->last_nonrect
1990 && fd->loops[i].outer == fd->last_nonrect - fd->first_nonrect
1991 && !POINTER_TYPE_P (TREE_TYPE (fd->loops[i].v))(((enum tree_code) (((contains_struct_check ((fd->loops[i]
.v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1991, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((fd->loops
[i].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1991, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
1992 && !TYPE_UNSIGNED (TREE_TYPE (fd->loops[i].v))((tree_class_check ((((contains_struct_check ((fd->loops[i
].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1992, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1992, __FUNCTION__))->base.u.bits.unsigned_flag)
)
1993 {
1994 int o = fd->first_nonrect;
1995 tree itype = TREE_TYPE (fd->loops[o].v)((contains_struct_check ((fd->loops[o].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 1995, __FUNCTION__))->typed.type)
;
1996 tree n1o = create_tmp_reg (itype, ".n1o");
1997 t = fold_convert (itype, unshare_expr (fd->loops[o].n1))fold_convert_loc (((location_t) 0), itype, unshare_expr (fd->
loops[o].n1))
;
1998 expand_omp_build_assign (gsi, n1o, t);
1999 tree n2o = create_tmp_reg (itype, ".n2o");
2000 t = fold_convert (itype, unshare_expr (fd->loops[o].n2))fold_convert_loc (((location_t) 0), itype, unshare_expr (fd->
loops[o].n2))
;
2001 expand_omp_build_assign (gsi, n2o, t);
2002 if (fd->loops[i].m1 && fd->loops[i].m2)
2003 t = fold_build2 (MINUS_EXPR, itype, unshare_expr (fd->loops[i].m2),fold_build2_loc (((location_t) 0), MINUS_EXPR, itype, unshare_expr
(fd->loops[i].m2), unshare_expr (fd->loops[i].m1) )
2004 unshare_expr (fd->loops[i].m1))fold_build2_loc (((location_t) 0), MINUS_EXPR, itype, unshare_expr
(fd->loops[i].m2), unshare_expr (fd->loops[i].m1) )
;
2005 else if (fd->loops[i].m1)
2006 t = fold_unary (NEGATE_EXPR, itype,fold_unary_loc (((location_t) 0), NEGATE_EXPR, itype, unshare_expr
(fd->loops[i].m1))
2007 unshare_expr (fd->loops[i].m1))fold_unary_loc (((location_t) 0), NEGATE_EXPR, itype, unshare_expr
(fd->loops[i].m1))
;
2008 else
2009 t = unshare_expr (fd->loops[i].m2);
2010 tree m2minusm1
2011 = force_gimple_operand_gsi (gsi, t, true, NULL_TREE(tree) nullptr,
2012 true, GSI_SAME_STMT);
2013
2014 gimple_stmt_iterator gsi2 = *gsi;
2015 gsi_prev (&gsi2);
2016 e = split_block (entry_bb, gsi_stmt (gsi2));
2017 e = split_block (e->dest, (gimple *) NULLnullptr);
2018 basic_block bb1 = e->src;
2019 entry_bb = e->dest;
2020 *gsi = gsi_after_labels (entry_bb);
2021
2022 gsi2 = gsi_after_labels (bb1);
2023 tree ostep = fold_convert (itype, fd->loops[o].step)fold_convert_loc (((location_t) 0), itype, fd->loops[o].step
)
;
2024 t = build_int_cst (itype, (fd->loops[o].cond_code
2025 == LT_EXPR ? -1 : 1));
2026 t = fold_build2 (PLUS_EXPR, itype, ostep, t)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, ostep, t
)
;
2027 t = fold_build2 (PLUS_EXPR, itype, t, n2o)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, t, n2o );
2028 t = fold_build2 (MINUS_EXPR, itype, t, n1o)fold_build2_loc (((location_t) 0), MINUS_EXPR, itype, t, n1o );
2029 if (TYPE_UNSIGNED (itype)((tree_class_check ((itype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2029, __FUNCTION__))->base.u.bits.unsigned_flag)
2030 && fd->loops[o].cond_code == GT_EXPR)
2031 t = fold_build2 (TRUNC_DIV_EXPR, itype,fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, itype, fold_build1_loc
(((location_t) 0), NEGATE_EXPR, itype, t ), fold_build1_loc (
((location_t) 0), NEGATE_EXPR, itype, ostep ) )
2032 fold_build1 (NEGATE_EXPR, itype, t),fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, itype, fold_build1_loc
(((location_t) 0), NEGATE_EXPR, itype, t ), fold_build1_loc (
((location_t) 0), NEGATE_EXPR, itype, ostep ) )
2033 fold_build1 (NEGATE_EXPR, itype, ostep))fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, itype, fold_build1_loc
(((location_t) 0), NEGATE_EXPR, itype, t ), fold_build1_loc (
((location_t) 0), NEGATE_EXPR, itype, ostep ) )
;
2034 else
2035 t = fold_build2 (TRUNC_DIV_EXPR, itype, t, ostep)fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, itype, t, ostep
)
;
2036 tree outer_niters
2037 = force_gimple_operand_gsi (&gsi2, t, true, NULL_TREE(tree) nullptr,
2038 true, GSI_SAME_STMT);
2039 t = fold_build2 (MINUS_EXPR, itype, outer_niters,fold_build2_loc (((location_t) 0), MINUS_EXPR, itype, outer_niters
, build_one_cst (itype) )
2040 build_one_cst (itype))fold_build2_loc (((location_t) 0), MINUS_EXPR, itype, outer_niters
, build_one_cst (itype) )
;
2041 t = fold_build2 (MULT_EXPR, itype, t, ostep)fold_build2_loc (((location_t) 0), MULT_EXPR, itype, t, ostep
)
;
2042 t = fold_build2 (PLUS_EXPR, itype, n1o, t)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, n1o, t );
2043 tree last = force_gimple_operand_gsi (&gsi2, t, true, NULL_TREE(tree) nullptr,
2044 true, GSI_SAME_STMT);
2045 tree n1, n2, n1e, n2e;
2046 t = fold_convert (itype, unshare_expr (fd->loops[i].n1))fold_convert_loc (((location_t) 0), itype, unshare_expr (fd->
loops[i].n1))
;
2047 if (fd->loops[i].m1)
2048 {
2049 n1 = fold_convert (itype, unshare_expr (fd->loops[i].m1))fold_convert_loc (((location_t) 0), itype, unshare_expr (fd->
loops[i].m1))
;
2050 n1 = fold_build2 (MULT_EXPR, itype, n1o, n1)fold_build2_loc (((location_t) 0), MULT_EXPR, itype, n1o, n1 );
2051 n1 = fold_build2 (PLUS_EXPR, itype, n1, t)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, n1, t );
2052 }
2053 else
2054 n1 = t;
2055 n1 = force_gimple_operand_gsi (&gsi2, n1, true, NULL_TREE(tree) nullptr,
2056 true, GSI_SAME_STMT);
2057 t = fold_convert (itype, unshare_expr (fd->loops[i].n2))fold_convert_loc (((location_t) 0), itype, unshare_expr (fd->
loops[i].n2))
;
2058 if (fd->loops[i].m2)
2059 {
2060 n2 = fold_convert (itype, unshare_expr (fd->loops[i].m2))fold_convert_loc (((location_t) 0), itype, unshare_expr (fd->
loops[i].m2))
;
2061 n2 = fold_build2 (MULT_EXPR, itype, n1o, n2)fold_build2_loc (((location_t) 0), MULT_EXPR, itype, n1o, n2 );
2062 n2 = fold_build2 (PLUS_EXPR, itype, n2, t)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, n2, t );
2063 }
2064 else
2065 n2 = t;
2066 n2 = force_gimple_operand_gsi (&gsi2, n2, true, NULL_TREE(tree) nullptr,
2067 true, GSI_SAME_STMT);
2068 t = fold_convert (itype, unshare_expr (fd->loops[i].n1))fold_convert_loc (((location_t) 0), itype, unshare_expr (fd->
loops[i].n1))
;
2069 if (fd->loops[i].m1)
2070 {
2071 n1e = fold_convert (itype, unshare_expr (fd->loops[i].m1))fold_convert_loc (((location_t) 0), itype, unshare_expr (fd->
loops[i].m1))
;
2072 n1e = fold_build2 (MULT_EXPR, itype, last, n1e)fold_build2_loc (((location_t) 0), MULT_EXPR, itype, last, n1e
)
;
2073 n1e = fold_build2 (PLUS_EXPR, itype, n1e, t)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, n1e, t );
2074 }
2075 else
2076 n1e = t;
2077 n1e = force_gimple_operand_gsi (&gsi2, n1e, true, NULL_TREE(tree) nullptr,
2078 true, GSI_SAME_STMT);
2079 t = fold_convert (itype, unshare_expr (fd->loops[i].n2))fold_convert_loc (((location_t) 0), itype, unshare_expr (fd->
loops[i].n2))
;
2080 if (fd->loops[i].m2)
2081 {
2082 n2e = fold_convert (itype, unshare_expr (fd->loops[i].m2))fold_convert_loc (((location_t) 0), itype, unshare_expr (fd->
loops[i].m2))
;
2083 n2e = fold_build2 (MULT_EXPR, itype, last, n2e)fold_build2_loc (((location_t) 0), MULT_EXPR, itype, last, n2e
)
;
2084 n2e = fold_build2 (PLUS_EXPR, itype, n2e, t)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, n2e, t );
2085 }
2086 else
2087 n2e = t;
2088 n2e = force_gimple_operand_gsi (&gsi2, n2e, true, NULL_TREE(tree) nullptr,
2089 true, GSI_SAME_STMT);
2090 gcond *cond_stmt
2091 = expand_omp_build_cond (&gsi2, fd->loops[i].cond_code,
2092 n1, n2);
2093 e = split_block (bb1, cond_stmt);
2094 e->flags = EDGE_TRUE_VALUE;
2095 e->probability = profile_probability::likely ().guessed ();
2096 basic_block bb2 = e->dest;
2097 gsi2 = gsi_after_labels (bb2);
2098
2099 cond_stmt = expand_omp_build_cond (&gsi2, fd->loops[i].cond_code,
2100 n1e, n2e);
2101 e = split_block (bb2, cond_stmt);
2102 e->flags = EDGE_TRUE_VALUE;
2103 e->probability = profile_probability::likely ().guessed ();
2104 gsi2 = gsi_after_labels (e->dest);
2105
2106 tree step = fold_convert (itype, fd->loops[i].step)fold_convert_loc (((location_t) 0), itype, fd->loops[i].step
)
;
2107 t = build_int_cst (itype, (fd->loops[i].cond_code
2108 == LT_EXPR ? -1 : 1));
2109 t = fold_build2 (PLUS_EXPR, itype, step, t)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, step, t );
2110 t = fold_build2 (PLUS_EXPR, itype, t, n2)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, t, n2 );
2111 t = fold_build2 (MINUS_EXPR, itype, t, n1)fold_build2_loc (((location_t) 0), MINUS_EXPR, itype, t, n1 );
2112 if (TYPE_UNSIGNED (itype)((tree_class_check ((itype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2112, __FUNCTION__))->base.u.bits.unsigned_flag)
2113 && fd->loops[i].cond_code == GT_EXPR)
2114 t = fold_build2 (TRUNC_DIV_EXPR, itype,fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, itype, fold_build1_loc
(((location_t) 0), NEGATE_EXPR, itype, t ), fold_build1_loc (
((location_t) 0), NEGATE_EXPR, itype, step ) )
2115 fold_build1 (NEGATE_EXPR, itype, t),fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, itype, fold_build1_loc
(((location_t) 0), NEGATE_EXPR, itype, t ), fold_build1_loc (
((location_t) 0), NEGATE_EXPR, itype, step ) )
2116 fold_build1 (NEGATE_EXPR, itype, step))fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, itype, fold_build1_loc
(((location_t) 0), NEGATE_EXPR, itype, t ), fold_build1_loc (
((location_t) 0), NEGATE_EXPR, itype, step ) )
;
2117 else
2118 t = fold_build2 (TRUNC_DIV_EXPR, itype, t, step)fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, itype, t, step
)
;
2119 tree first_inner_iterations
2120 = force_gimple_operand_gsi (&gsi2, t, true, NULL_TREE(tree) nullptr,
2121 true, GSI_SAME_STMT);
2122 t = fold_build2 (MULT_EXPR, itype, m2minusm1, ostep)fold_build2_loc (((location_t) 0), MULT_EXPR, itype, m2minusm1
, ostep )
;
2123 if (TYPE_UNSIGNED (itype)((tree_class_check ((itype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2123, __FUNCTION__))->base.u.bits.unsigned_flag)
2124 && fd->loops[i].cond_code == GT_EXPR)
2125 t = fold_build2 (TRUNC_DIV_EXPR, itype,fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, itype, fold_build1_loc
(((location_t) 0), NEGATE_EXPR, itype, t ), fold_build1_loc (
((location_t) 0), NEGATE_EXPR, itype, step ) )
2126 fold_build1 (NEGATE_EXPR, itype, t),fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, itype, fold_build1_loc
(((location_t) 0), NEGATE_EXPR, itype, t ), fold_build1_loc (
((location_t) 0), NEGATE_EXPR, itype, step ) )
2127 fold_build1 (NEGATE_EXPR, itype, step))fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, itype, fold_build1_loc
(((location_t) 0), NEGATE_EXPR, itype, t ), fold_build1_loc (
((location_t) 0), NEGATE_EXPR, itype, step ) )
;
2128 else
2129 t = fold_build2 (TRUNC_DIV_EXPR, itype, t, step)fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, itype, t, step
)
;
2130 tree factor
2131 = force_gimple_operand_gsi (&gsi2, t, true, NULL_TREE(tree) nullptr,
2132 true, GSI_SAME_STMT);
2133 t = fold_build2 (MINUS_EXPR, itype, outer_niters,fold_build2_loc (((location_t) 0), MINUS_EXPR, itype, outer_niters
, build_one_cst (itype) )
2134 build_one_cst (itype))fold_build2_loc (((location_t) 0), MINUS_EXPR, itype, outer_niters
, build_one_cst (itype) )
;
2135 t = fold_build2 (MULT_EXPR, itype, t, outer_niters)fold_build2_loc (((location_t) 0), MULT_EXPR, itype, t, outer_niters
)
;
2136 t = fold_build2 (RSHIFT_EXPR, itype, t, integer_one_node)fold_build2_loc (((location_t) 0), RSHIFT_EXPR, itype, t, global_trees
[TI_INTEGER_ONE] )
;
2137 t = fold_build2 (MULT_EXPR, itype, factor, t)fold_build2_loc (((location_t) 0), MULT_EXPR, itype, factor, t
)
;
2138 t = fold_build2 (PLUS_EXPR, itype,fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, fold_build2_loc
(((location_t) 0), MULT_EXPR, itype, outer_niters, first_inner_iterations
), t )
2139 fold_build2 (MULT_EXPR, itype, outer_niters,fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, fold_build2_loc
(((location_t) 0), MULT_EXPR, itype, outer_niters, first_inner_iterations
), t )
2140 first_inner_iterations), t)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, fold_build2_loc
(((location_t) 0), MULT_EXPR, itype, outer_niters, first_inner_iterations
), t )
;
2141 expand_omp_build_assign (&gsi2, counts[fd->last_nonrect],
2142 fold_convert (type, t)fold_convert_loc (((location_t) 0), type, t));
2143
2144 basic_block bb3 = create_empty_bb (bb1);
2145 add_bb_to_loop (bb3, bb1->loop_father);
2146
2147 e = make_edge (bb1, bb3, EDGE_FALSE_VALUE);
2148 e->probability = profile_probability::unlikely ().guessed ();
2149
2150 gsi2 = gsi_after_labels (bb3);
2151 cond_stmt = expand_omp_build_cond (&gsi2, fd->loops[i].cond_code,
2152 n1e, n2e);
2153 e = split_block (bb3, cond_stmt);
2154 e->flags = EDGE_TRUE_VALUE;
2155 e->probability = profile_probability::likely ().guessed ();
2156 basic_block bb4 = e->dest;
2157
2158 ne = make_edge (bb3, entry_bb, EDGE_FALSE_VALUE);
2159 ne->probability = e->probability.invert ();
2160
2161 basic_block bb5 = create_empty_bb (bb2);
2162 add_bb_to_loop (bb5, bb2->loop_father);
2163
2164 ne = make_edge (bb2, bb5, EDGE_FALSE_VALUE);
2165 ne->probability = profile_probability::unlikely ().guessed ();
2166
2167 for (int j = 0; j < 2; j++)
2168 {
2169 gsi2 = gsi_after_labels (j ? bb5 : bb4);
2170 t = fold_build2 (MINUS_EXPR, itype,fold_build2_loc (((location_t) 0), MINUS_EXPR, itype, unshare_expr
(fd->loops[i].n1), unshare_expr (fd->loops[i].n2) )
2171 unshare_expr (fd->loops[i].n1),fold_build2_loc (((location_t) 0), MINUS_EXPR, itype, unshare_expr
(fd->loops[i].n1), unshare_expr (fd->loops[i].n2) )
2172 unshare_expr (fd->loops[i].n2))fold_build2_loc (((location_t) 0), MINUS_EXPR, itype, unshare_expr
(fd->loops[i].n1), unshare_expr (fd->loops[i].n2) )
;
2173 t = fold_build2 (TRUNC_DIV_EXPR, itype, t, m2minusm1)fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, itype, t, m2minusm1
)
;
2174 tree tem
2175 = force_gimple_operand_gsi (&gsi2, t, true, NULL_TREE(tree) nullptr,
2176 true, GSI_SAME_STMT);
2177 t = fold_build2 (MINUS_EXPR, itype, tem, n1o)fold_build2_loc (((location_t) 0), MINUS_EXPR, itype, tem, n1o
)
;
2178 t = fold_build2 (TRUNC_MOD_EXPR, itype, t, ostep)fold_build2_loc (((location_t) 0), TRUNC_MOD_EXPR, itype, t, ostep
)
;
2179 t = fold_build2 (MINUS_EXPR, itype, tem, t)fold_build2_loc (((location_t) 0), MINUS_EXPR, itype, tem, t );
2180 tem = force_gimple_operand_gsi (&gsi2, t, true, NULL_TREE(tree) nullptr,
2181 true, GSI_SAME_STMT);
2182 t = fold_convert (itype, unshare_expr (fd->loops[i].n1))fold_convert_loc (((location_t) 0), itype, unshare_expr (fd->
loops[i].n1))
;
2183 if (fd->loops[i].m1)
2184 {
2185 n1 = fold_convert (itype, unshare_expr (fd->loops[i].m1))fold_convert_loc (((location_t) 0), itype, unshare_expr (fd->
loops[i].m1))
;
2186 n1 = fold_build2 (MULT_EXPR, itype, tem, n1)fold_build2_loc (((location_t) 0), MULT_EXPR, itype, tem, n1 );
2187 n1 = fold_build2 (PLUS_EXPR, itype, n1, t)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, n1, t );
2188 }
2189 else
2190 n1 = t;
2191 n1 = force_gimple_operand_gsi (&gsi2, n1, true, NULL_TREE(tree) nullptr,
2192 true, GSI_SAME_STMT);
2193 t = fold_convert (itype, unshare_expr (fd->loops[i].n2))fold_convert_loc (((location_t) 0), itype, unshare_expr (fd->
loops[i].n2))
;
2194 if (fd->loops[i].m2)
2195 {
2196 n2 = fold_convert (itype, unshare_expr (fd->loops[i].m2))fold_convert_loc (((location_t) 0), itype, unshare_expr (fd->
loops[i].m2))
;
2197 n2 = fold_build2 (MULT_EXPR, itype, tem, n2)fold_build2_loc (((location_t) 0), MULT_EXPR, itype, tem, n2 );
2198 n2 = fold_build2 (PLUS_EXPR, itype, n2, t)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, n2, t );
2199 }
2200 else
2201 n2 = t;
2202 n2 = force_gimple_operand_gsi (&gsi2, n2, true, NULL_TREE(tree) nullptr,
2203 true, GSI_SAME_STMT);
2204 expand_omp_build_assign (&gsi2, j ? n2o : n1o, tem);
2205
2206 cond_stmt = expand_omp_build_cond (&gsi2, fd->loops[i].cond_code,
2207 n1, n2);
2208 e = split_block (gsi_bb (gsi2), cond_stmt);
2209 e->flags = j ? EDGE_TRUE_VALUE : EDGE_FALSE_VALUE;
2210 e->probability = profile_probability::unlikely ().guessed ();
2211 ne = make_edge (e->src, bb1,
2212 j ? EDGE_FALSE_VALUE : EDGE_TRUE_VALUE);
2213 ne->probability = e->probability.invert ();
2214 gsi2 = gsi_after_labels (e->dest);
2215
2216 t = fold_build2 (PLUS_EXPR, itype, tem, ostep)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, tem, ostep
)
;
2217 expand_omp_build_assign (&gsi2, j ? n2o : n1o, t);
2218
2219 make_edge (e->dest, bb1, EDGE_FALLTHRU);
2220 }
2221
2222 set_immediate_dominator (CDI_DOMINATORS, bb3, bb1);
2223 set_immediate_dominator (CDI_DOMINATORS, bb5, bb2);
2224 set_immediate_dominator (CDI_DOMINATORS, entry_bb, bb1);
2225
2226 if (fd->first_nonrect + 1 == fd->last_nonrect)
2227 {
2228 fd->first_inner_iterations = first_inner_iterations;
2229 fd->factor = factor;
2230 fd->adjn1 = n1o;
2231 }
2232 }
2233 else
2234 {
2235 /* Fallback implementation. Evaluate the loops with m1/m2
2236 non-NULL as well as their outer loops at runtime using temporaries
2237 instead of the original iteration variables, and in the
2238 body just bump the counter. */
2239 gimple_stmt_iterator gsi2 = *gsi;
2240 gsi_prev (&gsi2);
2241 e = split_block (entry_bb, gsi_stmt (gsi2));
2242 e = split_block (e->dest, (gimple *) NULLnullptr);
2243 basic_block cur_bb = e->src;
2244 basic_block next_bb = e->dest;
2245 entry_bb = e->dest;
2246 *gsi = gsi_after_labels (entry_bb);
2247
2248 tree *vs = XALLOCAVEC (tree, fd->last_nonrect)((tree *) __builtin_alloca(sizeof (tree) * (fd->last_nonrect
)))
;
2249 memset (vs, 0, fd->last_nonrect * sizeof (tree));
2250
2251 for (i = 0; i <= fd->last_nonrect; i++)
2252 {
2253 if (fd->loops[i].m1 == NULL_TREE(tree) nullptr
2254 && fd->loops[i].m2 == NULL_TREE(tree) nullptr
2255 && !fd->loops[i].non_rect_referenced)
2256 continue;
2257
2258 tree itype = TREE_TYPE (fd->loops[i].v)((contains_struct_check ((fd->loops[i].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2258, __FUNCTION__))->typed.type)
;
2259
2260 gsi2 = gsi_after_labels (cur_bb);
2261 tree n1, n2;
2262 t = fold_convert (itype, unshare_expr (fd->loops[i].n1))fold_convert_loc (((location_t) 0), itype, unshare_expr (fd->
loops[i].n1))
;
2263 if (fd->loops[i].m1 == NULL_TREE(tree) nullptr)
2264 n1 = t;
2265 else if (POINTER_TYPE_P (itype)(((enum tree_code) (itype)->base.code) == POINTER_TYPE || (
(enum tree_code) (itype)->base.code) == REFERENCE_TYPE)
)
2266 {
2267 gcc_assert (integer_onep (fd->loops[i].m1))((void)(!(integer_onep (fd->loops[i].m1)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2267, __FUNCTION__), 0 : 0))
;
2268 t = fold_convert (sizetype,fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype
], unshare_expr (fd->loops[i].n1))
2269 unshare_expr (fd->loops[i].n1))fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype
], unshare_expr (fd->loops[i].n1))
;
2270 n1 = fold_build_pointer_plus (vs[i - fd->loops[i].outer], t)fold_build_pointer_plus_loc (((location_t) 0), vs[i - fd->
loops[i].outer], t)
;
2271 }
2272 else
2273 {
2274 n1 = fold_convert (itype, unshare_expr (fd->loops[i].m1))fold_convert_loc (((location_t) 0), itype, unshare_expr (fd->
loops[i].m1))
;
2275 n1 = fold_build2 (MULT_EXPR, itype,fold_build2_loc (((location_t) 0), MULT_EXPR, itype, vs[i - fd
->loops[i].outer], n1 )
2276 vs[i - fd->loops[i].outer], n1)fold_build2_loc (((location_t) 0), MULT_EXPR, itype, vs[i - fd
->loops[i].outer], n1 )
;
2277 n1 = fold_build2 (PLUS_EXPR, itype, n1, t)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, n1, t );
2278 }
2279 n1 = force_gimple_operand_gsi (&gsi2, n1, true, NULL_TREE(tree) nullptr,
2280 true, GSI_SAME_STMT);
2281 if (i < fd->last_nonrect)
2282 {
2283 vs[i] = create_tmp_reg (itype, ".it");
2284 expand_omp_build_assign (&gsi2, vs[i], n1);
2285 }
2286 t = fold_convert (itype, unshare_expr (fd->loops[i].n2))fold_convert_loc (((location_t) 0), itype, unshare_expr (fd->
loops[i].n2))
;
2287 if (fd->loops[i].m2 == NULL_TREE(tree) nullptr)
2288 n2 = t;
2289 else if (POINTER_TYPE_P (itype)(((enum tree_code) (itype)->base.code) == POINTER_TYPE || (
(enum tree_code) (itype)->base.code) == REFERENCE_TYPE)
)
2290 {
2291 gcc_assert (integer_onep (fd->loops[i].m2))((void)(!(integer_onep (fd->loops[i].m2)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2291, __FUNCTION__), 0 : 0))
;
2292 t = fold_convert (sizetype,fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype
], unshare_expr (fd->loops[i].n2))
2293 unshare_expr (fd->loops[i].n2))fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype
], unshare_expr (fd->loops[i].n2))
;
2294 n2 = fold_build_pointer_plus (vs[i - fd->loops[i].outer], t)fold_build_pointer_plus_loc (((location_t) 0), vs[i - fd->
loops[i].outer], t)
;
2295 }
2296 else
2297 {
2298 n2 = fold_convert (itype, unshare_expr (fd->loops[i].m2))fold_convert_loc (((location_t) 0), itype, unshare_expr (fd->
loops[i].m2))
;
2299 n2 = fold_build2 (MULT_EXPR, itype,fold_build2_loc (((location_t) 0), MULT_EXPR, itype, vs[i - fd
->loops[i].outer], n2 )
2300 vs[i - fd->loops[i].outer], n2)fold_build2_loc (((location_t) 0), MULT_EXPR, itype, vs[i - fd
->loops[i].outer], n2 )
;
2301 n2 = fold_build2 (PLUS_EXPR, itype, n2, t)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, n2, t );
2302 }
2303 n2 = force_gimple_operand_gsi (&gsi2, n2, true, NULL_TREE(tree) nullptr,
2304 true, GSI_SAME_STMT);
2305 if (POINTER_TYPE_P (itype)(((enum tree_code) (itype)->base.code) == POINTER_TYPE || (
(enum tree_code) (itype)->base.code) == REFERENCE_TYPE)
)
2306 itype = signed_type_for (itype);
2307 if (i == fd->last_nonrect)
2308 {
2309 gcond *cond_stmt
2310 = expand_omp_build_cond (&gsi2, fd->loops[i].cond_code,
2311 n1, n2);
2312 e = split_block (cur_bb, cond_stmt);
2313 e->flags = EDGE_TRUE_VALUE;
2314 ne = make_edge (cur_bb, next_bb, EDGE_FALSE_VALUE);
2315 e->probability = profile_probability::likely ().guessed ();
2316 ne->probability = e->probability.invert ();
2317 gsi2 = gsi_after_labels (e->dest);
2318
2319 t = build_int_cst (itype, (fd->loops[i].cond_code == LT_EXPR
2320 ? -1 : 1));
2321 t = fold_build2 (PLUS_EXPR, itype,fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, fold_convert_loc
(((location_t) 0), itype, fd->loops[i].step), t )
2322 fold_convert (itype, fd->loops[i].step), t)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, fold_convert_loc
(((location_t) 0), itype, fd->loops[i].step), t )
;
2323 t = fold_build2 (PLUS_EXPR, itype, t,fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, t, fold_convert_loc
(((location_t) 0), itype, n2) )
2324 fold_convert (itype, n2))fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, t, fold_convert_loc
(((location_t) 0), itype, n2) )
;
2325 t = fold_build2 (MINUS_EXPR, itype, t,fold_build2_loc (((location_t) 0), MINUS_EXPR, itype, t, fold_convert_loc
(((location_t) 0), itype, n1) )
2326 fold_convert (itype, n1))fold_build2_loc (((location_t) 0), MINUS_EXPR, itype, t, fold_convert_loc
(((location_t) 0), itype, n1) )
;
2327 tree step = fold_convert (itype, fd->loops[i].step)fold_convert_loc (((location_t) 0), itype, fd->loops[i].step
)
;
2328 if (TYPE_UNSIGNED (itype)((tree_class_check ((itype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2328, __FUNCTION__))->base.u.bits.unsigned_flag)
2329 && fd->loops[i].cond_code == GT_EXPR)
2330 t = fold_build2 (TRUNC_DIV_EXPR, itype,fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, itype, fold_build1_loc
(((location_t) 0), NEGATE_EXPR, itype, t ), fold_build1_loc (
((location_t) 0), NEGATE_EXPR, itype, step ) )
2331 fold_build1 (NEGATE_EXPR, itype, t),fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, itype, fold_build1_loc
(((location_t) 0), NEGATE_EXPR, itype, t ), fold_build1_loc (
((location_t) 0), NEGATE_EXPR, itype, step ) )
2332 fold_build1 (NEGATE_EXPR, itype, step))fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, itype, fold_build1_loc
(((location_t) 0), NEGATE_EXPR, itype, t ), fold_build1_loc (
((location_t) 0), NEGATE_EXPR, itype, step ) )
;
2333 else
2334 t = fold_build2 (TRUNC_DIV_EXPR, itype, t, step)fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, itype, t, step
)
;
2335 t = fold_convert (type, t)fold_convert_loc (((location_t) 0), type, t);
2336 t = fold_build2 (PLUS_EXPR, type,fold_build2_loc (((location_t) 0), PLUS_EXPR, type, counts[fd
->last_nonrect], t )
2337 counts[fd->last_nonrect], t)fold_build2_loc (((location_t) 0), PLUS_EXPR, type, counts[fd
->last_nonrect], t )
;
2338 t = force_gimple_operand_gsi (&gsi2, t, true, NULL_TREE(tree) nullptr,
2339 true, GSI_SAME_STMT);
2340 expand_omp_build_assign (&gsi2, counts[fd->last_nonrect], t);
2341 e = make_edge (e->dest, next_bb, EDGE_FALLTHRU);
2342 set_immediate_dominator (CDI_DOMINATORS, next_bb, cur_bb);
2343 break;
2344 }
2345 e = split_block (cur_bb, last_stmt (cur_bb));
2346
2347 basic_block new_cur_bb = create_empty_bb (cur_bb);
2348 add_bb_to_loop (new_cur_bb, cur_bb->loop_father);
2349
2350 gsi2 = gsi_after_labels (e->dest);
2351 tree step = fold_convert (itype,fold_convert_loc (((location_t) 0), itype, unshare_expr (fd->
loops[i].step))
2352 unshare_expr (fd->loops[i].step))fold_convert_loc (((location_t) 0), itype, unshare_expr (fd->
loops[i].step))
;
2353 if (POINTER_TYPE_P (TREE_TYPE (vs[i]))(((enum tree_code) (((contains_struct_check ((vs[i]), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2353, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((vs[i]), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2353, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
2354 t = fold_build_pointer_plus (vs[i],fold_build_pointer_plus_loc (((location_t) 0), vs[i], fold_convert_loc
(((location_t) 0), sizetype_tab[(int) stk_sizetype], step))
2355 fold_convert (sizetype, step))fold_build_pointer_plus_loc (((location_t) 0), vs[i], fold_convert_loc
(((location_t) 0), sizetype_tab[(int) stk_sizetype], step))
;
2356 else
2357 t = fold_build2 (PLUS_EXPR, itype, vs[i], step)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, vs[i], step
)
;
2358 t = force_gimple_operand_gsi (&gsi2, t, true, NULL_TREE(tree) nullptr,
2359 true, GSI_SAME_STMT);
2360 expand_omp_build_assign (&gsi2, vs[i], t);
2361
2362 ne = split_block (e->dest, last_stmt (e->dest));
2363 gsi2 = gsi_after_labels (ne->dest);
2364
2365 expand_omp_build_cond (&gsi2, fd->loops[i].cond_code, vs[i], n2);
2366 edge e3, e4;
2367 if (next_bb == entry_bb)
2368 {
2369 e3 = find_edge (ne->dest, next_bb);
2370 e3->flags = EDGE_FALSE_VALUE;
2371 }
2372 else
2373 e3 = make_edge (ne->dest, next_bb, EDGE_FALSE_VALUE);
2374 e4 = make_edge (ne->dest, new_cur_bb, EDGE_TRUE_VALUE);
2375 e4->probability = profile_probability::likely ().guessed ();
2376 e3->probability = e4->probability.invert ();
2377 basic_block esrc = e->src;
2378 make_edge (e->src, ne->dest, EDGE_FALLTHRU);
2379 cur_bb = new_cur_bb;
2380 basic_block latch_bb = next_bb;
2381 next_bb = e->dest;
2382 remove_edge (e);
2383 set_immediate_dominator (CDI_DOMINATORS, ne->dest, esrc);
2384 set_immediate_dominator (CDI_DOMINATORS, latch_bb, ne->dest);
2385 set_immediate_dominator (CDI_DOMINATORS, cur_bb, ne->dest);
2386 }
2387 }
2388 t = NULL_TREE(tree) nullptr;
2389 for (i = fd->first_nonrect; i < fd->last_nonrect; i++)
2390 if (!fd->loops[i].non_rect_referenced
2391 && fd->loops[i].m1 == NULL_TREE(tree) nullptr
2392 && fd->loops[i].m2 == NULL_TREE(tree) nullptr)
2393 {
2394 if (t == NULL_TREE(tree) nullptr)
2395 t = counts[i];
2396 else
2397 t = fold_build2 (MULT_EXPR, type, t, counts[i])fold_build2_loc (((location_t) 0), MULT_EXPR, type, t, counts
[i] )
;
2398 }
2399 if (t)
2400 {
2401 t = fold_build2 (MULT_EXPR, type, counts[fd->last_nonrect], t)fold_build2_loc (((location_t) 0), MULT_EXPR, type, counts[fd
->last_nonrect], t )
;
2402 expand_omp_build_assign (gsi, counts[fd->last_nonrect], t);
2403 }
2404 if (!rect_count_seen)
2405 t = counts[fd->last_nonrect];
2406 else
2407 t = fold_build2 (MULT_EXPR, type, fd->loop.n2,fold_build2_loc (((location_t) 0), MULT_EXPR, type, fd->loop
.n2, counts[fd->last_nonrect] )
2408 counts[fd->last_nonrect])fold_build2_loc (((location_t) 0), MULT_EXPR, type, fd->loop
.n2, counts[fd->last_nonrect] )
;
2409 expand_omp_build_assign (gsi, fd->loop.n2, t);
2410 }
2411 else if (fd->non_rect)
2412 {
2413 tree t = fd->loop.n2;
2414 gcc_assert (TREE_CODE (t) == INTEGER_CST)((void)(!(((enum tree_code) (t)->base.code) == INTEGER_CST
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2414, __FUNCTION__), 0 : 0))
;
2415 int non_rect_referenced = 0, non_rect = 0;
2416 for (i = 0; i < fd->collapse; i++)
2417 {
2418 if ((i < fd->first_nonrect || i > fd->last_nonrect)
2419 && !integer_zerop (counts[i]))
2420 t = fold_build2 (TRUNC_DIV_EXPR, type, t, counts[i])fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, type, t, counts
[i] )
;
2421 if (fd->loops[i].non_rect_referenced)
2422 non_rect_referenced++;
2423 if (fd->loops[i].m1 || fd->loops[i].m2)
2424 non_rect++;
2425 }
2426 gcc_assert (non_rect == 1 && non_rect_referenced == 1)((void)(!(non_rect == 1 && non_rect_referenced == 1) ?
fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2426, __FUNCTION__), 0 : 0))
;
2427 counts[fd->last_nonrect] = t;
2428 }
2429}
2430
2431/* Helper function for expand_omp_{for_*,simd}. Generate code like:
2432 T = V;
2433 V3 = N31 + (T % count3) * STEP3;
2434 T = T / count3;
2435 V2 = N21 + (T % count2) * STEP2;
2436 T = T / count2;
2437 V1 = N11 + T * STEP1;
2438 if this loop doesn't have an inner loop construct combined with it.
2439 If it does have an inner loop construct combined with it and the
2440 iteration count isn't known constant, store values from counts array
2441 into its _looptemp_ temporaries instead.
2442 For non-rectangular loops (between fd->first_nonrect and fd->last_nonrect
2443 inclusive), use the count of all those loops together, and either
2444 find quadratic etc. equation roots, or as a fallback, do:
2445 COUNT = 0;
2446 for (tmpi = N11; tmpi COND1 N12; tmpi += STEP1)
2447 for (tmpj = M21 * tmpi + N21;
2448 tmpj COND2 M22 * tmpi + N22; tmpj += STEP2)
2449 {
2450 int tmpk1 = M31 * tmpj + N31;
2451 int tmpk2 = M32 * tmpj + N32;
2452 if (tmpk1 COND3 tmpk2)
2453 {
2454 if (COND3 is <)
2455 adj = STEP3 - 1;
2456 else
2457 adj = STEP3 + 1;
2458 int temp = (adj + tmpk2 - tmpk1) / STEP3;
2459 if (COUNT + temp > T)
2460 {
2461 V1 = tmpi;
2462 V2 = tmpj;
2463 V3 = tmpk1 + (T - COUNT) * STEP3;
2464 goto done;
2465 }
2466 else
2467 COUNT += temp;
2468 }
2469 }
2470 done:;
2471 but for optional innermost or outermost rectangular loops that aren't
2472 referenced by other loop expressions keep doing the division/modulo. */
2473
2474static void
2475expand_omp_for_init_vars (struct omp_for_data *fd, gimple_stmt_iterator *gsi,
2476 tree *counts, tree *nonrect_bounds,
2477 gimple *inner_stmt, tree startvar)
2478{
2479 int i;
2480 if (gimple_omp_for_combined_p (fd->for_stmt))
2481 {
2482 /* If fd->loop.n2 is constant, then no propagation of the counts
2483 is needed, they are constant. */
2484 if (TREE_CODE (fd->loop.n2)((enum tree_code) (fd->loop.n2)->base.code) == INTEGER_CST)
2485 return;
2486
2487 tree clauses = gimple_code (inner_stmt) != GIMPLE_OMP_FOR
2488 ? gimple_omp_taskreg_clauses (inner_stmt)
2489 : gimple_omp_for_clauses (inner_stmt);
2490 /* First two _looptemp_ clauses are for istart/iend, counts[0]
2491 isn't supposed to be handled, as the inner loop doesn't
2492 use it. */
2493 tree innerc = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2494 gcc_assert (innerc)((void)(!(innerc) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2494, __FUNCTION__), 0 : 0))
;
2495 int count = 0;
2496 if (fd->non_rect
2497 && fd->last_nonrect == fd->first_nonrect + 1
2498 && !TYPE_UNSIGNED (TREE_TYPE (fd->loops[fd->last_nonrect].v))((tree_class_check ((((contains_struct_check ((fd->loops[fd
->last_nonrect].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2498, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2498, __FUNCTION__))->base.u.bits.unsigned_flag)
)
2499 count = 4;
2500 for (i = 0; i < fd->collapse + count; i++)
2501 {
2502 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc)((contains_struct_check (((tree_check ((innerc), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2502, __FUNCTION__, (OMP_CLAUSE)))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2502, __FUNCTION__))->common.chain)
,
2503 OMP_CLAUSE__LOOPTEMP_);
2504 gcc_assert (innerc)((void)(!(innerc) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2504, __FUNCTION__), 0 : 0))
;
2505 if (i)
2506 {
2507 tree tem = OMP_CLAUSE_DECL (innerc)(*(omp_clause_elt_check (((omp_clause_range_check (((tree_check
((innerc), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2507, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2507, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2507, __FUNCTION__)))
;
2508 tree t;
2509 if (i < fd->collapse)
2510 t = counts[i];
2511 else
2512 switch (i - fd->collapse)
2513 {
2514 case 0: t = counts[0]; break;
2515 case 1: t = fd->first_inner_iterations; break;
2516 case 2: t = fd->factor; break;
2517 case 3: t = fd->adjn1; break;
2518 default: gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2518, __FUNCTION__))
;
2519 }
2520 t = fold_convert (TREE_TYPE (tem), t)fold_convert_loc (((location_t) 0), ((contains_struct_check (
(tem), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2520, __FUNCTION__))->typed.type), t)
;
2521 t = force_gimple_operand_gsi (gsi, t, false, NULL_TREE(tree) nullptr,
2522 false, GSI_CONTINUE_LINKING);
2523 gassign *stmt = gimple_build_assign (tem, t);
2524 gsi_insert_after (gsi, stmt, GSI_CONTINUE_LINKING);
2525 }
2526 }
2527 return;
2528 }
2529
2530 tree type = TREE_TYPE (fd->loop.v)((contains_struct_check ((fd->loop.v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2530, __FUNCTION__))->typed.type)
;
2531 tree tem = create_tmp_reg (type, ".tem");
2532 gassign *stmt = gimple_build_assign (tem, startvar);
2533 gsi_insert_after (gsi, stmt, GSI_CONTINUE_LINKING);
2534
2535 for (i = fd->collapse - 1; i >= 0; i--)
2536 {
2537 tree vtype = TREE_TYPE (fd->loops[i].v)((contains_struct_check ((fd->loops[i].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2537, __FUNCTION__))->typed.type)
, itype, t;
2538 itype = vtype;
2539 if (POINTER_TYPE_P (vtype)(((enum tree_code) (vtype)->base.code) == POINTER_TYPE || (
(enum tree_code) (vtype)->base.code) == REFERENCE_TYPE)
)
2540 itype = signed_type_for (vtype);
2541 if (i != 0 && (i != fd->last_nonrect || fd->first_nonrect))
2542 t = fold_build2 (TRUNC_MOD_EXPR, type, tem, counts[i])fold_build2_loc (((location_t) 0), TRUNC_MOD_EXPR, type, tem,
counts[i] )
;
2543 else
2544 t = tem;
2545 if (i == fd->last_nonrect)
2546 {
2547 t = force_gimple_operand_gsi (gsi, t, true, NULL_TREE(tree) nullptr,
2548 false, GSI_CONTINUE_LINKING);
2549 tree stopval = t;
2550 tree idx = create_tmp_reg (type, ".count");
2551 expand_omp_build_assign (gsi, idx,
2552 build_zero_cst (type), true);
2553 basic_block bb_triang = NULLnullptr, bb_triang_dom = NULLnullptr;
2554 if (fd->first_nonrect + 1 == fd->last_nonrect
2555 && (TREE_CODE (fd->loop.n2)((enum tree_code) (fd->loop.n2)->base.code) == INTEGER_CST
2556 || fd->first_inner_iterations)
2557 && (optab_handler (sqrt_optab, TYPE_MODE (double_type_node)((((enum tree_code) ((tree_class_check ((global_trees[TI_DOUBLE_TYPE
]), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2557, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(global_trees[TI_DOUBLE_TYPE]) : (global_trees[TI_DOUBLE_TYPE
])->type_common.mode)
)
2558 != CODE_FOR_nothing)
2559 && !integer_zerop (fd->loop.n2))
2560 {
2561 tree outer_n1 = fd->adjn1 ? fd->adjn1 : fd->loops[i - 1].n1;
2562 tree itype = TREE_TYPE (fd->loops[i].v)((contains_struct_check ((fd->loops[i].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2562, __FUNCTION__))->typed.type)
;
2563 tree first_inner_iterations = fd->first_inner_iterations;
2564 tree factor = fd->factor;
2565 gcond *cond_stmt
2566 = expand_omp_build_cond (gsi, NE_EXPR, factor,
2567 build_zero_cst (TREE_TYPE (factor)((contains_struct_check ((factor), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2567, __FUNCTION__))->typed.type)
));
2568 edge e = split_block (gsi_bb (*gsi), cond_stmt);
2569 basic_block bb0 = e->src;
2570 e->flags = EDGE_TRUE_VALUE;
2571 e->probability = profile_probability::likely ();
2572 bb_triang_dom = bb0;
2573 *gsi = gsi_after_labels (e->dest);
2574 tree slltype = long_long_integer_type_nodeinteger_types[itk_long_long];
2575 tree ulltype = long_long_unsigned_type_nodeinteger_types[itk_unsigned_long_long];
2576 tree stopvalull = fold_convert (ulltype, stopval)fold_convert_loc (((location_t) 0), ulltype, stopval);
2577 stopvalull
2578 = force_gimple_operand_gsi (gsi, stopvalull, true, NULL_TREE(tree) nullptr,
2579 false, GSI_CONTINUE_LINKING);
2580 first_inner_iterations
2581 = fold_convert (slltype, first_inner_iterations)fold_convert_loc (((location_t) 0), slltype, first_inner_iterations
)
;
2582 first_inner_iterations
2583 = force_gimple_operand_gsi (gsi, first_inner_iterations, true,
2584 NULL_TREE(tree) nullptr, false,
2585 GSI_CONTINUE_LINKING);
2586 factor = fold_convert (slltype, factor)fold_convert_loc (((location_t) 0), slltype, factor);
2587 factor
2588 = force_gimple_operand_gsi (gsi, factor, true, NULL_TREE(tree) nullptr,
2589 false, GSI_CONTINUE_LINKING);
2590 tree first_inner_iterationsd
2591 = fold_build1 (FLOAT_EXPR, double_type_node,fold_build1_loc (((location_t) 0), FLOAT_EXPR, global_trees[TI_DOUBLE_TYPE
], first_inner_iterations )
2592 first_inner_iterations)fold_build1_loc (((location_t) 0), FLOAT_EXPR, global_trees[TI_DOUBLE_TYPE
], first_inner_iterations )
;
2593 first_inner_iterationsd
2594 = force_gimple_operand_gsi (gsi, first_inner_iterationsd, true,
2595 NULL_TREE(tree) nullptr, false,
2596 GSI_CONTINUE_LINKING);
2597 tree factord = fold_build1 (FLOAT_EXPR, double_type_node,fold_build1_loc (((location_t) 0), FLOAT_EXPR, global_trees[TI_DOUBLE_TYPE
], factor )
2598 factor)fold_build1_loc (((location_t) 0), FLOAT_EXPR, global_trees[TI_DOUBLE_TYPE
], factor )
;
2599 factord = force_gimple_operand_gsi (gsi, factord, true,
2600 NULL_TREE(tree) nullptr, false,
2601 GSI_CONTINUE_LINKING);
2602 tree stopvald = fold_build1 (FLOAT_EXPR, double_type_node,fold_build1_loc (((location_t) 0), FLOAT_EXPR, global_trees[TI_DOUBLE_TYPE
], stopvalull )
2603 stopvalull)fold_build1_loc (((location_t) 0), FLOAT_EXPR, global_trees[TI_DOUBLE_TYPE
], stopvalull )
;
2604 stopvald = force_gimple_operand_gsi (gsi, stopvald, true,
2605 NULL_TREE(tree) nullptr, false,
2606 GSI_CONTINUE_LINKING);
2607 /* Temporarily disable flag_rounding_math, values will be
2608 decimal numbers divided by 2 and worst case imprecisions
2609 due to too large values ought to be caught later by the
2610 checks for fallback. */
2611 int save_flag_rounding_math = flag_rounding_mathglobal_options.x_flag_rounding_math;
2612 flag_rounding_mathglobal_options.x_flag_rounding_math = 0;
2613 t = fold_build2 (RDIV_EXPR, double_type_node, factord,fold_build2_loc (((location_t) 0), RDIV_EXPR, global_trees[TI_DOUBLE_TYPE
], factord, build_real (global_trees[TI_DOUBLE_TYPE], dconst2
) )
2614 build_real (double_type_node, dconst2))fold_build2_loc (((location_t) 0), RDIV_EXPR, global_trees[TI_DOUBLE_TYPE
], factord, build_real (global_trees[TI_DOUBLE_TYPE], dconst2
) )
;
2615 tree t3 = fold_build2 (MINUS_EXPR, double_type_node,fold_build2_loc (((location_t) 0), MINUS_EXPR, global_trees[TI_DOUBLE_TYPE
], first_inner_iterationsd, t )
2616 first_inner_iterationsd, t)fold_build2_loc (((location_t) 0), MINUS_EXPR, global_trees[TI_DOUBLE_TYPE
], first_inner_iterationsd, t )
;
2617 t3 = force_gimple_operand_gsi (gsi, t3, true, NULL_TREE(tree) nullptr, false,
2618 GSI_CONTINUE_LINKING);
2619 t = fold_build2 (MULT_EXPR, double_type_node, factord,fold_build2_loc (((location_t) 0), MULT_EXPR, global_trees[TI_DOUBLE_TYPE
], factord, build_real (global_trees[TI_DOUBLE_TYPE], dconst2
) )
2620 build_real (double_type_node, dconst2))fold_build2_loc (((location_t) 0), MULT_EXPR, global_trees[TI_DOUBLE_TYPE
], factord, build_real (global_trees[TI_DOUBLE_TYPE], dconst2
) )
;
2621 t = fold_build2 (MULT_EXPR, double_type_node, t, stopvald)fold_build2_loc (((location_t) 0), MULT_EXPR, global_trees[TI_DOUBLE_TYPE
], t, stopvald )
;
2622 t = fold_build2 (PLUS_EXPR, double_type_node, t,fold_build2_loc (((location_t) 0), PLUS_EXPR, global_trees[TI_DOUBLE_TYPE
], t, fold_build2_loc (((location_t) 0), MULT_EXPR, global_trees
[TI_DOUBLE_TYPE], t3, t3 ) )
2623 fold_build2 (MULT_EXPR, double_type_node,fold_build2_loc (((location_t) 0), PLUS_EXPR, global_trees[TI_DOUBLE_TYPE
], t, fold_build2_loc (((location_t) 0), MULT_EXPR, global_trees
[TI_DOUBLE_TYPE], t3, t3 ) )
2624 t3, t3))fold_build2_loc (((location_t) 0), PLUS_EXPR, global_trees[TI_DOUBLE_TYPE
], t, fold_build2_loc (((location_t) 0), MULT_EXPR, global_trees
[TI_DOUBLE_TYPE], t3, t3 ) )
;
2625 flag_rounding_mathglobal_options.x_flag_rounding_math = save_flag_rounding_math;
2626 t = force_gimple_operand_gsi (gsi, t, true, NULL_TREE(tree) nullptr, false,
2627 GSI_CONTINUE_LINKING);
2628 if (flag_exceptionsglobal_options.x_flag_exceptions
2629 && cfun(cfun + 0)->can_throw_non_call_exceptions
2630 && operation_could_trap_p (LT_EXPR, true, false, NULL_TREE(tree) nullptr))
2631 {
2632 tree tem = fold_build2 (LT_EXPR, boolean_type_node, t,fold_build2_loc (((location_t) 0), LT_EXPR, global_trees[TI_BOOLEAN_TYPE
], t, build_zero_cst (global_trees[TI_DOUBLE_TYPE]) )
2633 build_zero_cst (double_type_node))fold_build2_loc (((location_t) 0), LT_EXPR, global_trees[TI_BOOLEAN_TYPE
], t, build_zero_cst (global_trees[TI_DOUBLE_TYPE]) )
;
2634 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE(tree) nullptr,
2635 false, GSI_CONTINUE_LINKING);
2636 cond_stmt = gimple_build_cond (NE_EXPR, tem,
2637 boolean_false_nodeglobal_trees[TI_BOOLEAN_FALSE],
2638 NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr);
2639 }
2640 else
2641 cond_stmt
2642 = gimple_build_cond (LT_EXPR, t,
2643 build_zero_cst (double_type_nodeglobal_trees[TI_DOUBLE_TYPE]),
2644 NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr);
2645 gsi_insert_after (gsi, cond_stmt, GSI_CONTINUE_LINKING);
2646 e = split_block (gsi_bb (*gsi), cond_stmt);
2647 basic_block bb1 = e->src;
2648 e->flags = EDGE_FALSE_VALUE;
2649 e->probability = profile_probability::very_likely ();
2650 *gsi = gsi_after_labels (e->dest);
2651 gcall *call = gimple_build_call_internal (IFN_SQRT, 1, t);
2652 tree sqrtr = create_tmp_var (double_type_nodeglobal_trees[TI_DOUBLE_TYPE]);
2653 gimple_call_set_lhs (call, sqrtr);
2654 gsi_insert_after (gsi, call, GSI_CONTINUE_LINKING);
2655 t = fold_build2 (MINUS_EXPR, double_type_node, sqrtr, t3)fold_build2_loc (((location_t) 0), MINUS_EXPR, global_trees[TI_DOUBLE_TYPE
], sqrtr, t3 )
;
2656 t = fold_build2 (RDIV_EXPR, double_type_node, t, factord)fold_build2_loc (((location_t) 0), RDIV_EXPR, global_trees[TI_DOUBLE_TYPE
], t, factord )
;
2657 t = fold_build1 (FIX_TRUNC_EXPR, ulltype, t)fold_build1_loc (((location_t) 0), FIX_TRUNC_EXPR, ulltype, t
)
;
2658 tree c = create_tmp_var (ulltype);
2659 tree d = create_tmp_var (ulltype);
2660 expand_omp_build_assign (gsi, c, t, true);
2661 t = fold_build2 (MINUS_EXPR, ulltype, c,fold_build2_loc (((location_t) 0), MINUS_EXPR, ulltype, c, build_one_cst
(ulltype) )
2662 build_one_cst (ulltype))fold_build2_loc (((location_t) 0), MINUS_EXPR, ulltype, c, build_one_cst
(ulltype) )
;
2663 t = fold_build2 (MULT_EXPR, ulltype, c, t)fold_build2_loc (((location_t) 0), MULT_EXPR, ulltype, c, t );
2664 t = fold_build2 (RSHIFT_EXPR, ulltype, t, integer_one_node)fold_build2_loc (((location_t) 0), RSHIFT_EXPR, ulltype, t, global_trees
[TI_INTEGER_ONE] )
;
2665 t = fold_build2 (MULT_EXPR, ulltype,fold_build2_loc (((location_t) 0), MULT_EXPR, ulltype, fold_convert_loc
(((location_t) 0), ulltype, fd->factor), t )
2666 fold_convert (ulltype, fd->factor), t)fold_build2_loc (((location_t) 0), MULT_EXPR, ulltype, fold_convert_loc
(((location_t) 0), ulltype, fd->factor), t )
;
2667 tree t2
2668 = fold_build2 (MULT_EXPR, ulltype, c,fold_build2_loc (((location_t) 0), MULT_EXPR, ulltype, c, fold_convert_loc
(((location_t) 0), ulltype, fd->first_inner_iterations) )
2669 fold_convert (ulltype,fold_build2_loc (((location_t) 0), MULT_EXPR, ulltype, c, fold_convert_loc
(((location_t) 0), ulltype, fd->first_inner_iterations) )
2670 fd->first_inner_iterations))fold_build2_loc (((location_t) 0), MULT_EXPR, ulltype, c, fold_convert_loc
(((location_t) 0), ulltype, fd->first_inner_iterations) )
;
2671 t = fold_build2 (PLUS_EXPR, ulltype, t, t2)fold_build2_loc (((location_t) 0), PLUS_EXPR, ulltype, t, t2 );
2672 expand_omp_build_assign (gsi, d, t, true);
2673 t = fold_build2 (MULT_EXPR, ulltype,fold_build2_loc (((location_t) 0), MULT_EXPR, ulltype, fold_convert_loc
(((location_t) 0), ulltype, fd->factor), c )
2674 fold_convert (ulltype, fd->factor), c)fold_build2_loc (((location_t) 0), MULT_EXPR, ulltype, fold_convert_loc
(((location_t) 0), ulltype, fd->factor), c )
;
2675 t = fold_build2 (PLUS_EXPR, ulltype,fold_build2_loc (((location_t) 0), PLUS_EXPR, ulltype, t, fold_convert_loc
(((location_t) 0), ulltype, fd->first_inner_iterations) )
2676 t, fold_convert (ulltype,fold_build2_loc (((location_t) 0), PLUS_EXPR, ulltype, t, fold_convert_loc
(((location_t) 0), ulltype, fd->first_inner_iterations) )
2677 fd->first_inner_iterations))fold_build2_loc (((location_t) 0), PLUS_EXPR, ulltype, t, fold_convert_loc
(((location_t) 0), ulltype, fd->first_inner_iterations) )
;
2678 t2 = force_gimple_operand_gsi (gsi, t, true, NULL_TREE(tree) nullptr, false,
2679 GSI_CONTINUE_LINKING);
2680 cond_stmt = gimple_build_cond (GE_EXPR, stopvalull, d,
2681 NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr);
2682 gsi_insert_after (gsi, cond_stmt, GSI_CONTINUE_LINKING);
2683 e = split_block (gsi_bb (*gsi), cond_stmt);
2684 basic_block bb2 = e->src;
2685 e->flags = EDGE_TRUE_VALUE;
2686 e->probability = profile_probability::very_likely ();
2687 *gsi = gsi_after_labels (e->dest);
2688 t = fold_build2 (PLUS_EXPR, ulltype, d, t2)fold_build2_loc (((location_t) 0), PLUS_EXPR, ulltype, d, t2 );
2689 t = force_gimple_operand_gsi (gsi, t, true, NULL_TREE(tree) nullptr, false,
2690 GSI_CONTINUE_LINKING);
2691 cond_stmt = gimple_build_cond (GE_EXPR, stopvalull, t,
2692 NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr);
2693 gsi_insert_after (gsi, cond_stmt, GSI_CONTINUE_LINKING);
2694 e = split_block (gsi_bb (*gsi), cond_stmt);
2695 basic_block bb3 = e->src;
2696 e->flags = EDGE_FALSE_VALUE;
2697 e->probability = profile_probability::very_likely ();
2698 *gsi = gsi_after_labels (e->dest);
2699 t = fold_convert (itype, c)fold_convert_loc (((location_t) 0), itype, c);
2700 t = fold_build2 (MULT_EXPR, itype, t, fd->loops[i - 1].step)fold_build2_loc (((location_t) 0), MULT_EXPR, itype, t, fd->
loops[i - 1].step )
;
2701 t = fold_build2 (PLUS_EXPR, itype, outer_n1, t)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, outer_n1
, t )
;
2702 t = force_gimple_operand_gsi (gsi, t, true, NULL_TREE(tree) nullptr, false,
2703 GSI_CONTINUE_LINKING);
2704 expand_omp_build_assign (gsi, fd->loops[i - 1].v, t, true);
2705 t2 = fold_build2 (MINUS_EXPR, ulltype, stopvalull, d)fold_build2_loc (((location_t) 0), MINUS_EXPR, ulltype, stopvalull
, d )
;
2706 t2 = fold_convert (itype, t2)fold_convert_loc (((location_t) 0), itype, t2);
2707 t2 = fold_build2 (MULT_EXPR, itype, t2, fd->loops[i].step)fold_build2_loc (((location_t) 0), MULT_EXPR, itype, t2, fd->
loops[i].step )
;
2708 t2 = fold_build2 (PLUS_EXPR, itype, t2, fd->loops[i].n1)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, t2, fd->
loops[i].n1 )
;
2709 if (fd->loops[i].m1)
2710 {
2711 t = fold_build2 (MULT_EXPR, itype, t, fd->loops[i].m1)fold_build2_loc (((location_t) 0), MULT_EXPR, itype, t, fd->
loops[i].m1 )
;
2712 t2 = fold_build2 (PLUS_EXPR, itype, t2, t)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, t2, t );
2713 }
2714 expand_omp_build_assign (gsi, fd->loops[i].v, t2, true);
2715 e = split_block (gsi_bb (*gsi), gsi_stmt (*gsi));
2716 bb_triang = e->src;
2717 *gsi = gsi_after_labels (e->dest);
2718 remove_edge (e);
2719 e = make_edge (bb1, gsi_bb (*gsi), EDGE_TRUE_VALUE);
2720 e->probability = profile_probability::very_unlikely ();
2721 e = make_edge (bb2, gsi_bb (*gsi), EDGE_FALSE_VALUE);
2722 e->probability = profile_probability::very_unlikely ();
2723 e = make_edge (bb3, gsi_bb (*gsi), EDGE_TRUE_VALUE);
2724 e->probability = profile_probability::very_unlikely ();
2725
2726 basic_block bb4 = create_empty_bb (bb0);
2727 add_bb_to_loop (bb4, bb0->loop_father);
2728 e = make_edge (bb0, bb4, EDGE_FALSE_VALUE);
2729 e->probability = profile_probability::unlikely ();
2730 make_edge (bb4, gsi_bb (*gsi), EDGE_FALLTHRU);
2731 set_immediate_dominator (CDI_DOMINATORS, bb4, bb0);
2732 set_immediate_dominator (CDI_DOMINATORS, gsi_bb (*gsi), bb0);
2733 gimple_stmt_iterator gsi2 = gsi_after_labels (bb4);
2734 t2 = fold_build2 (TRUNC_DIV_EXPR, type,fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, type, counts
[i], counts[i - 1] )
2735 counts[i], counts[i - 1])fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, type, counts
[i], counts[i - 1] )
;
2736 t2 = force_gimple_operand_gsi (&gsi2, t2, true, NULL_TREE(tree) nullptr, false,
2737 GSI_CONTINUE_LINKING);
2738 t = fold_build2 (TRUNC_MOD_EXPR, type, stopval, t2)fold_build2_loc (((location_t) 0), TRUNC_MOD_EXPR, type, stopval
, t2 )
;
2739 t2 = fold_build2 (TRUNC_DIV_EXPR, type, stopval, t2)fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, type, stopval
, t2 )
;
2740 t = fold_convert (itype, t)fold_convert_loc (((location_t) 0), itype, t);
2741 t2 = fold_convert (itype, t2)fold_convert_loc (((location_t) 0), itype, t2);
2742 t = fold_build2 (MULT_EXPR, itype, t,fold_build2_loc (((location_t) 0), MULT_EXPR, itype, t, fold_convert_loc
(((location_t) 0), itype, fd->loops[i].step) )
2743 fold_convert (itype, fd->loops[i].step))fold_build2_loc (((location_t) 0), MULT_EXPR, itype, t, fold_convert_loc
(((location_t) 0), itype, fd->loops[i].step) )
;
2744 t = fold_build2 (PLUS_EXPR, itype, fd->loops[i].n1, t)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, fd->loops
[i].n1, t )
;
2745 t2 = fold_build2 (MULT_EXPR, itype, t2,fold_build2_loc (((location_t) 0), MULT_EXPR, itype, t2, fold_convert_loc
(((location_t) 0), itype, fd->loops[i - 1].step) )
2746 fold_convert (itype, fd->loops[i - 1].step))fold_build2_loc (((location_t) 0), MULT_EXPR, itype, t2, fold_convert_loc
(((location_t) 0), itype, fd->loops[i - 1].step) )
;
2747 t2 = fold_build2 (PLUS_EXPR, itype, fd->loops[i - 1].n1, t2)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, fd->loops
[i - 1].n1, t2 )
;
2748 t2 = force_gimple_operand_gsi (&gsi2, t2, false, NULL_TREE(tree) nullptr,
2749 false, GSI_CONTINUE_LINKING);
2750 stmt = gimple_build_assign (fd->loops[i - 1].v, t2);
2751 gsi_insert_after (&gsi2, stmt, GSI_CONTINUE_LINKING);
2752 if (fd->loops[i].m1)
2753 {
2754 t2 = fold_build2 (MULT_EXPR, itype, fd->loops[i].m1,fold_build2_loc (((location_t) 0), MULT_EXPR, itype, fd->loops
[i].m1, fd->loops[i - 1].v )
2755 fd->loops[i - 1].v)fold_build2_loc (((location_t) 0), MULT_EXPR, itype, fd->loops
[i].m1, fd->loops[i - 1].v )
;
2756 t = fold_build2 (PLUS_EXPR, itype, t, t2)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, t, t2 );
2757 }
2758 t = force_gimple_operand_gsi (&gsi2, t, false, NULL_TREE(tree) nullptr,
2759 false, GSI_CONTINUE_LINKING);
2760 stmt = gimple_build_assign (fd->loops[i].v, t);
2761 gsi_insert_after (&gsi2, stmt, GSI_CONTINUE_LINKING);
2762 }
2763 /* Fallback implementation. Evaluate the loops in between
2764 (inclusive) fd->first_nonrect and fd->last_nonrect at
2765 runtime unsing temporaries instead of the original iteration
2766 variables, in the body just bump the counter and compare
2767 with the desired value. */
2768 gimple_stmt_iterator gsi2 = *gsi;
2769 basic_block entry_bb = gsi_bb (gsi2);
2770 edge e = split_block (entry_bb, gsi_stmt (gsi2));
2771 e = split_block (e->dest, (gimple *) NULLnullptr);
2772 basic_block dom_bb = NULLnullptr;
2773 basic_block cur_bb = e->src;
2774 basic_block next_bb = e->dest;
2775 entry_bb = e->dest;
2776 *gsi = gsi_after_labels (entry_bb);
2777
2778 tree *vs = XALLOCAVEC (tree, fd->last_nonrect)((tree *) __builtin_alloca(sizeof (tree) * (fd->last_nonrect
)))
;
2779 tree n1 = NULL_TREE(tree) nullptr, n2 = NULL_TREE(tree) nullptr;
2780 memset (vs, 0, fd->last_nonrect * sizeof (tree));
2781
2782 for (int j = fd->first_nonrect; j <= fd->last_nonrect; j++)
2783 {
2784 tree itype = TREE_TYPE (fd->loops[j].v)((contains_struct_check ((fd->loops[j].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2784, __FUNCTION__))->typed.type)
;
2785 bool rect_p = (fd->loops[j].m1 == NULL_TREE(tree) nullptr
2786 && fd->loops[j].m2 == NULL_TREE(tree) nullptr
2787 && !fd->loops[j].non_rect_referenced);
2788 gsi2 = gsi_after_labels (cur_bb);
2789 t = fold_convert (itype, unshare_expr (fd->loops[j].n1))fold_convert_loc (((location_t) 0), itype, unshare_expr (fd->
loops[j].n1))
;
2790 if (fd->loops[j].m1 == NULL_TREE(tree) nullptr)
2791 n1 = rect_p ? build_zero_cst (type) : t;
2792 else if (POINTER_TYPE_P (itype)(((enum tree_code) (itype)->base.code) == POINTER_TYPE || (
(enum tree_code) (itype)->base.code) == REFERENCE_TYPE)
)
2793 {
2794 gcc_assert (integer_onep (fd->loops[j].m1))((void)(!(integer_onep (fd->loops[j].m1)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2794, __FUNCTION__), 0 : 0))
;
2795 t = fold_convert (sizetype,fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype
], unshare_expr (fd->loops[j].n1))
2796 unshare_expr (fd->loops[j].n1))fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype
], unshare_expr (fd->loops[j].n1))
;
2797 n1 = fold_build_pointer_plus (vs[j - fd->loops[j].outer], t)fold_build_pointer_plus_loc (((location_t) 0), vs[j - fd->
loops[j].outer], t)
;
2798 }
2799 else
2800 {
2801 n1 = fold_convert (itype, unshare_expr (fd->loops[j].m1))fold_convert_loc (((location_t) 0), itype, unshare_expr (fd->
loops[j].m1))
;
2802 n1 = fold_build2 (MULT_EXPR, itype,fold_build2_loc (((location_t) 0), MULT_EXPR, itype, vs[j - fd
->loops[j].outer], n1 )
2803 vs[j - fd->loops[j].outer], n1)fold_build2_loc (((location_t) 0), MULT_EXPR, itype, vs[j - fd
->loops[j].outer], n1 )
;
2804 n1 = fold_build2 (PLUS_EXPR, itype, n1, t)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, n1, t );
2805 }
2806 n1 = force_gimple_operand_gsi (&gsi2, n1, true, NULL_TREE(tree) nullptr,
2807 true, GSI_SAME_STMT);
2808 if (j < fd->last_nonrect)
2809 {
2810 vs[j] = create_tmp_reg (rect_p ? type : itype, ".it");
2811 expand_omp_build_assign (&gsi2, vs[j], n1);
2812 }
2813 t = fold_convert (itype, unshare_expr (fd->loops[j].n2))fold_convert_loc (((location_t) 0), itype, unshare_expr (fd->
loops[j].n2))
;
2814 if (fd->loops[j].m2 == NULL_TREE(tree) nullptr)
2815 n2 = rect_p ? counts[j] : t;
2816 else if (POINTER_TYPE_P (itype)(((enum tree_code) (itype)->base.code) == POINTER_TYPE || (
(enum tree_code) (itype)->base.code) == REFERENCE_TYPE)
)
2817 {
2818 gcc_assert (integer_onep (fd->loops[j].m2))((void)(!(integer_onep (fd->loops[j].m2)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2818, __FUNCTION__), 0 : 0))
;
2819 t = fold_convert (sizetype,fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype
], unshare_expr (fd->loops[j].n2))
2820 unshare_expr (fd->loops[j].n2))fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype
], unshare_expr (fd->loops[j].n2))
;
2821 n2 = fold_build_pointer_plus (vs[j - fd->loops[j].outer], t)fold_build_pointer_plus_loc (((location_t) 0), vs[j - fd->
loops[j].outer], t)
;
2822 }
2823 else
2824 {
2825 n2 = fold_convert (itype, unshare_expr (fd->loops[j].m2))fold_convert_loc (((location_t) 0), itype, unshare_expr (fd->
loops[j].m2))
;
2826 n2 = fold_build2 (MULT_EXPR, itype,fold_build2_loc (((location_t) 0), MULT_EXPR, itype, vs[j - fd
->loops[j].outer], n2 )
2827 vs[j - fd->loops[j].outer], n2)fold_build2_loc (((location_t) 0), MULT_EXPR, itype, vs[j - fd
->loops[j].outer], n2 )
;
2828 n2 = fold_build2 (PLUS_EXPR, itype, n2, t)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, n2, t );
2829 }
2830 n2 = force_gimple_operand_gsi (&gsi2, n2, true, NULL_TREE(tree) nullptr,
2831 true, GSI_SAME_STMT);
2832 if (POINTER_TYPE_P (itype)(((enum tree_code) (itype)->base.code) == POINTER_TYPE || (
(enum tree_code) (itype)->base.code) == REFERENCE_TYPE)
)
2833 itype = signed_type_for (itype);
2834 if (j == fd->last_nonrect)
2835 {
2836 gcond *cond_stmt
2837 = expand_omp_build_cond (&gsi2, fd->loops[i].cond_code,
2838 n1, n2);
2839 e = split_block (cur_bb, cond_stmt);
2840 e->flags = EDGE_TRUE_VALUE;
2841 edge ne = make_edge (cur_bb, next_bb, EDGE_FALSE_VALUE);
2842 e->probability = profile_probability::likely ().guessed ();
2843 ne->probability = e->probability.invert ();
2844 gsi2 = gsi_after_labels (e->dest);
2845
2846 t = build_int_cst (itype, (fd->loops[j].cond_code == LT_EXPR
2847 ? -1 : 1));
2848 t = fold_build2 (PLUS_EXPR, itype,fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, fold_convert_loc
(((location_t) 0), itype, fd->loops[j].step), t )
2849 fold_convert (itype, fd->loops[j].step), t)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, fold_convert_loc
(((location_t) 0), itype, fd->loops[j].step), t )
;
2850 t = fold_build2 (PLUS_EXPR, itype, t,fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, t, fold_convert_loc
(((location_t) 0), itype, n2) )
2851 fold_convert (itype, n2))fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, t, fold_convert_loc
(((location_t) 0), itype, n2) )
;
2852 t = fold_build2 (MINUS_EXPR, itype, t,fold_build2_loc (((location_t) 0), MINUS_EXPR, itype, t, fold_convert_loc
(((location_t) 0), itype, n1) )
2853 fold_convert (itype, n1))fold_build2_loc (((location_t) 0), MINUS_EXPR, itype, t, fold_convert_loc
(((location_t) 0), itype, n1) )
;
2854 tree step = fold_convert (itype, fd->loops[j].step)fold_convert_loc (((location_t) 0), itype, fd->loops[j].step
)
;
2855 if (TYPE_UNSIGNED (itype)((tree_class_check ((itype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2855, __FUNCTION__))->base.u.bits.unsigned_flag)
2856 && fd->loops[j].cond_code == GT_EXPR)
2857 t = fold_build2 (TRUNC_DIV_EXPR, itype,fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, itype, fold_build1_loc
(((location_t) 0), NEGATE_EXPR, itype, t ), fold_build1_loc (
((location_t) 0), NEGATE_EXPR, itype, step ) )
2858 fold_build1 (NEGATE_EXPR, itype, t),fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, itype, fold_build1_loc
(((location_t) 0), NEGATE_EXPR, itype, t ), fold_build1_loc (
((location_t) 0), NEGATE_EXPR, itype, step ) )
2859 fold_build1 (NEGATE_EXPR, itype, step))fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, itype, fold_build1_loc
(((location_t) 0), NEGATE_EXPR, itype, t ), fold_build1_loc (
((location_t) 0), NEGATE_EXPR, itype, step ) )
;
2860 else
2861 t = fold_build2 (TRUNC_DIV_EXPR, itype, t, step)fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, itype, t, step
)
;
2862 t = fold_convert (type, t)fold_convert_loc (((location_t) 0), type, t);
2863 t = fold_build2 (PLUS_EXPR, type, idx, t)fold_build2_loc (((location_t) 0), PLUS_EXPR, type, idx, t );
2864 t = force_gimple_operand_gsi (&gsi2, t, true, NULL_TREE(tree) nullptr,
2865 true, GSI_SAME_STMT);
2866 e = make_edge (e->dest, next_bb, EDGE_FALLTHRU);
2867 set_immediate_dominator (CDI_DOMINATORS, next_bb, cur_bb);
2868 cond_stmt
2869 = gimple_build_cond (LE_EXPR, t, stopval, NULL_TREE(tree) nullptr,
2870 NULL_TREE(tree) nullptr);
2871 gsi_insert_before (&gsi2, cond_stmt, GSI_SAME_STMT);
2872 e = split_block (gsi_bb (gsi2), cond_stmt);
2873 e->flags = EDGE_TRUE_VALUE;
2874 e->probability = profile_probability::likely ().guessed ();
2875 ne = make_edge (e->src, entry_bb, EDGE_FALSE_VALUE);
2876 ne->probability = e->probability.invert ();
2877 gsi2 = gsi_after_labels (e->dest);
2878 expand_omp_build_assign (&gsi2, idx, t);
2879 set_immediate_dominator (CDI_DOMINATORS, entry_bb, dom_bb);
2880 break;
2881 }
2882 e = split_block (cur_bb, last_stmt (cur_bb));
2883
2884 basic_block new_cur_bb = create_empty_bb (cur_bb);
2885 add_bb_to_loop (new_cur_bb, cur_bb->loop_father);
2886
2887 gsi2 = gsi_after_labels (e->dest);
2888 if (rect_p)
2889 t = fold_build2 (PLUS_EXPR, type, vs[j],fold_build2_loc (((location_t) 0), PLUS_EXPR, type, vs[j], build_one_cst
(type) )
2890 build_one_cst (type))fold_build2_loc (((location_t) 0), PLUS_EXPR, type, vs[j], build_one_cst
(type) )
;
2891 else
2892 {
2893 tree step
2894 = fold_convert (itype, unshare_expr (fd->loops[j].step))fold_convert_loc (((location_t) 0), itype, unshare_expr (fd->
loops[j].step))
;
2895 if (POINTER_TYPE_P (vtype)(((enum tree_code) (vtype)->base.code) == POINTER_TYPE || (
(enum tree_code) (vtype)->base.code) == REFERENCE_TYPE)
)
2896 t = fold_build_pointer_plus (vs[j], fold_convert (sizetype,fold_build_pointer_plus_loc (((location_t) 0), vs[j], fold_convert_loc
(((location_t) 0), sizetype_tab[(int) stk_sizetype], step))
2897 step))fold_build_pointer_plus_loc (((location_t) 0), vs[j], fold_convert_loc
(((location_t) 0), sizetype_tab[(int) stk_sizetype], step))
;
2898 else
2899 t = fold_build2 (PLUS_EXPR, itype, vs[j], step)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, vs[j], step
)
;
2900 }
2901 t = force_gimple_operand_gsi (&gsi2, t, true, NULL_TREE(tree) nullptr,
2902 true, GSI_SAME_STMT);
2903 expand_omp_build_assign (&gsi2, vs[j], t);
2904
2905 edge ne = split_block (e->dest, last_stmt (e->dest));
2906 gsi2 = gsi_after_labels (ne->dest);
2907
2908 gcond *cond_stmt;
2909 if (next_bb == entry_bb)
2910 /* No need to actually check the outermost condition. */
2911 cond_stmt
2912 = gimple_build_cond (EQ_EXPR, boolean_true_nodeglobal_trees[TI_BOOLEAN_TRUE],
2913 boolean_true_nodeglobal_trees[TI_BOOLEAN_TRUE],
2914 NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr);
2915 else
2916 cond_stmt
2917 = gimple_build_cond (rect_p ? LT_EXPR
2918 : fd->loops[j].cond_code,
2919 vs[j], n2, NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr);
2920 gsi_insert_before (&gsi2, cond_stmt, GSI_SAME_STMT);
2921 edge e3, e4;
2922 if (next_bb == entry_bb)
2923 {
2924 e3 = find_edge (ne->dest, next_bb);
2925 e3->flags = EDGE_FALSE_VALUE;
2926 dom_bb = ne->dest;
2927 }
2928 else
2929 e3 = make_edge (ne->dest, next_bb, EDGE_FALSE_VALUE);
2930 e4 = make_edge (ne->dest, new_cur_bb, EDGE_TRUE_VALUE);
2931 e4->probability = profile_probability::likely ().guessed ();
2932 e3->probability = e4->probability.invert ();
2933 basic_block esrc = e->src;
2934 make_edge (e->src, ne->dest, EDGE_FALLTHRU);
2935 cur_bb = new_cur_bb;
2936 basic_block latch_bb = next_bb;
2937 next_bb = e->dest;
2938 remove_edge (e);
2939 set_immediate_dominator (CDI_DOMINATORS, ne->dest, esrc);
2940 set_immediate_dominator (CDI_DOMINATORS, latch_bb, ne->dest);
2941 set_immediate_dominator (CDI_DOMINATORS, cur_bb, ne->dest);
2942 }
2943 for (int j = fd->last_nonrect; j >= fd->first_nonrect; j--)
2944 {
2945 tree vtype = TREE_TYPE (fd->loops[j].v)((contains_struct_check ((fd->loops[j].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 2945, __FUNCTION__))->typed.type)
;
2946 tree itype = vtype;
2947 if (POINTER_TYPE_P (itype)(((enum tree_code) (itype)->base.code) == POINTER_TYPE || (
(enum tree_code) (itype)->base.code) == REFERENCE_TYPE)
)
2948 itype = signed_type_for (itype);
2949 bool rect_p = (fd->loops[j].m1 == NULL_TREE(tree) nullptr
2950 && fd->loops[j].m2 == NULL_TREE(tree) nullptr
2951 && !fd->loops[j].non_rect_referenced);
2952 if (j == fd->last_nonrect)
2953 {
2954 t = fold_build2 (MINUS_EXPR, type, stopval, idx)fold_build2_loc (((location_t) 0), MINUS_EXPR, type, stopval,
idx )
;
2955 t = fold_convert (itype, t)fold_convert_loc (((location_t) 0), itype, t);
2956 tree t2
2957 = fold_convert (itype, unshare_expr (fd->loops[j].step))fold_convert_loc (((location_t) 0), itype, unshare_expr (fd->
loops[j].step))
;
2958 t = fold_build2 (MULT_EXPR, itype, t, t2)fold_build2_loc (((location_t) 0), MULT_EXPR, itype, t, t2 );
2959 if (POINTER_TYPE_P (vtype)(((enum tree_code) (vtype)->base.code) == POINTER_TYPE || (
(enum tree_code) (vtype)->base.code) == REFERENCE_TYPE)
)
2960 t = fold_build_pointer_plus (n1,fold_build_pointer_plus_loc (((location_t) 0), n1, fold_convert_loc
(((location_t) 0), sizetype_tab[(int) stk_sizetype], t))
2961 fold_convert (sizetype, t))fold_build_pointer_plus_loc (((location_t) 0), n1, fold_convert_loc
(((location_t) 0), sizetype_tab[(int) stk_sizetype], t))
;
2962 else
2963 t = fold_build2 (PLUS_EXPR, itype, n1, t)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, n1, t );
2964 }
2965 else if (rect_p)
2966 {
2967 t = fold_convert (itype, vs[j])fold_convert_loc (((location_t) 0), itype, vs[j]);
2968 t = fold_build2 (MULT_EXPR, itype, t,fold_build2_loc (((location_t) 0), MULT_EXPR, itype, t, fold_convert_loc
(((location_t) 0), itype, fd->loops[j].step) )
2969 fold_convert (itype, fd->loops[j].step))fold_build2_loc (((location_t) 0), MULT_EXPR, itype, t, fold_convert_loc
(((location_t) 0), itype, fd->loops[j].step) )
;
2970 if (POINTER_TYPE_P (vtype)(((enum tree_code) (vtype)->base.code) == POINTER_TYPE || (
(enum tree_code) (vtype)->base.code) == REFERENCE_TYPE)
)
2971 t = fold_build_pointer_plus (fd->loops[j].n1,fold_build_pointer_plus_loc (((location_t) 0), fd->loops[j
].n1, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype
], t))
2972 fold_convert (sizetype, t))fold_build_pointer_plus_loc (((location_t) 0), fd->loops[j
].n1, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype
], t))
;
2973 else
2974 t = fold_build2 (PLUS_EXPR, itype, fd->loops[j].n1, t)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, fd->loops
[j].n1, t )
;
2975 }
2976 else
2977 t = vs[j];
2978 t = force_gimple_operand_gsi (gsi, t, false,
2979 NULL_TREE(tree) nullptr, true,
2980 GSI_SAME_STMT);
2981 stmt = gimple_build_assign (fd->loops[j].v, t);
2982 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2983 }
2984 if (gsi_end_p (*gsi))
2985 *gsi = gsi_last_bb (gsi_bb (*gsi));
2986 else
2987 gsi_prev (gsi);
2988 if (bb_triang)
2989 {
2990 e = split_block (gsi_bb (*gsi), gsi_stmt (*gsi));
2991 make_edge (bb_triang, e->dest, EDGE_FALLTHRU);
2992 *gsi = gsi_after_labels (e->dest);
2993 if (!gsi_end_p (*gsi))
2994 gsi_insert_before (gsi, gimple_build_nop (), GSI_NEW_STMT);
2995 set_immediate_dominator (CDI_DOMINATORS, e->dest, bb_triang_dom);
2996 }
2997 }
2998 else
2999 {
3000 t = fold_convert (itype, t)fold_convert_loc (((location_t) 0), itype, t);
3001 t = fold_build2 (MULT_EXPR, itype, t,fold_build2_loc (((location_t) 0), MULT_EXPR, itype, t, fold_convert_loc
(((location_t) 0), itype, fd->loops[i].step) )
3002 fold_convert (itype, fd->loops[i].step))fold_build2_loc (((location_t) 0), MULT_EXPR, itype, t, fold_convert_loc
(((location_t) 0), itype, fd->loops[i].step) )
;
3003 if (POINTER_TYPE_P (vtype)(((enum tree_code) (vtype)->base.code) == POINTER_TYPE || (
(enum tree_code) (vtype)->base.code) == REFERENCE_TYPE)
)
3004 t = fold_build_pointer_plus (fd->loops[i].n1, t)fold_build_pointer_plus_loc (((location_t) 0), fd->loops[i
].n1, t)
;
3005 else
3006 t = fold_build2 (PLUS_EXPR, itype, fd->loops[i].n1, t)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, fd->loops
[i].n1, t )
;
3007 t = force_gimple_operand_gsi (gsi, t,
3008 DECL_P (fd->loops[i].v)(tree_code_type[(int) (((enum tree_code) (fd->loops[i].v)->
base.code))] == tcc_declaration)
3009 && TREE_ADDRESSABLE (fd->loops[i].v)((fd->loops[i].v)->base.addressable_flag),
3010 NULL_TREE(tree) nullptr, false,
3011 GSI_CONTINUE_LINKING);
3012 stmt = gimple_build_assign (fd->loops[i].v, t);
3013 gsi_insert_after (gsi, stmt, GSI_CONTINUE_LINKING);
3014 }
3015 if (i != 0 && (i != fd->last_nonrect || fd->first_nonrect))
3016 {
3017 t = fold_build2 (TRUNC_DIV_EXPR, type, tem, counts[i])fold_build2_loc (((location_t) 0), TRUNC_DIV_EXPR, type, tem,
counts[i] )
;
3018 t = force_gimple_operand_gsi (gsi, t, false, NULL_TREE(tree) nullptr,
3019 false, GSI_CONTINUE_LINKING);
3020 stmt = gimple_build_assign (tem, t);
3021 gsi_insert_after (gsi, stmt, GSI_CONTINUE_LINKING);
3022 }
3023 if (i == fd->last_nonrect)
3024 i = fd->first_nonrect;
3025 }
3026 if (fd->non_rect)
3027 for (i = 0; i <= fd->last_nonrect; i++)
3028 if (fd->loops[i].m2)
3029 {
3030 tree itype = TREE_TYPE (fd->loops[i].v)((contains_struct_check ((fd->loops[i].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3030, __FUNCTION__))->typed.type)
;
3031
3032 tree t;
3033 if (POINTER_TYPE_P (itype)(((enum tree_code) (itype)->base.code) == POINTER_TYPE || (
(enum tree_code) (itype)->base.code) == REFERENCE_TYPE)
)
3034 {
3035 gcc_assert (integer_onep (fd->loops[i].m2))((void)(!(integer_onep (fd->loops[i].m2)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3035, __FUNCTION__), 0 : 0))
;
3036 t = fold_convert (sizetype, unshare_expr (fd->loops[i].n2))fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype
], unshare_expr (fd->loops[i].n2))
;
3037 t = fold_build_pointer_plus (fd->loops[i - fd->loops[i].outer].v,fold_build_pointer_plus_loc (((location_t) 0), fd->loops[i
- fd->loops[i].outer].v, t)
3038 t)fold_build_pointer_plus_loc (((location_t) 0), fd->loops[i
- fd->loops[i].outer].v, t)
;
3039 }
3040 else
3041 {
3042 t = fold_convert (itype, unshare_expr (fd->loops[i].m2))fold_convert_loc (((location_t) 0), itype, unshare_expr (fd->
loops[i].m2))
;
3043 t = fold_build2 (MULT_EXPR, itype,fold_build2_loc (((location_t) 0), MULT_EXPR, itype, fd->loops
[i - fd->loops[i].outer].v, t )
3044 fd->loops[i - fd->loops[i].outer].v, t)fold_build2_loc (((location_t) 0), MULT_EXPR, itype, fd->loops
[i - fd->loops[i].outer].v, t )
;
3045 t = fold_build2 (PLUS_EXPR, itype, t,fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, t, fold_convert_loc
(((location_t) 0), itype, unshare_expr (fd->loops[i].n2))
)
3046 fold_convert (itype,fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, t, fold_convert_loc
(((location_t) 0), itype, unshare_expr (fd->loops[i].n2))
)
3047 unshare_expr (fd->loops[i].n2)))fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, t, fold_convert_loc
(((location_t) 0), itype, unshare_expr (fd->loops[i].n2))
)
;
3048 }
3049 nonrect_bounds[i] = create_tmp_reg (itype, ".bound");
3050 t = force_gimple_operand_gsi (gsi, t, false,
3051 NULL_TREE(tree) nullptr, false,
3052 GSI_CONTINUE_LINKING);
3053 stmt = gimple_build_assign (nonrect_bounds[i], t);
3054 gsi_insert_after (gsi, stmt, GSI_CONTINUE_LINKING);
3055 }
3056}
3057
3058/* Helper function for expand_omp_for_*. Generate code like:
3059 L10:
3060 V3 += STEP3;
3061 if (V3 cond3 N32) goto BODY_BB; else goto L11;
3062 L11:
3063 V3 = N31;
3064 V2 += STEP2;
3065 if (V2 cond2 N22) goto BODY_BB; else goto L12;
3066 L12:
3067 V2 = N21;
3068 V1 += STEP1;
3069 goto BODY_BB;
3070 For non-rectangular loops, use temporaries stored in nonrect_bounds
3071 for the upper bounds if M?2 multiplier is present. Given e.g.
3072 for (V1 = N11; V1 cond1 N12; V1 += STEP1)
3073 for (V2 = N21; V2 cond2 N22; V2 += STEP2)
3074 for (V3 = N31; V3 cond3 N32; V3 += STEP3)
3075 for (V4 = N41 + M41 * V2; V4 cond4 N42 + M42 * V2; V4 += STEP4)
3076 do:
3077 L10:
3078 V4 += STEP4;
3079 if (V4 cond4 NONRECT_BOUND4) goto BODY_BB; else goto L11;
3080 L11:
3081 V4 = N41 + M41 * V2; // This can be left out if the loop
3082 // refers to the immediate parent loop
3083 V3 += STEP3;
3084 if (V3 cond3 N32) goto BODY_BB; else goto L12;
3085 L12:
3086 V3 = N31;
3087 V2 += STEP2;
3088 if (V2 cond2 N22) goto L120; else goto L13;
3089 L120:
3090 V4 = N41 + M41 * V2;
3091 NONRECT_BOUND4 = N42 + M42 * V2;
3092 if (V4 cond4 NONRECT_BOUND4) goto BODY_BB; else goto L12;
3093 L13:
3094 V2 = N21;
3095 V1 += STEP1;
3096 goto L120; */
3097
3098static basic_block
3099extract_omp_for_update_vars (struct omp_for_data *fd, tree *nonrect_bounds,
3100 basic_block cont_bb, basic_block body_bb)
3101{
3102 basic_block last_bb, bb, collapse_bb = NULLnullptr;
3103 int i;
3104 gimple_stmt_iterator gsi;
3105 edge e;
3106 tree t;
3107 gimple *stmt;
3108
3109 last_bb = cont_bb;
3110 for (i = fd->collapse - 1; i >= 0; i--)
3111 {
3112 tree vtype = TREE_TYPE (fd->loops[i].v)((contains_struct_check ((fd->loops[i].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3112, __FUNCTION__))->typed.type)
;
3113
3114 bb = create_empty_bb (last_bb);
3115 add_bb_to_loop (bb, last_bb->loop_father);
3116 gsi = gsi_start_bb (bb);
3117
3118 if (i < fd->collapse - 1)
3119 {
3120 e = make_edge (last_bb, bb, EDGE_FALSE_VALUE);
3121 e->probability
3122 = profile_probability::guessed_always ().apply_scale (1, 8);
3123
3124 struct omp_for_data_loop *l = &fd->loops[i + 1];
3125 if (l->m1 == NULL_TREE(tree) nullptr || l->outer != 1)
3126 {
3127 t = l->n1;
3128 if (l->m1)
3129 {
3130 if (POINTER_TYPE_P (TREE_TYPE (l->v))(((enum tree_code) (((contains_struct_check ((l->v), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3130, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((l->v), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3130, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
3131 t = fold_build_pointer_plus (fd->loops[i + 1 - l->outer].v,fold_build_pointer_plus_loc (((location_t) 0), fd->loops[i
+ 1 - l->outer].v, fold_convert_loc (((location_t) 0), sizetype_tab
[(int) stk_sizetype], t))
3132 fold_convert (sizetype, t))fold_build_pointer_plus_loc (((location_t) 0), fd->loops[i
+ 1 - l->outer].v, fold_convert_loc (((location_t) 0), sizetype_tab
[(int) stk_sizetype], t))
;
3133 else
3134 {
3135 tree t2
3136 = fold_build2 (MULT_EXPR, TREE_TYPE (t),fold_build2_loc (((location_t) 0), MULT_EXPR, ((contains_struct_check
((t), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3136, __FUNCTION__))->typed.type), fd->loops[i + 1 - l
->outer].v, l->m1 )
3137 fd->loops[i + 1 - l->outer].v, l->m1)fold_build2_loc (((location_t) 0), MULT_EXPR, ((contains_struct_check
((t), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3136, __FUNCTION__))->typed.type), fd->loops[i + 1 - l
->outer].v, l->m1 )
;
3138 t = fold_build2 (PLUS_EXPR, TREE_TYPE (t), t2, t)fold_build2_loc (((location_t) 0), PLUS_EXPR, ((contains_struct_check
((t), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3138, __FUNCTION__))->typed.type), t2, t )
;
3139 }
3140 }
3141 t = force_gimple_operand_gsi (&gsi, t,
3142 DECL_P (l->v)(tree_code_type[(int) (((enum tree_code) (l->v)->base.code
))] == tcc_declaration)
3143 && TREE_ADDRESSABLE (l->v)((l->v)->base.addressable_flag),
3144 NULL_TREE(tree) nullptr, false,
3145 GSI_CONTINUE_LINKING);
3146 stmt = gimple_build_assign (l->v, t);
3147 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
3148 }
3149 }
3150 else
3151 collapse_bb = bb;
3152
3153 set_immediate_dominator (CDI_DOMINATORS, bb, last_bb);
3154
3155 if (POINTER_TYPE_P (vtype)(((enum tree_code) (vtype)->base.code) == POINTER_TYPE || (
(enum tree_code) (vtype)->base.code) == REFERENCE_TYPE)
)
3156 t = fold_build_pointer_plus (fd->loops[i].v, fd->loops[i].step)fold_build_pointer_plus_loc (((location_t) 0), fd->loops[i
].v, fd->loops[i].step)
;
3157 else
3158 t = fold_build2 (PLUS_EXPR, vtype, fd->loops[i].v, fd->loops[i].step)fold_build2_loc (((location_t) 0), PLUS_EXPR, vtype, fd->loops
[i].v, fd->loops[i].step )
;
3159 t = force_gimple_operand_gsi (&gsi, t,
3160 DECL_P (fd->loops[i].v)(tree_code_type[(int) (((enum tree_code) (fd->loops[i].v)->
base.code))] == tcc_declaration)
3161 && TREE_ADDRESSABLE (fd->loops[i].v)((fd->loops[i].v)->base.addressable_flag),
3162 NULL_TREE(tree) nullptr, false, GSI_CONTINUE_LINKING);
3163 stmt = gimple_build_assign (fd->loops[i].v, t);
3164 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
3165
3166 if (fd->loops[i].non_rect_referenced)
3167 {
3168 basic_block update_bb = NULLnullptr, prev_bb = NULLnullptr;
3169 for (int j = i + 1; j <= fd->last_nonrect; j++)
3170 if (j - fd->loops[j].outer == i)
3171 {
3172 tree n1, n2;
3173 struct omp_for_data_loop *l = &fd->loops[j];
3174 basic_block this_bb = create_empty_bb (last_bb);
3175 add_bb_to_loop (this_bb, last_bb->loop_father);
3176 gimple_stmt_iterator gsi2 = gsi_start_bb (this_bb);
3177 if (prev_bb)
3178 {
3179 e = make_edge (prev_bb, this_bb, EDGE_TRUE_VALUE);
3180 e->probability
3181 = profile_probability::guessed_always ().apply_scale (7,
3182 8);
3183 set_immediate_dominator (CDI_DOMINATORS, this_bb, prev_bb);
3184 }
3185 if (l->m1)
3186 {
3187 if (POINTER_TYPE_P (TREE_TYPE (l->v))(((enum tree_code) (((contains_struct_check ((l->v), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3187, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((l->v), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3187, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
3188 t = fold_build_pointer_plus (fd->loops[i].v,fold_build_pointer_plus_loc (((location_t) 0), fd->loops[i
].v, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype
], l->n1))
3189 fold_convert (sizetype,fold_build_pointer_plus_loc (((location_t) 0), fd->loops[i
].v, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype
], l->n1))
3190 l->n1))fold_build_pointer_plus_loc (((location_t) 0), fd->loops[i
].v, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype
], l->n1))
;
3191 else
3192 {
3193 t = fold_build2 (MULT_EXPR, TREE_TYPE (l->m1), l->m1,fold_build2_loc (((location_t) 0), MULT_EXPR, ((contains_struct_check
((l->m1), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3193, __FUNCTION__))->typed.type), l->m1, fd->loops
[i].v )
3194 fd->loops[i].v)fold_build2_loc (((location_t) 0), MULT_EXPR, ((contains_struct_check
((l->m1), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3193, __FUNCTION__))->typed.type), l->m1, fd->loops
[i].v )
;
3195 t = fold_build2 (PLUS_EXPR, TREE_TYPE (l->v),fold_build2_loc (((location_t) 0), PLUS_EXPR, ((contains_struct_check
((l->v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3195, __FUNCTION__))->typed.type), t, l->n1 )
3196 t, l->n1)fold_build2_loc (((location_t) 0), PLUS_EXPR, ((contains_struct_check
((l->v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3195, __FUNCTION__))->typed.type), t, l->n1 )
;
3197 }
3198 n1 = force_gimple_operand_gsi (&gsi2, t, true, NULL_TREE(tree) nullptr,
3199 false,
3200 GSI_CONTINUE_LINKING);
3201 stmt = gimple_build_assign (l->v, n1);
3202 gsi_insert_after (&gsi2, stmt, GSI_CONTINUE_LINKING);
3203 n1 = l->v;
3204 }
3205 else
3206 n1 = force_gimple_operand_gsi (&gsi2, l->n1, true,
3207 NULL_TREE(tree) nullptr, false,
3208 GSI_CONTINUE_LINKING);
3209 if (l->m2)
3210 {
3211 if (POINTER_TYPE_P (TREE_TYPE (l->v))(((enum tree_code) (((contains_struct_check ((l->v), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3211, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((l->v), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3211, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
3212 t = fold_build_pointer_plus (fd->loops[i].v,fold_build_pointer_plus_loc (((location_t) 0), fd->loops[i
].v, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype
], l->n2))
3213 fold_convert (sizetype,fold_build_pointer_plus_loc (((location_t) 0), fd->loops[i
].v, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype
], l->n2))
3214 l->n2))fold_build_pointer_plus_loc (((location_t) 0), fd->loops[i
].v, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype
], l->n2))
;
3215 else
3216 {
3217 t = fold_build2 (MULT_EXPR, TREE_TYPE (l->m2), l->m2,fold_build2_loc (((location_t) 0), MULT_EXPR, ((contains_struct_check
((l->m2), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3217, __FUNCTION__))->typed.type), l->m2, fd->loops
[i].v )
3218 fd->loops[i].v)fold_build2_loc (((location_t) 0), MULT_EXPR, ((contains_struct_check
((l->m2), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3217, __FUNCTION__))->typed.type), l->m2, fd->loops
[i].v )
;
3219 t = fold_build2 (PLUS_EXPR,fold_build2_loc (((location_t) 0), PLUS_EXPR, ((contains_struct_check
((nonrect_bounds[j]), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3220, __FUNCTION__))->typed.type), t, unshare_expr (l->
n2) )
3220 TREE_TYPE (nonrect_bounds[j]),fold_build2_loc (((location_t) 0), PLUS_EXPR, ((contains_struct_check
((nonrect_bounds[j]), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3220, __FUNCTION__))->typed.type), t, unshare_expr (l->
n2) )
3221 t, unshare_expr (l->n2))fold_build2_loc (((location_t) 0), PLUS_EXPR, ((contains_struct_check
((nonrect_bounds[j]), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3220, __FUNCTION__))->typed.type), t, unshare_expr (l->
n2) )
;
3222 }
3223 n2 = force_gimple_operand_gsi (&gsi2, t, true, NULL_TREE(tree) nullptr,
3224 false,
3225 GSI_CONTINUE_LINKING);
3226 stmt = gimple_build_assign (nonrect_bounds[j], n2);
3227 gsi_insert_after (&gsi2, stmt, GSI_CONTINUE_LINKING);
3228 n2 = nonrect_bounds[j];
3229 }
3230 else
3231 n2 = force_gimple_operand_gsi (&gsi2, unshare_expr (l->n2),
3232 true, NULL_TREE(tree) nullptr, false,
3233 GSI_CONTINUE_LINKING);
3234 gcond *cond_stmt
3235 = gimple_build_cond (l->cond_code, n1, n2,
3236 NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr);
3237 gsi_insert_after (&gsi2, cond_stmt, GSI_CONTINUE_LINKING);
3238 if (update_bb == NULLnullptr)
3239 update_bb = this_bb;
3240 e = make_edge (this_bb, bb, EDGE_FALSE_VALUE);
3241 e->probability
3242 = profile_probability::guessed_always ().apply_scale (1, 8);
3243 if (prev_bb == NULLnullptr)
3244 set_immediate_dominator (CDI_DOMINATORS, this_bb, bb);
3245 prev_bb = this_bb;
3246 }
3247 e = make_edge (prev_bb, body_bb, EDGE_TRUE_VALUE);
3248 e->probability
3249 = profile_probability::guessed_always ().apply_scale (7, 8);
3250 body_bb = update_bb;
3251 }
3252
3253 if (i > 0)
3254 {
3255 if (fd->loops[i].m2)
3256 t = nonrect_bounds[i];
3257 else
3258 t = unshare_expr (fd->loops[i].n2);
3259 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE(tree) nullptr,
3260 false, GSI_CONTINUE_LINKING);
3261 tree v = fd->loops[i].v;
3262 if (DECL_P (v)(tree_code_type[(int) (((enum tree_code) (v)->base.code))]
== tcc_declaration)
&& TREE_ADDRESSABLE (v)((v)->base.addressable_flag))
3263 v = force_gimple_operand_gsi (&gsi, v, true, NULL_TREE(tree) nullptr,
3264 false, GSI_CONTINUE_LINKING);
3265 t = fold_build2 (fd->loops[i].cond_code, boolean_type_node, v, t)fold_build2_loc (((location_t) 0), fd->loops[i].cond_code,
global_trees[TI_BOOLEAN_TYPE], v, t )
;
3266 stmt = gimple_build_cond_empty (t);
3267 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
3268 if (walk_tree (gimple_cond_lhs_ptr (as_a <gcond *> (stmt)),walk_tree_1 (gimple_cond_lhs_ptr (as_a <gcond *> (stmt)
), expand_omp_regimplify_p, nullptr, nullptr, nullptr)
3269 expand_omp_regimplify_p, NULL, NULL)walk_tree_1 (gimple_cond_lhs_ptr (as_a <gcond *> (stmt)
), expand_omp_regimplify_p, nullptr, nullptr, nullptr)
3270 || walk_tree (gimple_cond_rhs_ptr (as_a <gcond *> (stmt)),walk_tree_1 (gimple_cond_rhs_ptr (as_a <gcond *> (stmt)
), expand_omp_regimplify_p, nullptr, nullptr, nullptr)
3271 expand_omp_regimplify_p, NULL, NULL)walk_tree_1 (gimple_cond_rhs_ptr (as_a <gcond *> (stmt)
), expand_omp_regimplify_p, nullptr, nullptr, nullptr)
)
3272 gimple_regimplify_operands (stmt, &gsi);
3273 e = make_edge (bb, body_bb, EDGE_TRUE_VALUE);
3274 e->probability = profile_probability::guessed_always ().apply_scale (7, 8);
3275 }
3276 else
3277 make_edge (bb, body_bb, EDGE_FALLTHRU);
3278 set_immediate_dominator (CDI_DOMINATORS, bb, last_bb);
3279 last_bb = bb;
3280 }
3281
3282 return collapse_bb;
3283}
3284
3285/* Expand #pragma omp ordered depend(source). */
3286
3287static void
3288expand_omp_ordered_source (gimple_stmt_iterator *gsi, struct omp_for_data *fd,
3289 tree *counts, location_t loc)
3290{
3291 enum built_in_function source_ix
3292 = fd->iter_type == long_integer_type_nodeinteger_types[itk_long]
3293 ? BUILT_IN_GOMP_DOACROSS_POST : BUILT_IN_GOMP_DOACROSS_ULL_POST;
3294 gimple *g
3295 = gimple_build_call (builtin_decl_explicit (source_ix), 1,
3296 build_fold_addr_expr (counts[fd->ordered])build_fold_addr_expr_loc (((location_t) 0), (counts[fd->ordered
]))
);
3297 gimple_set_location (g, loc);
3298 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3299}
3300
3301/* Expand a single depend from #pragma omp ordered depend(sink:...). */
3302
3303static void
3304expand_omp_ordered_sink (gimple_stmt_iterator *gsi, struct omp_for_data *fd,
3305 tree *counts, tree c, location_t loc)
3306{
3307 auto_vec<tree, 10> args;
3308 enum built_in_function sink_ix
3309 = fd->iter_type == long_integer_type_nodeinteger_types[itk_long]
3310 ? BUILT_IN_GOMP_DOACROSS_WAIT : BUILT_IN_GOMP_DOACROSS_ULL_WAIT;
3311 tree t, off, coff = NULL_TREE(tree) nullptr, deps = OMP_CLAUSE_DECL (c)(*(omp_clause_elt_check (((omp_clause_range_check (((tree_check
((c), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3311, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3311, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3311, __FUNCTION__)))
, cond = NULL_TREE(tree) nullptr;
3312 int i;
3313 gimple_stmt_iterator gsi2 = *gsi;
3314 bool warned_step = false;
3315
3316 for (i = 0; i < fd->ordered; i++)
3317 {
3318 tree step = NULL_TREE(tree) nullptr;
3319 off = TREE_PURPOSE (deps)((tree_check ((deps), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3319, __FUNCTION__, (TREE_LIST)))->list.purpose)
;
3320 if (TREE_CODE (off)((enum tree_code) (off)->base.code) == TRUNC_DIV_EXPR)
3321 {
3322 step = TREE_OPERAND (off, 1)(*((const_cast<tree*> (tree_operand_check ((off), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3322, __FUNCTION__)))))
;
3323 off = TREE_OPERAND (off, 0)(*((const_cast<tree*> (tree_operand_check ((off), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3323, __FUNCTION__)))))
;
3324 }
3325 if (!integer_zerop (off))
3326 {
3327 gcc_assert (fd->loops[i].cond_code == LT_EXPR((void)(!(fd->loops[i].cond_code == LT_EXPR || fd->loops
[i].cond_code == GT_EXPR) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3328, __FUNCTION__), 0 : 0))
3328 || fd->loops[i].cond_code == GT_EXPR)((void)(!(fd->loops[i].cond_code == LT_EXPR || fd->loops
[i].cond_code == GT_EXPR) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3328, __FUNCTION__), 0 : 0))
;
3329 bool forward = fd->loops[i].cond_code == LT_EXPR;
3330 if (step)
3331 {
3332 /* Non-simple Fortran DO loops. If step is variable,
3333 we don't know at compile even the direction, so can't
3334 warn. */
3335 if (TREE_CODE (step)((enum tree_code) (step)->base.code) != INTEGER_CST)
3336 break;
3337 forward = tree_int_cst_sgn (step) != -1;
3338 }
3339 if (forward ^ OMP_CLAUSE_DEPEND_SINK_NEGATIVE (deps)(((tree_check ((deps), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3339, __FUNCTION__, (TREE_LIST))))->base.public_flag)
)
3340 warning_at (loc, 0, "%<depend%> clause with %<sink%> modifier "
3341 "waiting for lexically later iteration");
3342 break;
3343 }
3344 deps = TREE_CHAIN (deps)((contains_struct_check ((deps), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3344, __FUNCTION__))->common.chain)
;
3345 }
3346 /* If all offsets corresponding to the collapsed loops are zero,
3347 this depend clause can be ignored. FIXME: but there is still a
3348 flush needed. We need to emit one __sync_synchronize () for it
3349 though (perhaps conditionally)? Solve this together with the
3350 conservative dependence folding optimization.
3351 if (i >= fd->collapse)
3352 return; */
3353
3354 deps = OMP_CLAUSE_DECL (c)(*(omp_clause_elt_check (((omp_clause_range_check (((tree_check
((c), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3354, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3354, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3354, __FUNCTION__)))
;
3355 gsi_prev (&gsi2);
3356 edge e1 = split_block (gsi_bb (gsi2), gsi_stmt (gsi2));
3357 edge e2 = split_block_after_labels (e1->dest);
3358
3359 gsi2 = gsi_after_labels (e1->dest);
3360 *gsi = gsi_last_bb (e1->src);
3361 for (i = 0; i < fd->ordered; i++)
3362 {
3363 tree itype = TREE_TYPE (fd->loops[i].v)((contains_struct_check ((fd->loops[i].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3363, __FUNCTION__))->typed.type)
;
3364 tree step = NULL_TREE(tree) nullptr;
3365 tree orig_off = NULL_TREE(tree) nullptr;
3366 if (POINTER_TYPE_P (itype)(((enum tree_code) (itype)->base.code) == POINTER_TYPE || (
(enum tree_code) (itype)->base.code) == REFERENCE_TYPE)
)
3367 itype = sizetypesizetype_tab[(int) stk_sizetype];
3368 if (i)
3369 deps = TREE_CHAIN (deps)((contains_struct_check ((deps), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3369, __FUNCTION__))->common.chain)
;
3370 off = TREE_PURPOSE (deps)((tree_check ((deps), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3370, __FUNCTION__, (TREE_LIST)))->list.purpose)
;
3371 if (TREE_CODE (off)((enum tree_code) (off)->base.code) == TRUNC_DIV_EXPR)
3372 {
3373 step = TREE_OPERAND (off, 1)(*((const_cast<tree*> (tree_operand_check ((off), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3373, __FUNCTION__)))))
;
3374 off = TREE_OPERAND (off, 0)(*((const_cast<tree*> (tree_operand_check ((off), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3374, __FUNCTION__)))))
;
3375 gcc_assert (fd->loops[i].cond_code == LT_EXPR((void)(!(fd->loops[i].cond_code == LT_EXPR && integer_onep
(fd->loops[i].step) && !(((enum tree_code) (((contains_struct_check
((fd->loops[i].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3377, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((fd->loops
[i].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3377, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3377, __FUNCTION__), 0 : 0))
3376 && integer_onep (fd->loops[i].step)((void)(!(fd->loops[i].cond_code == LT_EXPR && integer_onep
(fd->loops[i].step) && !(((enum tree_code) (((contains_struct_check
((fd->loops[i].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3377, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((fd->loops
[i].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3377, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3377, __FUNCTION__), 0 : 0))
3377 && !POINTER_TYPE_P (TREE_TYPE (fd->loops[i].v)))((void)(!(fd->loops[i].cond_code == LT_EXPR && integer_onep
(fd->loops[i].step) && !(((enum tree_code) (((contains_struct_check
((fd->loops[i].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3377, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((fd->loops
[i].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3377, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3377, __FUNCTION__), 0 : 0))
;
3378 }
3379 tree s = fold_convert_loc (loc, itype, step ? step : fd->loops[i].step);
3380 if (step)
3381 {
3382 off = fold_convert_loc (loc, itype, off);
3383 orig_off = off;
3384 off = fold_build2_loc (loc, TRUNC_DIV_EXPR, itype, off, s);
3385 }
3386
3387 if (integer_zerop (off))
3388 t = boolean_true_nodeglobal_trees[TI_BOOLEAN_TRUE];
3389 else
3390 {
3391 tree a;
3392 tree co = fold_convert_loc (loc, itype, off);
3393 if (POINTER_TYPE_P (TREE_TYPE (fd->loops[i].v))(((enum tree_code) (((contains_struct_check ((fd->loops[i]
.v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3393, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((fd->loops
[i].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3393, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
3394 {
3395 if (OMP_CLAUSE_DEPEND_SINK_NEGATIVE (deps)(((tree_check ((deps), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3395, __FUNCTION__, (TREE_LIST))))->base.public_flag)
)
3396 co = fold_build1_loc (loc, NEGATE_EXPR, itype, co);
3397 a = fold_build2_loc (loc, POINTER_PLUS_EXPR,
3398 TREE_TYPE (fd->loops[i].v)((contains_struct_check ((fd->loops[i].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3398, __FUNCTION__))->typed.type)
, fd->loops[i].v,
3399 co);
3400 }
3401 else if (OMP_CLAUSE_DEPEND_SINK_NEGATIVE (deps)(((tree_check ((deps), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3401, __FUNCTION__, (TREE_LIST))))->base.public_flag)
)
3402 a = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (fd->loops[i].v)((contains_struct_check ((fd->loops[i].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3402, __FUNCTION__))->typed.type)
,
3403 fd->loops[i].v, co);
3404 else
3405 a = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (fd->loops[i].v)((contains_struct_check ((fd->loops[i].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3405, __FUNCTION__))->typed.type)
,
3406 fd->loops[i].v, co);
3407 if (step)
3408 {
3409 tree t1, t2;
3410 if (OMP_CLAUSE_DEPEND_SINK_NEGATIVE (deps)(((tree_check ((deps), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3410, __FUNCTION__, (TREE_LIST))))->base.public_flag)
)
3411 t1 = fold_build2_loc (loc, GE_EXPR, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], a,
3412 fd->loops[i].n1);
3413 else
3414 t1 = fold_build2_loc (loc, LT_EXPR, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], a,
3415 fd->loops[i].n2);
3416 if (OMP_CLAUSE_DEPEND_SINK_NEGATIVE (deps)(((tree_check ((deps), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3416, __FUNCTION__, (TREE_LIST))))->base.public_flag)
)
3417 t2 = fold_build2_loc (loc, LT_EXPR, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], a,
3418 fd->loops[i].n2);
3419 else
3420 t2 = fold_build2_loc (loc, GE_EXPR, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], a,
3421 fd->loops[i].n1);
3422 t = fold_build2_loc (loc, LT_EXPR, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE],
3423 step, build_int_cst (TREE_TYPE (step)((contains_struct_check ((step), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3423, __FUNCTION__))->typed.type)
, 0));
3424 if (TREE_CODE (step)((enum tree_code) (step)->base.code) != INTEGER_CST)
3425 {
3426 t1 = unshare_expr (t1);
3427 t1 = force_gimple_operand_gsi (gsi, t1, true, NULL_TREE(tree) nullptr,
3428 false, GSI_CONTINUE_LINKING);
3429 t2 = unshare_expr (t2);
3430 t2 = force_gimple_operand_gsi (gsi, t2, true, NULL_TREE(tree) nullptr,
3431 false, GSI_CONTINUE_LINKING);
3432 }
3433 t = fold_build3_loc (loc, COND_EXPR, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE],
3434 t, t2, t1);
3435 }
3436 else if (fd->loops[i].cond_code == LT_EXPR)
3437 {
3438 if (OMP_CLAUSE_DEPEND_SINK_NEGATIVE (deps)(((tree_check ((deps), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3438, __FUNCTION__, (TREE_LIST))))->base.public_flag)
)
3439 t = fold_build2_loc (loc, GE_EXPR, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], a,
3440 fd->loops[i].n1);
3441 else
3442 t = fold_build2_loc (loc, LT_EXPR, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], a,
3443 fd->loops[i].n2);
3444 }
3445 else if (OMP_CLAUSE_DEPEND_SINK_NEGATIVE (deps)(((tree_check ((deps), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3445, __FUNCTION__, (TREE_LIST))))->base.public_flag)
)
3446 t = fold_build2_loc (loc, GT_EXPR, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], a,
3447 fd->loops[i].n2);
3448 else
3449 t = fold_build2_loc (loc, LE_EXPR, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], a,
3450 fd->loops[i].n1);
3451 }
3452 if (cond)
3453 cond = fold_build2_loc (loc, BIT_AND_EXPR, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], cond, t);
3454 else
3455 cond = t;
3456
3457 off = fold_convert_loc (loc, itype, off);
3458
3459 if (step
3460 || (fd->loops[i].cond_code == LT_EXPR
3461 ? !integer_onep (fd->loops[i].step)
3462 : !integer_minus_onep (fd->loops[i].step)))
3463 {
3464 if (step == NULL_TREE(tree) nullptr
3465 && TYPE_UNSIGNED (itype)((tree_class_check ((itype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3465, __FUNCTION__))->base.u.bits.unsigned_flag)
3466 && fd->loops[i].cond_code == GT_EXPR)
3467 t = fold_build2_loc (loc, TRUNC_MOD_EXPR, itype, off,
3468 fold_build1_loc (loc, NEGATE_EXPR, itype,
3469 s));
3470 else
3471 t = fold_build2_loc (loc, TRUNC_MOD_EXPR, itype,
3472 orig_off ? orig_off : off, s);
3473 t = fold_build2_loc (loc, EQ_EXPR, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], t,
3474 build_int_cst (itype, 0));
3475 if (integer_zerop (t) && !warned_step)
3476 {
3477 warning_at (loc, 0, "%<depend%> clause with %<sink%> modifier "
3478 "refers to iteration never in the iteration "
3479 "space");
3480 warned_step = true;
3481 }
3482 cond = fold_build2_loc (loc, BIT_AND_EXPR, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE],
3483 cond, t);
3484 }
3485
3486 if (i <= fd->collapse - 1 && fd->collapse > 1)
3487 t = fd->loop.v;
3488 else if (counts[i])
3489 t = counts[i];
3490 else
3491 {
3492 t = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (fd->loops[i].v)((contains_struct_check ((fd->loops[i].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3492, __FUNCTION__))->typed.type)
,
3493 fd->loops[i].v, fd->loops[i].n1);
3494 t = fold_convert_loc (loc, fd->iter_type, t);
3495 }
3496 if (step)
3497 /* We have divided off by step already earlier. */;
3498 else if (TYPE_UNSIGNED (itype)((tree_class_check ((itype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3498, __FUNCTION__))->base.u.bits.unsigned_flag)
&& fd->loops[i].cond_code == GT_EXPR)
3499 off = fold_build2_loc (loc, TRUNC_DIV_EXPR, itype, off,
3500 fold_build1_loc (loc, NEGATE_EXPR, itype,
3501 s));
3502 else
3503 off = fold_build2_loc (loc, TRUNC_DIV_EXPR, itype, off, s);
3504 if (OMP_CLAUSE_DEPEND_SINK_NEGATIVE (deps)(((tree_check ((deps), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3504, __FUNCTION__, (TREE_LIST))))->base.public_flag)
)
3505 off = fold_build1_loc (loc, NEGATE_EXPR, itype, off);
3506 off = fold_convert_loc (loc, fd->iter_type, off);
3507 if (i <= fd->collapse - 1 && fd->collapse > 1)
3508 {
3509 if (i)
3510 off = fold_build2_loc (loc, PLUS_EXPR, fd->iter_type, coff,
3511 off);
3512 if (i < fd->collapse - 1)
3513 {
3514 coff = fold_build2_loc (loc, MULT_EXPR, fd->iter_type, off,
3515 counts[i]);
3516 continue;
3517 }
3518 }
3519 off = unshare_expr (off);
3520 t = fold_build2_loc (loc, PLUS_EXPR, fd->iter_type, t, off);
3521 t = force_gimple_operand_gsi (&gsi2, t, true, NULL_TREE(tree) nullptr,
3522 true, GSI_SAME_STMT);
3523 args.safe_push (t);
3524 }
3525 gimple *g = gimple_build_call_vec (builtin_decl_explicit (sink_ix), args);
3526 gimple_set_location (g, loc);
3527 gsi_insert_before (&gsi2, g, GSI_SAME_STMT);
3528
3529 cond = unshare_expr (cond);
3530 cond = force_gimple_operand_gsi (gsi, cond, true, NULL_TREE(tree) nullptr, false,
3531 GSI_CONTINUE_LINKING);
3532 gsi_insert_after (gsi, gimple_build_cond_empty (cond), GSI_NEW_STMT);
3533 edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
3534 e3->probability = profile_probability::guessed_always ().apply_scale (1, 8);
3535 e1->probability = e3->probability.invert ();
3536 e1->flags = EDGE_TRUE_VALUE;
3537 set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
3538
3539 *gsi = gsi_after_labels (e2->dest);
3540}
3541
3542/* Expand all #pragma omp ordered depend(source) and
3543 #pragma omp ordered depend(sink:...) constructs in the current
3544 #pragma omp for ordered(n) region. */
3545
3546static void
3547expand_omp_ordered_source_sink (struct omp_region *region,
3548 struct omp_for_data *fd, tree *counts,
3549 basic_block cont_bb)
3550{
3551 struct omp_region *inner;
3552 int i;
3553 for (i = fd->collapse - 1; i < fd->ordered; i++)
3554 if (i == fd->collapse - 1 && fd->collapse > 1)
3555 counts[i] = NULL_TREE(tree) nullptr;
3556 else if (i >= fd->collapse && !cont_bb)
3557 counts[i] = build_zero_cst (fd->iter_type);
3558 else if (!POINTER_TYPE_P (TREE_TYPE (fd->loops[i].v))(((enum tree_code) (((contains_struct_check ((fd->loops[i]
.v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3558, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((fd->loops
[i].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3558, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
3559 && integer_onep (fd->loops[i].step))
3560 counts[i] = NULL_TREE(tree) nullptr;
3561 else
3562 counts[i] = create_tmp_var (fd->iter_type, ".orditer");
3563 tree atype
3564 = build_array_type_nelts (fd->iter_type, fd->ordered - fd->collapse + 1);
3565 counts[fd->ordered] = create_tmp_var (atype, ".orditera");
3566 TREE_ADDRESSABLE (counts[fd->ordered])((counts[fd->ordered])->base.addressable_flag) = 1;
3567
3568 for (inner = region->inner; inner; inner = inner->next)
3569 if (inner->type == GIMPLE_OMP_ORDERED)
3570 {
3571 gomp_ordered *ord_stmt = inner->ord_stmt;
3572 gimple_stmt_iterator gsi = gsi_for_stmt (ord_stmt);
3573 location_t loc = gimple_location (ord_stmt);
3574 tree c;
3575 for (c = gimple_omp_ordered_clauses (ord_stmt);
3576 c; c = OMP_CLAUSE_CHAIN (c)((contains_struct_check (((tree_check ((c), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3576, __FUNCTION__, (OMP_CLAUSE)))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3576, __FUNCTION__))->common.chain)
)
3577 if (OMP_CLAUSE_DEPEND_KIND (c)((omp_clause_subcode_check ((c), (OMP_CLAUSE_DEPEND), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3577, __FUNCTION__))->omp_clause.subcode.depend_kind)
== OMP_CLAUSE_DEPEND_SOURCE)
3578 break;
3579 if (c)
3580 expand_omp_ordered_source (&gsi, fd, counts, loc);
3581 for (c = gimple_omp_ordered_clauses (ord_stmt);
3582 c; c = OMP_CLAUSE_CHAIN (c)((contains_struct_check (((tree_check ((c), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3582, __FUNCTION__, (OMP_CLAUSE)))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3582, __FUNCTION__))->common.chain)
)
3583 if (OMP_CLAUSE_DEPEND_KIND (c)((omp_clause_subcode_check ((c), (OMP_CLAUSE_DEPEND), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3583, __FUNCTION__))->omp_clause.subcode.depend_kind)
== OMP_CLAUSE_DEPEND_SINK)
3584 expand_omp_ordered_sink (&gsi, fd, counts, c, loc);
3585 gsi_remove (&gsi, true);
3586 }
3587}
3588
3589/* Wrap the body into fd->ordered - fd->collapse loops that aren't
3590 collapsed. */
3591
3592static basic_block
3593expand_omp_for_ordered_loops (struct omp_for_data *fd, tree *counts,
3594 basic_block cont_bb, basic_block body_bb,
3595 bool ordered_lastprivate)
3596{
3597 if (fd->ordered == fd->collapse)
3598 return cont_bb;
3599
3600 if (!cont_bb)
3601 {
3602 gimple_stmt_iterator gsi = gsi_after_labels (body_bb);
3603 for (int i = fd->collapse; i < fd->ordered; i++)
3604 {
3605 tree type = TREE_TYPE (fd->loops[i].v)((contains_struct_check ((fd->loops[i].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3605, __FUNCTION__))->typed.type)
;
3606 tree n1 = fold_convert (type, fd->loops[i].n1)fold_convert_loc (((location_t) 0), type, fd->loops[i].n1);
3607 expand_omp_build_assign (&gsi, fd->loops[i].v, n1);
3608 tree aref = build4 (ARRAY_REF, fd->iter_type, counts[fd->ordered],
3609 size_int (i - fd->collapse + 1)size_int_kind (i - fd->collapse + 1, stk_sizetype),
3610 NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr);
3611 expand_omp_build_assign (&gsi, aref, build_zero_cst (fd->iter_type));
3612 }
3613 return NULLnullptr;
3614 }
3615
3616 for (int i = fd->ordered - 1; i >= fd->collapse; i--)
3617 {
3618 tree t, type = TREE_TYPE (fd->loops[i].v)((contains_struct_check ((fd->loops[i].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3618, __FUNCTION__))->typed.type)
;
3619 gimple_stmt_iterator gsi = gsi_after_labels (body_bb);
3620 expand_omp_build_assign (&gsi, fd->loops[i].v,
3621 fold_convert (type, fd->loops[i].n1)fold_convert_loc (((location_t) 0), type, fd->loops[i].n1));
3622 if (counts[i])
3623 expand_omp_build_assign (&gsi, counts[i],
3624 build_zero_cst (fd->iter_type));
3625 tree aref = build4 (ARRAY_REF, fd->iter_type, counts[fd->ordered],
3626 size_int (i - fd->collapse + 1)size_int_kind (i - fd->collapse + 1, stk_sizetype),
3627 NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr);
3628 expand_omp_build_assign (&gsi, aref, build_zero_cst (fd->iter_type));
3629 if (!gsi_end_p (gsi))
3630 gsi_prev (&gsi);
3631 else
3632 gsi = gsi_last_bb (body_bb);
3633 edge e1 = split_block (body_bb, gsi_stmt (gsi));
3634 basic_block new_body = e1->dest;
3635 if (body_bb == cont_bb)
3636 cont_bb = new_body;
3637 edge e2 = NULLnullptr;
3638 basic_block new_header;
3639 if (EDGE_COUNT (cont_bb->preds)vec_safe_length (cont_bb->preds) > 0)
3640 {
3641 gsi = gsi_last_bb (cont_bb);
3642 if (POINTER_TYPE_P (type)(((enum tree_code) (type)->base.code) == POINTER_TYPE || (
(enum tree_code) (type)->base.code) == REFERENCE_TYPE)
)
3643 t = fold_build_pointer_plus (fd->loops[i].v,fold_build_pointer_plus_loc (((location_t) 0), fd->loops[i
].v, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype
], fd->loops[i].step))
3644 fold_convert (sizetype,fold_build_pointer_plus_loc (((location_t) 0), fd->loops[i
].v, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype
], fd->loops[i].step))
3645 fd->loops[i].step))fold_build_pointer_plus_loc (((location_t) 0), fd->loops[i
].v, fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype
], fd->loops[i].step))
;
3646 else
3647 t = fold_build2 (PLUS_EXPR, type, fd->loops[i].v,fold_build2_loc (((location_t) 0), PLUS_EXPR, type, fd->loops
[i].v, fold_convert_loc (((location_t) 0), type, fd->loops
[i].step) )
3648 fold_convert (type, fd->loops[i].step))fold_build2_loc (((location_t) 0), PLUS_EXPR, type, fd->loops
[i].v, fold_convert_loc (((location_t) 0), type, fd->loops
[i].step) )
;
3649 expand_omp_build_assign (&gsi, fd->loops[i].v, t);
3650 if (counts[i])
3651 {
3652 t = fold_build2 (PLUS_EXPR, fd->iter_type, counts[i],fold_build2_loc (((location_t) 0), PLUS_EXPR, fd->iter_type
, counts[i], build_int_cst (fd->iter_type, 1) )
3653 build_int_cst (fd->iter_type, 1))fold_build2_loc (((location_t) 0), PLUS_EXPR, fd->iter_type
, counts[i], build_int_cst (fd->iter_type, 1) )
;
3654 expand_omp_build_assign (&gsi, counts[i], t);
3655 t = counts[i];
3656 }
3657 else
3658 {
3659 t = fold_build2 (MINUS_EXPR, TREE_TYPE (fd->loops[i].v),fold_build2_loc (((location_t) 0), MINUS_EXPR, ((contains_struct_check
((fd->loops[i].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3659, __FUNCTION__))->typed.type), fd->loops[i].v, fd
->loops[i].n1 )
3660 fd->loops[i].v, fd->loops[i].n1)fold_build2_loc (((location_t) 0), MINUS_EXPR, ((contains_struct_check
((fd->loops[i].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3659, __FUNCTION__))->typed.type), fd->loops[i].v, fd
->loops[i].n1 )
;
3661 t = fold_convert (fd->iter_type, t)fold_convert_loc (((location_t) 0), fd->iter_type, t);
3662 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE(tree) nullptr,
3663 true, GSI_SAME_STMT);
3664 }
3665 aref = build4 (ARRAY_REF, fd->iter_type, counts[fd->ordered],
3666 size_int (i - fd->collapse + 1)size_int_kind (i - fd->collapse + 1, stk_sizetype),
3667 NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr);
3668 expand_omp_build_assign (&gsi, aref, t);
3669 gsi_prev (&gsi);
3670 e2 = split_block (cont_bb, gsi_stmt (gsi));
3671 new_header = e2->dest;
3672 }
3673 else
3674 new_header = cont_bb;
3675 gsi = gsi_after_labels (new_header);
3676 tree v = force_gimple_operand_gsi (&gsi, fd->loops[i].v, true, NULL_TREE(tree) nullptr,
3677 true, GSI_SAME_STMT);
3678 tree n2
3679 = force_gimple_operand_gsi (&gsi, fold_convert (type, fd->loops[i].n2)fold_convert_loc (((location_t) 0), type, fd->loops[i].n2),
3680 true, NULL_TREE(tree) nullptr, true, GSI_SAME_STMT);
3681 t = build2 (fd->loops[i].cond_code, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], v, n2);
3682 gsi_insert_before (&gsi, gimple_build_cond_empty (t), GSI_NEW_STMT);
3683 edge e3 = split_block (new_header, gsi_stmt (gsi));
3684 cont_bb = e3->dest;
3685 remove_edge (e1);
3686 make_edge (body_bb, new_header, EDGE_FALLTHRU);
3687 e3->flags = EDGE_FALSE_VALUE;
3688 e3->probability = profile_probability::guessed_always ().apply_scale (1, 8);
3689 e1 = make_edge (new_header, new_body, EDGE_TRUE_VALUE);
3690 e1->probability = e3->probability.invert ();
3691
3692 set_immediate_dominator (CDI_DOMINATORS, new_header, body_bb);
3693 set_immediate_dominator (CDI_DOMINATORS, new_body, new_header);
3694
3695 if (e2)
3696 {
3697 class loop *loop = alloc_loop ();
3698 loop->header = new_header;
3699 loop->latch = e2->src;
3700 add_loop (loop, body_bb->loop_father);
3701 }
3702 }
3703
3704 /* If there are any lastprivate clauses and it is possible some loops
3705 might have zero iterations, ensure all the decls are initialized,
3706 otherwise we could crash evaluating C++ class iterators with lastprivate
3707 clauses. */
3708 bool need_inits = false;
3709 for (int i = fd->collapse; ordered_lastprivate && i < fd->ordered; i++)
3710 if (need_inits)
3711 {
3712 tree type = TREE_TYPE (fd->loops[i].v)((contains_struct_check ((fd->loops[i].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3712, __FUNCTION__))->typed.type)
;
3713 gimple_stmt_iterator gsi = gsi_after_labels (body_bb);
3714 expand_omp_build_assign (&gsi, fd->loops[i].v,
3715 fold_convert (type, fd->loops[i].n1)fold_convert_loc (((location_t) 0), type, fd->loops[i].n1));
3716 }
3717 else
3718 {
3719 tree type = TREE_TYPE (fd->loops[i].v)((contains_struct_check ((fd->loops[i].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3719, __FUNCTION__))->typed.type)
;
3720 tree this_cond = fold_build2 (fd->loops[i].cond_code,fold_build2_loc (((location_t) 0), fd->loops[i].cond_code,
global_trees[TI_BOOLEAN_TYPE], fold_convert_loc (((location_t
) 0), type, fd->loops[i].n1), fold_convert_loc (((location_t
) 0), type, fd->loops[i].n2) )
3721 boolean_type_node,fold_build2_loc (((location_t) 0), fd->loops[i].cond_code,
global_trees[TI_BOOLEAN_TYPE], fold_convert_loc (((location_t
) 0), type, fd->loops[i].n1), fold_convert_loc (((location_t
) 0), type, fd->loops[i].n2) )
3722 fold_convert (type, fd->loops[i].n1),fold_build2_loc (((location_t) 0), fd->loops[i].cond_code,
global_trees[TI_BOOLEAN_TYPE], fold_convert_loc (((location_t
) 0), type, fd->loops[i].n1), fold_convert_loc (((location_t
) 0), type, fd->loops[i].n2) )
3723 fold_convert (type, fd->loops[i].n2))fold_build2_loc (((location_t) 0), fd->loops[i].cond_code,
global_trees[TI_BOOLEAN_TYPE], fold_convert_loc (((location_t
) 0), type, fd->loops[i].n1), fold_convert_loc (((location_t
) 0), type, fd->loops[i].n2) )
;
3724 if (!integer_onep (this_cond))
3725 need_inits = true;
3726 }
3727
3728 return cont_bb;
3729}
3730
3731/* A subroutine of expand_omp_for. Generate code for a parallel
3732 loop with any schedule. Given parameters:
3733
3734 for (V = N1; V cond N2; V += STEP) BODY;
3735
3736 where COND is "<" or ">", we generate pseudocode
3737
3738 more = GOMP_loop_foo_start (N1, N2, STEP, CHUNK, &istart0, &iend0);
3739 if (more) goto L0; else goto L3;
3740 L0:
3741 V = istart0;
3742 iend = iend0;
3743 L1:
3744 BODY;
3745 V += STEP;
3746 if (V cond iend) goto L1; else goto L2;
3747 L2:
3748 if (GOMP_loop_foo_next (&istart0, &iend0)) goto L0; else goto L3;
3749 L3:
3750
3751 If this is a combined omp parallel loop, instead of the call to
3752 GOMP_loop_foo_start, we call GOMP_loop_foo_next.
3753 If this is gimple_omp_for_combined_p loop, then instead of assigning
3754 V and iend in L0 we assign the first two _looptemp_ clause decls of the
3755 inner GIMPLE_OMP_FOR and V += STEP; and
3756 if (V cond iend) goto L1; else goto L2; are removed.
3757
3758 For collapsed loops, given parameters:
3759 collapse(3)
3760 for (V1 = N11; V1 cond1 N12; V1 += STEP1)
3761 for (V2 = N21; V2 cond2 N22; V2 += STEP2)
3762 for (V3 = N31; V3 cond3 N32; V3 += STEP3)
3763 BODY;
3764
3765 we generate pseudocode
3766
3767 if (__builtin_expect (N32 cond3 N31, 0)) goto Z0;
3768 if (cond3 is <)
3769 adj = STEP3 - 1;
3770 else
3771 adj = STEP3 + 1;
3772 count3 = (adj + N32 - N31) / STEP3;
3773 if (__builtin_expect (N22 cond2 N21, 0)) goto Z0;
3774 if (cond2 is <)
3775 adj = STEP2 - 1;
3776 else
3777 adj = STEP2 + 1;
3778 count2 = (adj + N22 - N21) / STEP2;
3779 if (__builtin_expect (N12 cond1 N11, 0)) goto Z0;
3780 if (cond1 is <)
3781 adj = STEP1 - 1;
3782 else
3783 adj = STEP1 + 1;
3784 count1 = (adj + N12 - N11) / STEP1;
3785 count = count1 * count2 * count3;
3786 goto Z1;
3787 Z0:
3788 count = 0;
3789 Z1:
3790 more = GOMP_loop_foo_start (0, count, 1, CHUNK, &istart0, &iend0);
3791 if (more) goto L0; else goto L3;
3792 L0:
3793 V = istart0;
3794 T = V;
3795 V3 = N31 + (T % count3) * STEP3;
3796 T = T / count3;
3797 V2 = N21 + (T % count2) * STEP2;
3798 T = T / count2;
3799 V1 = N11 + T * STEP1;
3800 iend = iend0;
3801 L1:
3802 BODY;
3803 V += 1;
3804 if (V < iend) goto L10; else goto L2;
3805 L10:
3806 V3 += STEP3;
3807 if (V3 cond3 N32) goto L1; else goto L11;
3808 L11:
3809 V3 = N31;
3810 V2 += STEP2;
3811 if (V2 cond2 N22) goto L1; else goto L12;
3812 L12:
3813 V2 = N21;
3814 V1 += STEP1;
3815 goto L1;
3816 L2:
3817 if (GOMP_loop_foo_next (&istart0, &iend0)) goto L0; else goto L3;
3818 L3:
3819
3820 */
3821
3822static void
3823expand_omp_for_generic (struct omp_region *region,
3824 struct omp_for_data *fd,
3825 enum built_in_function start_fn,
3826 enum built_in_function next_fn,
3827 tree sched_arg,
3828 gimple *inner_stmt)
3829{
3830 tree type, istart0, iend0, iend;
3831 tree t, vmain, vback, bias = NULL_TREE(tree) nullptr;
3832 basic_block entry_bb, cont_bb, exit_bb, l0_bb, l1_bb, collapse_bb;
3833 basic_block l2_bb = NULLnullptr, l3_bb = NULLnullptr;
3834 gimple_stmt_iterator gsi;
3835 gassign *assign_stmt;
3836 bool in_combined_parallel = is_combined_parallel (region);
3837 bool broken_loop = region->cont == NULLnullptr;
3838 edge e, ne;
3839 tree *counts = NULLnullptr;
3840 int i;
3841 bool ordered_lastprivate = false;
3842
3843 gcc_assert (!broken_loop || !in_combined_parallel)((void)(!(!broken_loop || !in_combined_parallel) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3843, __FUNCTION__), 0 : 0))
;
3844 gcc_assert (fd->iter_type == long_integer_type_node((void)(!(fd->iter_type == integer_types[itk_long] || !in_combined_parallel
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3845, __FUNCTION__), 0 : 0))
3845 || !in_combined_parallel)((void)(!(fd->iter_type == integer_types[itk_long] || !in_combined_parallel
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3845, __FUNCTION__), 0 : 0))
;
3846
3847 entry_bb = region->entry;
3848 cont_bb = region->cont;
3849 collapse_bb = NULLnullptr;
3850 gcc_assert (EDGE_COUNT (entry_bb->succs) == 2)((void)(!(vec_safe_length (entry_bb->succs) == 2) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3850, __FUNCTION__), 0 : 0))
;
3851 gcc_assert (broken_loop((void)(!(broken_loop || ((*((entry_bb))->succs)[(0)]->
flags & EDGE_FALLTHRU ? (*((entry_bb))->succs)[(1)] : (
*((entry_bb))->succs)[(0)])->dest == ((*((cont_bb))->
succs)[(0)]->flags & EDGE_FALLTHRU ? (*((cont_bb))->
succs)[(0)] : (*((cont_bb))->succs)[(1)])->dest) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3852, __FUNCTION__), 0 : 0))
3852 || BRANCH_EDGE (entry_bb)->dest == FALLTHRU_EDGE (cont_bb)->dest)((void)(!(broken_loop || ((*((entry_bb))->succs)[(0)]->
flags & EDGE_FALLTHRU ? (*((entry_bb))->succs)[(1)] : (
*((entry_bb))->succs)[(0)])->dest == ((*((cont_bb))->
succs)[(0)]->flags & EDGE_FALLTHRU ? (*((cont_bb))->
succs)[(0)] : (*((cont_bb))->succs)[(1)])->dest) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3852, __FUNCTION__), 0 : 0))
;
3853 l0_bb = split_edge (FALLTHRU_EDGE (entry_bb)((*((entry_bb))->succs)[(0)]->flags & EDGE_FALLTHRU
? (*((entry_bb))->succs)[(0)] : (*((entry_bb))->succs)
[(1)])
);
3854 l1_bb = single_succ (l0_bb);
3855 if (!broken_loop)
3856 {
3857 l2_bb = create_empty_bb (cont_bb);
3858 gcc_assert (BRANCH_EDGE (cont_bb)->dest == l1_bb((void)(!(((*((cont_bb))->succs)[(0)]->flags & EDGE_FALLTHRU
? (*((cont_bb))->succs)[(1)] : (*((cont_bb))->succs)[(
0)])->dest == l1_bb || (single_succ_edge (((*((cont_bb))->
succs)[(0)]->flags & EDGE_FALLTHRU ? (*((cont_bb))->
succs)[(1)] : (*((cont_bb))->succs)[(0)])->dest)->dest
== l1_bb)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3860, __FUNCTION__), 0 : 0))
3859 || (single_succ_edge (BRANCH_EDGE (cont_bb)->dest)->dest((void)(!(((*((cont_bb))->succs)[(0)]->flags & EDGE_FALLTHRU
? (*((cont_bb))->succs)[(1)] : (*((cont_bb))->succs)[(
0)])->dest == l1_bb || (single_succ_edge (((*((cont_bb))->
succs)[(0)]->flags & EDGE_FALLTHRU ? (*((cont_bb))->
succs)[(1)] : (*((cont_bb))->succs)[(0)])->dest)->dest
== l1_bb)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3860, __FUNCTION__), 0 : 0))
3860 == l1_bb))((void)(!(((*((cont_bb))->succs)[(0)]->flags & EDGE_FALLTHRU
? (*((cont_bb))->succs)[(1)] : (*((cont_bb))->succs)[(
0)])->dest == l1_bb || (single_succ_edge (((*((cont_bb))->
succs)[(0)]->flags & EDGE_FALLTHRU ? (*((cont_bb))->
succs)[(1)] : (*((cont_bb))->succs)[(0)])->dest)->dest
== l1_bb)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3860, __FUNCTION__), 0 : 0))
;
3861 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2)((void)(!(vec_safe_length (cont_bb->succs) == 2) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3861, __FUNCTION__), 0 : 0))
;
3862 }
3863 else
3864 l2_bb = NULLnullptr;
3865 l3_bb = BRANCH_EDGE (entry_bb)((*((entry_bb))->succs)[(0)]->flags & EDGE_FALLTHRU
? (*((entry_bb))->succs)[(1)] : (*((entry_bb))->succs)
[(0)])
->dest;
3866 exit_bb = region->exit;
3867
3868 gsi = gsi_last_nondebug_bb (entry_bb);
3869
3870 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR)((void)(!(gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3870, __FUNCTION__), 0 : 0))
;
3871 if (fd->ordered
3872 && omp_find_clause (gimple_omp_for_clauses (fd->for_stmt),
3873 OMP_CLAUSE_LASTPRIVATE))
3874 ordered_lastprivate = false;
3875 tree reductions = NULL_TREE(tree) nullptr;
3876 tree mem = NULL_TREE(tree) nullptr, cond_var = NULL_TREE(tree) nullptr, condtemp = NULL_TREE(tree) nullptr;
3877 tree memv = NULL_TREE(tree) nullptr;
3878 if (fd->lastprivate_conditional)
3879 {
3880 tree c = omp_find_clause (gimple_omp_for_clauses (fd->for_stmt),
3881 OMP_CLAUSE__CONDTEMP_);
3882 if (fd->have_pointer_condtemp)
3883 condtemp = OMP_CLAUSE_DECL (c)(*(omp_clause_elt_check (((omp_clause_range_check (((tree_check
((c), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3883, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3883, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3883, __FUNCTION__)))
;
3884 c = omp_find_clause (OMP_CLAUSE_CHAIN (c)((contains_struct_check (((tree_check ((c), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3884, __FUNCTION__, (OMP_CLAUSE)))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3884, __FUNCTION__))->common.chain)
, OMP_CLAUSE__CONDTEMP_);
3885 cond_var = OMP_CLAUSE_DECL (c)(*(omp_clause_elt_check (((omp_clause_range_check (((tree_check
((c), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3885, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3885, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3885, __FUNCTION__)))
;
3886 }
3887 if (sched_arg)
3888 {
3889 if (fd->have_reductemp)
3890 {
3891 tree c = omp_find_clause (gimple_omp_for_clauses (fd->for_stmt),
3892 OMP_CLAUSE__REDUCTEMP_);
3893 reductions = OMP_CLAUSE_DECL (c)(*(omp_clause_elt_check (((omp_clause_range_check (((tree_check
((c), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3893, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3893, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3893, __FUNCTION__)))
;
3894 gcc_assert (TREE_CODE (reductions) == SSA_NAME)((void)(!(((enum tree_code) (reductions)->base.code) == SSA_NAME
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3894, __FUNCTION__), 0 : 0))
;
3895 gimple *g = SSA_NAME_DEF_STMT (reductions)(tree_check ((reductions), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3895, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
3896 reductions = gimple_assign_rhs1 (g);
3897 OMP_CLAUSE_DECL (c)(*(omp_clause_elt_check (((omp_clause_range_check (((tree_check
((c), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3897, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3897, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3897, __FUNCTION__)))
= reductions;
3898 entry_bb = gimple_bb (g);
3899 edge e = split_block (entry_bb, g);
3900 if (region->entry == entry_bb)
3901 region->entry = e->dest;
3902 gsi = gsi_last_bb (entry_bb);
3903 }
3904 else
3905 reductions = null_pointer_nodeglobal_trees[TI_NULL_POINTER];
3906 if (fd->have_pointer_condtemp)
3907 {
3908 tree type = TREE_TYPE (condtemp)((contains_struct_check ((condtemp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3908, __FUNCTION__))->typed.type)
;
3909 memv = create_tmp_var (type);
3910 TREE_ADDRESSABLE (memv)((memv)->base.addressable_flag) = 1;
3911 unsigned HOST_WIDE_INTlong sz
3912 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (type))((tree_class_check ((((contains_struct_check ((type), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3912, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3912, __FUNCTION__))->type_common.size_unit)
);
3913 sz *= fd->lastprivate_conditional;
3914 expand_omp_build_assign (&gsi, memv, build_int_cst (type, sz),
3915 false);
3916 mem = build_fold_addr_expr (memv)build_fold_addr_expr_loc (((location_t) 0), (memv));
3917 }
3918 else
3919 mem = null_pointer_nodeglobal_trees[TI_NULL_POINTER];
3920 }
3921 if (fd->collapse > 1 || fd->ordered)
3922 {
3923 int first_zero_iter1 = -1, first_zero_iter2 = -1;
3924 basic_block zero_iter1_bb = NULLnullptr, zero_iter2_bb = NULLnullptr, l2_dom_bb = NULLnullptr;
3925
3926 counts = XALLOCAVEC (tree, fd->ordered ? fd->ordered + 1 : fd->collapse)((tree *) __builtin_alloca(sizeof (tree) * (fd->ordered ? fd
->ordered + 1 : fd->collapse)))
;
3927 expand_omp_for_init_counts (fd, &gsi, entry_bb, counts,
3928 zero_iter1_bb, first_zero_iter1,
3929 zero_iter2_bb, first_zero_iter2, l2_dom_bb);
3930
3931 if (zero_iter1_bb)
3932 {
3933 /* Some counts[i] vars might be uninitialized if
3934 some loop has zero iterations. But the body shouldn't
3935 be executed in that case, so just avoid uninit warnings. */
3936 for (i = first_zero_iter1;
3937 i < (fd->ordered ? fd->ordered : fd->collapse); i++)
3938 if (SSA_VAR_P (counts[i])(((enum tree_code) (counts[i])->base.code) == VAR_DECL || (
(enum tree_code) (counts[i])->base.code) == PARM_DECL || (
(enum tree_code) (counts[i])->base.code) == RESULT_DECL ||
((enum tree_code) (counts[i])->base.code) == SSA_NAME)
)
3939 suppress_warning (counts[i], OPT_Wuninitialized);
3940 gsi_prev (&gsi);
3941 e = split_block (entry_bb, gsi_stmt (gsi));
3942 entry_bb = e->dest;
3943 make_edge (zero_iter1_bb, entry_bb, EDGE_FALLTHRU);
3944 gsi = gsi_last_nondebug_bb (entry_bb);
3945 set_immediate_dominator (CDI_DOMINATORS, entry_bb,
3946 get_immediate_dominator (CDI_DOMINATORS,
3947 zero_iter1_bb));
3948 }
3949 if (zero_iter2_bb)
3950 {
3951 /* Some counts[i] vars might be uninitialized if
3952 some loop has zero iterations. But the body shouldn't
3953 be executed in that case, so just avoid uninit warnings. */
3954 for (i = first_zero_iter2; i < fd->ordered; i++)
3955 if (SSA_VAR_P (counts[i])(((enum tree_code) (counts[i])->base.code) == VAR_DECL || (
(enum tree_code) (counts[i])->base.code) == PARM_DECL || (
(enum tree_code) (counts[i])->base.code) == RESULT_DECL ||
((enum tree_code) (counts[i])->base.code) == SSA_NAME)
)
3956 suppress_warning (counts[i], OPT_Wuninitialized);
3957 if (zero_iter1_bb)
3958 make_edge (zero_iter2_bb, entry_bb, EDGE_FALLTHRU);
3959 else
3960 {
3961 gsi_prev (&gsi);
3962 e = split_block (entry_bb, gsi_stmt (gsi));
3963 entry_bb = e->dest;
3964 make_edge (zero_iter2_bb, entry_bb, EDGE_FALLTHRU);
3965 gsi = gsi_last_nondebug_bb (entry_bb);
3966 set_immediate_dominator (CDI_DOMINATORS, entry_bb,
3967 get_immediate_dominator
3968 (CDI_DOMINATORS, zero_iter2_bb));
3969 }
3970 }
3971 if (fd->collapse == 1)
3972 {
3973 counts[0] = fd->loop.n2;
3974 fd->loop = fd->loops[0];
3975 }
3976 }
3977
3978 type = TREE_TYPE (fd->loop.v)((contains_struct_check ((fd->loop.v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3978, __FUNCTION__))->typed.type)
;
3979 istart0 = create_tmp_var (fd->iter_type, ".istart0");
3980 iend0 = create_tmp_var (fd->iter_type, ".iend0");
3981 TREE_ADDRESSABLE (istart0)((istart0)->base.addressable_flag) = 1;
3982 TREE_ADDRESSABLE (iend0)((iend0)->base.addressable_flag) = 1;
3983
3984 /* See if we need to bias by LLONG_MIN. */
3985 if (fd->iter_type == long_long_unsigned_type_nodeinteger_types[itk_unsigned_long_long]
3986 && TREE_CODE (type)((enum tree_code) (type)->base.code) == INTEGER_TYPE
3987 && !TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 3987, __FUNCTION__))->base.u.bits.unsigned_flag)
3988 && fd->ordered == 0)
3989 {
3990 tree n1, n2;
3991
3992 if (fd->loop.cond_code == LT_EXPR)
3993 {
3994 n1 = fd->loop.n1;
3995 n2 = fold_build2 (PLUS_EXPR, type, fd->loop.n2, fd->loop.step)fold_build2_loc (((location_t) 0), PLUS_EXPR, type, fd->loop
.n2, fd->loop.step )
;
3996 }
3997 else
3998 {
3999 n1 = fold_build2 (MINUS_EXPR, type, fd->loop.n2, fd->loop.step)fold_build2_loc (((location_t) 0), MINUS_EXPR, type, fd->loop
.n2, fd->loop.step )
;
4000 n2 = fd->loop.n1;
4001 }
4002 if (TREE_CODE (n1)((enum tree_code) (n1)->base.code) != INTEGER_CST
4003 || TREE_CODE (n2)((enum tree_code) (n2)->base.code) != INTEGER_CST
4004 || ((tree_int_cst_sgn (n1) < 0) ^ (tree_int_cst_sgn (n2) < 0)))
4005 bias = fold_convert (fd->iter_type, TYPE_MIN_VALUE (type))fold_convert_loc (((location_t) 0), fd->iter_type, ((tree_check5
((type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4005, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.minval
))
;
4006 }
4007
4008 gimple_stmt_iterator gsif = gsi;
4009 gsi_prev (&gsif);
4010
4011 tree arr = NULL_TREE(tree) nullptr;
4012 if (in_combined_parallel)
4013 {
4014 gcc_assert (fd->ordered == 0)((void)(!(fd->ordered == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4014, __FUNCTION__), 0 : 0))
;
4015 /* In a combined parallel loop, emit a call to
4016 GOMP_loop_foo_next. */
4017 t = build_call_expr (builtin_decl_explicit (next_fn), 2,
4018 build_fold_addr_expr (istart0)build_fold_addr_expr_loc (((location_t) 0), (istart0)),
4019 build_fold_addr_expr (iend0)build_fold_addr_expr_loc (((location_t) 0), (iend0)));
4020 }
4021 else
4022 {
4023 tree t0, t1, t2, t3, t4;
4024 /* If this is not a combined parallel loop, emit a call to
4025 GOMP_loop_foo_start in ENTRY_BB. */
4026 t4 = build_fold_addr_expr (iend0)build_fold_addr_expr_loc (((location_t) 0), (iend0));
4027 t3 = build_fold_addr_expr (istart0)build_fold_addr_expr_loc (((location_t) 0), (istart0));
4028 if (fd->ordered)
4029 {
4030 t0 = build_int_cst (unsigned_type_nodeinteger_types[itk_unsigned_int],
4031 fd->ordered - fd->collapse + 1);
4032 arr = create_tmp_var (build_array_type_nelts (fd->iter_type,
4033 fd->ordered
4034 - fd->collapse + 1),
4035 ".omp_counts");
4036 DECL_NAMELESS (arr)((contains_struct_check ((arr), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4036, __FUNCTION__))->base.u.bits.nameless_flag)
= 1;
4037 TREE_ADDRESSABLE (arr)((arr)->base.addressable_flag) = 1;
4038 TREE_STATIC (arr)((arr)->base.static_flag) = 1;
4039 vec<constructor_elt, va_gc> *v;
4040 vec_alloc (v, fd->ordered - fd->collapse + 1);
4041 int idx;
4042
4043 for (idx = 0; idx < fd->ordered - fd->collapse + 1; idx++)
4044 {
4045 tree c;
4046 if (idx == 0 && fd->collapse > 1)
4047 c = fd->loop.n2;
4048 else
4049 c = counts[idx + fd->collapse - 1];
4050 tree purpose = size_int (idx)size_int_kind (idx, stk_sizetype);
4051 CONSTRUCTOR_APPEND_ELT (v, purpose, c)do { constructor_elt _ce___ = {purpose, c}; vec_safe_push ((v
), _ce___); } while (0)
;
4052 if (TREE_CODE (c)((enum tree_code) (c)->base.code) != INTEGER_CST)
4053 TREE_STATIC (arr)((arr)->base.static_flag) = 0;
4054 }
4055
4056 DECL_INITIAL (arr)((contains_struct_check ((arr), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4056, __FUNCTION__))->decl_common.initial)
= build_constructor (TREE_TYPE (arr)((contains_struct_check ((arr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4056, __FUNCTION__))->typed.type)
, v);
4057 if (!TREE_STATIC (arr)((arr)->base.static_flag))
4058 force_gimple_operand_gsi (&gsi, build1 (DECL_EXPR,
4059 void_type_nodeglobal_trees[TI_VOID_TYPE], arr),
4060 true, NULL_TREE(tree) nullptr, true, GSI_SAME_STMT);
4061 t1 = build_fold_addr_expr (arr)build_fold_addr_expr_loc (((location_t) 0), (arr));
4062 t2 = NULL_TREE(tree) nullptr;
4063 }
4064 else
4065 {
4066 t2 = fold_convert (fd->iter_type, fd->loop.step)fold_convert_loc (((location_t) 0), fd->iter_type, fd->
loop.step)
;
4067 t1 = fd->loop.n2;
4068 t0 = fd->loop.n1;
4069 if (gimple_omp_for_combined_into_p (fd->for_stmt))
4070 {
4071 tree innerc
4072 = omp_find_clause (gimple_omp_for_clauses (fd->for_stmt),
4073 OMP_CLAUSE__LOOPTEMP_);
4074 gcc_assert (innerc)((void)(!(innerc) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4074, __FUNCTION__), 0 : 0))
;
4075 t0 = OMP_CLAUSE_DECL (innerc)(*(omp_clause_elt_check (((omp_clause_range_check (((tree_check
((innerc), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4075, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4075, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4075, __FUNCTION__)))
;
4076 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc)((contains_struct_check (((tree_check ((innerc), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4076, __FUNCTION__, (OMP_CLAUSE)))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4076, __FUNCTION__))->common.chain)
,
4077 OMP_CLAUSE__LOOPTEMP_);
4078 gcc_assert (innerc)((void)(!(innerc) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4078, __FUNCTION__), 0 : 0))
;
4079 t1 = OMP_CLAUSE_DECL (innerc)(*(omp_clause_elt_check (((omp_clause_range_check (((tree_check
((innerc), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4079, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4079, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4079, __FUNCTION__)))
;
4080 }
4081 if (POINTER_TYPE_P (TREE_TYPE (t0))(((enum tree_code) (((contains_struct_check ((t0), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4081, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((t0), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4081, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
4082 && TYPE_PRECISION (TREE_TYPE (t0))((tree_class_check ((((contains_struct_check ((t0), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4082, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4082, __FUNCTION__))->type_common.precision)
4083 != TYPE_PRECISION (fd->iter_type)((tree_class_check ((fd->iter_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4083, __FUNCTION__))->type_common.precision)
)
4084 {
4085 /* Avoid casting pointers to integer of a different size. */
4086 tree itype = signed_type_for (type);
4087 t1 = fold_convert (fd->iter_type, fold_convert (itype, t1))fold_convert_loc (((location_t) 0), fd->iter_type, fold_convert_loc
(((location_t) 0), itype, t1))
;
4088 t0 = fold_convert (fd->iter_type, fold_convert (itype, t0))fold_convert_loc (((location_t) 0), fd->iter_type, fold_convert_loc
(((location_t) 0), itype, t0))
;
4089 }
4090 else
4091 {
4092 t1 = fold_convert (fd->iter_type, t1)fold_convert_loc (((location_t) 0), fd->iter_type, t1);
4093 t0 = fold_convert (fd->iter_type, t0)fold_convert_loc (((location_t) 0), fd->iter_type, t0);
4094 }
4095 if (bias)
4096 {
4097 t1 = fold_build2 (PLUS_EXPR, fd->iter_type, t1, bias)fold_build2_loc (((location_t) 0), PLUS_EXPR, fd->iter_type
, t1, bias )
;
4098 t0 = fold_build2 (PLUS_EXPR, fd->iter_type, t0, bias)fold_build2_loc (((location_t) 0), PLUS_EXPR, fd->iter_type
, t0, bias )
;
4099 }
4100 }
4101 if (fd->iter_type == long_integer_type_nodeinteger_types[itk_long] || fd->ordered)
4102 {
4103 if (fd->chunk_size)
4104 {
4105 t = fold_convert (fd->iter_type, fd->chunk_size)fold_convert_loc (((location_t) 0), fd->iter_type, fd->
chunk_size)
;
4106 t = omp_adjust_chunk_size (t, fd->simd_schedule);
4107 if (sched_arg)
4108 {
4109 if (fd->ordered)
4110 t = build_call_expr (builtin_decl_explicit (start_fn),
4111 8, t0, t1, sched_arg, t, t3, t4,
4112 reductions, mem);
4113 else
4114 t = build_call_expr (builtin_decl_explicit (start_fn),
4115 9, t0, t1, t2, sched_arg, t, t3, t4,
4116 reductions, mem);
4117 }
4118 else if (fd->ordered)
4119 t = build_call_expr (builtin_decl_explicit (start_fn),
4120 5, t0, t1, t, t3, t4);
4121 else
4122 t = build_call_expr (builtin_decl_explicit (start_fn),
4123 6, t0, t1, t2, t, t3, t4);
4124 }
4125 else if (fd->ordered)
4126 t = build_call_expr (builtin_decl_explicit (start_fn),
4127 4, t0, t1, t3, t4);
4128 else
4129 t = build_call_expr (builtin_decl_explicit (start_fn),
4130 5, t0, t1, t2, t3, t4);
4131 }
4132 else
4133 {
4134 tree t5;
4135 tree c_bool_type;
4136 tree bfn_decl;
4137
4138 /* The GOMP_loop_ull_*start functions have additional boolean
4139 argument, true for < loops and false for > loops.
4140 In Fortran, the C bool type can be different from
4141 boolean_type_node. */
4142 bfn_decl = builtin_decl_explicit (start_fn);
4143 c_bool_type = TREE_TYPE (TREE_TYPE (bfn_decl))((contains_struct_check ((((contains_struct_check ((bfn_decl)
, (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4143, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4143, __FUNCTION__))->typed.type)
;
4144 t5 = build_int_cst (c_bool_type,
4145 fd->loop.cond_code == LT_EXPR ? 1 : 0);
4146 if (fd->chunk_size)
4147 {
4148 tree bfn_decl = builtin_decl_explicit (start_fn);
4149 t = fold_convert (fd->iter_type, fd->chunk_size)fold_convert_loc (((location_t) 0), fd->iter_type, fd->
chunk_size)
;
4150 t = omp_adjust_chunk_size (t, fd->simd_schedule);
4151 if (sched_arg)
4152 t = build_call_expr (bfn_decl, 10, t5, t0, t1, t2, sched_arg,
4153 t, t3, t4, reductions, mem);
4154 else
4155 t = build_call_expr (bfn_decl, 7, t5, t0, t1, t2, t, t3, t4);
4156 }
4157 else
4158 t = build_call_expr (builtin_decl_explicit (start_fn),
4159 6, t5, t0, t1, t2, t3, t4);
4160 }
4161 }
4162 if (TREE_TYPE (t)((contains_struct_check ((t), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4162, __FUNCTION__))->typed.type)
!= boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE])
4163 t = fold_build2 (NE_EXPR, boolean_type_node,fold_build2_loc (((location_t) 0), NE_EXPR, global_trees[TI_BOOLEAN_TYPE
], t, build_int_cst (((contains_struct_check ((t), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4164, __FUNCTION__))->typed.type), 0) )
4164 t, build_int_cst (TREE_TYPE (t), 0))fold_build2_loc (((location_t) 0), NE_EXPR, global_trees[TI_BOOLEAN_TYPE
], t, build_int_cst (((contains_struct_check ((t), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4164, __FUNCTION__))->typed.type), 0) )
;
4165 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE(tree) nullptr,
4166 true, GSI_SAME_STMT);
4167 if (arr && !TREE_STATIC (arr)((arr)->base.static_flag))
4168 {
4169 tree clobber = build_clobber (TREE_TYPE (arr)((contains_struct_check ((arr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4169, __FUNCTION__))->typed.type)
);
4170 gsi_insert_before (&gsi, gimple_build_assign (arr, clobber),
4171 GSI_SAME_STMT);
4172 }
4173 if (fd->have_pointer_condtemp)
4174 expand_omp_build_assign (&gsi, condtemp, memv, false);
4175 if (fd->have_reductemp)
4176 {
4177 gimple *g = gsi_stmt (gsi);
4178 gsi_remove (&gsi, true);
4179 release_ssa_name (gimple_assign_lhs (g));
4180
4181 entry_bb = region->entry;
4182 gsi = gsi_last_nondebug_bb (entry_bb);
4183
4184 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR)((void)(!(gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4184, __FUNCTION__), 0 : 0))
;
4185 }
4186 gsi_insert_after (&gsi, gimple_build_cond_empty (t), GSI_SAME_STMT);
4187
4188 /* Remove the GIMPLE_OMP_FOR statement. */
4189 gsi_remove (&gsi, true);
4190
4191 if (gsi_end_p (gsif))
4192 gsif = gsi_after_labels (gsi_bb (gsif));
4193 gsi_next (&gsif);
4194
4195 /* Iteration setup for sequential loop goes in L0_BB. */
4196 tree startvar = fd->loop.v;
4197 tree endvar = NULL_TREE(tree) nullptr;
4198
4199 if (gimple_omp_for_combined_p (fd->for_stmt))
4200 {
4201 gcc_assert (gimple_code (inner_stmt) == GIMPLE_OMP_FOR((void)(!(gimple_code (inner_stmt) == GIMPLE_OMP_FOR &&
gimple_omp_for_kind (inner_stmt) == GF_OMP_FOR_KIND_SIMD) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4203, __FUNCTION__), 0 : 0))
4202 && gimple_omp_for_kind (inner_stmt)((void)(!(gimple_code (inner_stmt) == GIMPLE_OMP_FOR &&
gimple_omp_for_kind (inner_stmt) == GF_OMP_FOR_KIND_SIMD) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4203, __FUNCTION__), 0 : 0))
4203 == GF_OMP_FOR_KIND_SIMD)((void)(!(gimple_code (inner_stmt) == GIMPLE_OMP_FOR &&
gimple_omp_for_kind (inner_stmt) == GF_OMP_FOR_KIND_SIMD) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4203, __FUNCTION__), 0 : 0))
;
4204 tree innerc = omp_find_clause (gimple_omp_for_clauses (inner_stmt),
4205 OMP_CLAUSE__LOOPTEMP_);
4206 gcc_assert (innerc)((void)(!(innerc) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4206, __FUNCTION__), 0 : 0))
;
4207 startvar = OMP_CLAUSE_DECL (innerc)(*(omp_clause_elt_check (((omp_clause_range_check (((tree_check
((innerc), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4207, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4207, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4207, __FUNCTION__)))
;
4208 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc)((contains_struct_check (((tree_check ((innerc), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4208, __FUNCTION__, (OMP_CLAUSE)))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4208, __FUNCTION__))->common.chain)
,
4209 OMP_CLAUSE__LOOPTEMP_);
4210 gcc_assert (innerc)((void)(!(innerc) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4210, __FUNCTION__), 0 : 0))
;
4211 endvar = OMP_CLAUSE_DECL (innerc)(*(omp_clause_elt_check (((omp_clause_range_check (((tree_check
((innerc), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4211, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4211, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4211, __FUNCTION__)))
;
4212 }
4213
4214 gsi = gsi_start_bb (l0_bb);
4215 t = istart0;
4216 if (fd->ordered && fd->collapse == 1)
4217 t = fold_build2 (MULT_EXPR, fd->iter_type, t,fold_build2_loc (((location_t) 0), MULT_EXPR, fd->iter_type
, t, fold_convert_loc (((location_t) 0), fd->iter_type, fd
->loop.step) )
4218 fold_convert (fd->iter_type, fd->loop.step))fold_build2_loc (((location_t) 0), MULT_EXPR, fd->iter_type
, t, fold_convert_loc (((location_t) 0), fd->iter_type, fd
->loop.step) )
;
4219 else if (bias)
4220 t = fold_build2 (MINUS_EXPR, fd->iter_type, t, bias)fold_build2_loc (((location_t) 0), MINUS_EXPR, fd->iter_type
, t, bias )
;
4221 if (fd->ordered && fd->collapse == 1)
4222 {
4223 if (POINTER_TYPE_P (TREE_TYPE (startvar))(((enum tree_code) (((contains_struct_check ((startvar), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4223, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((startvar), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4223, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
4224 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (startvar),fold_build2_loc (((location_t) 0), POINTER_PLUS_EXPR, ((contains_struct_check
((startvar), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4224, __FUNCTION__))->typed.type), fd->loop.n1, fold_convert_loc
(((location_t) 0), sizetype_tab[(int) stk_sizetype], t) )
4225 fd->loop.n1, fold_convert (sizetype, t))fold_build2_loc (((location_t) 0), POINTER_PLUS_EXPR, ((contains_struct_check
((startvar), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4224, __FUNCTION__))->typed.type), fd->loop.n1, fold_convert_loc
(((location_t) 0), sizetype_tab[(int) stk_sizetype], t) )
;
4226 else
4227 {
4228 t = fold_convert (TREE_TYPE (startvar), t)fold_convert_loc (((location_t) 0), ((contains_struct_check (
(startvar), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4228, __FUNCTION__))->typed.type), t)
;
4229 t = fold_build2 (PLUS_EXPR, TREE_TYPE (startvar),fold_build2_loc (((location_t) 0), PLUS_EXPR, ((contains_struct_check
((startvar), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4229, __FUNCTION__))->typed.type), fd->loop.n1, t )
4230 fd->loop.n1, t)fold_build2_loc (((location_t) 0), PLUS_EXPR, ((contains_struct_check
((startvar), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4229, __FUNCTION__))->typed.type), fd->loop.n1, t )
;
4231 }
4232 }
4233 else
4234 {
4235 if (POINTER_TYPE_P (TREE_TYPE (startvar))(((enum tree_code) (((contains_struct_check ((startvar), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4235, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((startvar), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4235, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
4236 t = fold_convert (signed_type_for (TREE_TYPE (startvar)), t)fold_convert_loc (((location_t) 0), signed_type_for (((contains_struct_check
((startvar), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4236, __FUNCTION__))->typed.type)), t)
;
4237 t = fold_convert (TREE_TYPE (startvar), t)fold_convert_loc (((location_t) 0), ((contains_struct_check (
(startvar), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4237, __FUNCTION__))->typed.type), t)
;
4238 }
4239 t = force_gimple_operand_gsi (&gsi, t,
4240 DECL_P (startvar)(tree_code_type[(int) (((enum tree_code) (startvar)->base.
code))] == tcc_declaration)
4241 && TREE_ADDRESSABLE (startvar)((startvar)->base.addressable_flag),
4242 NULL_TREE(tree) nullptr, false, GSI_CONTINUE_LINKING);
4243 assign_stmt = gimple_build_assign (startvar, t);
4244 gsi_insert_after (&gsi, assign_stmt, GSI_CONTINUE_LINKING);
4245 if (cond_var)
4246 {
4247 tree itype = TREE_TYPE (cond_var)((contains_struct_check ((cond_var), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4247, __FUNCTION__))->typed.type)
;
4248 /* For lastprivate(conditional:) itervar, we need some iteration
4249 counter that starts at unsigned non-zero and increases.
4250 Prefer as few IVs as possible, so if we can use startvar
4251 itself, use that, or startvar + constant (those would be
4252 incremented with step), and as last resort use the s0 + 1
4253 incremented by 1. */
4254 if ((fd->ordered && fd->collapse == 1)
4255 || bias
4256 || POINTER_TYPE_P (type)(((enum tree_code) (type)->base.code) == POINTER_TYPE || (
(enum tree_code) (type)->base.code) == REFERENCE_TYPE)
4257 || TREE_CODE (fd->loop.n1)((enum tree_code) (fd->loop.n1)->base.code) != INTEGER_CST
4258 || fd->loop.cond_code != LT_EXPR)
4259 t = fold_build2 (PLUS_EXPR, itype, fold_convert (itype, istart0),fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, fold_convert_loc
(((location_t) 0), itype, istart0), build_int_cst (itype, 1)
)
4260 build_int_cst (itype, 1))fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, fold_convert_loc
(((location_t) 0), itype, istart0), build_int_cst (itype, 1)
)
;
4261 else if (tree_int_cst_sgn (fd->loop.n1) == 1)
4262 t = fold_convert (itype, t)fold_convert_loc (((location_t) 0), itype, t);
4263 else
4264 {
4265 tree c = fold_convert (itype, fd->loop.n1)fold_convert_loc (((location_t) 0), itype, fd->loop.n1);
4266 c = fold_build2 (MINUS_EXPR, itype, build_int_cst (itype, 1), c)fold_build2_loc (((location_t) 0), MINUS_EXPR, itype, build_int_cst
(itype, 1), c )
;
4267 t = fold_build2 (PLUS_EXPR, itype, fold_convert (itype, t), c)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, fold_convert_loc
(((location_t) 0), itype, t), c )
;
4268 }
4269 t = force_gimple_operand_gsi (&gsi, t, false,
4270 NULL_TREE(tree) nullptr, false, GSI_CONTINUE_LINKING);
4271 assign_stmt = gimple_build_assign (cond_var, t);
4272 gsi_insert_after (&gsi, assign_stmt, GSI_CONTINUE_LINKING);
4273 }
4274
4275 t = iend0;
4276 if (fd->ordered && fd->collapse == 1)
4277 t = fold_build2 (MULT_EXPR, fd->iter_type, t,fold_build2_loc (((location_t) 0), MULT_EXPR, fd->iter_type
, t, fold_convert_loc (((location_t) 0), fd->iter_type, fd
->loop.step) )
4278 fold_convert (fd->iter_type, fd->loop.step))fold_build2_loc (((location_t) 0), MULT_EXPR, fd->iter_type
, t, fold_convert_loc (((location_t) 0), fd->iter_type, fd
->loop.step) )
;
4279 else if (bias)
4280 t = fold_build2 (MINUS_EXPR, fd->iter_type, t, bias)fold_build2_loc (((location_t) 0), MINUS_EXPR, fd->iter_type
, t, bias )
;
4281 if (fd->ordered && fd->collapse == 1)
4282 {
4283 if (POINTER_TYPE_P (TREE_TYPE (startvar))(((enum tree_code) (((contains_struct_check ((startvar), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4283, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((startvar), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4283, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
4284 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (startvar),fold_build2_loc (((location_t) 0), POINTER_PLUS_EXPR, ((contains_struct_check
((startvar), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4284, __FUNCTION__))->typed.type), fd->loop.n1, fold_convert_loc
(((location_t) 0), sizetype_tab[(int) stk_sizetype], t) )
4285 fd->loop.n1, fold_convert (sizetype, t))fold_build2_loc (((location_t) 0), POINTER_PLUS_EXPR, ((contains_struct_check
((startvar), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4284, __FUNCTION__))->typed.type), fd->loop.n1, fold_convert_loc
(((location_t) 0), sizetype_tab[(int) stk_sizetype], t) )
;
4286 else
4287 {
4288 t = fold_convert (TREE_TYPE (startvar), t)fold_convert_loc (((location_t) 0), ((contains_struct_check (
(startvar), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4288, __FUNCTION__))->typed.type), t)
;
4289 t = fold_build2 (PLUS_EXPR, TREE_TYPE (startvar),fold_build2_loc (((location_t) 0), PLUS_EXPR, ((contains_struct_check
((startvar), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4289, __FUNCTION__))->typed.type), fd->loop.n1, t )
4290 fd->loop.n1, t)fold_build2_loc (((location_t) 0), PLUS_EXPR, ((contains_struct_check
((startvar), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4289, __FUNCTION__))->typed.type), fd->loop.n1, t )
;
4291 }
4292 }
4293 else
4294 {
4295 if (POINTER_TYPE_P (TREE_TYPE (startvar))(((enum tree_code) (((contains_struct_check ((startvar), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4295, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((startvar), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4295, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
4296 t = fold_convert (signed_type_for (TREE_TYPE (startvar)), t)fold_convert_loc (((location_t) 0), signed_type_for (((contains_struct_check
((startvar), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4296, __FUNCTION__))->typed.type)), t)
;
4297 t = fold_convert (TREE_TYPE (startvar), t)fold_convert_loc (((location_t) 0), ((contains_struct_check (
(startvar), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4297, __FUNCTION__))->typed.type), t)
;
4298 }
4299 iend = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE(tree) nullptr,
4300 false, GSI_CONTINUE_LINKING);
4301 if (endvar)
4302 {
4303 assign_stmt = gimple_build_assign (endvar, iend);
4304 gsi_insert_after (&gsi, assign_stmt, GSI_CONTINUE_LINKING);
4305 if (useless_type_conversion_p (TREE_TYPE (fd->loop.v)((contains_struct_check ((fd->loop.v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4305, __FUNCTION__))->typed.type)
, TREE_TYPE (iend)((contains_struct_check ((iend), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4305, __FUNCTION__))->typed.type)
))
4306 assign_stmt = gimple_build_assign (fd->loop.v, iend);
4307 else
4308 assign_stmt = gimple_build_assign (fd->loop.v, NOP_EXPR, iend);
4309 gsi_insert_after (&gsi, assign_stmt, GSI_CONTINUE_LINKING);
4310 }
4311 /* Handle linear clause adjustments. */
4312 tree itercnt = NULL_TREE(tree) nullptr;
4313 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_FOR)
4314 for (tree c = gimple_omp_for_clauses (fd->for_stmt);
4315 c; c = OMP_CLAUSE_CHAIN (c)((contains_struct_check (((tree_check ((c), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4315, __FUNCTION__, (OMP_CLAUSE)))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4315, __FUNCTION__))->common.chain)
)
4316 if (OMP_CLAUSE_CODE (c)((tree_check ((c), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4316, __FUNCTION__, (OMP_CLAUSE))))->omp_clause.code
== OMP_CLAUSE_LINEAR
4317 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c)((omp_clause_subcode_check ((c), (OMP_CLAUSE_LINEAR), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4317, __FUNCTION__))->base.public_flag)
)
4318 {
4319 tree d = OMP_CLAUSE_DECL (c)(*(omp_clause_elt_check (((omp_clause_range_check (((tree_check
((c), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4319, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4319, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4319, __FUNCTION__)))
;
4320 tree t = d, a, dest;
4321 if (omp_privatize_by_reference (t))
4322 t = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c)((tree_check ((c), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4322, __FUNCTION__, (OMP_CLAUSE))))->omp_clause.locus
, t);
4323 tree type = TREE_TYPE (t)((contains_struct_check ((t), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4323, __FUNCTION__))->typed.type)
;
4324 if (POINTER_TYPE_P (type)(((enum tree_code) (type)->base.code) == POINTER_TYPE || (
(enum tree_code) (type)->base.code) == REFERENCE_TYPE)
)
4325 type = sizetypesizetype_tab[(int) stk_sizetype];
4326 dest = unshare_expr (t);
4327 tree v = create_tmp_var (TREE_TYPE (t)((contains_struct_check ((t), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4327, __FUNCTION__))->typed.type)
, NULLnullptr);
4328 expand_omp_build_assign (&gsif, v, t);
4329 if (itercnt == NULL_TREE(tree) nullptr)
4330 {
4331 itercnt = startvar;
4332 tree n1 = fd->loop.n1;
4333 if (POINTER_TYPE_P (TREE_TYPE (itercnt))(((enum tree_code) (((contains_struct_check ((itercnt), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4333, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((itercnt), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4333, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
4334 {
4335 itercnt
4336 = fold_convert (signed_type_for (TREE_TYPE (itercnt)),fold_convert_loc (((location_t) 0), signed_type_for (((contains_struct_check
((itercnt), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4336, __FUNCTION__))->typed.type)), itercnt)
4337 itercnt)fold_convert_loc (((location_t) 0), signed_type_for (((contains_struct_check
((itercnt), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4336, __FUNCTION__))->typed.type)), itercnt)
;
4338 n1 = fold_convert (TREE_TYPE (itercnt), n1)fold_convert_loc (((location_t) 0), ((contains_struct_check (
(itercnt), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4338, __FUNCTION__))->typed.type), n1)
;
4339 }
4340 itercnt = fold_build2 (MINUS_EXPR, TREE_TYPE (itercnt),fold_build2_loc (((location_t) 0), MINUS_EXPR, ((contains_struct_check
((itercnt), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4340, __FUNCTION__))->typed.type), itercnt, n1 )
4341 itercnt, n1)fold_build2_loc (((location_t) 0), MINUS_EXPR, ((contains_struct_check
((itercnt), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4340, __FUNCTION__))->typed.type), itercnt, n1 )
;
4342 itercnt = fold_build2 (EXACT_DIV_EXPR, TREE_TYPE (itercnt),fold_build2_loc (((location_t) 0), EXACT_DIV_EXPR, ((contains_struct_check
((itercnt), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4342, __FUNCTION__))->typed.type), itercnt, fd->loop.
step )
4343 itercnt, fd->loop.step)fold_build2_loc (((location_t) 0), EXACT_DIV_EXPR, ((contains_struct_check
((itercnt), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4342, __FUNCTION__))->typed.type), itercnt, fd->loop.
step )
;
4344 itercnt = force_gimple_operand_gsi (&gsi, itercnt, true,
4345 NULL_TREE(tree) nullptr, false,
4346 GSI_CONTINUE_LINKING);
4347 }
4348 a = fold_build2 (MULT_EXPR, type,fold_build2_loc (((location_t) 0), MULT_EXPR, type, fold_convert_loc
(((location_t) 0), type, itercnt), fold_convert_loc (((location_t
) 0), type, (*(omp_clause_elt_check (((omp_clause_subcode_check
((c), (OMP_CLAUSE_LINEAR), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4350, __FUNCTION__))), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4350, __FUNCTION__)))) )
4349 fold_convert (type, itercnt),fold_build2_loc (((location_t) 0), MULT_EXPR, type, fold_convert_loc
(((location_t) 0), type, itercnt), fold_convert_loc (((location_t
) 0), type, (*(omp_clause_elt_check (((omp_clause_subcode_check
((c), (OMP_CLAUSE_LINEAR), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4350, __FUNCTION__))), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4350, __FUNCTION__)))) )
4350 fold_convert (type, OMP_CLAUSE_LINEAR_STEP (c)))fold_build2_loc (((location_t) 0), MULT_EXPR, type, fold_convert_loc
(((location_t) 0), type, itercnt), fold_convert_loc (((location_t
) 0), type, (*(omp_clause_elt_check (((omp_clause_subcode_check
((c), (OMP_CLAUSE_LINEAR), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4350, __FUNCTION__))), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4350, __FUNCTION__)))) )
;
4351 t = fold_build2 (type == TREE_TYPE (t) ? PLUS_EXPRfold_build2_loc (((location_t) 0), type == ((contains_struct_check
((t), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4351, __FUNCTION__))->typed.type) ? PLUS_EXPR : POINTER_PLUS_EXPR
, ((contains_struct_check ((t), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4352, __FUNCTION__))->typed.type), v, a )
4352 : POINTER_PLUS_EXPR, TREE_TYPE (t), v, a)fold_build2_loc (((location_t) 0), type == ((contains_struct_check
((t), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4351, __FUNCTION__))->typed.type) ? PLUS_EXPR : POINTER_PLUS_EXPR
, ((contains_struct_check ((t), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4352, __FUNCTION__))->typed.type), v, a )
;
4353 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE(tree) nullptr,
4354 false, GSI_CONTINUE_LINKING);
4355 expand_omp_build_assign (&gsi, dest, t, true);
4356 }
4357 if (fd->collapse > 1)
4358 expand_omp_for_init_vars (fd, &gsi, counts, NULLnullptr, inner_stmt, startvar);
4359
4360 if (fd->ordered)
4361 {
4362 /* Until now, counts array contained number of iterations or
4363 variable containing it for ith loop. From now on, we need
4364 those counts only for collapsed loops, and only for the 2nd
4365 till the last collapsed one. Move those one element earlier,
4366 we'll use counts[fd->collapse - 1] for the first source/sink
4367 iteration counter and so on and counts[fd->ordered]
4368 as the array holding the current counter values for
4369 depend(source). */
4370 if (fd->collapse > 1)
4371 memmove (counts, counts + 1, (fd->collapse - 1) * sizeof (counts[0]));
4372 if (broken_loop)
4373 {
4374 int i;
4375 for (i = fd->collapse; i < fd->ordered; i++)
4376 {
4377 tree type = TREE_TYPE (fd->loops[i].v)((contains_struct_check ((fd->loops[i].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4377, __FUNCTION__))->typed.type)
;
4378 tree this_cond
4379 = fold_build2 (fd->loops[i].cond_code, boolean_type_node,fold_build2_loc (((location_t) 0), fd->loops[i].cond_code,
global_trees[TI_BOOLEAN_TYPE], fold_convert_loc (((location_t
) 0), type, fd->loops[i].n1), fold_convert_loc (((location_t
) 0), type, fd->loops[i].n2) )
4380 fold_convert (type, fd->loops[i].n1),fold_build2_loc (((location_t) 0), fd->loops[i].cond_code,
global_trees[TI_BOOLEAN_TYPE], fold_convert_loc (((location_t
) 0), type, fd->loops[i].n1), fold_convert_loc (((location_t
) 0), type, fd->loops[i].n2) )
4381 fold_convert (type, fd->loops[i].n2))fold_build2_loc (((location_t) 0), fd->loops[i].cond_code,
global_trees[TI_BOOLEAN_TYPE], fold_convert_loc (((location_t
) 0), type, fd->loops[i].n1), fold_convert_loc (((location_t
) 0), type, fd->loops[i].n2) )
;
4382 if (!integer_onep (this_cond))
4383 break;
4384 }
4385 if (i < fd->ordered)
4386 {
4387 cont_bb
4388 = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr)->prev_bb);
4389 add_bb_to_loop (cont_bb, l1_bb->loop_father);
4390 gimple_stmt_iterator gsi = gsi_after_labels (cont_bb);
4391 gimple *g = gimple_build_omp_continue (fd->loop.v, fd->loop.v);
4392 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
4393 make_edge (cont_bb, l3_bb, EDGE_FALLTHRU);
4394 make_edge (cont_bb, l1_bb, 0);
4395 l2_bb = create_empty_bb (cont_bb);
4396 broken_loop = false;
4397 }
4398 }
4399 expand_omp_ordered_source_sink (region, fd, counts, cont_bb);
4400 cont_bb = expand_omp_for_ordered_loops (fd, counts, cont_bb, l1_bb,
4401 ordered_lastprivate);
4402 if (counts[fd->collapse - 1])
4403 {
4404 gcc_assert (fd->collapse == 1)((void)(!(fd->collapse == 1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4404, __FUNCTION__), 0 : 0))
;
4405 gsi = gsi_last_bb (l0_bb);
4406 expand_omp_build_assign (&gsi, counts[fd->collapse - 1],
4407 istart0, true);
4408 if (cont_bb)
4409 {
4410 gsi = gsi_last_bb (cont_bb);
4411 t = fold_build2 (PLUS_EXPR, fd->iter_type,fold_build2_loc (((location_t) 0), PLUS_EXPR, fd->iter_type
, counts[fd->collapse - 1], build_int_cst (fd->iter_type
, 1) )
4412 counts[fd->collapse - 1],fold_build2_loc (((location_t) 0), PLUS_EXPR, fd->iter_type
, counts[fd->collapse - 1], build_int_cst (fd->iter_type
, 1) )
4413 build_int_cst (fd->iter_type, 1))fold_build2_loc (((location_t) 0), PLUS_EXPR, fd->iter_type
, counts[fd->collapse - 1], build_int_cst (fd->iter_type
, 1) )
;
4414 expand_omp_build_assign (&gsi, counts[fd->collapse - 1], t);
4415 tree aref = build4 (ARRAY_REF, fd->iter_type,
4416 counts[fd->ordered], size_zero_nodeglobal_trees[TI_SIZE_ZERO],
4417 NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr);
4418 expand_omp_build_assign (&gsi, aref, counts[fd->collapse - 1]);
4419 }
4420 t = counts[fd->collapse - 1];
4421 }
4422 else if (fd->collapse > 1)
4423 t = fd->loop.v;
4424 else
4425 {
4426 t = fold_build2 (MINUS_EXPR, TREE_TYPE (fd->loops[0].v),fold_build2_loc (((location_t) 0), MINUS_EXPR, ((contains_struct_check
((fd->loops[0].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4426, __FUNCTION__))->typed.type), fd->loops[0].v, fd
->loops[0].n1 )
4427 fd->loops[0].v, fd->loops[0].n1)fold_build2_loc (((location_t) 0), MINUS_EXPR, ((contains_struct_check
((fd->loops[0].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4426, __FUNCTION__))->typed.type), fd->loops[0].v, fd
->loops[0].n1 )
;
4428 t = fold_convert (fd->iter_type, t)fold_convert_loc (((location_t) 0), fd->iter_type, t);
4429 }
4430 gsi = gsi_last_bb (l0_bb);
4431 tree aref = build4 (ARRAY_REF, fd->iter_type, counts[fd->ordered],
4432 size_zero_nodeglobal_trees[TI_SIZE_ZERO], NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr);
4433 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE(tree) nullptr,
4434 false, GSI_CONTINUE_LINKING);
4435 expand_omp_build_assign (&gsi, aref, t, true);
4436 }
4437
4438 if (!broken_loop)
4439 {
4440 /* Code to control the increment and predicate for the sequential
4441 loop goes in the CONT_BB. */
4442 gsi = gsi_last_nondebug_bb (cont_bb);
4443 gomp_continue *cont_stmt = as_a <gomp_continue *> (gsi_stmt (gsi));
4444 gcc_assert (gimple_code (cont_stmt) == GIMPLE_OMP_CONTINUE)((void)(!(gimple_code (cont_stmt) == GIMPLE_OMP_CONTINUE) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4444, __FUNCTION__), 0 : 0))
;
4445 vmain = gimple_omp_continue_control_use (cont_stmt);
4446 vback = gimple_omp_continue_control_def (cont_stmt);
4447
4448 if (cond_var)
4449 {
4450 tree itype = TREE_TYPE (cond_var)((contains_struct_check ((cond_var), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4450, __FUNCTION__))->typed.type)
;
4451 tree t2;
4452 if ((fd->ordered && fd->collapse == 1)
4453 || bias
4454 || POINTER_TYPE_P (type)(((enum tree_code) (type)->base.code) == POINTER_TYPE || (
(enum tree_code) (type)->base.code) == REFERENCE_TYPE)
4455 || TREE_CODE (fd->loop.n1)((enum tree_code) (fd->loop.n1)->base.code) != INTEGER_CST
4456 || fd->loop.cond_code != LT_EXPR)
4457 t2 = build_int_cst (itype, 1);
4458 else
4459 t2 = fold_convert (itype, fd->loop.step)fold_convert_loc (((location_t) 0), itype, fd->loop.step);
4460 t2 = fold_build2 (PLUS_EXPR, itype, cond_var, t2)fold_build2_loc (((location_t) 0), PLUS_EXPR, itype, cond_var
, t2 )
;
4461 t2 = force_gimple_operand_gsi (&gsi, t2, false,
4462 NULL_TREE(tree) nullptr, true, GSI_SAME_STMT);
4463 assign_stmt = gimple_build_assign (cond_var, t2);
4464 gsi_insert_before (&gsi, assign_stmt, GSI_SAME_STMT);
4465 }
4466
4467 if (!gimple_omp_for_combined_p (fd->for_stmt))
4468 {
4469 if (POINTER_TYPE_P (type)(((enum tree_code) (type)->base.code) == POINTER_TYPE || (
(enum tree_code) (type)->base.code) == REFERENCE_TYPE)
)
4470 t = fold_build_pointer_plus (vmain, fd->loop.step)fold_build_pointer_plus_loc (((location_t) 0), vmain, fd->
loop.step)
;
4471 else
4472 t = fold_build2 (PLUS_EXPR, type, vmain, fd->loop.step)fold_build2_loc (((location_t) 0), PLUS_EXPR, type, vmain, fd
->loop.step )
;
4473 t = force_gimple_operand_gsi (&gsi, t,
4474 DECL_P (vback)(tree_code_type[(int) (((enum tree_code) (vback)->base.code
))] == tcc_declaration)
4475 && TREE_ADDRESSABLE (vback)((vback)->base.addressable_flag),
4476 NULL_TREE(tree) nullptr, true, GSI_SAME_STMT);
4477 assign_stmt = gimple_build_assign (vback, t);
4478 gsi_insert_before (&gsi, assign_stmt, GSI_SAME_STMT);
4479
4480 if (fd->ordered && counts[fd->collapse - 1] == NULL_TREE(tree) nullptr)
4481 {
4482 tree tem;
4483 if (fd->collapse > 1)
4484 tem = fd->loop.v;
4485 else
4486 {
4487 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (fd->loops[0].v),fold_build2_loc (((location_t) 0), MINUS_EXPR, ((contains_struct_check
((fd->loops[0].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4487, __FUNCTION__))->typed.type), fd->loops[0].v, fd
->loops[0].n1 )
4488 fd->loops[0].v, fd->loops[0].n1)fold_build2_loc (((location_t) 0), MINUS_EXPR, ((contains_struct_check
((fd->loops[0].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4487, __FUNCTION__))->typed.type), fd->loops[0].v, fd
->loops[0].n1 )
;
4489 tem = fold_convert (fd->iter_type, tem)fold_convert_loc (((location_t) 0), fd->iter_type, tem);
4490 }
4491 tree aref = build4 (ARRAY_REF, fd->iter_type,
4492 counts[fd->ordered], size_zero_nodeglobal_trees[TI_SIZE_ZERO],
4493 NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr);
4494 tem = force_gimple_operand_gsi (&gsi, tem, true, NULL_TREE(tree) nullptr,
4495 true, GSI_SAME_STMT);
4496 expand_omp_build_assign (&gsi, aref, tem);
4497 }
4498
4499 t = build2 (fd->loop.cond_code, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE],
4500 DECL_P (vback)(tree_code_type[(int) (((enum tree_code) (vback)->base.code
))] == tcc_declaration)
&& TREE_ADDRESSABLE (vback)((vback)->base.addressable_flag) ? t : vback,
4501 iend);
4502 gcond *cond_stmt = gimple_build_cond_empty (t);
4503 gsi_insert_before (&gsi, cond_stmt, GSI_SAME_STMT);
4504 }
4505
4506 /* Remove GIMPLE_OMP_CONTINUE. */
4507 gsi_remove (&gsi, true);
4508
4509 if (fd->collapse > 1 && !gimple_omp_for_combined_p (fd->for_stmt))
4510 collapse_bb = extract_omp_for_update_vars (fd, NULLnullptr, cont_bb, l1_bb);
4511
4512 /* Emit code to get the next parallel iteration in L2_BB. */
4513 gsi = gsi_start_bb (l2_bb);
4514
4515 t = build_call_expr (builtin_decl_explicit (next_fn), 2,
4516 build_fold_addr_expr (istart0)build_fold_addr_expr_loc (((location_t) 0), (istart0)),
4517 build_fold_addr_expr (iend0)build_fold_addr_expr_loc (((location_t) 0), (iend0)));
4518 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE(tree) nullptr,
4519 false, GSI_CONTINUE_LINKING);
4520 if (TREE_TYPE (t)((contains_struct_check ((t), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4520, __FUNCTION__))->typed.type)
!= boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE])
4521 t = fold_build2 (NE_EXPR, boolean_type_node,fold_build2_loc (((location_t) 0), NE_EXPR, global_trees[TI_BOOLEAN_TYPE
], t, build_int_cst (((contains_struct_check ((t), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4522, __FUNCTION__))->typed.type), 0) )
4522 t, build_int_cst (TREE_TYPE (t), 0))fold_build2_loc (((location_t) 0), NE_EXPR, global_trees[TI_BOOLEAN_TYPE
], t, build_int_cst (((contains_struct_check ((t), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4522, __FUNCTION__))->typed.type), 0) )
;
4523 gcond *cond_stmt = gimple_build_cond_empty (t);
4524 gsi_insert_after (&gsi, cond_stmt, GSI_CONTINUE_LINKING);
4525 }
4526
4527 /* Add the loop cleanup function. */
4528 gsi = gsi_last_nondebug_bb (exit_bb);
4529 if (gimple_omp_return_nowait_p (gsi_stmt (gsi)))
4530 t = builtin_decl_explicit (BUILT_IN_GOMP_LOOP_END_NOWAIT);
4531 else if (gimple_omp_return_lhs (gsi_stmt (gsi)))
4532 t = builtin_decl_explicit (BUILT_IN_GOMP_LOOP_END_CANCEL);
4533 else
4534 t = builtin_decl_explicit (BUILT_IN_GOMP_LOOP_END);
4535 gcall *call_stmt = gimple_build_call (t, 0);
4536 if (fd->ordered)
4537 {
4538 tree arr = counts[fd->ordered];
4539 tree clobber = build_clobber (TREE_TYPE (arr)((contains_struct_check ((arr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4539, __FUNCTION__))->typed.type)
);
4540 gsi_insert_after (&gsi, gimple_build_assign (arr, clobber),
4541 GSI_SAME_STMT);
4542 }
4543 if (gimple_omp_return_lhs (gsi_stmt (gsi)))
4544 {
4545 gimple_call_set_lhs (call_stmt, gimple_omp_return_lhs (gsi_stmt (gsi)));
4546 if (fd->have_reductemp)
4547 {
4548 gimple *g = gimple_build_assign (reductions, NOP_EXPR,
4549 gimple_call_lhs (call_stmt));
4550 gsi_insert_after (&gsi, g, GSI_SAME_STMT);
4551 }
4552 }
4553 gsi_insert_after (&gsi, call_stmt, GSI_SAME_STMT);
4554 gsi_remove (&gsi, true);
4555
4556 /* Connect the new blocks. */
4557 find_edge (entry_bb, l0_bb)->flags = EDGE_TRUE_VALUE;
4558 find_edge (entry_bb, l3_bb)->flags = EDGE_FALSE_VALUE;
4559
4560 if (!broken_loop)
4561 {
4562 gimple_seq phis;
4563
4564 e = find_edge (cont_bb, l3_bb);
4565 ne = make_edge (l2_bb, l3_bb, EDGE_FALSE_VALUE);
4566
4567 phis = phi_nodes (l3_bb);
4568 for (gsi = gsi_start (phis)gsi_start_1 (&(phis)); !gsi_end_p (gsi); gsi_next (&gsi))
4569 {
4570 gimple *phi = gsi_stmt (gsi);
4571 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, ne),set_ssa_use_from_ptr (gimple_phi_arg_imm_use_ptr (((phi)), ((
ne)->dest_idx)), gimple_phi_arg_def (((phi)), ((e)->dest_idx
)))
4572 PHI_ARG_DEF_FROM_EDGE (phi, e))set_ssa_use_from_ptr (gimple_phi_arg_imm_use_ptr (((phi)), ((
ne)->dest_idx)), gimple_phi_arg_def (((phi)), ((e)->dest_idx
)))
;
4573 }
4574 remove_edge (e);
4575
4576 make_edge (cont_bb, l2_bb, EDGE_FALSE_VALUE);
4577 e = find_edge (cont_bb, l1_bb);
4578 if (e == NULLnullptr)
4579 {
4580 e = BRANCH_EDGE (cont_bb)((*((cont_bb))->succs)[(0)]->flags & EDGE_FALLTHRU ?
(*((cont_bb))->succs)[(1)] : (*((cont_bb))->succs)[(0)
])
;
4581 gcc_assert (single_succ (e->dest) == l1_bb)((void)(!(single_succ (e->dest) == l1_bb) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4581, __FUNCTION__), 0 : 0))
;
4582 }
4583 if (gimple_omp_for_combined_p (fd->for_stmt))
4584 {
4585 remove_edge (e);
4586 e = NULLnullptr;
4587 }
4588 else if (fd->collapse > 1)
4589 {
4590 remove_edge (e);
4591 e = make_edge (cont_bb, collapse_bb, EDGE_TRUE_VALUE);
4592 }
4593 else
4594 e->flags = EDGE_TRUE_VALUE;
4595 if (e)
4596 {
4597 e->probability = profile_probability::guessed_always ().apply_scale (7, 8);
4598 find_edge (cont_bb, l2_bb)->probability = e->probability.invert ();
4599 }
4600 else
4601 {
4602 e = find_edge (cont_bb, l2_bb);
4603 e->flags = EDGE_FALLTHRU;
4604 }
4605 make_edge (l2_bb, l0_bb, EDGE_TRUE_VALUE);
4606
4607 if (gimple_in_ssa_p (cfun(cfun + 0)))
4608 {
4609 /* Add phis to the outer loop that connect to the phis in the inner,
4610 original loop, and move the loop entry value of the inner phi to
4611 the loop entry value of the outer phi. */
4612 gphi_iterator psi;
4613 for (psi = gsi_start_phis (l3_bb); !gsi_end_p (psi); gsi_next (&psi))
4614 {
4615 location_t locus;
4616 gphi *nphi;
4617 gphi *exit_phi = psi.phi ();
4618
4619 if (virtual_operand_p (gimple_phi_result (exit_phi)))
4620 continue;
4621
4622 edge l2_to_l3 = find_edge (l2_bb, l3_bb);
4623 tree exit_res = PHI_ARG_DEF_FROM_EDGE (exit_phi, l2_to_l3)gimple_phi_arg_def (((exit_phi)), ((l2_to_l3)->dest_idx));
4624
4625 basic_block latch = BRANCH_EDGE (cont_bb)((*((cont_bb))->succs)[(0)]->flags & EDGE_FALLTHRU ?
(*((cont_bb))->succs)[(1)] : (*((cont_bb))->succs)[(0)
])
->dest;
4626 edge latch_to_l1 = find_edge (latch, l1_bb);
4627 gphi *inner_phi
4628 = find_phi_with_arg_on_edge (exit_res, latch_to_l1);
4629
4630 tree t = gimple_phi_result (exit_phi);
4631 tree new_res = copy_ssa_name (t, NULLnullptr);
4632 nphi = create_phi_node (new_res, l0_bb);
4633
4634 edge l0_to_l1 = find_edge (l0_bb, l1_bb);
4635 t = PHI_ARG_DEF_FROM_EDGE (inner_phi, l0_to_l1)gimple_phi_arg_def (((inner_phi)), ((l0_to_l1)->dest_idx));
4636 locus = gimple_phi_arg_location_from_edge (inner_phi, l0_to_l1);
4637 edge entry_to_l0 = find_edge (entry_bb, l0_bb);
4638 add_phi_arg (nphi, t, entry_to_l0, locus);
4639
4640 edge l2_to_l0 = find_edge (l2_bb, l0_bb);
4641 add_phi_arg (nphi, exit_res, l2_to_l0, UNKNOWN_LOCATION((location_t) 0));
4642
4643 add_phi_arg (inner_phi, new_res, l0_to_l1, UNKNOWN_LOCATION((location_t) 0));
4644 }
4645 }
4646
4647 set_immediate_dominator (CDI_DOMINATORS, l2_bb,
4648 recompute_dominator (CDI_DOMINATORS, l2_bb));
4649 set_immediate_dominator (CDI_DOMINATORS, l3_bb,
4650 recompute_dominator (CDI_DOMINATORS, l3_bb));
4651 set_immediate_dominator (CDI_DOMINATORS, l0_bb,
4652 recompute_dominator (CDI_DOMINATORS, l0_bb));
4653 set_immediate_dominator (CDI_DOMINATORS, l1_bb,
4654 recompute_dominator (CDI_DOMINATORS, l1_bb));
4655
4656 /* We enter expand_omp_for_generic with a loop. This original loop may
4657 have its own loop struct, or it may be part of an outer loop struct
4658 (which may be the fake loop). */
4659 class loop *outer_loop = entry_bb->loop_father;
4660 bool orig_loop_has_loop_struct = l1_bb->loop_father != outer_loop;
4661
4662 add_bb_to_loop (l2_bb, outer_loop);
4663
4664 /* We've added a new loop around the original loop. Allocate the
4665 corresponding loop struct. */
4666 class loop *new_loop = alloc_loop ();
4667 new_loop->header = l0_bb;
4668 new_loop->latch = l2_bb;
4669 add_loop (new_loop, outer_loop);
4670
4671 /* Allocate a loop structure for the original loop unless we already
4672 had one. */
4673 if (!orig_loop_has_loop_struct
4674 && !gimple_omp_for_combined_p (fd->for_stmt))
4675 {
4676 class loop *orig_loop = alloc_loop ();
4677 orig_loop->header = l1_bb;
4678 /* The loop may have multiple latches. */
4679 add_loop (orig_loop, new_loop);
4680 }
4681 }
4682}
4683
4684/* Helper function for expand_omp_for_static_nochunk. If PTR is NULL,
4685 compute needed allocation size. If !ALLOC of team allocations,
4686 if ALLOC of thread allocation. SZ is the initial needed size for
4687 other purposes, ALLOC_ALIGN guaranteed alignment of allocation in bytes,
4688 CNT number of elements of each array, for !ALLOC this is
4689 omp_get_num_threads (), for ALLOC number of iterations handled by the
4690 current thread. If PTR is non-NULL, it is the start of the allocation
4691 and this routine shall assign to OMP_CLAUSE_DECL (c) of those _scantemp_
4692 clauses pointers to the corresponding arrays. */
4693
4694static tree
4695expand_omp_scantemp_alloc (tree clauses, tree ptr, unsigned HOST_WIDE_INTlong sz,
4696 unsigned HOST_WIDE_INTlong alloc_align, tree cnt,
4697 gimple_stmt_iterator *gsi, bool alloc)
4698{
4699 tree eltsz = NULL_TREE(tree) nullptr;
4700 unsigned HOST_WIDE_INTlong preval = 0;
4701 if (ptr && sz)
4702 ptr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (ptr),fold_build2_loc (((location_t) 0), POINTER_PLUS_EXPR, ((contains_struct_check
((ptr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4702, __FUNCTION__))->typed.type), ptr, size_int_kind (sz
, stk_sizetype) )
4703 ptr, size_int (sz))fold_build2_loc (((location_t) 0), POINTER_PLUS_EXPR, ((contains_struct_check
((ptr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4702, __FUNCTION__))->typed.type), ptr, size_int_kind (sz
, stk_sizetype) )
;
4704 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c)((contains_struct_check (((tree_check ((c), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4704, __FUNCTION__, (OMP_CLAUSE)))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4704, __FUNCTION__))->common.chain)
)
4705 if (OMP_CLAUSE_CODE (c)((tree_check ((c), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4705, __FUNCTION__, (OMP_CLAUSE))))->omp_clause.code
== OMP_CLAUSE__SCANTEMP_
4706 && !OMP_CLAUSE__SCANTEMP__CONTROL (c)(((omp_clause_subcode_check ((c), (OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4706, __FUNCTION__)))->base.private_flag)
4707 && (!OMP_CLAUSE__SCANTEMP__ALLOC (c)((omp_clause_subcode_check ((c), (OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4707, __FUNCTION__))->base.public_flag)
) != alloc)
4708 {
4709 tree pointee_type = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)))((contains_struct_check ((((contains_struct_check (((*(omp_clause_elt_check
(((omp_clause_range_check (((tree_check ((c), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4709, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4709, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4709, __FUNCTION__)))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4709, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4709, __FUNCTION__))->typed.type)
;
4710 unsigned HOST_WIDE_INTlong al = TYPE_ALIGN_UNIT (pointee_type)((((tree_class_check ((pointee_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4710, __FUNCTION__))->type_common.align) ? ((unsigned)1)
<< (((tree_class_check ((pointee_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4710, __FUNCTION__))->type_common.align) - 1) : 0) / (8)
)
;
4711 if (tree_fits_uhwi_p (TYPE_SIZE_UNIT (pointee_type)((tree_class_check ((pointee_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4711, __FUNCTION__))->type_common.size_unit)
))
4712 {
4713 unsigned HOST_WIDE_INTlong szl
4714 = tree_to_uhwi (TYPE_SIZE_UNIT (pointee_type)((tree_class_check ((pointee_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4714, __FUNCTION__))->type_common.size_unit)
);
4715 szl = least_bit_hwi (szl);
4716 if (szl)
4717 al = MIN (al, szl)((al) < (szl) ? (al) : (szl));
4718 }
4719 if (ptr == NULL_TREE(tree) nullptr)
4720 {
4721 if (eltsz == NULL_TREE(tree) nullptr)
4722 eltsz = TYPE_SIZE_UNIT (pointee_type)((tree_class_check ((pointee_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4722, __FUNCTION__))->type_common.size_unit)
;
4723 else
4724 eltsz = size_binop (PLUS_EXPR, eltsz,size_binop_loc (((location_t) 0), PLUS_EXPR, eltsz, ((tree_class_check
((pointee_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4725, __FUNCTION__))->type_common.size_unit))
4725 TYPE_SIZE_UNIT (pointee_type))size_binop_loc (((location_t) 0), PLUS_EXPR, eltsz, ((tree_class_check
((pointee_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4725, __FUNCTION__))->type_common.size_unit))
;
4726 }
4727 if (preval == 0 && al <= alloc_align)
4728 {
4729 unsigned HOST_WIDE_INTlong diff = ROUND_UP (sz, al)(((sz) + (al) - 1) & ~((al) - 1)) - sz;
4730 sz += diff;
4731 if (diff && ptr)
4732 ptr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (ptr),fold_build2_loc (((location_t) 0), POINTER_PLUS_EXPR, ((contains_struct_check
((ptr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4732, __FUNCTION__))->typed.type), ptr, size_int_kind (diff
, stk_sizetype) )
4733 ptr, size_int (diff))fold_build2_loc (((location_t) 0), POINTER_PLUS_EXPR, ((contains_struct_check
((ptr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4732, __FUNCTION__))->typed.type), ptr, size_int_kind (diff
, stk_sizetype) )
;
4734 }
4735 else if (al > preval)
4736 {
4737 if (ptr)
4738 {
4739 ptr = fold_convert (pointer_sized_int_node, ptr)fold_convert_loc (((location_t) 0), global_trees[TI_POINTER_SIZED_TYPE
], ptr)
;
4740 ptr = fold_build2 (PLUS_EXPR, pointer_sized_int_node, ptr,fold_build2_loc (((location_t) 0), PLUS_EXPR, global_trees[TI_POINTER_SIZED_TYPE
], ptr, build_int_cst (global_trees[TI_POINTER_SIZED_TYPE], al
- 1) )
4741 build_int_cst (pointer_sized_int_node,fold_build2_loc (((location_t) 0), PLUS_EXPR, global_trees[TI_POINTER_SIZED_TYPE
], ptr, build_int_cst (global_trees[TI_POINTER_SIZED_TYPE], al
- 1) )
4742 al - 1))fold_build2_loc (((location_t) 0), PLUS_EXPR, global_trees[TI_POINTER_SIZED_TYPE
], ptr, build_int_cst (global_trees[TI_POINTER_SIZED_TYPE], al
- 1) )
;
4743 ptr = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, ptr,fold_build2_loc (((location_t) 0), BIT_AND_EXPR, global_trees
[TI_POINTER_SIZED_TYPE], ptr, build_int_cst (global_trees[TI_POINTER_SIZED_TYPE
], -(long) al) )
4744 build_int_cst (pointer_sized_int_node,fold_build2_loc (((location_t) 0), BIT_AND_EXPR, global_trees
[TI_POINTER_SIZED_TYPE], ptr, build_int_cst (global_trees[TI_POINTER_SIZED_TYPE
], -(long) al) )
4745 -(HOST_WIDE_INT) al))fold_build2_loc (((location_t) 0), BIT_AND_EXPR, global_trees
[TI_POINTER_SIZED_TYPE], ptr, build_int_cst (global_trees[TI_POINTER_SIZED_TYPE
], -(long) al) )
;
4746 ptr = fold_convert (ptr_type_node, ptr)fold_convert_loc (((location_t) 0), global_trees[TI_PTR_TYPE]
, ptr)
;
4747 }
4748 else
4749 sz += al - 1;
4750 }
4751 if (tree_fits_uhwi_p (TYPE_SIZE_UNIT (pointee_type)((tree_class_check ((pointee_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4751, __FUNCTION__))->type_common.size_unit)
))
4752 preval = al;
4753 else
4754 preval = 1;
4755 if (ptr)
4756 {
4757 expand_omp_build_assign (gsi, OMP_CLAUSE_DECL (c)(*(omp_clause_elt_check (((omp_clause_range_check (((tree_check
((c), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4757, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4757, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4757, __FUNCTION__)))
, ptr, false);
4758 ptr = OMP_CLAUSE_DECL (c)(*(omp_clause_elt_check (((omp_clause_range_check (((tree_check
((c), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4758, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4758, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4758, __FUNCTION__)))
;
4759 ptr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,fold_build2_loc (((location_t) 0), POINTER_PLUS_EXPR, ((contains_struct_check
((ptr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4759, __FUNCTION__))->typed.type), ptr, size_binop_loc (
((location_t) 0), MULT_EXPR, cnt, ((tree_class_check ((pointee_type
), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4761, __FUNCTION__))->type_common.size_unit)) )
4760 size_binop (MULT_EXPR, cnt,fold_build2_loc (((location_t) 0), POINTER_PLUS_EXPR, ((contains_struct_check
((ptr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4759, __FUNCTION__))->typed.type), ptr, size_binop_loc (
((location_t) 0), MULT_EXPR, cnt, ((tree_class_check ((pointee_type
), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4761, __FUNCTION__))->type_common.size_unit)) )
4761 TYPE_SIZE_UNIT (pointee_type)))fold_build2_loc (((location_t) 0), POINTER_PLUS_EXPR, ((contains_struct_check
((ptr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4759, __FUNCTION__))->typed.type), ptr, size_binop_loc (
((location_t) 0), MULT_EXPR, cnt, ((tree_class_check ((pointee_type
), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4761, __FUNCTION__))->type_common.size_unit)) )
;
4762 }
4763 }
4764
4765 if (ptr == NULL_TREE(tree) nullptr)
4766 {
4767 eltsz = size_binop (MULT_EXPR, eltsz, cnt)size_binop_loc (((location_t) 0), MULT_EXPR, eltsz, cnt);
4768 if (sz)
4769 eltsz = size_binop (PLUS_EXPR, eltsz, size_int (sz))size_binop_loc (((location_t) 0), PLUS_EXPR, eltsz, size_int_kind
(sz, stk_sizetype))
;
4770 return eltsz;
4771 }
4772 else
4773 return ptr;
4774}
4775
4776/* Return the last _looptemp_ clause if one has been created for
4777 lastprivate on distribute parallel for{, simd} or taskloop.
4778 FD is the loop data and INNERC should be the second _looptemp_
4779 clause (the one holding the end of the range).
4780 This is followed by collapse - 1 _looptemp_ clauses for the
4781 counts[1] and up, and for triangular loops followed by 4
4782 further _looptemp_ clauses (one for counts[0], one first_inner_iterations,
4783 one factor and one adjn1). After this there is optionally one
4784 _looptemp_ clause that this function returns. */
4785
4786static tree
4787find_lastprivate_looptemp (struct omp_for_data *fd, tree innerc)
4788{
4789 gcc_assert (innerc)((void)(!(innerc) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4789, __FUNCTION__), 0 : 0))
;
4790 int count = fd->collapse - 1;
4791 if (fd->non_rect
4792 && fd->last_nonrect == fd->first_nonrect + 1
4793 && !TYPE_UNSIGNED (TREE_TYPE (fd->loops[fd->last_nonrect].v))((tree_class_check ((((contains_struct_check ((fd->loops[fd
->last_nonrect].v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4793, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4793, __FUNCTION__))->base.u.bits.unsigned_flag)
)
4794 count += 4;
4795 for (int i = 0; i < count; i++)
4796 {
4797 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc)((contains_struct_check (((tree_check ((innerc), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4797, __FUNCTION__, (OMP_CLAUSE)))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4797, __FUNCTION__))->common.chain)
,
4798 OMP_CLAUSE__LOOPTEMP_);
4799 gcc_assert (innerc)((void)(!(innerc) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4799, __FUNCTION__), 0 : 0))
;
4800 }
4801 return omp_find_clause (OMP_CLAUSE_CHAIN (innerc)((contains_struct_check (((tree_check ((innerc), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4801, __FUNCTION__, (OMP_CLAUSE)))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4801, __FUNCTION__))->common.chain)
,
4802 OMP_CLAUSE__LOOPTEMP_);
4803}
4804
4805/* A subroutine of expand_omp_for. Generate code for a parallel
4806 loop with static schedule and no specified chunk size. Given
4807 parameters:
4808
4809 for (V = N1; V cond N2; V += STEP) BODY;
4810
4811 where COND is "<" or ">", we generate pseudocode
4812
4813 if ((__typeof (V)) -1 > 0 && N2 cond N1) goto L2;
4814 if (cond is <)
4815 adj = STEP - 1;
4816 else
4817 adj = STEP + 1;
4818 if ((__typeof (V)) -1 > 0 && cond is >)
4819 n = -(adj + N2 - N1) / -STEP;
4820 else
4821 n = (adj + N2 - N1) / STEP;
4822 q = n / nthreads;
4823 tt = n % nthreads;
4824 if (threadid < tt) goto L3; else goto L4;
4825 L3:
4826 tt = 0;
4827 q = q + 1;
4828 L4:
4829 s0 = q * threadid + tt;
4830 e0 = s0 + q;
4831 V = s0 * STEP + N1;
4832 if (s0 >= e0) goto L2; else goto L0;
4833 L0:
4834 e = e0 * STEP + N1;
4835 L1:
4836 BODY;
4837 V += STEP;
4838 if (V cond e) goto L1;
4839 L2:
4840*/
4841
4842static void
4843expand_omp_for_static_nochunk (struct omp_region *region,
4844 struct omp_for_data *fd,
4845 gimple *inner_stmt)
4846{
4847 tree n, q, s0, e0, e, t, tt, nthreads = NULL_TREE(tree) nullptr, threadid;
4848 tree type, itype, vmain, vback;
4849 basic_block entry_bb, second_bb, third_bb, exit_bb, seq_start_bb;
4850 basic_block body_bb, cont_bb, collapse_bb = NULLnullptr;
4851 basic_block fin_bb, fourth_bb = NULLnullptr, fifth_bb = NULLnullptr, sixth_bb = NULLnullptr;
4852 basic_block exit1_bb = NULLnullptr, exit2_bb = NULLnullptr, exit3_bb = NULLnullptr;
4853 gimple_stmt_iterator gsi, gsip;
4854 edge ep;
4855 bool broken_loop = region->cont == NULLnullptr;
4856 tree *counts = NULLnullptr;
4857 tree n1, n2, step;
4858 tree reductions = NULL_TREE(tree) nullptr;
4859 tree cond_var = NULL_TREE(tree) nullptr, condtemp = NULL_TREE(tree) nullptr;
4860
4861 itype = type = TREE_TYPE (fd->loop.v)((contains_struct_check ((fd->loop.v), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4861, __FUNCTION__))->typed.type)
;
4862 if (POINTER_TYPE_P (type)(((enum tree_code) (type)->base.code) == POINTER_TYPE || (
(enum tree_code) (type)->base.code) == REFERENCE_TYPE)
)
4863 itype = signed_type_for (type);
4864
4865 entry_bb = region->entry;
4866 cont_bb = region->cont;
4867 gcc_assert (EDGE_COUNT (entry_bb->succs) == 2)((void)(!(vec_safe_length (entry_bb->succs) == 2) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4867, __FUNCTION__), 0 : 0))
;
4868 fin_bb = BRANCH_EDGE (entry_bb)((*((entry_bb))->succs)[(0)]->flags & EDGE_FALLTHRU
? (*((entry_bb))->succs)[(1)] : (*((entry_bb))->succs)
[(0)])
->dest;
4869 gcc_assert (broken_loop((void)(!(broken_loop || (fin_bb == ((*((cont_bb))->succs)
[(0)]->flags & EDGE_FALLTHRU ? (*((cont_bb))->succs
)[(0)] : (*((cont_bb))->succs)[(1)])->dest)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4870, __FUNCTION__), 0 : 0))
4870 || (fin_bb == FALLTHRU_EDGE (cont_bb)->dest))((void)(!(broken_loop || (fin_bb == ((*((cont_bb))->succs)
[(0)]->flags & EDGE_FALLTHRU ? (*((cont_bb))->succs
)[(0)] : (*((cont_bb))->succs)[(1)])->dest)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4870, __FUNCTION__), 0 : 0))
;
4871 seq_start_bb = split_edge (FALLTHRU_EDGE (entry_bb)((*((entry_bb))->succs)[(0)]->flags & EDGE_FALLTHRU
? (*((entry_bb))->succs)[(0)] : (*((entry_bb))->succs)
[(1)])
);
4872 body_bb = single_succ (seq_start_bb);
4873 if (!broken_loop)
4874 {
4875 gcc_assert (BRANCH_EDGE (cont_bb)->dest == body_bb((void)(!(((*((cont_bb))->succs)[(0)]->flags & EDGE_FALLTHRU
? (*((cont_bb))->succs)[(1)] : (*((cont_bb))->succs)[(
0)])->dest == body_bb || single_succ (((*((cont_bb))->succs
)[(0)]->flags & EDGE_FALLTHRU ? (*((cont_bb))->succs
)[(1)] : (*((cont_bb))->succs)[(0)])->dest) == body_bb)
? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4876, __FUNCTION__), 0 : 0))
4876 || single_succ (BRANCH_EDGE (cont_bb)->dest) == body_bb)((void)(!(((*((cont_bb))->succs)[(0)]->flags & EDGE_FALLTHRU
? (*((cont_bb))->succs)[(1)] : (*((cont_bb))->succs)[(
0)])->dest == body_bb || single_succ (((*((cont_bb))->succs
)[(0)]->flags & EDGE_FALLTHRU ? (*((cont_bb))->succs
)[(1)] : (*((cont_bb))->succs)[(0)])->dest) == body_bb)
? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4876, __FUNCTION__), 0 : 0))
;
4877 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2)((void)(!(vec_safe_length (cont_bb->succs) == 2) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4877, __FUNCTION__), 0 : 0))
;
4878 }
4879 exit_bb = region->exit;
4880
4881 /* Iteration space partitioning goes in ENTRY_BB. */
4882 gsi = gsi_last_nondebug_bb (entry_bb);
4883 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR)((void)(!(gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4883, __FUNCTION__), 0 : 0))
;
4884 gsip = gsi;
4885 gsi_prev (&gsip);
4886
4887 if (fd->collapse > 1)
4888 {
4889 int first_zero_iter = -1, dummy = -1;
4890 basic_block l2_dom_bb = NULLnullptr, dummy_bb = NULLnullptr;
4891
4892 counts = XALLOCAVEC (tree, fd->collapse)((tree *) __builtin_alloca(sizeof (tree) * (fd->collapse))
)
;
4893 expand_omp_for_init_counts (fd, &gsi, entry_bb, counts,
4894 fin_bb, first_zero_iter,
4895 dummy_bb, dummy, l2_dom_bb);
4896 t = NULL_TREE(tree) nullptr;
4897 }
4898 else if (gimple_omp_for_combined_into_p (fd->for_stmt))
4899 t = integer_one_nodeglobal_trees[TI_INTEGER_ONE];
4900 else
4901 t = fold_binary (fd->loop.cond_code, boolean_type_node,fold_binary_loc (((location_t) 0), fd->loop.cond_code, global_trees
[TI_BOOLEAN_TYPE], fold_convert_loc (((location_t) 0), type, fd
->loop.n1), fold_convert_loc (((location_t) 0), type, fd->
loop.n2))
4902 fold_convert (type, fd->loop.n1),fold_binary_loc (((location_t) 0), fd->loop.cond_code, global_trees
[TI_BOOLEAN_TYPE], fold_convert_loc (((location_t) 0), type, fd
->loop.n1), fold_convert_loc (((location_t) 0), type, fd->
loop.n2))
4903 fold_convert (type, fd->loop.n2))fold_binary_loc (((location_t) 0), fd->loop.cond_code, global_trees
[TI_BOOLEAN_TYPE], fold_convert_loc (((location_t) 0), type, fd
->loop.n1), fold_convert_loc (((location_t) 0), type, fd->
loop.n2))
;
4904 if (fd->collapse == 1
4905 && TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4905, __FUNCTION__))->base.u.bits.unsigned_flag)
4906 && (t == NULL_TREE(tree) nullptr || !integer_onep (t)))
4907 {
4908 n1 = fold_convert (type, unshare_expr (fd->loop.n1))fold_convert_loc (((location_t) 0), type, unshare_expr (fd->
loop.n1))
;
4909 n1 = force_gimple_operand_gsi (&gsi, n1, true, NULL_TREE(tree) nullptr,
4910 true, GSI_SAME_STMT);
4911 n2 = fold_convert (type, unshare_expr (fd->loop.n2))fold_convert_loc (((location_t) 0), type, unshare_expr (fd->
loop.n2))
;
4912 n2 = force_gimple_operand_gsi (&gsi, n2, true, NULL_TREE(tree) nullptr,
4913 true, GSI_SAME_STMT);
4914 gcond *cond_stmt = expand_omp_build_cond (&gsi, fd->loop.cond_code,
4915 n1, n2);
4916 ep = split_block (entry_bb, cond_stmt);
4917 ep->flags = EDGE_TRUE_VALUE;
4918 entry_bb = ep->dest;
4919 ep->probability = profile_probability::very_likely ();
4920 ep = make_edge (ep->src, fin_bb, EDGE_FALSE_VALUE);
4921 ep->probability = profile_probability::very_unlikely ();
4922 if (gimple_in_ssa_p (cfun(cfun + 0)))
4923 {
4924 int dest_idx = find_edge (entry_bb, fin_bb)->dest_idx;
4925 for (gphi_iterator gpi = gsi_start_phis (fin_bb);
4926 !gsi_end_p (gpi); gsi_next (&gpi))
4927 {
4928 gphi *phi = gpi.phi ();
4929 add_phi_arg (phi, gimple_phi_arg_def (phi, dest_idx),
4930 ep, UNKNOWN_LOCATION((location_t) 0));
4931 }
4932 }
4933 gsi = gsi_last_bb (entry_bb);
4934 }
4935
4936 if (fd->lastprivate_conditional)
4937 {
4938 tree clauses = gimple_omp_for_clauses (fd->for_stmt);
4939 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
4940 if (fd->have_pointer_condtemp)
4941 condtemp = OMP_CLAUSE_DECL (c)(*(omp_clause_elt_check (((omp_clause_range_check (((tree_check
((c), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4941, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4941, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4941, __FUNCTION__)))
;
4942 c = omp_find_clause (OMP_CLAUSE_CHAIN (c)((contains_struct_check (((tree_check ((c), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4942, __FUNCTION__, (OMP_CLAUSE)))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4942, __FUNCTION__))->common.chain)
, OMP_CLAUSE__CONDTEMP_);
4943 cond_var = OMP_CLAUSE_DECL (c)(*(omp_clause_elt_check (((omp_clause_range_check (((tree_check
((c), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4943, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4943, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4943, __FUNCTION__)))
;
4944 }
4945 if (fd->have_reductemp
4946 /* For scan, we don't want to reinitialize condtemp before the
4947 second loop. */
4948 || (fd->have_pointer_condtemp && !fd->have_scantemp)
4949 || fd->have_nonctrl_scantemp)
4950 {
4951 tree t1 = build_int_cst (long_integer_type_nodeinteger_types[itk_long], 0);
4952 tree t2 = build_int_cst (long_integer_type_nodeinteger_types[itk_long], 1);
4953 tree t3 = build_int_cstu (long_integer_type_nodeinteger_types[itk_long],
4954 (HOST_WIDE_INT_1U1UL << 31) + 1);
4955 tree clauses = gimple_omp_for_clauses (fd->for_stmt);
4956 gimple_stmt_iterator gsi2 = gsi_none ();
4957 gimple *g = NULLnullptr;
4958 tree mem = null_pointer_nodeglobal_trees[TI_NULL_POINTER], memv = NULL_TREE(tree) nullptr;
4959 unsigned HOST_WIDE_INTlong condtemp_sz = 0;
4960 unsigned HOST_WIDE_INTlong alloc_align = 0;
4961 if (fd->have_reductemp)
4962 {
4963 gcc_assert (!fd->have_nonctrl_scantemp)((void)(!(!fd->have_nonctrl_scantemp) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4963, __FUNCTION__), 0 : 0))
;
4964 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4965 reductions = OMP_CLAUSE_DECL (c)(*(omp_clause_elt_check (((omp_clause_range_check (((tree_check
((c), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4965, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4965, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4965, __FUNCTION__)))
;
4966 gcc_assert (TREE_CODE (reductions) == SSA_NAME)((void)(!(((enum tree_code) (reductions)->base.code) == SSA_NAME
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4966, __FUNCTION__), 0 : 0))
;
4967 g = SSA_NAME_DEF_STMT (reductions)(tree_check ((reductions), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4967, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
4968 reductions = gimple_assign_rhs1 (g);
4969 OMP_CLAUSE_DECL (c)(*(omp_clause_elt_check (((omp_clause_range_check (((tree_check
((c), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4969, __FUNCTION__, (OMP_CLAUSE)))), (OMP_CLAUSE_PRIVATE), (
OMP_CLAUSE__SCANTEMP_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4969, __FUNCTION__))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4969, __FUNCTION__)))
= reductions;
4970 gsi2 = gsi_for_stmt (g);
4971 }
4972 else
4973 {
4974 if (gsi_end_p (gsip))
4975 gsi2 = gsi_after_labels (region->entry);
4976 else
4977 gsi2 = gsip;
4978 reductions = null_pointer_nodeglobal_trees[TI_NULL_POINTER];
4979 }
4980 if (fd->have_pointer_condtemp || fd->have_nonctrl_scantemp)
4981 {
4982 tree type;
4983 if (fd->have_pointer_condtemp)
4984 type = TREE_TYPE (condtemp)((contains_struct_check ((condtemp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4984, __FUNCTION__))->typed.type)
;
4985 else
4986 type = ptr_type_nodeglobal_trees[TI_PTR_TYPE];
4987 memv = create_tmp_var (type);
4988 TREE_ADDRESSABLE (memv)((memv)->base.addressable_flag) = 1;
4989 unsigned HOST_WIDE_INTlong sz = 0;
4990 tree size = NULL_TREE(tree) nullptr;
4991 if (fd->have_pointer_condtemp)
4992 {
4993 sz = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (type))((tree_class_check ((((contains_struct_check ((type), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4993, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 4993, __FUNCTION__))->type_common.size_unit)
);
4994 sz *= fd->lastprivate_conditional;
4995 condtemp_sz = sz;
4996 }
4997 if (fd->have_nonctrl_scantemp)
4998 {
4999 nthreads = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
5000 gimple *g = gimple_build_call (nthreads, 0);
5001 nthreads = create_tmp_var (integer_type_nodeinteger_types[itk_int]);
5002 gimple_call_set_lhs (g, nthreads);
5003 gsi_insert_before (&gsi2, g, GSI_SAME_STMT);
5004 nthreads = fold_convert (sizetype, nthreads)fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype
], nthreads)
;
5005 alloc_align = TYPE_ALIGN_UNIT (long_long_integer_type_node)((((tree_class_check ((integer_types[itk_long_long]), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 5005, __FUNCTION__))->type_common.align) ? ((unsigned)1)
<< (((tree_class_check ((integer_types[itk_long_long])
, (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 5005, __FUNCTION__))->type_common.align) - 1) : 0) / (8)
)
;
5006 size = expand_omp_scantemp_alloc (clauses, NULL_TREE(tree) nullptr, sz,
5007 alloc_align, nthreads, NULLnullptr,
5008 false);
5009 size = fold_convert (type, size)fold_convert_loc (((location_t) 0), type, size);
5010 }
5011 else
5012 size = build_int_cst (type, sz);
5013 expand_omp_build_assign (&gsi2, memv, size, false);
5014 mem = build_fold_addr_expr (memv)build_fold_addr_expr_loc (((location_t) 0), (memv));
5015 }
5016 tree t
5017 = build_call_expr (builtin_decl_explicit (BUILT_IN_GOMP_LOOP_START),
5018 9, t1, t2, t2, t3, t1, null_pointer_nodeglobal_trees[TI_NULL_POINTER],
5019 null_pointer_nodeglobal_trees[TI_NULL_POINTER], reductions, mem);
5020 force_gimple_operand_gsi (&gsi2, t, true, NULL_TREE(tree) nullptr,
5021 true, GSI_SAME_STMT);
5022 if (fd->have_pointer_condtemp)
5023 expand_omp_build_assign (&gsi2, condtemp, memv, false);
5024 if (fd->have_nonctrl_scantemp)
5025 {
5026 tree ptr = fd->have_pointer_condtemp ? condtemp : memv;
5027 expand_omp_scantemp_alloc (clauses, ptr, condtemp_sz,
5028 alloc_align, nthreads, &gsi2, false);
5029 }
5030 if (fd->have_reductemp)
5031 {
5032 gsi_remove (&gsi2, true);
5033 release_ssa_name (gimple_assign_lhs (g));
5034 }
5035 }
5036 switch (gimple_omp_for_kind (fd->for_stmt))
5037 {
5038 case GF_OMP_FOR_KIND_FOR:
5039 nthreads = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
5040 threadid = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
5041 break;
5042 case GF_OMP_FOR_KIND_DISTRIBUTE:
5043 nthreads = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_TEAMS);
5044 threadid = builtin_decl_explicit (BUILT_IN_OMP_GET_TEAM_NUM);
5045 break;
5046 default:
5047 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/omp-expand.c"
, 5047, __FUNCTION__))
;
5048 }
5049 nthreads = build_call_expr