Bug Summary

File:build/gcc/pointer-query.cc
Warning:line 2105, column 4
Called C++ object pointer is null

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-unknown-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name pointer-query.cc -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -fno-rounding-math -mconstructor-aliases -munwind-tables -target-cpu x86-64 -tune-cpu generic -debugger-tuning=gdb -fcoverage-compilation-dir=/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -resource-dir /usr/lib64/clang/13.0.0 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11/backward -internal-isystem /usr/lib64/clang/13.0.0/include -internal-isystem /usr/local/include -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../x86_64-suse-linux/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-error=format-diag -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir=/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -D__GCC_HAVE_DWARF2_CFI_ASM=1 -o /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2021-11-20-133755-20252-1/report-aCIxlO.plist -x c++ /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc

/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc

1/* Definitions of the pointer_query and related classes.
2
3 Copyright (C) 2020-2021 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "backend.h"
25#include "tree.h"
26#include "gimple.h"
27#include "stringpool.h"
28#include "tree-vrp.h"
29#include "diagnostic-core.h"
30#include "fold-const.h"
31#include "tree-object-size.h"
32#include "tree-ssa-strlen.h"
33#include "langhooks.h"
34#include "stringpool.h"
35#include "attribs.h"
36#include "gimple-fold.h"
37#include "gimple-ssa.h"
38#include "intl.h"
39#include "attr-fnspec.h"
40#include "gimple-range.h"
41#include "pointer-query.h"
42#include "tree-pretty-print.h"
43#include "tree-ssanames.h"
44#include "target.h"
45
46static bool compute_objsize_r (tree, gimple *, int, access_ref *,
47 ssa_name_limit_t &, pointer_query *);
48
49/* Wrapper around the wide_int overload of get_range that accepts
50 offset_int instead. For middle end expressions returns the same
51 result. For a subset of nonconstamt expressions emitted by the front
52 end determines a more precise range than would be possible otherwise. */
53
54static bool
55get_offset_range (tree x, gimple *stmt, offset_int r[2], range_query *rvals)
56{
57 offset_int add = 0;
58 if (TREE_CODE (x)((enum tree_code) (x)->base.code) == PLUS_EXPR)
59 {
60 /* Handle constant offsets in pointer addition expressions seen
61 n the front end IL. */
62 tree op = TREE_OPERAND (x, 1)(*((const_cast<tree*> (tree_operand_check ((x), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 62, __FUNCTION__)))))
;
63 if (TREE_CODE (op)((enum tree_code) (op)->base.code) == INTEGER_CST)
64 {
65 op = fold_convert (signed_type_for (TREE_TYPE (op)), op)fold_convert_loc (((location_t) 0), signed_type_for (((contains_struct_check
((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 65, __FUNCTION__))->typed.type)), op)
;
66 add = wi::to_offset (op);
67 x = TREE_OPERAND (x, 0)(*((const_cast<tree*> (tree_operand_check ((x), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 67, __FUNCTION__)))))
;
68 }
69 }
70
71 if (TREE_CODE (x)((enum tree_code) (x)->base.code) == NOP_EXPR)
72 /* Also handle conversions to sizetype seen in the front end IL. */
73 x = TREE_OPERAND (x, 0)(*((const_cast<tree*> (tree_operand_check ((x), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 73, __FUNCTION__)))))
;
74
75 tree type = TREE_TYPE (x)((contains_struct_check ((x), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 75, __FUNCTION__))->typed.type)
;
76 if (!INTEGRAL_TYPE_P (type)(((enum tree_code) (type)->base.code) == ENUMERAL_TYPE || (
(enum tree_code) (type)->base.code) == BOOLEAN_TYPE || ((enum
tree_code) (type)->base.code) == INTEGER_TYPE)
&& !POINTER_TYPE_P (type)(((enum tree_code) (type)->base.code) == POINTER_TYPE || (
(enum tree_code) (type)->base.code) == REFERENCE_TYPE)
)
77 return false;
78
79 if (TREE_CODE (x)((enum tree_code) (x)->base.code) != INTEGER_CST
80 && TREE_CODE (x)((enum tree_code) (x)->base.code) != SSA_NAME)
81 {
82 if (TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 82, __FUNCTION__))->base.u.bits.unsigned_flag)
83 && TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 83, __FUNCTION__))->type_common.precision)
== TYPE_PRECISION (sizetype)((tree_class_check ((sizetype_tab[(int) stk_sizetype]), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 83, __FUNCTION__))->type_common.precision)
)
84 type = signed_type_for (type);
85
86 r[0] = wi::to_offset (TYPE_MIN_VALUE (type)((tree_check5 ((type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 86, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.minval
)
) + add;
87 r[1] = wi::to_offset (TYPE_MAX_VALUE (type)((tree_check5 ((type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 87, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval
)
) + add;
88 return x;
89 }
90
91 wide_int wr[2];
92 if (!get_range (x, stmt, wr, rvals))
93 return false;
94
95 signop sgn = SIGNED;
96 /* Only convert signed integers or unsigned sizetype to a signed
97 offset and avoid converting large positive values in narrower
98 types to negative offsets. */
99 if (TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 99, __FUNCTION__))->base.u.bits.unsigned_flag)
100 && wr[0].get_precision () < TYPE_PRECISION (sizetype)((tree_class_check ((sizetype_tab[(int) stk_sizetype]), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 100, __FUNCTION__))->type_common.precision)
)
101 sgn = UNSIGNED;
102
103 r[0] = offset_int::from (wr[0], sgn);
104 r[1] = offset_int::from (wr[1], sgn);
105 return true;
106}
107
108/* Return the argument that the call STMT to a built-in function returns
109 or null if it doesn't. On success, set OFFRNG[] to the range of offsets
110 from the argument reflected in the value returned by the built-in if it
111 can be determined, otherwise to 0 and HWI_M1U respectively. Set
112 *PAST_END for functions like mempcpy that might return a past the end
113 pointer (most functions return a dereferenceable pointer to an existing
114 element of an array). */
115
116static tree
117gimple_call_return_array (gimple *stmt, offset_int offrng[2], bool *past_end,
118 ssa_name_limit_t &snlim, pointer_query *qry)
119{
120 /* Clear and set below for the rare function(s) that might return
121 a past-the-end pointer. */
122 *past_end = false;
123
124 {
125 /* Check for attribute fn spec to see if the function returns one
126 of its arguments. */
127 attr_fnspec fnspec = gimple_call_fnspec (as_a <gcall *>(stmt));
128 unsigned int argno;
129 if (fnspec.returns_arg (&argno))
130 {
131 /* Functions return the first argument (not a range). */
132 offrng[0] = offrng[1] = 0;
133 return gimple_call_arg (stmt, argno);
134 }
135 }
136
137 if (gimple_call_num_args (stmt) < 1)
138 return NULL_TREE(tree) __null;
139
140 tree fn = gimple_call_fndecl (stmt);
141 if (!gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
142 {
143 /* See if this is a call to placement new. */
144 if (!fn
145 || !DECL_IS_OPERATOR_NEW_P (fn)(((tree_check ((fn), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 145, __FUNCTION__, (FUNCTION_DECL)))->function_decl.decl_type
) == OPERATOR_NEW)
146 || DECL_IS_REPLACEABLE_OPERATOR_NEW_P (fn)((((tree_check ((fn), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 146, __FUNCTION__, (FUNCTION_DECL)))->function_decl.decl_type
) == OPERATOR_NEW) && ((tree_check ((fn), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 146, __FUNCTION__, (FUNCTION_DECL)))->function_decl.replaceable_operator
))
)
147 return NULL_TREE(tree) __null;
148
149 /* Check the mangling, keeping in mind that operator new takes
150 a size_t which could be unsigned int or unsigned long. */
151 tree fname = DECL_ASSEMBLER_NAME (fn)decl_assembler_name (fn);
152 if (!id_equal (fname, "_ZnwjPv") // ordinary form
153 && !id_equal (fname, "_ZnwmPv") // ordinary form
154 && !id_equal (fname, "_ZnajPv") // array form
155 && !id_equal (fname, "_ZnamPv")) // array form
156 return NULL_TREE(tree) __null;
157
158 if (gimple_call_num_args (stmt) != 2)
159 return NULL_TREE(tree) __null;
160
161 /* Allocation functions return a pointer to the beginning. */
162 offrng[0] = offrng[1] = 0;
163 return gimple_call_arg (stmt, 1);
164 }
165
166 switch (DECL_FUNCTION_CODE (fn))
167 {
168 case BUILT_IN_MEMCPY:
169 case BUILT_IN_MEMCPY_CHK:
170 case BUILT_IN_MEMMOVE:
171 case BUILT_IN_MEMMOVE_CHK:
172 case BUILT_IN_MEMSET:
173 case BUILT_IN_STRCAT:
174 case BUILT_IN_STRCAT_CHK:
175 case BUILT_IN_STRCPY:
176 case BUILT_IN_STRCPY_CHK:
177 case BUILT_IN_STRNCAT:
178 case BUILT_IN_STRNCAT_CHK:
179 case BUILT_IN_STRNCPY:
180 case BUILT_IN_STRNCPY_CHK:
181 /* Functions return the first argument (not a range). */
182 offrng[0] = offrng[1] = 0;
183 return gimple_call_arg (stmt, 0);
184
185 case BUILT_IN_MEMPCPY:
186 case BUILT_IN_MEMPCPY_CHK:
187 {
188 /* The returned pointer is in a range constrained by the smaller
189 of the upper bound of the size argument and the source object
190 size. */
191 offrng[0] = 0;
192 offrng[1] = HOST_WIDE_INT_M1U-1UL;
193 tree off = gimple_call_arg (stmt, 2);
194 bool off_valid = get_offset_range (off, stmt, offrng, qry->rvals);
195 if (!off_valid || offrng[0] != offrng[1])
196 {
197 /* If the offset is either indeterminate or in some range,
198 try to constrain its upper bound to at most the size
199 of the source object. */
200 access_ref aref;
201 tree src = gimple_call_arg (stmt, 1);
202 if (compute_objsize (src, stmt, 1, &aref, qry)
203 && aref.sizrng[1] < offrng[1])
204 offrng[1] = aref.sizrng[1];
205 }
206
207 /* Mempcpy may return a past-the-end pointer. */
208 *past_end = true;
209 return gimple_call_arg (stmt, 0);
210 }
211
212 case BUILT_IN_MEMCHR:
213 {
214 tree off = gimple_call_arg (stmt, 2);
215 if (get_offset_range (off, stmt, offrng, qry->rvals))
216 offrng[1] -= 1;
217 else
218 offrng[1] = HOST_WIDE_INT_M1U-1UL;
219
220 offrng[0] = 0;
221 return gimple_call_arg (stmt, 0);
222 }
223
224 case BUILT_IN_STRCHR:
225 case BUILT_IN_STRRCHR:
226 case BUILT_IN_STRSTR:
227 offrng[0] = 0;
228 offrng[1] = HOST_WIDE_INT_M1U-1UL;
229 return gimple_call_arg (stmt, 0);
230
231 case BUILT_IN_STPCPY:
232 case BUILT_IN_STPCPY_CHK:
233 {
234 access_ref aref;
235 tree src = gimple_call_arg (stmt, 1);
236 if (compute_objsize_r (src, stmt, 1, &aref, snlim, qry))
237 offrng[1] = aref.sizrng[1] - 1;
238 else
239 offrng[1] = HOST_WIDE_INT_M1U-1UL;
240
241 offrng[0] = 0;
242 return gimple_call_arg (stmt, 0);
243 }
244
245 case BUILT_IN_STPNCPY:
246 case BUILT_IN_STPNCPY_CHK:
247 {
248 /* The returned pointer is in a range between the first argument
249 and it plus the smaller of the upper bound of the size argument
250 and the source object size. */
251 offrng[1] = HOST_WIDE_INT_M1U-1UL;
252 tree off = gimple_call_arg (stmt, 2);
253 if (!get_offset_range (off, stmt, offrng, qry->rvals)
254 || offrng[0] != offrng[1])
255 {
256 /* If the offset is either indeterminate or in some range,
257 try to constrain its upper bound to at most the size
258 of the source object. */
259 access_ref aref;
260 tree src = gimple_call_arg (stmt, 1);
261 if (compute_objsize_r (src, stmt, 1, &aref, snlim, qry)
262 && aref.sizrng[1] < offrng[1])
263 offrng[1] = aref.sizrng[1];
264 }
265
266 /* When the source is the empty string the returned pointer is
267 a copy of the argument. Otherwise stpcpy can also return
268 a past-the-end pointer. */
269 offrng[0] = 0;
270 *past_end = true;
271 return gimple_call_arg (stmt, 0);
272 }
273
274 default:
275 break;
276 }
277
278 return NULL_TREE(tree) __null;
279}
280
281/* Return true when EXP's range can be determined and set RANGE[] to it
282 after adjusting it if necessary to make EXP a represents a valid size
283 of object, or a valid size argument to an allocation function declared
284 with attribute alloc_size (whose argument may be signed), or to a string
285 manipulation function like memset.
286 When ALLOW_ZERO is set in FLAGS, allow returning a range of [0, 0] for
287 a size in an anti-range [1, N] where N > PTRDIFF_MAX. A zero range is
288 a (nearly) invalid argument to allocation functions like malloc but it
289 is a valid argument to functions like memset.
290 When USE_LARGEST is set in FLAGS set RANGE to the largest valid subrange
291 in a multi-range, otherwise to the smallest valid subrange. */
292
293bool
294get_size_range (range_query *query, tree exp, gimple *stmt, tree range[2],
295 int flags /* = 0 */)
296{
297 if (!exp)
298 return false;
299
300 if (tree_fits_uhwi_p (exp))
301 {
302 /* EXP is a constant. */
303 range[0] = range[1] = exp;
304 return true;
305 }
306
307 tree exptype = TREE_TYPE (exp)((contains_struct_check ((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 307, __FUNCTION__))->typed.type)
;
308 bool integral = INTEGRAL_TYPE_P (exptype)(((enum tree_code) (exptype)->base.code) == ENUMERAL_TYPE ||
((enum tree_code) (exptype)->base.code) == BOOLEAN_TYPE ||
((enum tree_code) (exptype)->base.code) == INTEGER_TYPE)
;
309
310 wide_int min, max;
311 enum value_range_kind range_type;
312
313 if (!query)
314 query = get_range_query (cfun(cfun + 0));
315
316 if (integral)
317 {
318 value_range vr;
319
320 query->range_of_expr (vr, exp, stmt);
321
322 if (vr.undefined_p ())
323 vr.set_varying (TREE_TYPE (exp)((contains_struct_check ((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 323, __FUNCTION__))->typed.type)
);
324 range_type = vr.kind ();
325 min = wi::to_wide (vr.min ());
326 max = wi::to_wide (vr.max ());
327 }
328 else
329 range_type = VR_VARYING;
330
331 if (range_type == VR_VARYING)
332 {
333 if (integral)
334 {
335 /* Use the full range of the type of the expression when
336 no value range information is available. */
337 range[0] = TYPE_MIN_VALUE (exptype)((tree_check5 ((exptype), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 337, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.minval
)
;
338 range[1] = TYPE_MAX_VALUE (exptype)((tree_check5 ((exptype), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 338, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval
)
;
339 return true;
340 }
341
342 range[0] = NULL_TREE(tree) __null;
343 range[1] = NULL_TREE(tree) __null;
344 return false;
345 }
346
347 unsigned expprec = TYPE_PRECISION (exptype)((tree_class_check ((exptype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 347, __FUNCTION__))->type_common.precision)
;
348
349 bool signed_p = !TYPE_UNSIGNED (exptype)((tree_class_check ((exptype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 349, __FUNCTION__))->base.u.bits.unsigned_flag)
;
350
351 if (range_type == VR_ANTI_RANGE)
352 {
353 if (signed_p)
354 {
355 if (wi::les_p (max, 0))
356 {
357 /* EXP is not in a strictly negative range. That means
358 it must be in some (not necessarily strictly) positive
359 range which includes zero. Since in signed to unsigned
360 conversions negative values end up converted to large
361 positive values, and otherwise they are not valid sizes,
362 the resulting range is in both cases [0, TYPE_MAX]. */
363 min = wi::zero (expprec);
364 max = wi::to_wide (TYPE_MAX_VALUE (exptype)((tree_check5 ((exptype), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 364, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval
)
);
365 }
366 else if (wi::les_p (min - 1, 0))
367 {
368 /* EXP is not in a negative-positive range. That means EXP
369 is either negative, or greater than max. Since negative
370 sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
371 min = max + 1;
372 max = wi::to_wide (TYPE_MAX_VALUE (exptype)((tree_check5 ((exptype), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 372, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval
)
);
373 }
374 else
375 {
376 max = min - 1;
377 min = wi::zero (expprec);
378 }
379 }
380 else
381 {
382 wide_int maxsize = wi::to_wide (max_object_size ());
383 min = wide_int::from (min, maxsize.get_precision (), UNSIGNED);
384 max = wide_int::from (max, maxsize.get_precision (), UNSIGNED);
385 if (wi::eq_p (0, min - 1))
386 {
387 /* EXP is unsigned and not in the range [1, MAX]. That means
388 it's either zero or greater than MAX. Even though 0 would
389 normally be detected by -Walloc-zero, unless ALLOW_ZERO
390 is set, set the range to [MAX, TYPE_MAX] so that when MAX
391 is greater than the limit the whole range is diagnosed. */
392 wide_int maxsize = wi::to_wide (max_object_size ());
393 if (flags & SR_ALLOW_ZERO)
394 {
395 if (wi::leu_p (maxsize, max + 1)
396 || !(flags & SR_USE_LARGEST))
397 min = max = wi::zero (expprec);
398 else
399 {
400 min = max + 1;
401 max = wi::to_wide (TYPE_MAX_VALUE (exptype)((tree_check5 ((exptype), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 401, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval
)
);
402 }
403 }
404 else
405 {
406 min = max + 1;
407 max = wi::to_wide (TYPE_MAX_VALUE (exptype)((tree_check5 ((exptype), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 407, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval
)
);
408 }
409 }
410 else if ((flags & SR_USE_LARGEST)
411 && wi::ltu_p (max + 1, maxsize))
412 {
413 /* When USE_LARGEST is set and the larger of the two subranges
414 is a valid size, use it... */
415 min = max + 1;
416 max = maxsize;
417 }
418 else
419 {
420 /* ...otherwise use the smaller subrange. */
421 max = min - 1;
422 min = wi::zero (expprec);
423 }
424 }
425 }
426
427 range[0] = wide_int_to_tree (exptype, min);
428 range[1] = wide_int_to_tree (exptype, max);
429
430 return true;
431}
432
433bool
434get_size_range (tree exp, tree range[2], int flags /* = 0 */)
435{
436 return get_size_range (/*query=*/NULL__null, exp, /*stmt=*/NULL__null, range, flags);
437}
438
439/* If STMT is a call to an allocation function, returns the constant
440 maximum size of the object allocated by the call represented as
441 sizetype. If nonnull, sets RNG1[] to the range of the size.
442 When nonnull, uses RVALS for range information, otherwise gets global
443 range info.
444 Returns null when STMT is not a call to a valid allocation function. */
445
446tree
447gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
448 range_query *qry /* = NULL */)
449{
450 if (!stmt || !is_gimple_call (stmt))
451 return NULL_TREE(tree) __null;
452
453 tree allocfntype;
454 if (tree fndecl = gimple_call_fndecl (stmt))
455 allocfntype = TREE_TYPE (fndecl)((contains_struct_check ((fndecl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 455, __FUNCTION__))->typed.type)
;
456 else
457 allocfntype = gimple_call_fntype (stmt);
458
459 if (!allocfntype)
460 return NULL_TREE(tree) __null;
461
462 unsigned argidx1 = UINT_MAX(2147483647 *2U +1U), argidx2 = UINT_MAX(2147483647 *2U +1U);
463 tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype)((tree_class_check ((allocfntype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 463, __FUNCTION__))->type_common.attributes)
);
464 if (!at)
465 {
466 if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
467 return NULL_TREE(tree) __null;
468
469 argidx1 = 0;
470 }
471
472 unsigned nargs = gimple_call_num_args (stmt);
473
474 if (argidx1 == UINT_MAX(2147483647 *2U +1U))
475 {
476 tree atval = TREE_VALUE (at)((tree_check ((at), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 476, __FUNCTION__, (TREE_LIST)))->list.value)
;
477 if (!atval)
478 return NULL_TREE(tree) __null;
479
480 argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval))((unsigned long) (*tree_int_cst_elt_check ((((tree_check ((atval
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 480, __FUNCTION__, (TREE_LIST)))->list.value)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 480, __FUNCTION__)))
- 1;
481 if (nargs <= argidx1)
482 return NULL_TREE(tree) __null;
483
484 atval = TREE_CHAIN (atval)((contains_struct_check ((atval), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 484, __FUNCTION__))->common.chain)
;
485 if (atval)
486 {
487 argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval))((unsigned long) (*tree_int_cst_elt_check ((((tree_check ((atval
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 487, __FUNCTION__, (TREE_LIST)))->list.value)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 487, __FUNCTION__)))
- 1;
488 if (nargs <= argidx2)
489 return NULL_TREE(tree) __null;
490 }
491 }
492
493 tree size = gimple_call_arg (stmt, argidx1);
494
495 wide_int rng1_buf[2];
496 /* If RNG1 is not set, use the buffer. */
497 if (!rng1)
498 rng1 = rng1_buf;
499
500 /* Use maximum precision to avoid overflow below. */
501 const int prec = ADDR_MAX_PRECISION((64 + 4 + 64 - 1) & ~(64 - 1));
502
503 {
504 tree r[2];
505 /* Determine the largest valid range size, including zero. */
506 if (!get_size_range (qry, size, stmt, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
507 return NULL_TREE(tree) __null;
508 rng1[0] = wi::to_wide (r[0], prec);
509 rng1[1] = wi::to_wide (r[1], prec);
510 }
511
512 if (argidx2 > nargs && TREE_CODE (size)((enum tree_code) (size)->base.code) == INTEGER_CST)
513 return fold_convert (sizetype, size)fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype
], size)
;
514
515 /* To handle ranges do the math in wide_int and return the product
516 of the upper bounds as a constant. Ignore anti-ranges. */
517 tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_nodeglobal_trees[TI_INTEGER_ONE];
518 wide_int rng2[2];
519 {
520 tree r[2];
521 /* As above, use the full non-negative range on failure. */
522 if (!get_size_range (qry, n, stmt, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
523 return NULL_TREE(tree) __null;
524 rng2[0] = wi::to_wide (r[0], prec);
525 rng2[1] = wi::to_wide (r[1], prec);
526 }
527
528 /* Compute products of both bounds for the caller but return the lesser
529 of SIZE_MAX and the product of the upper bounds as a constant. */
530 rng1[0] = rng1[0] * rng2[0];
531 rng1[1] = rng1[1] * rng2[1];
532
533 const tree size_max = TYPE_MAX_VALUE (sizetype)((tree_check5 ((sizetype_tab[(int) stk_sizetype]), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 533, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval
)
;
534 if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
535 {
536 rng1[1] = wi::to_wide (size_max, prec);
537 return size_max;
538 }
539
540 return wide_int_to_tree (sizetypesizetype_tab[(int) stk_sizetype], rng1[1]);
541}
542
543/* For an access to an object referenced to by the function parameter PTR
544 of pointer type, and set RNG[] to the range of sizes of the object
545 obtainedfrom the attribute access specification for the current function.
546 Set STATIC_ARRAY if the array parameter has been declared [static].
547 Return the function parameter on success and null otherwise. */
548
549static tree
550gimple_parm_array_size (tree ptr, wide_int rng[2],
551 bool *static_array /* = NULL */)
552{
553 /* For a function argument try to determine the byte size of the array
554 from the current function declaratation (e.g., attribute access or
555 related). */
556 tree var = SSA_NAME_VAR (ptr)((tree_check ((ptr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 556, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree) __null
|| ((enum tree_code) ((ptr)->ssa_name.var)->base.code)
== IDENTIFIER_NODE ? (tree) __null : (ptr)->ssa_name.var)
;
557 if (TREE_CODE (var)((enum tree_code) (var)->base.code) != PARM_DECL)
558 return NULL_TREE(tree) __null;
559
560 const unsigned prec = TYPE_PRECISION (sizetype)((tree_class_check ((sizetype_tab[(int) stk_sizetype]), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 560, __FUNCTION__))->type_common.precision)
;
561
562 rdwr_map rdwr_idx;
563 attr_access *access = get_parm_access (rdwr_idx, var);
564 if (!access)
565 return NULL_TREE(tree) __null;
566
567 if (access->sizarg != UINT_MAX(2147483647 *2U +1U))
568 {
569 /* TODO: Try to extract the range from the argument based on
570 those of subsequent assertions or based on known calls to
571 the current function. */
572 return NULL_TREE(tree) __null;
573 }
574
575 if (!access->minsize)
576 return NULL_TREE(tree) __null;
577
578 /* Only consider ordinary array bound at level 2 (or above if it's
579 ever added). */
580 if (warn_array_parameterglobal_options.x_warn_array_parameter < 2 && !access->static_p)
581 return NULL_TREE(tree) __null;
582
583 if (static_array)
584 *static_array = access->static_p;
585
586 rng[0] = wi::zero (prec);
587 rng[1] = wi::uhwi (access->minsize, prec);
588 /* Multiply the array bound encoded in the attribute by the size
589 of what the pointer argument to which it decays points to. */
590 tree eltype = TREE_TYPE (TREE_TYPE (ptr))((contains_struct_check ((((contains_struct_check ((ptr), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 590, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 590, __FUNCTION__))->typed.type)
;
591 tree size = TYPE_SIZE_UNIT (eltype)((tree_class_check ((eltype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 591, __FUNCTION__))->type_common.size_unit)
;
592 if (!size || TREE_CODE (size)((enum tree_code) (size)->base.code) != INTEGER_CST)
593 return NULL_TREE(tree) __null;
594
595 rng[1] *= wi::to_wide (size, prec);
596 return var;
597}
598
599/* Given a statement STMT, set the bounds of the reference to at most
600 as many bytes as BOUND or unknown when null, and at least one when
601 the MINACCESS is true unless BOUND is a constant zero. STMT is
602 used for context to get accurate range info. */
603
604access_ref::access_ref (range_query *qry /* = nullptr */,
605 tree bound /* = NULL_TREE */,
606 gimple *stmt /* = nullptr */,
607 bool minaccess /* = false */)
608 : ref (), eval ([](tree x){ return x; }), deref (), trail1special (true),
609 base0 (true), parmarray ()
610{
611 /* Set to valid. */
612 offrng[0] = offrng[1] = 0;
613 offmax[0] = offmax[1] = 0;
614 /* Invalidate. */
615 sizrng[0] = sizrng[1] = -1;
616
617 /* Set the default bounds of the access and adjust below. */
618 bndrng[0] = minaccess ? 1 : 0;
619 bndrng[1] = HOST_WIDE_INT_M1U-1UL;
620
621 /* When BOUND is nonnull and a range can be extracted from it,
622 set the bounds of the access to reflect both it and MINACCESS.
623 BNDRNG[0] is the size of the minimum access. */
624 tree rng[2];
625 if (bound && get_size_range (qry, bound, stmt, rng, SR_ALLOW_ZERO))
626 {
627 bndrng[0] = wi::to_offset (rng[0]);
628 bndrng[1] = wi::to_offset (rng[1]);
629 bndrng[0] = bndrng[0] > 0 && minaccess ? 1 : 0;
630 }
631}
632
633/* Return the PHI node REF refers to or null if it doesn't. */
634
635gphi *
636access_ref::phi () const
637{
638 if (!ref || TREE_CODE (ref)((enum tree_code) (ref)->base.code) != SSA_NAME)
639 return NULL__null;
640
641 gimple *def_stmt = SSA_NAME_DEF_STMT (ref)(tree_check ((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 641, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
642 if (!def_stmt || gimple_code (def_stmt) != GIMPLE_PHI)
643 return NULL__null;
644
645 return as_a <gphi *> (def_stmt);
646}
647
648/* Determine and return the largest object to which *THIS refers. If
649 *THIS refers to a PHI and PREF is nonnull, fill *PREF with the details
650 of the object determined by compute_objsize(ARG, OSTYPE) for each PHI
651 argument ARG. */
652
653tree
654access_ref::get_ref (vec<access_ref> *all_refs,
655 access_ref *pref /* = NULL */,
656 int ostype /* = 1 */,
657 ssa_name_limit_t *psnlim /* = NULL */,
658 pointer_query *qry /* = NULL */) const
659{
660 gphi *phi_stmt = this->phi ();
661 if (!phi_stmt)
662 return ref;
663
664 /* FIXME: Calling get_ref() with a null PSNLIM is dangerous and might
665 cause unbounded recursion. */
666 ssa_name_limit_t snlim_buf;
667 if (!psnlim)
668 psnlim = &snlim_buf;
669
670 if (!psnlim->visit_phi (ref))
671 return NULL_TREE(tree) __null;
672
673 pointer_query empty_qry;
674 if (!qry)
675 qry = &empty_qry;
676
677 /* The conservative result of the PHI reflecting the offset and size
678 of the largest PHI argument, regardless of whether or not they all
679 refer to the same object. */
680 access_ref phi_ref;
681 if (pref)
682 {
683 /* The identity of the object has not been determined yet but
684 PREF->REF is set by the caller to the PHI for convenience.
685 The size is negative/invalid and the offset is zero (it's
686 updated only after the identity of the object has been
687 established). */
688 gcc_assert (pref->sizrng[0] < 0)((void)(!(pref->sizrng[0] < 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 688, __FUNCTION__), 0 : 0))
;
689 gcc_assert (pref->offrng[0] == 0 && pref->offrng[1] == 0)((void)(!(pref->offrng[0] == 0 && pref->offrng[
1] == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 689, __FUNCTION__), 0 : 0))
;
690
691 phi_ref = *pref;
692 }
693
694 /* Set if any argument is a function array (or VLA) parameter not
695 declared [static]. */
696 bool parmarray = false;
697 /* The size of the smallest object referenced by the PHI arguments. */
698 offset_int minsize = 0;
699 const offset_int maxobjsize = wi::to_offset (max_object_size ());
700
701 const unsigned nargs = gimple_phi_num_args (phi_stmt);
702 for (unsigned i = 0; i < nargs; ++i)
703 {
704 access_ref phi_arg_ref;
705 tree arg = gimple_phi_arg_def (phi_stmt, i);
706 if (!compute_objsize_r (arg, phi_stmt, ostype, &phi_arg_ref, *psnlim,
707 qry)
708 || phi_arg_ref.sizrng[0] < 0)
709 /* A PHI with all null pointer arguments. */
710 return NULL_TREE(tree) __null;
711
712 if (TREE_CODE (arg)((enum tree_code) (arg)->base.code) == SSA_NAME)
713 qry->put_ref (arg, phi_arg_ref);
714
715 if (all_refs)
716 all_refs->safe_push (phi_arg_ref);
717
718 parmarray |= phi_arg_ref.parmarray;
719
720 const bool nullp = integer_zerop (arg) && (i || i + 1 < nargs);
721
722 if (phi_ref.sizrng[0] < 0)
723 {
724 /* If PHI_REF doesn't contain a meaningful result yet set it
725 to the result for the first argument. */
726 if (!nullp)
727 phi_ref = phi_arg_ref;
728
729 /* Set if the current argument refers to one or more objects of
730 known size (or range of sizes), as opposed to referring to
731 one or more unknown object(s). */
732 const bool arg_known_size = (phi_arg_ref.sizrng[0] != 0
733 || phi_arg_ref.sizrng[1] != maxobjsize);
734 if (arg_known_size)
735 minsize = phi_arg_ref.sizrng[0];
736
737 continue;
738 }
739
740 const bool phi_known_size = (phi_ref.sizrng[0] != 0
741 || phi_ref.sizrng[1] != maxobjsize);
742
743 if (phi_known_size && phi_arg_ref.sizrng[0] < minsize)
744 minsize = phi_arg_ref.sizrng[0];
745
746 /* Disregard null pointers in PHIs with two or more arguments.
747 TODO: Handle this better! */
748 if (nullp)
749 continue;
750
751 /* Determine the amount of remaining space in the argument. */
752 offset_int argrem[2];
753 argrem[1] = phi_arg_ref.size_remaining (argrem);
754
755 /* Determine the amount of remaining space computed so far and
756 if the remaining space in the argument is more use it instead. */
757 offset_int phirem[2];
758 phirem[1] = phi_ref.size_remaining (phirem);
759
760 /* Reset the PHI's BASE0 flag if any of the nonnull arguments
761 refers to an object at an unknown offset. */
762 if (!phi_arg_ref.base0)
763 phi_ref.base0 = false;
764
765 if (phirem[1] < argrem[1]
766 || (phirem[1] == argrem[1]
767 && phi_ref.sizrng[1] < phi_arg_ref.sizrng[1]))
768 /* Use the argument with the most space remaining as the result,
769 or the larger one if the space is equal. */
770 phi_ref = phi_arg_ref;
771 }
772
773 /* Replace the lower bound of the largest argument with the size
774 of the smallest argument, and set PARMARRAY if any argument
775 was one. */
776 phi_ref.sizrng[0] = minsize;
777 phi_ref.parmarray = parmarray;
778
779 if (phi_ref.sizrng[0] < 0)
780 {
781 /* Fail if none of the PHI's arguments resulted in updating PHI_REF
782 (perhaps because they have all been already visited by prior
783 recursive calls). */
784 psnlim->leave_phi (ref);
785 return NULL_TREE(tree) __null;
786 }
787
788 /* Avoid changing *THIS. */
789 if (pref && pref != this)
790 *pref = phi_ref;
791
792 psnlim->leave_phi (ref);
793
794 return phi_ref.ref;
795}
796
797/* Return the maximum amount of space remaining and if non-null, set
798 argument to the minimum. */
799
800offset_int
801access_ref::size_remaining (offset_int *pmin /* = NULL */) const
802{
803 offset_int minbuf;
804 if (!pmin)
805 pmin = &minbuf;
806
807 if (sizrng[0] < 0)
808 {
809 /* If the identity of the object hasn't been determined return
810 the maximum size range. */
811 *pmin = 0;
812 return wi::to_offset (max_object_size ());
813 }
814
815 /* add_offset() ensures the offset range isn't inverted. */
816 gcc_checking_assert (offrng[0] <= offrng[1])((void)(!(offrng[0] <= offrng[1]) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 816, __FUNCTION__), 0 : 0))
;
817
818 if (base0)
819 {
820 /* The offset into referenced object is zero-based (i.e., it's
821 not referenced by a pointer into middle of some unknown object). */
822 if (offrng[0] < 0 && offrng[1] < 0)
823 {
824 /* If the offset is negative the remaining size is zero. */
825 *pmin = 0;
826 return 0;
827 }
828
829 if (sizrng[1] <= offrng[0])
830 {
831 /* If the starting offset is greater than or equal to the upper
832 bound on the size of the object, the space remaining is zero.
833 As a special case, if it's equal, set *PMIN to -1 to let
834 the caller know the offset is valid and just past the end. */
835 *pmin = sizrng[1] == offrng[0] ? -1 : 0;
836 return 0;
837 }
838
839 /* Otherwise return the size minus the lower bound of the offset. */
840 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
841
842 *pmin = sizrng[0] - or0;
843 return sizrng[1] - or0;
844 }
845
846 /* The offset to the referenced object isn't zero-based (i.e., it may
847 refer to a byte other than the first. The size of such an object
848 is constrained only by the size of the address space (the result
849 of max_object_size()). */
850 if (sizrng[1] <= offrng[0])
851 {
852 *pmin = 0;
853 return 0;
854 }
855
856 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
857
858 *pmin = sizrng[0] - or0;
859 return sizrng[1] - or0;
860}
861
862/* Return true if the offset and object size are in range for SIZE. */
863
864bool
865access_ref::offset_in_range (const offset_int &size) const
866{
867 if (size_remaining () < size)
868 return false;
869
870 if (base0)
871 return offmax[0] >= 0 && offmax[1] <= sizrng[1];
872
873 offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node)((tree_check5 ((global_trees[TI_PTRDIFF_TYPE]), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 873, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval
)
);
874 return offmax[0] > -maxoff && offmax[1] < maxoff;
875}
876
877/* Add the range [MIN, MAX] to the offset range. For known objects (with
878 zero-based offsets) at least one of whose offset's bounds is in range,
879 constrain the other (or both) to the bounds of the object (i.e., zero
880 and the upper bound of its size). This improves the quality of
881 diagnostics. */
882
883void access_ref::add_offset (const offset_int &min, const offset_int &max)
884{
885 if (min <= max)
886 {
887 /* To add an ordinary range just add it to the bounds. */
888 offrng[0] += min;
889 offrng[1] += max;
890 }
891 else if (!base0)
892 {
893 /* To add an inverted range to an offset to an unknown object
894 expand it to the maximum. */
895 add_max_offset ();
896 return;
897 }
898 else
899 {
900 /* To add an inverted range to an offset to an known object set
901 the upper bound to the maximum representable offset value
902 (which may be greater than MAX_OBJECT_SIZE).
903 The lower bound is either the sum of the current offset and
904 MIN when abs(MAX) is greater than the former, or zero otherwise.
905 Zero because then then inverted range includes the negative of
906 the lower bound. */
907 offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node)((tree_check5 ((global_trees[TI_PTRDIFF_TYPE]), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 907, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval
)
);
908 offrng[1] = maxoff;
909
910 if (max >= 0)
911 {
912 offrng[0] = 0;
913 if (offmax[0] > 0)
914 offmax[0] = 0;
915 return;
916 }
917
918 offset_int absmax = wi::abs (max);
919 if (offrng[0] < absmax)
920 {
921 offrng[0] += min;
922 /* Cap the lower bound at the upper (set to MAXOFF above)
923 to avoid inadvertently recreating an inverted range. */
924 if (offrng[1] < offrng[0])
925 offrng[0] = offrng[1];
926 }
927 else
928 offrng[0] = 0;
929 }
930
931 /* Set the minimum and maximmum computed so far. */
932 if (offrng[1] < 0 && offrng[1] < offmax[0])
933 offmax[0] = offrng[1];
934 if (offrng[0] > 0 && offrng[0] > offmax[1])
935 offmax[1] = offrng[0];
936
937 if (!base0)
938 return;
939
940 /* When referencing a known object check to see if the offset computed
941 so far is in bounds... */
942 offset_int remrng[2];
943 remrng[1] = size_remaining (remrng);
944 if (remrng[1] > 0 || remrng[0] < 0)
945 {
946 /* ...if so, constrain it so that neither bound exceeds the size of
947 the object. Out of bounds offsets are left unchanged, and, for
948 better or worse, become in bounds later. They should be detected
949 and diagnosed at the point they first become invalid by
950 -Warray-bounds. */
951 if (offrng[0] < 0)
952 offrng[0] = 0;
953 if (offrng[1] > sizrng[1])
954 offrng[1] = sizrng[1];
955 }
956}
957
958/* Issue one inform message describing each target of an access REF.
959 WRITE is set for a write access and clear for a read access. */
960
961void
962access_ref::inform_access (access_mode mode) const
963{
964 const access_ref &aref = *this;
965 if (!aref.ref)
966 return;
967
968 if (aref.phi ())
969 {
970 /* Set MAXREF to refer to the largest object and fill ALL_REFS
971 with data for all objects referenced by the PHI arguments. */
972 access_ref maxref;
973 auto_vec<access_ref> all_refs;
974 if (!get_ref (&all_refs, &maxref))
975 return;
976
977 /* Except for MAXREF, the rest of the arguments' offsets need not
978 reflect one added to the PHI itself. Determine the latter from
979 MAXREF on which the result is based. */
980 const offset_int orng[] =
981 {
982 offrng[0] - maxref.offrng[0],
983 wi::smax (offrng[1] - maxref.offrng[1], offrng[0]),
984 };
985
986 /* Add the final PHI's offset to that of each of the arguments
987 and recurse to issue an inform message for it. */
988 for (unsigned i = 0; i != all_refs.length (); ++i)
989 {
990 /* Skip any PHIs; those could lead to infinite recursion. */
991 if (all_refs[i].phi ())
992 continue;
993
994 all_refs[i].add_offset (orng[0], orng[1]);
995 all_refs[i].inform_access (mode);
996 }
997 return;
998 }
999
1000 /* Convert offset range and avoid including a zero range since it
1001 isn't necessarily meaningful. */
1002 HOST_WIDE_INTlong diff_min = tree_to_shwi (TYPE_MIN_VALUE (ptrdiff_type_node)((tree_check5 ((global_trees[TI_PTRDIFF_TYPE]), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1002, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.minval
)
);
1003 HOST_WIDE_INTlong diff_max = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node)((tree_check5 ((global_trees[TI_PTRDIFF_TYPE]), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1003, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval
)
);
1004 HOST_WIDE_INTlong minoff;
1005 HOST_WIDE_INTlong maxoff = diff_max;
1006 if (wi::fits_shwi_p (aref.offrng[0]))
1007 minoff = aref.offrng[0].to_shwi ();
1008 else
1009 minoff = aref.offrng[0] < 0 ? diff_min : diff_max;
1010
1011 if (wi::fits_shwi_p (aref.offrng[1]))
1012 maxoff = aref.offrng[1].to_shwi ();
1013
1014 if (maxoff <= diff_min || maxoff >= diff_max)
1015 /* Avoid mentioning an upper bound that's equal to or in excess
1016 of the maximum of ptrdiff_t. */
1017 maxoff = minoff;
1018
1019 /* Convert size range and always include it since all sizes are
1020 meaningful. */
1021 unsigned long long minsize = 0, maxsize = 0;
1022 if (wi::fits_shwi_p (aref.sizrng[0])
1023 && wi::fits_shwi_p (aref.sizrng[1]))
1024 {
1025 minsize = aref.sizrng[0].to_shwi ();
1026 maxsize = aref.sizrng[1].to_shwi ();
1027 }
1028
1029 /* SIZRNG doesn't necessarily have the same range as the allocation
1030 size determined by gimple_call_alloc_size (). */
1031 char sizestr[80];
1032 if (minsize == maxsize)
1033 sprintf (sizestr, "%llu", minsize);
1034 else
1035 sprintf (sizestr, "[%llu, %llu]", minsize, maxsize);
1036
1037 char offstr[80];
1038 if (minoff == 0
1039 && (maxoff == 0 || aref.sizrng[1] <= maxoff))
1040 offstr[0] = '\0';
1041 else if (minoff == maxoff)
1042 sprintf (offstr, "%lli", (long long) minoff);
1043 else
1044 sprintf (offstr, "[%lli, %lli]", (long long) minoff, (long long) maxoff);
1045
1046 location_t loc = UNKNOWN_LOCATION((location_t) 0);
1047
1048 tree ref = this->ref;
1049 tree allocfn = NULL_TREE(tree) __null;
1050 if (TREE_CODE (ref)((enum tree_code) (ref)->base.code) == SSA_NAME)
1051 {
1052 gimple *stmt = SSA_NAME_DEF_STMT (ref)(tree_check ((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1052, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
1053 if (!stmt)
1054 return;
1055
1056 if (is_gimple_call (stmt))
1057 {
1058 loc = gimple_location (stmt);
1059 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
1060 {
1061 /* Strip the SSA_NAME suffix from the variable name and
1062 recreate an identifier with the VLA's original name. */
1063 ref = gimple_call_lhs (stmt);
1064 if (SSA_NAME_IDENTIFIER (ref)((tree_check ((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1064, __FUNCTION__, (SSA_NAME)))->ssa_name.var != (tree)
__null ? (((enum tree_code) ((ref)->ssa_name.var)->base
.code) == IDENTIFIER_NODE ? (ref)->ssa_name.var : ((contains_struct_check
(((ref)->ssa_name.var), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1064, __FUNCTION__))->decl_minimal.name)) : (tree) __null
)
)
1065 {
1066 ref = SSA_NAME_IDENTIFIER (ref)((tree_check ((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1066, __FUNCTION__, (SSA_NAME)))->ssa_name.var != (tree)
__null ? (((enum tree_code) ((ref)->ssa_name.var)->base
.code) == IDENTIFIER_NODE ? (ref)->ssa_name.var : ((contains_struct_check
(((ref)->ssa_name.var), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1066, __FUNCTION__))->decl_minimal.name)) : (tree) __null
)
;
1067 const char *id = IDENTIFIER_POINTER (ref)((const char *) (tree_check ((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1067, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)
;
1068 size_t len = strcspn (id, ".$");
1069 if (!len)
1070 len = strlen (id);
1071 ref = get_identifier_with_length (id, len);
1072 }
1073 }
1074 else
1075 {
1076 /* Except for VLAs, retrieve the allocation function. */
1077 allocfn = gimple_call_fndecl (stmt);
1078 if (!allocfn)
1079 allocfn = gimple_call_fn (stmt);
1080 if (TREE_CODE (allocfn)((enum tree_code) (allocfn)->base.code) == SSA_NAME)
1081 {
1082 /* For an ALLOC_CALL via a function pointer make a small
1083 effort to determine the destination of the pointer. */
1084 gimple *def = SSA_NAME_DEF_STMT (allocfn)(tree_check ((allocfn), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1084, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
1085 if (gimple_assign_single_p (def))
1086 {
1087 tree rhs = gimple_assign_rhs1 (def);
1088 if (DECL_P (rhs)(tree_code_type[(int) (((enum tree_code) (rhs)->base.code)
)] == tcc_declaration)
)
1089 allocfn = rhs;
1090 else if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == COMPONENT_REF)
1091 allocfn = TREE_OPERAND (rhs, 1)(*((const_cast<tree*> (tree_operand_check ((rhs), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1091, __FUNCTION__)))))
;
1092 }
1093 }
1094 }
1095 }
1096 else if (gimple_nop_p (stmt))
1097 /* Handle DECL_PARM below. */
1098 ref = SSA_NAME_VAR (ref)((tree_check ((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1098, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
__null || ((enum tree_code) ((ref)->ssa_name.var)->base
.code) == IDENTIFIER_NODE ? (tree) __null : (ref)->ssa_name
.var)
;
1099 else if (is_gimple_assign (stmt)
1100 && (gimple_assign_rhs_code (stmt) == MIN_EXPR
1101 || gimple_assign_rhs_code (stmt) == MAX_EXPR))
1102 {
1103 /* MIN or MAX_EXPR here implies a reference to a known object
1104 and either an unknown or distinct one (the latter being
1105 the result of an invalid relational expression). Determine
1106 the identity of the former and point to it in the note.
1107 TODO: Consider merging with PHI handling. */
1108 access_ref arg_ref[2];
1109 tree arg = gimple_assign_rhs1 (stmt);
1110 compute_objsize (arg, /* ostype = */ 1 , &arg_ref[0]);
1111 arg = gimple_assign_rhs2 (stmt);
1112 compute_objsize (arg, /* ostype = */ 1 , &arg_ref[1]);
1113
1114 /* Use the argument that references a known object with more
1115 space remaining. */
1116 const bool idx
1117 = (!arg_ref[0].ref || !arg_ref[0].base0
1118 || (arg_ref[0].base0 && arg_ref[1].base0
1119 && (arg_ref[0].size_remaining ()
1120 < arg_ref[1].size_remaining ())));
1121
1122 arg_ref[idx].offrng[0] = offrng[0];
1123 arg_ref[idx].offrng[1] = offrng[1];
1124 arg_ref[idx].inform_access (mode);
1125 return;
1126 }
1127 }
1128
1129 if (DECL_P (ref)(tree_code_type[(int) (((enum tree_code) (ref)->base.code)
)] == tcc_declaration)
)
1130 loc = DECL_SOURCE_LOCATION (ref)((contains_struct_check ((ref), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1130, __FUNCTION__))->decl_minimal.locus)
;
1131 else if (EXPR_P (ref)((tree_code_type[(int) (((enum tree_code) (ref)->base.code
))]) >= tcc_reference && (tree_code_type[(int) (((
enum tree_code) (ref)->base.code))]) <= tcc_expression)
&& EXPR_HAS_LOCATION (ref)(((IS_ADHOC_LOC (((((ref)) && ((tree_code_type[(int) (
((enum tree_code) ((ref))->base.code))]) >= tcc_reference
&& (tree_code_type[(int) (((enum tree_code) ((ref))->
base.code))]) <= tcc_expression)) ? (ref)->exp.locus : (
(location_t) 0)))) ? get_location_from_adhoc_loc (line_table,
((((ref)) && ((tree_code_type[(int) (((enum tree_code
) ((ref))->base.code))]) >= tcc_reference && (tree_code_type
[(int) (((enum tree_code) ((ref))->base.code))]) <= tcc_expression
)) ? (ref)->exp.locus : ((location_t) 0))) : (((((ref)) &&
((tree_code_type[(int) (((enum tree_code) ((ref))->base.code
))]) >= tcc_reference && (tree_code_type[(int) (((
enum tree_code) ((ref))->base.code))]) <= tcc_expression
)) ? (ref)->exp.locus : ((location_t) 0)))) != ((location_t
) 0))
)
1132 loc = EXPR_LOCATION (ref)((((ref)) && ((tree_code_type[(int) (((enum tree_code
) ((ref))->base.code))]) >= tcc_reference && (tree_code_type
[(int) (((enum tree_code) ((ref))->base.code))]) <= tcc_expression
)) ? (ref)->exp.locus : ((location_t) 0))
;
1133 else if (TREE_CODE (ref)((enum tree_code) (ref)->base.code) != IDENTIFIER_NODE
1134 && TREE_CODE (ref)((enum tree_code) (ref)->base.code) != SSA_NAME)
1135 return;
1136
1137 if (mode == access_read_write || mode == access_write_only)
1138 {
1139 if (allocfn == NULL_TREE(tree) __null)
1140 {
1141 if (*offstr)
1142 inform (loc, "at offset %s into destination object %qE of size %s",
1143 offstr, ref, sizestr);
1144 else
1145 inform (loc, "destination object %qE of size %s", ref, sizestr);
1146 return;
1147 }
1148
1149 if (*offstr)
1150 inform (loc,
1151 "at offset %s into destination object of size %s "
1152 "allocated by %qE", offstr, sizestr, allocfn);
1153 else
1154 inform (loc, "destination object of size %s allocated by %qE",
1155 sizestr, allocfn);
1156 return;
1157 }
1158
1159 if (mode == access_read_only)
1160 {
1161 if (allocfn == NULL_TREE(tree) __null)
1162 {
1163 if (*offstr)
1164 inform (loc, "at offset %s into source object %qE of size %s",
1165 offstr, ref, sizestr);
1166 else
1167 inform (loc, "source object %qE of size %s", ref, sizestr);
1168
1169 return;
1170 }
1171
1172 if (*offstr)
1173 inform (loc,
1174 "at offset %s into source object of size %s allocated by %qE",
1175 offstr, sizestr, allocfn);
1176 else
1177 inform (loc, "source object of size %s allocated by %qE",
1178 sizestr, allocfn);
1179 return;
1180 }
1181
1182 if (allocfn == NULL_TREE(tree) __null)
1183 {
1184 if (*offstr)
1185 inform (loc, "at offset %s into object %qE of size %s",
1186 offstr, ref, sizestr);
1187 else
1188 inform (loc, "object %qE of size %s", ref, sizestr);
1189
1190 return;
1191 }
1192
1193 if (*offstr)
1194 inform (loc,
1195 "at offset %s into object of size %s allocated by %qE",
1196 offstr, sizestr, allocfn);
1197 else
1198 inform (loc, "object of size %s allocated by %qE",
1199 sizestr, allocfn);
1200}
1201
1202/* Set a bit for the PHI in VISITED and return true if it wasn't
1203 already set. */
1204
1205bool
1206ssa_name_limit_t::visit_phi (tree ssa_name)
1207{
1208 if (!visited)
1209 visited = BITMAP_ALLOCbitmap_alloc (NULL__null);
1210
1211 /* Return false if SSA_NAME has already been visited. */
1212 return bitmap_set_bit (visited, SSA_NAME_VERSION (ssa_name)(tree_check ((ssa_name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1212, __FUNCTION__, (SSA_NAME)))->base.u.version
);
1213}
1214
1215/* Clear a bit for the PHI in VISITED. */
1216
1217void
1218ssa_name_limit_t::leave_phi (tree ssa_name)
1219{
1220 /* Return false if SSA_NAME has already been visited. */
1221 bitmap_clear_bit (visited, SSA_NAME_VERSION (ssa_name)(tree_check ((ssa_name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1221, __FUNCTION__, (SSA_NAME)))->base.u.version
);
1222}
1223
1224/* Return false if the SSA_NAME chain length counter has reached
1225 the limit, otherwise increment the counter and return true. */
1226
1227bool
1228ssa_name_limit_t::next ()
1229{
1230 /* Return a negative value to let caller avoid recursing beyond
1231 the specified limit. */
1232 if (ssa_def_max == 0)
28
Assuming field 'ssa_def_max' is not equal to 0
29
Taking false branch
1233 return false;
1234
1235 --ssa_def_max;
1236 return true;
30
Returning the value 1, which participates in a condition later
1237}
1238
1239/* If the SSA_NAME has already been "seen" return a positive value.
1240 Otherwise add it to VISITED. If the SSA_NAME limit has been
1241 reached, return a negative value. Otherwise return zero. */
1242
1243int
1244ssa_name_limit_t::next_phi (tree ssa_name)
1245{
1246 {
1247 gimple *def_stmt = SSA_NAME_DEF_STMT (ssa_name)(tree_check ((ssa_name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1247, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
1248 /* Return a positive value if the PHI has already been visited. */
1249 if (gimple_code (def_stmt) == GIMPLE_PHI
1250 && !visit_phi (ssa_name))
1251 return 1;
1252 }
1253
1254 /* Return a negative value to let caller avoid recursing beyond
1255 the specified limit. */
1256 if (ssa_def_max == 0)
1257 return -1;
1258
1259 --ssa_def_max;
1260
1261 return 0;
1262}
1263
1264ssa_name_limit_t::~ssa_name_limit_t ()
1265{
1266 if (visited)
1267 BITMAP_FREE (visited)((void) (bitmap_obstack_free ((bitmap) visited), (visited) = (
bitmap) __null))
;
1268}
1269
1270/* Default ctor. Initialize object with pointers to the range_query
1271 and cache_type instances to use or null. */
1272
1273pointer_query::pointer_query (range_query *qry /* = NULL */,
1274 cache_type *cache /* = NULL */)
1275: rvals (qry), var_cache (cache), hits (), misses (),
1276 failures (), depth (), max_depth ()
1277{
1278 /* No op. */
1279}
1280
1281/* Return a pointer to the cached access_ref instance for the SSA_NAME
1282 PTR if it's there or null otherwise. */
1283
1284const access_ref *
1285pointer_query::get_ref (tree ptr, int ostype /* = 1 */) const
1286{
1287 if (!var_cache)
1288 {
1289 ++misses;
1290 return NULL__null;
1291 }
1292
1293 unsigned version = SSA_NAME_VERSION (ptr)(tree_check ((ptr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1293, __FUNCTION__, (SSA_NAME)))->base.u.version
;
1294 unsigned idx = version << 1 | (ostype & 1);
1295 if (var_cache->indices.length () <= idx)
1296 {
1297 ++misses;
1298 return NULL__null;
1299 }
1300
1301 unsigned cache_idx = var_cache->indices[idx];
1302 if (var_cache->access_refs.length () <= cache_idx)
1303 {
1304 ++misses;
1305 return NULL__null;
1306 }
1307
1308 access_ref &cache_ref = var_cache->access_refs[cache_idx];
1309 if (cache_ref.ref)
1310 {
1311 ++hits;
1312 return &cache_ref;
1313 }
1314
1315 ++misses;
1316 return NULL__null;
1317}
1318
1319/* Retrieve the access_ref instance for a variable from the cache if it's
1320 there or compute it and insert it into the cache if it's nonnonull. */
1321
1322bool
1323pointer_query::get_ref (tree ptr, gimple *stmt, access_ref *pref, int ostype /* = 1 */)
1324{
1325 const unsigned version
1326 = TREE_CODE (ptr)((enum tree_code) (ptr)->base.code) == SSA_NAME ? SSA_NAME_VERSION (ptr)(tree_check ((ptr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1326, __FUNCTION__, (SSA_NAME)))->base.u.version
: 0;
1327
1328 if (var_cache && version)
1329 {
1330 unsigned idx = version << 1 | (ostype & 1);
1331 if (idx < var_cache->indices.length ())
1332 {
1333 unsigned cache_idx = var_cache->indices[idx] - 1;
1334 if (cache_idx < var_cache->access_refs.length ()
1335 && var_cache->access_refs[cache_idx].ref)
1336 {
1337 ++hits;
1338 *pref = var_cache->access_refs[cache_idx];
1339 return true;
1340 }
1341 }
1342
1343 ++misses;
1344 }
1345
1346 if (!compute_objsize (ptr, stmt, ostype, pref, this))
1347 {
1348 ++failures;
1349 return false;
1350 }
1351
1352 return true;
1353}
1354
1355/* Add a copy of the access_ref REF for the SSA_NAME to the cache if it's
1356 nonnull. */
1357
1358void
1359pointer_query::put_ref (tree ptr, const access_ref &ref, int ostype /* = 1 */)
1360{
1361 /* Only add populated/valid entries. */
1362 if (!var_cache || !ref.ref || ref.sizrng[0] < 0)
1363 return;
1364
1365 /* Add REF to the two-level cache. */
1366 unsigned version = SSA_NAME_VERSION (ptr)(tree_check ((ptr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1366, __FUNCTION__, (SSA_NAME)))->base.u.version
;
1367 unsigned idx = version << 1 | (ostype & 1);
1368
1369 /* Grow INDICES if necessary. An index is valid if it's nonzero.
1370 Its value minus one is the index into ACCESS_REFS. Not all
1371 entries are valid. */
1372 if (var_cache->indices.length () <= idx)
1373 var_cache->indices.safe_grow_cleared (idx + 1);
1374
1375 if (!var_cache->indices[idx])
1376 var_cache->indices[idx] = var_cache->access_refs.length () + 1;
1377
1378 /* Grow ACCESS_REF cache if necessary. An entry is valid if its
1379 REF member is nonnull. All entries except for the last two
1380 are valid. Once nonnull, the REF value must stay unchanged. */
1381 unsigned cache_idx = var_cache->indices[idx];
1382 if (var_cache->access_refs.length () <= cache_idx)
1383 var_cache->access_refs.safe_grow_cleared (cache_idx + 1);
1384
1385 access_ref &cache_ref = var_cache->access_refs[cache_idx];
1386 if (cache_ref.ref)
1387 {
1388 gcc_checking_assert (cache_ref.ref == ref.ref)((void)(!(cache_ref.ref == ref.ref) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1388, __FUNCTION__), 0 : 0))
;
1389 return;
1390 }
1391
1392 cache_ref = ref;
1393}
1394
1395/* Flush the cache if it's nonnull. */
1396
1397void
1398pointer_query::flush_cache ()
1399{
1400 if (!var_cache)
1401 return;
1402 var_cache->indices.release ();
1403 var_cache->access_refs.release ();
1404}
1405
1406/* Dump statistics and, optionally, cache contents to DUMP_FILE. */
1407
1408void
1409pointer_query::dump (FILE *dump_file, bool contents /* = false */)
1410{
1411 unsigned nused = 0, nrefs = 0;
1412 unsigned nidxs = var_cache->indices.length ();
1413 for (unsigned i = 0; i != nidxs; ++i)
1414 {
1415 unsigned ari = var_cache->indices[i];
1416 if (!ari)
1417 continue;
1418
1419 ++nused;
1420
1421 const access_ref &aref = var_cache->access_refs[ari];
1422 if (!aref.ref)
1423 continue;
1424
1425 ++nrefs;
1426 }
1427
1428 fprintf (dump_file, "pointer_query counters:\n"
1429 " index cache size: %u\n"
1430 " index entries: %u\n"
1431 " access cache size: %u\n"
1432 " access entries: %u\n"
1433 " hits: %u\n"
1434 " misses: %u\n"
1435 " failures: %u\n"
1436 " max_depth: %u\n",
1437 nidxs, nused,
1438 var_cache->access_refs.length (), nrefs,
1439 hits, misses, failures, max_depth);
1440
1441 if (!contents || !nidxs)
1442 return;
1443
1444 fputs ("\npointer_query cache contents:\n", dump_file);
1445
1446 for (unsigned i = 0; i != nidxs; ++i)
1447 {
1448 unsigned ari = var_cache->indices[i];
1449 if (!ari)
1450 continue;
1451
1452 const access_ref &aref = var_cache->access_refs[ari];
1453 if (!aref.ref)
1454 continue;
1455
1456 /* The level-1 cache index corresponds to the SSA_NAME_VERSION
1457 shifted left by one and ORed with the Object Size Type in
1458 the lowest bit. Print the two separately. */
1459 unsigned ver = i >> 1;
1460 unsigned ost = i & 1;
1461
1462 fprintf (dump_file, " %u.%u[%u]: ", ver, ost, ari);
1463 if (tree name = ssa_name (ver)((*(cfun + 0)->gimple_df->ssa_names)[(ver)]))
1464 {
1465 print_generic_expr (dump_file, name);
1466 fputs (" = ", dump_file);
1467 }
1468 else
1469 fprintf (dump_file, " _%u = ", ver);
1470
1471 if (gphi *phi = aref.phi ())
1472 {
1473 fputs ("PHI <", dump_file);
1474 unsigned nargs = gimple_phi_num_args (phi);
1475 for (unsigned i = 0; i != nargs; ++i)
1476 {
1477 tree arg = gimple_phi_arg_def (phi, i);
1478 print_generic_expr (dump_file, arg);
1479 if (i + 1 < nargs)
1480 fputs (", ", dump_file);
1481 }
1482 fputc ('>', dump_file);
1483 }
1484 else
1485 print_generic_expr (dump_file, aref.ref);
1486
1487 if (aref.offrng[0] != aref.offrng[1])
1488 fprintf (dump_file, " + [%lli, %lli]",
1489 (long long) aref.offrng[0].to_shwi (),
1490 (long long) aref.offrng[1].to_shwi ());
1491 else if (aref.offrng[0] != 0)
1492 fprintf (dump_file, " %c %lli",
1493 aref.offrng[0] < 0 ? '-' : '+',
1494 (long long) aref.offrng[0].to_shwi ());
1495
1496 fputc ('\n', dump_file);
1497 }
1498
1499 fputc ('\n', dump_file);
1500}
1501
1502/* A helper of compute_objsize_r() to determine the size from an assignment
1503 statement STMT with the RHS of either MIN_EXPR or MAX_EXPR. On success
1504 set PREF->REF to the operand with more or less space remaining,
1505 respectively, if both refer to the same (sub)object, or to PTR if they
1506 might not, and return true. Otherwise, if the identity of neither
1507 operand can be determined, return false. */
1508
1509static bool
1510handle_min_max_size (tree ptr, int ostype, access_ref *pref,
1511 ssa_name_limit_t &snlim, pointer_query *qry)
1512{
1513 gimple *stmt = SSA_NAME_DEF_STMT (ptr)(tree_check ((ptr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1513, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
1514 const tree_code code = gimple_assign_rhs_code (stmt);
1515
1516 /* In a valid MAX_/MIN_EXPR both operands must refer to the same array.
1517 Determine the size/offset of each and use the one with more or less
1518 space remaining, respectively. If either fails, use the information
1519 determined from the other instead, adjusted up or down as appropriate
1520 for the expression. */
1521 access_ref aref[2] = { *pref, *pref };
1522 tree arg1 = gimple_assign_rhs1 (stmt);
1523 if (!compute_objsize_r (arg1, stmt, ostype, &aref[0], snlim, qry))
1524 {
1525 aref[0].base0 = false;
1526 aref[0].offrng[0] = aref[0].offrng[1] = 0;
1527 aref[0].add_max_offset ();
1528 aref[0].set_max_size_range ();
1529 }
1530
1531 tree arg2 = gimple_assign_rhs2 (stmt);
1532 if (!compute_objsize_r (arg2, stmt, ostype, &aref[1], snlim, qry))
1533 {
1534 aref[1].base0 = false;
1535 aref[1].offrng[0] = aref[1].offrng[1] = 0;
1536 aref[1].add_max_offset ();
1537 aref[1].set_max_size_range ();
1538 }
1539
1540 if (!aref[0].ref && !aref[1].ref)
1541 /* Fail if the identity of neither argument could be determined. */
1542 return false;
1543
1544 bool i0 = false;
1545 if (aref[0].ref && aref[0].base0)
1546 {
1547 if (aref[1].ref && aref[1].base0)
1548 {
1549 /* If the object referenced by both arguments has been determined
1550 set *PREF to the one with more or less space remainng, whichever
1551 is appopriate for CODE.
1552 TODO: Indicate when the objects are distinct so it can be
1553 diagnosed. */
1554 i0 = code == MAX_EXPR;
1555 const bool i1 = !i0;
1556
1557 if (aref[i0].size_remaining () < aref[i1].size_remaining ())
1558 *pref = aref[i1];
1559 else
1560 *pref = aref[i0];
1561
1562 if (aref[i0].ref != aref[i1].ref)
1563 /* If the operands don't refer to the same (sub)object set
1564 PREF->REF to the SSA_NAME from which STMT was obtained
1565 so that both can be identified in a diagnostic. */
1566 pref->ref = ptr;
1567
1568 return true;
1569 }
1570
1571 /* If only the object referenced by one of the arguments could be
1572 determined, use it and... */
1573 *pref = aref[0];
1574 i0 = true;
1575 }
1576 else
1577 *pref = aref[1];
1578
1579 const bool i1 = !i0;
1580 /* ...see if the offset obtained from the other pointer can be used
1581 to tighten up the bound on the offset obtained from the first. */
1582 if ((code == MAX_EXPR && aref[i1].offrng[1] < aref[i0].offrng[0])
1583 || (code == MIN_EXPR && aref[i0].offrng[0] < aref[i1].offrng[1]))
1584 {
1585 pref->offrng[0] = aref[i0].offrng[0];
1586 pref->offrng[1] = aref[i0].offrng[1];
1587 }
1588
1589 /* Replace PTR->REF with the SSA_NAME to indicate the expression
1590 might not refer to the same (sub)object. */
1591 pref->ref = ptr;
1592 return true;
1593}
1594
1595/* A helper of compute_objsize_r() to determine the size from ARRAY_REF
1596 AREF. ADDR is true if PTR is the operand of ADDR_EXPR. Return true
1597 on success and false on failure. */
1598
1599static bool
1600handle_array_ref (tree aref, gimple *stmt, bool addr, int ostype,
1601 access_ref *pref, ssa_name_limit_t &snlim,
1602 pointer_query *qry)
1603{
1604 gcc_assert (TREE_CODE (aref) == ARRAY_REF)((void)(!(((enum tree_code) (aref)->base.code) == ARRAY_REF
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1604, __FUNCTION__), 0 : 0))
;
1605
1606 ++pref->deref;
1607
1608 tree arefop = TREE_OPERAND (aref, 0)(*((const_cast<tree*> (tree_operand_check ((aref), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1608, __FUNCTION__)))))
;
1609 tree reftype = TREE_TYPE (arefop)((contains_struct_check ((arefop), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1609, __FUNCTION__))->typed.type)
;
1610 if (!addr && TREE_CODE (TREE_TYPE (reftype))((enum tree_code) (((contains_struct_check ((reftype), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1610, __FUNCTION__))->typed.type))->base.code)
== POINTER_TYPE)
1611 /* Avoid arrays of pointers. FIXME: Hande pointers to arrays
1612 of known bound. */
1613 return false;
1614
1615 if (!compute_objsize_r (arefop, stmt, ostype, pref, snlim, qry))
1616 return false;
1617
1618 offset_int orng[2];
1619 tree off = pref->eval (TREE_OPERAND (aref, 1)(*((const_cast<tree*> (tree_operand_check ((aref), (1),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1619, __FUNCTION__)))))
);
1620 range_query *const rvals = qry ? qry->rvals : NULL__null;
1621 if (!get_offset_range (off, NULL__null, orng, rvals))
1622 {
1623 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
1624 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node)((tree_check5 ((global_trees[TI_PTRDIFF_TYPE]), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1624, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval
)
);
1625 orng[0] = -orng[1] - 1;
1626 }
1627
1628 /* Convert the array index range determined above to a byte
1629 offset. */
1630 tree lowbnd = array_ref_low_bound (aref);
1631 if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd))
1632 {
1633 /* Adjust the index by the low bound of the array domain
1634 (normally zero but 1 in Fortran). */
1635 unsigned HOST_WIDE_INTlong lb = tree_to_uhwi (lowbnd);
1636 orng[0] -= lb;
1637 orng[1] -= lb;
1638 }
1639
1640 tree eltype = TREE_TYPE (aref)((contains_struct_check ((aref), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1640, __FUNCTION__))->typed.type)
;
1641 tree tpsize = TYPE_SIZE_UNIT (eltype)((tree_class_check ((eltype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1641, __FUNCTION__))->type_common.size_unit)
;
1642 if (!tpsize || TREE_CODE (tpsize)((enum tree_code) (tpsize)->base.code) != INTEGER_CST)
1643 {
1644 pref->add_max_offset ();
1645 return true;
1646 }
1647
1648 offset_int sz = wi::to_offset (tpsize);
1649 orng[0] *= sz;
1650 orng[1] *= sz;
1651
1652 if (ostype && TREE_CODE (eltype)((enum tree_code) (eltype)->base.code) == ARRAY_TYPE)
1653 {
1654 /* Except for the permissive raw memory functions which use
1655 the size of the whole object determined above, use the size
1656 of the referenced array. Because the overall offset is from
1657 the beginning of the complete array object add this overall
1658 offset to the size of array. */
1659 offset_int sizrng[2] =
1660 {
1661 pref->offrng[0] + orng[0] + sz,
1662 pref->offrng[1] + orng[1] + sz
1663 };
1664 if (sizrng[1] < sizrng[0])
1665 std::swap (sizrng[0], sizrng[1]);
1666 if (sizrng[0] >= 0 && sizrng[0] <= pref->sizrng[0])
1667 pref->sizrng[0] = sizrng[0];
1668 if (sizrng[1] >= 0 && sizrng[1] <= pref->sizrng[1])
1669 pref->sizrng[1] = sizrng[1];
1670 }
1671
1672 pref->add_offset (orng[0], orng[1]);
1673 return true;
1674}
1675
1676/* A helper of compute_objsize_r() to determine the size from MEM_REF
1677 MREF. Return true on success and false on failure. */
1678
1679static bool
1680handle_mem_ref (tree mref, gimple *stmt, int ostype, access_ref *pref,
1681 ssa_name_limit_t &snlim, pointer_query *qry)
1682{
1683 gcc_assert (TREE_CODE (mref) == MEM_REF)((void)(!(((enum tree_code) (mref)->base.code) == MEM_REF)
? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1683, __FUNCTION__), 0 : 0))
;
1684
1685 ++pref->deref;
1686
1687 if (VECTOR_TYPE_P (TREE_TYPE (mref))(((enum tree_code) (((contains_struct_check ((mref), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1687, __FUNCTION__))->typed.type))->base.code) == VECTOR_TYPE
)
)
1688 {
1689 /* Hack: Handle MEM_REFs of vector types as those to complete
1690 objects; those may be synthesized from multiple assignments
1691 to consecutive data members (see PR 93200 and 96963).
1692 FIXME: Vectorized assignments should only be present after
1693 vectorization so this hack is only necessary after it has
1694 run and could be avoided in calls from prior passes (e.g.,
1695 tree-ssa-strlen.c).
1696 FIXME: Deal with this more generally, e.g., by marking up
1697 such MEM_REFs at the time they're created. */
1698 ostype = 0;
1699 }
1700
1701 tree mrefop = TREE_OPERAND (mref, 0)(*((const_cast<tree*> (tree_operand_check ((mref), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1701, __FUNCTION__)))))
;
1702 if (!compute_objsize_r (mrefop, stmt, ostype, pref, snlim, qry))
1703 return false;
1704
1705 offset_int orng[2];
1706 tree off = pref->eval (TREE_OPERAND (mref, 1)(*((const_cast<tree*> (tree_operand_check ((mref), (1),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1706, __FUNCTION__)))))
);
1707 range_query *const rvals = qry ? qry->rvals : NULL__null;
1708 if (!get_offset_range (off, NULL__null, orng, rvals))
1709 {
1710 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
1711 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node)((tree_check5 ((global_trees[TI_PTRDIFF_TYPE]), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1711, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval
)
);
1712 orng[0] = -orng[1] - 1;
1713 }
1714
1715 pref->add_offset (orng[0], orng[1]);
1716 return true;
1717}
1718
1719/* Helper to compute the size of the object referenced by the PTR
1720 expression which must have pointer type, using Object Size type
1721 OSTYPE (only the least significant 2 bits are used).
1722 On success, sets PREF->REF to the DECL of the referenced object
1723 if it's unique, otherwise to null, PREF->OFFRNG to the range of
1724 offsets into it, and PREF->SIZRNG to the range of sizes of
1725 the object(s).
1726 SNLIM is used to avoid visiting the same PHI operand multiple
1727 times, and, when nonnull, RVALS to determine range information.
1728 Returns true on success, false when a meaningful size (or range)
1729 cannot be determined.
1730
1731 The function is intended for diagnostics and should not be used
1732 to influence code generation or optimization. */
1733
1734static bool
1735compute_objsize_r (tree ptr, gimple *stmt, int ostype, access_ref *pref,
1736 ssa_name_limit_t &snlim, pointer_query *qry)
1737{
1738 STRIP_NOPS (ptr)(ptr) = tree_strip_nop_conversions ((const_cast<union tree_node
*> (((ptr)))))
;
1739
1740 const bool addr = TREE_CODE (ptr)((enum tree_code) (ptr)->base.code) == ADDR_EXPR;
1
Assuming field 'code' is not equal to ADDR_EXPR
1741 if (addr
1.1
'addr' is false
1.1
'addr' is false
)
2
Taking false branch
1742 {
1743 --pref->deref;
1744 ptr = TREE_OPERAND (ptr, 0)(*((const_cast<tree*> (tree_operand_check ((ptr), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1744, __FUNCTION__)))))
;
1745 }
1746
1747 if (DECL_P (ptr)(tree_code_type[(int) (((enum tree_code) (ptr)->base.code)
)] == tcc_declaration)
)
3
Assuming the condition is false
4
Taking false branch
1748 {
1749 pref->ref = ptr;
1750
1751 /* Reset the offset in case it was set by a prior call and not
1752 cleared by the caller. The offset is only adjusted after
1753 the identity of the object has been determined. */
1754 pref->offrng[0] = pref->offrng[1] = 0;
1755
1756 if (!addr && POINTER_TYPE_P (TREE_TYPE (ptr))(((enum tree_code) (((contains_struct_check ((ptr), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1756, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((ptr), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1756, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
1757 {
1758 /* Set the maximum size if the reference is to the pointer
1759 itself (as opposed to what it points to), and clear
1760 BASE0 since the offset isn't necessarily zero-based. */
1761 pref->set_max_size_range ();
1762 pref->base0 = false;
1763 return true;
1764 }
1765
1766 /* Valid offsets into the object are nonnegative. */
1767 pref->base0 = true;
1768
1769 if (tree size = decl_init_size (ptr, false))
1770 if (TREE_CODE (size)((enum tree_code) (size)->base.code) == INTEGER_CST)
1771 {
1772 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
1773 return true;
1774 }
1775
1776 pref->set_max_size_range ();
1777 return true;
1778 }
1779
1780 const tree_code code = TREE_CODE (ptr)((enum tree_code) (ptr)->base.code);
1781 range_query *const rvals = qry ? qry->rvals : NULL__null;
5
Assuming 'qry' is null
6
'?' condition is false
1782
1783 if (code == BIT_FIELD_REF)
7
Assuming 'code' is not equal to BIT_FIELD_REF
8
Taking false branch
1784 {
1785 tree ref = TREE_OPERAND (ptr, 0)(*((const_cast<tree*> (tree_operand_check ((ptr), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1785, __FUNCTION__)))))
;
1786 if (!compute_objsize_r (ref, stmt, ostype, pref, snlim, qry))
1787 return false;
1788
1789 offset_int off = wi::to_offset (pref->eval (TREE_OPERAND (ptr, 2)(*((const_cast<tree*> (tree_operand_check ((ptr), (2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1789, __FUNCTION__)))))
));
1790 pref->add_offset (off / BITS_PER_UNIT(8));
1791 return true;
1792 }
1793
1794 if (code == COMPONENT_REF)
9
Assuming 'code' is not equal to COMPONENT_REF
10
Taking false branch
1795 {
1796 tree ref = TREE_OPERAND (ptr, 0)(*((const_cast<tree*> (tree_operand_check ((ptr), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1796, __FUNCTION__)))))
;
1797 if (TREE_CODE (TREE_TYPE (ref))((enum tree_code) (((contains_struct_check ((ref), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1797, __FUNCTION__))->typed.type))->base.code)
== UNION_TYPE)
1798 /* In accesses through union types consider the entire unions
1799 rather than just their members. */
1800 ostype = 0;
1801 tree field = TREE_OPERAND (ptr, 1)(*((const_cast<tree*> (tree_operand_check ((ptr), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1801, __FUNCTION__)))))
;
1802
1803 if (ostype == 0)
1804 {
1805 /* In OSTYPE zero (for raw memory functions like memcpy), use
1806 the maximum size instead if the identity of the enclosing
1807 object cannot be determined. */
1808 if (!compute_objsize_r (ref, stmt, ostype, pref, snlim, qry))
1809 return false;
1810
1811 /* Otherwise, use the size of the enclosing object and add
1812 the offset of the member to the offset computed so far. */
1813 tree offset = byte_position (field);
1814 if (TREE_CODE (offset)((enum tree_code) (offset)->base.code) == INTEGER_CST)
1815 pref->add_offset (wi::to_offset (offset));
1816 else
1817 pref->add_max_offset ();
1818
1819 if (!pref->ref)
1820 /* REF may have been already set to an SSA_NAME earlier
1821 to provide better context for diagnostics. In that case,
1822 leave it unchanged. */
1823 pref->ref = ref;
1824 return true;
1825 }
1826
1827 pref->ref = field;
1828
1829 if (!addr && POINTER_TYPE_P (TREE_TYPE (field))(((enum tree_code) (((contains_struct_check ((field), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1829, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((field), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1829, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
1830 {
1831 /* Set maximum size if the reference is to the pointer member
1832 itself (as opposed to what it points to). */
1833 pref->set_max_size_range ();
1834 return true;
1835 }
1836
1837 /* SAM is set for array members that might need special treatment. */
1838 special_array_member sam;
1839 tree size = component_ref_size (ptr, &sam);
1840 if (sam == special_array_member::int_0)
1841 pref->sizrng[0] = pref->sizrng[1] = 0;
1842 else if (!pref->trail1special && sam == special_array_member::trail_1)
1843 pref->sizrng[0] = pref->sizrng[1] = 1;
1844 else if (size && TREE_CODE (size)((enum tree_code) (size)->base.code) == INTEGER_CST)
1845 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
1846 else
1847 {
1848 /* When the size of the member is unknown it's either a flexible
1849 array member or a trailing special array member (either zero
1850 length or one-element). Set the size to the maximum minus
1851 the constant size of the type. */
1852 pref->sizrng[0] = 0;
1853 pref->sizrng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node)((tree_check5 ((global_trees[TI_PTRDIFF_TYPE]), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1853, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval
)
);
1854 if (tree recsize = TYPE_SIZE_UNIT (TREE_TYPE (ref))((tree_class_check ((((contains_struct_check ((ref), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1854, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1854, __FUNCTION__))->type_common.size_unit)
)
1855 if (TREE_CODE (recsize)((enum tree_code) (recsize)->base.code) == INTEGER_CST)
1856 pref->sizrng[1] -= wi::to_offset (recsize);
1857 }
1858 return true;
1859 }
1860
1861 if (code == ARRAY_REF)
11
Assuming 'code' is not equal to ARRAY_REF
12
Taking false branch
1862 return handle_array_ref (ptr, stmt, addr, ostype, pref, snlim, qry);
1863
1864 if (code == MEM_REF)
13
Assuming 'code' is not equal to MEM_REF
14
Taking false branch
1865 return handle_mem_ref (ptr, stmt, ostype, pref, snlim, qry);
1866
1867 if (code == TARGET_MEM_REF)
15
Assuming 'code' is not equal to TARGET_MEM_REF
16
Taking false branch
1868 {
1869 tree ref = TREE_OPERAND (ptr, 0)(*((const_cast<tree*> (tree_operand_check ((ptr), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1869, __FUNCTION__)))))
;
1870 if (!compute_objsize_r (ref, stmt, ostype, pref, snlim, qry))
1871 return false;
1872
1873 /* TODO: Handle remaining operands. Until then, add maximum offset. */
1874 pref->ref = ptr;
1875 pref->add_max_offset ();
1876 return true;
1877 }
1878
1879 if (code == INTEGER_CST)
17
Assuming 'code' is not equal to INTEGER_CST
18
Taking false branch
1880 {
1881 /* Pointer constants other than null are most likely the result
1882 of erroneous null pointer addition/subtraction. Unless zero
1883 is a valid address set size to zero. For null pointers, set
1884 size to the maximum for now since those may be the result of
1885 jump threading. */
1886 if (integer_zerop (ptr))
1887 pref->set_max_size_range ();
1888 else if (POINTER_TYPE_P (TREE_TYPE (ptr))(((enum tree_code) (((contains_struct_check ((ptr), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1888, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((ptr), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1888, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
1889 {
1890 tree deref_type = TREE_TYPE (TREE_TYPE (ptr))((contains_struct_check ((((contains_struct_check ((ptr), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1890, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1890, __FUNCTION__))->typed.type)
;
1891 addr_space_t as = TYPE_ADDR_SPACE (deref_type)((tree_class_check ((deref_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1891, __FUNCTION__))->base.u.bits.address_space)
;
1892 if (targetm.addr_space.zero_address_valid (as))
1893 pref->set_max_size_range ();
1894 else
1895 pref->sizrng[0] = pref->sizrng[1] = 0;
1896 }
1897 else
1898 pref->sizrng[0] = pref->sizrng[1] = 0;
1899
1900 pref->ref = ptr;
1901
1902 return true;
1903 }
1904
1905 if (code == STRING_CST)
19
Assuming 'code' is not equal to STRING_CST
20
Taking false branch
1906 {
1907 pref->sizrng[0] = pref->sizrng[1] = TREE_STRING_LENGTH (ptr)((tree_check ((ptr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1907, __FUNCTION__, (STRING_CST)))->string.length)
;
1908 pref->ref = ptr;
1909 return true;
1910 }
1911
1912 if (code == POINTER_PLUS_EXPR)
21
Assuming 'code' is not equal to POINTER_PLUS_EXPR
22
Taking false branch
1913 {
1914 tree ref = TREE_OPERAND (ptr, 0)(*((const_cast<tree*> (tree_operand_check ((ptr), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1914, __FUNCTION__)))))
;
1915 if (!compute_objsize_r (ref, stmt, ostype, pref, snlim, qry))
1916 return false;
1917
1918 /* Clear DEREF since the offset is being applied to the target
1919 of the dereference. */
1920 pref->deref = 0;
1921
1922 offset_int orng[2];
1923 tree off = pref->eval (TREE_OPERAND (ptr, 1)(*((const_cast<tree*> (tree_operand_check ((ptr), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1923, __FUNCTION__)))))
);
1924 if (get_offset_range (off, NULL__null, orng, rvals))
1925 pref->add_offset (orng[0], orng[1]);
1926 else
1927 pref->add_max_offset ();
1928 return true;
1929 }
1930
1931 if (code == VIEW_CONVERT_EXPR)
23
Assuming 'code' is not equal to VIEW_CONVERT_EXPR
24
Taking false branch
1932 {
1933 ptr = TREE_OPERAND (ptr, 0)(*((const_cast<tree*> (tree_operand_check ((ptr), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1933, __FUNCTION__)))))
;
1934 return compute_objsize_r (ptr, stmt, ostype, pref, snlim, qry);
1935 }
1936
1937 if (code == SSA_NAME)
25
Assuming 'code' is equal to SSA_NAME
26
Taking true branch
1938 {
1939 if (!snlim.next ())
27
Calling 'ssa_name_limit_t::next'
31
Returning from 'ssa_name_limit_t::next'
32
Taking false branch
1940 return false;
1941
1942 /* Only process an SSA_NAME if the recursion limit has not yet
1943 been reached. */
1944 if (qry
32.1
'qry' is null
32.1
'qry' is null
)
33
Taking false branch
1945 {
1946 if (++qry->depth)
1947 qry->max_depth = qry->depth;
1948 if (const access_ref *cache_ref = qry->get_ref (ptr))
1949 {
1950 /* If the pointer is in the cache set *PREF to what it refers
1951 to and return success.
1952 FIXME: BNDRNG is determined by each access and so it doesn't
1953 belong in access_ref. Until the design is changed, keep it
1954 unchanged here. */
1955 const offset_int bndrng[2] = { pref->bndrng[0], pref->bndrng[1] };
1956 *pref = *cache_ref;
1957 pref->bndrng[0] = bndrng[0];
1958 pref->bndrng[1] = bndrng[1];
1959 return true;
1960 }
1961 }
1962
1963 stmt = SSA_NAME_DEF_STMT (ptr)(tree_check ((ptr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 1963, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
1964 if (is_gimple_call (stmt))
34
Calling 'is_gimple_call'
37
Returning from 'is_gimple_call'
38
Taking false branch
1965 {
1966 /* If STMT is a call to an allocation function get the size
1967 from its argument(s). If successful, also set *PREF->REF
1968 to PTR for the caller to include in diagnostics. */
1969 wide_int wr[2];
1970 if (gimple_call_alloc_size (stmt, wr, rvals))
1971 {
1972 pref->ref = ptr;
1973 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
1974 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
1975 /* Constrain both bounds to a valid size. */
1976 offset_int maxsize = wi::to_offset (max_object_size ());
1977 if (pref->sizrng[0] > maxsize)
1978 pref->sizrng[0] = maxsize;
1979 if (pref->sizrng[1] > maxsize)
1980 pref->sizrng[1] = maxsize;
1981 }
1982 else
1983 {
1984 /* For functions known to return one of their pointer arguments
1985 try to determine what the returned pointer points to, and on
1986 success add OFFRNG which was set to the offset added by
1987 the function (e.g., memchr) to the overall offset. */
1988 bool past_end;
1989 offset_int offrng[2];
1990 if (tree ret = gimple_call_return_array (stmt, offrng,
1991 &past_end, snlim, qry))
1992 {
1993 if (!compute_objsize_r (ret, stmt, ostype, pref, snlim, qry))
1994 return false;
1995
1996 /* Cap OFFRNG[1] to at most the remaining size of
1997 the object. */
1998 offset_int remrng[2];
1999 remrng[1] = pref->size_remaining (remrng);
2000 if (remrng[1] != 0 && !past_end)
2001 /* Decrement the size for functions that never return
2002 a past-the-end pointer. */
2003 remrng[1] -= 1;
2004
2005 if (remrng[1] < offrng[1])
2006 offrng[1] = remrng[1];
2007 pref->add_offset (offrng[0], offrng[1]);
2008 }
2009 else
2010 {
2011 /* For other calls that might return arbitrary pointers
2012 including into the middle of objects set the size
2013 range to maximum, clear PREF->BASE0, and also set
2014 PREF->REF to include in diagnostics. */
2015 pref->set_max_size_range ();
2016 pref->base0 = false;
2017 pref->ref = ptr;
2018 }
2019 }
2020 qry->put_ref (ptr, *pref);
2021 return true;
2022 }
2023
2024 if (gimple_nop_p (stmt))
39
Calling 'gimple_nop_p'
42
Returning from 'gimple_nop_p'
43
Taking false branch
2025 {
2026 /* For a function argument try to determine the byte size
2027 of the array from the current function declaratation
2028 (e.g., attribute access or related). */
2029 wide_int wr[2];
2030 bool static_array = false;
2031 if (tree ref = gimple_parm_array_size (ptr, wr, &static_array))
2032 {
2033 pref->parmarray = !static_array;
2034 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
2035 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
2036 pref->ref = ref;
2037 qry->put_ref (ptr, *pref);
2038 return true;
2039 }
2040
2041 pref->set_max_size_range ();
2042 pref->base0 = false;
2043 pref->ref = ptr;
2044 qry->put_ref (ptr, *pref);
2045 return true;
2046 }
2047
2048 if (gimple_code (stmt) == GIMPLE_PHI)
44
Assuming the condition is false
45
Taking false branch
2049 {
2050 pref->ref = ptr;
2051 access_ref phi_ref = *pref;
2052 if (!pref->get_ref (NULL__null, &phi_ref, ostype, &snlim, qry))
2053 return false;
2054 *pref = phi_ref;
2055 pref->ref = ptr;
2056 qry->put_ref (ptr, *pref);
2057 return true;
2058 }
2059
2060 if (!is_gimple_assign (stmt))
46
Calling 'is_gimple_assign'
49
Returning from 'is_gimple_assign'
50
Taking false branch
2061 {
2062 /* Clear BASE0 since the assigned pointer might point into
2063 the middle of the object, set the maximum size range and,
2064 if the SSA_NAME refers to a function argumnent, set
2065 PREF->REF to it. */
2066 pref->base0 = false;
2067 pref->set_max_size_range ();
2068 pref->ref = ptr;
2069 return true;
2070 }
2071
2072 tree_code code = gimple_assign_rhs_code (stmt);
2073
2074 if (code == MAX_EXPR || code == MIN_EXPR)
51
Assuming 'code' is not equal to MAX_EXPR
52
Assuming 'code' is not equal to MIN_EXPR
53
Taking false branch
2075 {
2076 if (!handle_min_max_size (ptr, ostype, pref, snlim, qry))
2077 return false;
2078
2079 qry->put_ref (ptr, *pref);
2080 return true;
2081 }
2082
2083 tree rhs = gimple_assign_rhs1 (stmt);
2084
2085 if (code == ASSERT_EXPR)
54
Assuming 'code' is not equal to ASSERT_EXPR
55
Taking false branch
2086 {
2087 rhs = TREE_OPERAND (rhs, 0)(*((const_cast<tree*> (tree_operand_check ((rhs), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2087, __FUNCTION__)))))
;
2088 return compute_objsize_r (rhs, stmt, ostype, pref, snlim, qry);
2089 }
2090
2091 if (code == POINTER_PLUS_EXPR
56
Assuming 'code' is equal to POINTER_PLUS_EXPR
58
Taking true branch
2092 && TREE_CODE (TREE_TYPE (rhs))((enum tree_code) (((contains_struct_check ((rhs), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2092, __FUNCTION__))->typed.type))->base.code)
== POINTER_TYPE
)
57
Assuming field 'code' is equal to POINTER_TYPE
2093 {
2094 /* Compute the size of the object first. */
2095 if (!compute_objsize_r (rhs, stmt, ostype, pref, snlim, qry))
59
Assuming the condition is false
60
Taking false branch
2096 return false;
2097
2098 offset_int orng[2];
2099 tree off = gimple_assign_rhs2 (stmt);
2100 if (get_offset_range (off, stmt, orng, rvals))
61
Taking false branch
2101 pref->add_offset (orng[0], orng[1]);
2102 else
2103 pref->add_max_offset ();
2104
2105 qry->put_ref (ptr, *pref);
62
Called C++ object pointer is null
2106 return true;
2107 }
2108
2109 if (code == ADDR_EXPR || code == SSA_NAME)
2110 {
2111 if (!compute_objsize_r (rhs, stmt, ostype, pref, snlim, qry))
2112 return false;
2113 qry->put_ref (ptr, *pref);
2114 return true;
2115 }
2116
2117 /* (This could also be an assignment from a nonlocal pointer.) Save
2118 PTR to mention in diagnostics but otherwise treat it as a pointer
2119 to an unknown object. */
2120 pref->ref = rhs;
2121 pref->base0 = false;
2122 pref->set_max_size_range ();
2123 return true;
2124 }
2125
2126 /* Assume all other expressions point into an unknown object
2127 of the maximum valid size. */
2128 pref->ref = ptr;
2129 pref->base0 = false;
2130 pref->set_max_size_range ();
2131 if (TREE_CODE (ptr)((enum tree_code) (ptr)->base.code) == SSA_NAME)
2132 qry->put_ref (ptr, *pref);
2133 return true;
2134}
2135
2136/* A "public" wrapper around the above. Clients should use this overload
2137 instead. */
2138
2139tree
2140compute_objsize (tree ptr, gimple *stmt, int ostype, access_ref *pref,
2141 pointer_query *ptr_qry)
2142{
2143 pointer_query qry;
2144 if (ptr_qry)
2145 ptr_qry->depth = 0;
2146 else
2147 ptr_qry = &qry;
2148
2149 /* Clear and invalidate in case *PREF is being reused. */
2150 pref->offrng[0] = pref->offrng[1] = 0;
2151 pref->sizrng[0] = pref->sizrng[1] = -1;
2152
2153 ssa_name_limit_t snlim;
2154 if (!compute_objsize_r (ptr, stmt, ostype, pref, snlim, ptr_qry))
2155 return NULL_TREE(tree) __null;
2156
2157 offset_int maxsize = pref->size_remaining ();
2158 if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
2159 pref->offrng[0] = 0;
2160 return wide_int_to_tree (sizetypesizetype_tab[(int) stk_sizetype], maxsize);
2161}
2162
2163/* Transitional wrapper. The function should be removed once callers
2164 transition to the pointer_query API. */
2165
2166tree
2167compute_objsize (tree ptr, gimple *stmt, int ostype, access_ref *pref,
2168 range_query *rvals /* = NULL */)
2169{
2170 pointer_query qry;
2171 qry.rvals = rvals;
2172 return compute_objsize (ptr, stmt, ostype, pref, &qry);
2173}
2174
2175/* Legacy wrapper around the above. The function should be removed
2176 once callers transition to one of the two above. */
2177
2178tree
2179compute_objsize (tree ptr, int ostype, tree *pdecl /* = NULL */,
2180 tree *poff /* = NULL */, range_query *rvals /* = NULL */)
2181{
2182 /* Set the initial offsets to zero and size to negative to indicate
2183 none has been computed yet. */
2184 access_ref ref;
2185 tree size = compute_objsize (ptr, nullptr, ostype, &ref, rvals);
2186 if (!size || !ref.base0)
2187 return NULL_TREE(tree) __null;
2188
2189 if (pdecl)
2190 *pdecl = ref.ref;
2191
2192 if (poff)
2193 *poff = wide_int_to_tree (ptrdiff_type_nodeglobal_trees[TI_PTRDIFF_TYPE], ref.offrng[ref.offrng[0] < 0]);
2194
2195 return size;
2196}
2197
2198/* Determine the offset *FLDOFF of the first byte of a struct member
2199 of TYPE (possibly recursively) into which the byte offset OFF points,
2200 starting after the field START_AFTER if it's non-null. On success,
2201 if nonnull, set *FLDOFF to the offset of the first byte, and return
2202 the field decl. If nonnull, set *NEXTOFF to the offset of the next
2203 field (which reflects any padding between the returned field and
2204 the next). Otherwise, if no such member can be found, return null. */
2205
2206tree
2207field_at_offset (tree type, tree start_after, HOST_WIDE_INTlong off,
2208 HOST_WIDE_INTlong *fldoff /* = nullptr */,
2209 HOST_WIDE_INTlong *nextoff /* = nullptr */)
2210{
2211 tree first_fld = TYPE_FIELDS (type)((tree_check3 ((type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2211, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE
)))->type_non_common.values)
;
2212
2213 HOST_WIDE_INTlong offbuf = 0, nextbuf = 0;
2214 if (!fldoff)
2215 fldoff = &offbuf;
2216 if (!nextoff)
2217 nextoff = &nextbuf;
2218
2219 *nextoff = 0;
2220
2221 /* The field to return. */
2222 tree last_fld = NULL_TREE(tree) __null;
2223 /* The next field to advance to. */
2224 tree next_fld = NULL_TREE(tree) __null;
2225
2226 /* NEXT_FLD's cached offset. */
2227 HOST_WIDE_INTlong next_pos = -1;
2228
2229 for (tree fld = first_fld; fld; fld = next_fld)
2230 {
2231 next_fld = fld;
2232 do
2233 /* Advance to the next relevant data member. */
2234 next_fld = TREE_CHAIN (next_fld)((contains_struct_check ((next_fld), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2234, __FUNCTION__))->common.chain)
;
2235 while (next_fld
2236 && (TREE_CODE (next_fld)((enum tree_code) (next_fld)->base.code) != FIELD_DECL
2237 || DECL_ARTIFICIAL (next_fld)((contains_struct_check ((next_fld), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2237, __FUNCTION__))->decl_common.artificial_flag)
));
2238
2239 if (TREE_CODE (fld)((enum tree_code) (fld)->base.code) != FIELD_DECL || DECL_ARTIFICIAL (fld)((contains_struct_check ((fld), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2239, __FUNCTION__))->decl_common.artificial_flag)
)
2240 continue;
2241
2242 if (fld == start_after)
2243 continue;
2244
2245 tree fldtype = TREE_TYPE (fld)((contains_struct_check ((fld), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2245, __FUNCTION__))->typed.type)
;
2246 /* The offset of FLD within its immediately enclosing structure. */
2247 HOST_WIDE_INTlong fldpos = next_pos < 0 ? int_byte_position (fld) : next_pos;
2248
2249 /* If the size is not available the field is a flexible array
2250 member. Treat this case as success. */
2251 tree typesize = TYPE_SIZE_UNIT (fldtype)((tree_class_check ((fldtype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2251, __FUNCTION__))->type_common.size_unit)
;
2252 HOST_WIDE_INTlong fldsize = (tree_fits_uhwi_p (typesize)
2253 ? tree_to_uhwi (typesize)
2254 : off);
2255
2256 /* If OFF is beyond the end of the current field continue. */
2257 HOST_WIDE_INTlong fldend = fldpos + fldsize;
2258 if (fldend < off)
2259 continue;
2260
2261 if (next_fld)
2262 {
2263 /* If OFF is equal to the offset of the next field continue
2264 to it and skip the array/struct business below. */
2265 next_pos = int_byte_position (next_fld);
2266 *nextoff = *fldoff + next_pos;
2267 if (*nextoff == off && TREE_CODE (type)((enum tree_code) (type)->base.code) != UNION_TYPE)
2268 continue;
2269 }
2270 else
2271 *nextoff = HOST_WIDE_INT_MAX(~((long) (1UL << (64 - 1))));
2272
2273 /* OFF refers somewhere into the current field or just past its end,
2274 which could mean it refers to the next field. */
2275 if (TREE_CODE (fldtype)((enum tree_code) (fldtype)->base.code) == ARRAY_TYPE)
2276 {
2277 /* Will be set to the offset of the first byte of the array
2278 element (which may be an array) of FLDTYPE into which
2279 OFF - FLDPOS points (which may be past ELTOFF). */
2280 HOST_WIDE_INTlong eltoff = 0;
2281 if (tree ft = array_elt_at_offset (fldtype, off - fldpos, &eltoff))
2282 fldtype = ft;
2283 else
2284 continue;
2285
2286 /* Advance the position to include the array element above.
2287 If OFF - FLPOS refers to a member of FLDTYPE, the member
2288 will be determined below. */
2289 fldpos += eltoff;
2290 }
2291
2292 *fldoff += fldpos;
2293
2294 if (TREE_CODE (fldtype)((enum tree_code) (fldtype)->base.code) == RECORD_TYPE)
2295 /* Drill down into the current field if it's a struct. */
2296 fld = field_at_offset (fldtype, start_after, off - fldpos,
2297 fldoff, nextoff);
2298
2299 last_fld = fld;
2300
2301 /* Unless the offset is just past the end of the field return it.
2302 Otherwise save it and return it only if the offset of the next
2303 next field is greater (i.e., there is padding between the two)
2304 or if there is no next field. */
2305 if (off < fldend)
2306 break;
2307 }
2308
2309 if (*nextoff == HOST_WIDE_INT_MAX(~((long) (1UL << (64 - 1)))) && next_fld)
2310 *nextoff = next_pos;
2311
2312 return last_fld;
2313}
2314
2315/* Determine the offset *ELTOFF of the first byte of the array element
2316 of array ARTYPE into which the byte offset OFF points. On success
2317 set *ELTOFF to the offset of the first byte and return type.
2318 Otherwise, if no such element can be found, return null. */
2319
2320tree
2321array_elt_at_offset (tree artype, HOST_WIDE_INTlong off,
2322 HOST_WIDE_INTlong *eltoff /* = nullptr */,
2323 HOST_WIDE_INTlong *subar_size /* = nullptr */)
2324{
2325 gcc_assert (TREE_CODE (artype) == ARRAY_TYPE)((void)(!(((enum tree_code) (artype)->base.code) == ARRAY_TYPE
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2325, __FUNCTION__), 0 : 0))
;
2326
2327 HOST_WIDE_INTlong dummy;
2328 if (!eltoff)
2329 eltoff = &dummy;
2330 if (!subar_size)
2331 subar_size = &dummy;
2332
2333 tree eltype = artype;
2334 while (TREE_CODE (TREE_TYPE (eltype))((enum tree_code) (((contains_struct_check ((eltype), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2334, __FUNCTION__))->typed.type))->base.code)
== ARRAY_TYPE)
2335 eltype = TREE_TYPE (eltype)((contains_struct_check ((eltype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2335, __FUNCTION__))->typed.type)
;
2336
2337 tree subartype = eltype;
2338 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (eltype))(((enum tree_code) (((contains_struct_check ((eltype), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2338, __FUNCTION__))->typed.type))->base.code) == RECORD_TYPE
|| ((enum tree_code) (((contains_struct_check ((eltype), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2338, __FUNCTION__))->typed.type))->base.code) == UNION_TYPE
|| ((enum tree_code) (((contains_struct_check ((eltype), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2338, __FUNCTION__))->typed.type))->base.code) == QUAL_UNION_TYPE
)
2339 || TYPE_MODE (TREE_TYPE (eltype))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((eltype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2339, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2339, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((eltype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2339, __FUNCTION__))->typed.type)) : (((contains_struct_check
((eltype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2339, __FUNCTION__))->typed.type))->type_common.mode)
!= TYPE_MODE (char_type_node)((((enum tree_code) ((tree_class_check ((integer_types[itk_char
]), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2339, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(integer_types[itk_char]) : (integer_types[itk_char])->type_common
.mode)
)
2340 eltype = TREE_TYPE (eltype)((contains_struct_check ((eltype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2340, __FUNCTION__))->typed.type)
;
2341
2342 *subar_size = int_size_in_bytes (subartype);
2343
2344 if (eltype == artype)
2345 {
2346 *eltoff = 0;
2347 return artype;
2348 }
2349
2350 HOST_WIDE_INTlong artype_size = int_size_in_bytes (artype);
2351 HOST_WIDE_INTlong eltype_size = int_size_in_bytes (eltype);
2352
2353 if (off < artype_size)// * eltype_size)
2354 {
2355 *eltoff = (off / eltype_size) * eltype_size;
2356 return TREE_CODE (eltype)((enum tree_code) (eltype)->base.code) == ARRAY_TYPE ? TREE_TYPE (eltype)((contains_struct_check ((eltype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2356, __FUNCTION__))->typed.type)
: eltype;
2357 }
2358
2359 return NULL_TREE(tree) __null;
2360}
2361
2362/* Wrapper around build_array_type_nelts that makes sure the array
2363 can be created at all and handles zero sized arrays specially. */
2364
2365tree
2366build_printable_array_type (tree eltype, unsigned HOST_WIDE_INTlong nelts)
2367{
2368 if (TYPE_SIZE_UNIT (eltype)((tree_class_check ((eltype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2368, __FUNCTION__))->type_common.size_unit)
2369 && TREE_CODE (TYPE_SIZE_UNIT (eltype))((enum tree_code) (((tree_class_check ((eltype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2369, __FUNCTION__))->type_common.size_unit))->base.code
)
== INTEGER_CST
2370 && !integer_zerop (TYPE_SIZE_UNIT (eltype)((tree_class_check ((eltype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2370, __FUNCTION__))->type_common.size_unit)
)
2371 && TYPE_ALIGN_UNIT (eltype)((((tree_class_check ((eltype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2371, __FUNCTION__))->type_common.align) ? ((unsigned)1)
<< (((tree_class_check ((eltype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2371, __FUNCTION__))->type_common.align) - 1) : 0) / (8)
)
> 1
2372 && wi::zext (wi::to_wide (TYPE_SIZE_UNIT (eltype)((tree_class_check ((eltype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2372, __FUNCTION__))->type_common.size_unit)
),
2373 ffs_hwi (TYPE_ALIGN_UNIT (eltype)((((tree_class_check ((eltype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2373, __FUNCTION__))->type_common.align) ? ((unsigned)1)
<< (((tree_class_check ((eltype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2373, __FUNCTION__))->type_common.align) - 1) : 0) / (8)
)
) - 1) != 0)
2374 eltype = TYPE_MAIN_VARIANT (eltype)((tree_class_check ((eltype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2374, __FUNCTION__))->type_common.main_variant)
;
2375
2376 /* Consider excessive NELTS an array of unknown bound. */
2377 tree idxtype = NULL_TREE(tree) __null;
2378 if (nelts < HOST_WIDE_INT_MAX(~((long) (1UL << (64 - 1)))))
2379 {
2380 if (nelts)
2381 return build_array_type_nelts (eltype, nelts);
2382 idxtype = build_range_type (sizetypesizetype_tab[(int) stk_sizetype], size_zero_nodeglobal_trees[TI_SIZE_ZERO], NULL_TREE(tree) __null);
2383 }
2384
2385 tree arrtype = build_array_type (eltype, idxtype);
2386 arrtype = build_distinct_type_copy (TYPE_MAIN_VARIANT (arrtype)((tree_class_check ((arrtype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2386, __FUNCTION__))->type_common.main_variant)
);
2387 TYPE_SIZE (arrtype)((tree_class_check ((arrtype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2387, __FUNCTION__))->type_common.size)
= bitsize_zero_nodeglobal_trees[TI_BITSIZE_ZERO];
2388 TYPE_SIZE_UNIT (arrtype)((tree_class_check ((arrtype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/pointer-query.cc"
, 2388, __FUNCTION__))->type_common.size_unit)
= size_zero_nodeglobal_trees[TI_SIZE_ZERO];
2389 return arrtype;
2390}

/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h

1/* Gimple IR definitions.
2
3 Copyright (C) 2007-2021 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22#ifndef GCC_GIMPLE_H
23#define GCC_GIMPLE_H
24
25#include "tree-ssa-alias.h"
26#include "gimple-expr.h"
27
28typedef gimple *gimple_seq_node;
29
30enum gimple_code {
31#define DEFGSCODE(SYM, STRING, STRUCT) SYM,
32#include "gimple.def"
33#undef DEFGSCODE
34 LAST_AND_UNUSED_GIMPLE_CODE
35};
36
37extern const char *const gimple_code_name[];
38extern const unsigned char gimple_rhs_class_table[];
39
40/* Strip the outermost pointer, from tr1/type_traits. */
41template<typename T> struct remove_pointer { typedef T type; };
42template<typename T> struct remove_pointer<T *> { typedef T type; };
43
44/* Error out if a gimple tuple is addressed incorrectly. */
45#if defined ENABLE_GIMPLE_CHECKING1
46#define gcc_gimple_checking_assert(EXPR)((void)(!(EXPR) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 46, __FUNCTION__), 0 : 0))
gcc_assert (EXPR)((void)(!(EXPR) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 46, __FUNCTION__), 0 : 0))
47extern void gimple_check_failed (const gimple *, const char *, int, \
48 const char *, enum gimple_code, \
49 enum tree_code) ATTRIBUTE_NORETURN__attribute__ ((__noreturn__)) \
50 ATTRIBUTE_COLD;
51
52#define GIMPLE_CHECK(GS, CODE)do { const gimple *__gs = (GS); if (gimple_code (__gs) != (CODE
)) gimple_check_failed (__gs, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 52, __FUNCTION__, (CODE), ERROR_MARK); } while (0)
\
53 do { \
54 const gimple *__gs = (GS); \
55 if (gimple_code (__gs) != (CODE)) \
56 gimple_check_failed (__gs, __FILE__"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h", __LINE__56, __FUNCTION__, \
57 (CODE), ERROR_MARK); \
58 } while (0)
59template <typename T>
60static inline T
61GIMPLE_CHECK2(const gimple *gs,
62#if __GNUC__4 > 4 || (__GNUC__4 == 4 && __GNUC_MINOR__2 >= 8)
63 const char *file = __builtin_FILE (),
64 int line = __builtin_LINE (),
65 const char *fun = __builtin_FUNCTION ())
66#else
67 const char *file = __FILE__"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h",
68 int line = __LINE__68,
69 const char *fun = NULL__null)
70#endif
71{
72 T ret = dyn_cast <T> (gs);
73 if (!ret)
74 gimple_check_failed (gs, file, line, fun,
75 remove_pointer<T>::type::code_, ERROR_MARK);
76 return ret;
77}
78template <typename T>
79static inline T
80GIMPLE_CHECK2(gimple *gs,
81#if __GNUC__4 > 4 || (__GNUC__4 == 4 && __GNUC_MINOR__2 >= 8)
82 const char *file = __builtin_FILE (),
83 int line = __builtin_LINE (),
84 const char *fun = __builtin_FUNCTION ())
85#else
86 const char *file = __FILE__"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h",
87 int line = __LINE__87,
88 const char *fun = NULL__null)
89#endif
90{
91 T ret = dyn_cast <T> (gs);
92 if (!ret)
93 gimple_check_failed (gs, file, line, fun,
94 remove_pointer<T>::type::code_, ERROR_MARK);
95 return ret;
96}
97#else /* not ENABLE_GIMPLE_CHECKING */
98#define gcc_gimple_checking_assert(EXPR)((void)(!(EXPR) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 98, __FUNCTION__), 0 : 0))
((void)(0 && (EXPR)))
99#define GIMPLE_CHECK(GS, CODE)do { const gimple *__gs = (GS); if (gimple_code (__gs) != (CODE
)) gimple_check_failed (__gs, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 99, __FUNCTION__, (CODE), ERROR_MARK); } while (0)
(void)0
100template <typename T>
101static inline T
102GIMPLE_CHECK2(gimple *gs)
103{
104 return as_a <T> (gs);
105}
106template <typename T>
107static inline T
108GIMPLE_CHECK2(const gimple *gs)
109{
110 return as_a <T> (gs);
111}
112#endif
113
114/* Class of GIMPLE expressions suitable for the RHS of assignments. See
115 get_gimple_rhs_class. */
116enum gimple_rhs_class
117{
118 GIMPLE_INVALID_RHS, /* The expression cannot be used on the RHS. */
119 GIMPLE_TERNARY_RHS, /* The expression is a ternary operation. */
120 GIMPLE_BINARY_RHS, /* The expression is a binary operation. */
121 GIMPLE_UNARY_RHS, /* The expression is a unary operation. */
122 GIMPLE_SINGLE_RHS /* The expression is a single object (an SSA
123 name, a _DECL, a _REF, etc. */
124};
125
126/* Specific flags for individual GIMPLE statements. These flags are
127 always stored in gimple.subcode and they may only be
128 defined for statement codes that do not use subcodes.
129
130 Values for the masks can overlap as long as the overlapping values
131 are never used in the same statement class.
132
133 The maximum mask value that can be defined is 1 << 15 (i.e., each
134 statement code can hold up to 16 bitflags).
135
136 Keep this list sorted. */
137enum gf_mask {
138 GF_ASM_INPUT = 1 << 0,
139 GF_ASM_VOLATILE = 1 << 1,
140 GF_ASM_INLINE = 1 << 2,
141 GF_CALL_FROM_THUNK = 1 << 0,
142 GF_CALL_RETURN_SLOT_OPT = 1 << 1,
143 GF_CALL_TAILCALL = 1 << 2,
144 GF_CALL_VA_ARG_PACK = 1 << 3,
145 GF_CALL_NOTHROW = 1 << 4,
146 GF_CALL_ALLOCA_FOR_VAR = 1 << 5,
147 GF_CALL_INTERNAL = 1 << 6,
148 GF_CALL_CTRL_ALTERING = 1 << 7,
149 GF_CALL_MUST_TAIL_CALL = 1 << 9,
150 GF_CALL_BY_DESCRIPTOR = 1 << 10,
151 GF_CALL_NOCF_CHECK = 1 << 11,
152 GF_CALL_FROM_NEW_OR_DELETE = 1 << 12,
153 GF_OMP_PARALLEL_COMBINED = 1 << 0,
154 GF_OMP_TASK_TASKLOOP = 1 << 0,
155 GF_OMP_TASK_TASKWAIT = 1 << 1,
156 GF_OMP_FOR_KIND_MASK = (1 << 3) - 1,
157 GF_OMP_FOR_KIND_FOR = 0,
158 GF_OMP_FOR_KIND_DISTRIBUTE = 1,
159 GF_OMP_FOR_KIND_TASKLOOP = 2,
160 GF_OMP_FOR_KIND_OACC_LOOP = 4,
161 GF_OMP_FOR_KIND_SIMD = 5,
162 GF_OMP_FOR_COMBINED = 1 << 3,
163 GF_OMP_FOR_COMBINED_INTO = 1 << 4,
164 GF_OMP_TARGET_KIND_MASK = (1 << 5) - 1,
165 GF_OMP_TARGET_KIND_REGION = 0,
166 GF_OMP_TARGET_KIND_DATA = 1,
167 GF_OMP_TARGET_KIND_UPDATE = 2,
168 GF_OMP_TARGET_KIND_ENTER_DATA = 3,
169 GF_OMP_TARGET_KIND_EXIT_DATA = 4,
170 GF_OMP_TARGET_KIND_OACC_PARALLEL = 5,
171 GF_OMP_TARGET_KIND_OACC_KERNELS = 6,
172 GF_OMP_TARGET_KIND_OACC_SERIAL = 7,
173 GF_OMP_TARGET_KIND_OACC_DATA = 8,
174 GF_OMP_TARGET_KIND_OACC_UPDATE = 9,
175 GF_OMP_TARGET_KIND_OACC_ENTER_DATA = 10,
176 GF_OMP_TARGET_KIND_OACC_EXIT_DATA = 11,
177 GF_OMP_TARGET_KIND_OACC_DECLARE = 12,
178 GF_OMP_TARGET_KIND_OACC_HOST_DATA = 13,
179 /* A 'GF_OMP_TARGET_KIND_OACC_PARALLEL' representing an OpenACC 'kernels'
180 decomposed part, parallelized. */
181 GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED = 14,
182 /* A 'GF_OMP_TARGET_KIND_OACC_PARALLEL' representing an OpenACC 'kernels'
183 decomposed part, "gang-single". */
184 GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE = 15,
185 /* A 'GF_OMP_TARGET_KIND_OACC_DATA' representing an OpenACC 'kernels'
186 decomposed parts' 'data' construct. */
187 GF_OMP_TARGET_KIND_OACC_DATA_KERNELS = 16,
188 GF_OMP_TEAMS_HOST = 1 << 0,
189
190 /* True on an GIMPLE_OMP_RETURN statement if the return does not require
191 a thread synchronization via some sort of barrier. The exact barrier
192 that would otherwise be emitted is dependent on the OMP statement with
193 which this return is associated. */
194 GF_OMP_RETURN_NOWAIT = 1 << 0,
195
196 GF_OMP_SECTION_LAST = 1 << 0,
197 GF_OMP_ATOMIC_MEMORY_ORDER = (1 << 6) - 1,
198 GF_OMP_ATOMIC_NEED_VALUE = 1 << 6,
199 GF_OMP_ATOMIC_WEAK = 1 << 7,
200 GF_PREDICT_TAKEN = 1 << 15
201};
202
203/* This subcode tells apart different kinds of stmts that are not used
204 for codegen, but rather to retain debug information. */
205enum gimple_debug_subcode {
206 GIMPLE_DEBUG_BIND = 0,
207 GIMPLE_DEBUG_SOURCE_BIND = 1,
208 GIMPLE_DEBUG_BEGIN_STMT = 2,
209 GIMPLE_DEBUG_INLINE_ENTRY = 3
210};
211
212/* Masks for selecting a pass local flag (PLF) to work on. These
213 masks are used by gimple_set_plf and gimple_plf. */
214enum plf_mask {
215 GF_PLF_1 = 1 << 0,
216 GF_PLF_2 = 1 << 1
217};
218
219/* Data structure definitions for GIMPLE tuples. NOTE: word markers
220 are for 64 bit hosts. */
221
222struct GTY((desc ("gimple_statement_structure (&%h)"), tag ("GSS_BASE"),
223 chain_next ("%h.next"), variable_size))
224 gimple
225{
226 /* [ WORD 1 ]
227 Main identifying code for a tuple. */
228 ENUM_BITFIELD(gimple_code)enum gimple_code code : 8;
229
230 /* Nonzero if a warning should not be emitted on this tuple. */
231 unsigned int no_warning : 1;
232
233 /* Nonzero if this tuple has been visited. Passes are responsible
234 for clearing this bit before using it. */
235 unsigned int visited : 1;
236
237 /* Nonzero if this tuple represents a non-temporal move. */
238 unsigned int nontemporal_move : 1;
239
240 /* Pass local flags. These flags are free for any pass to use as
241 they see fit. Passes should not assume that these flags contain
242 any useful value when the pass starts. Any initial state that
243 the pass requires should be set on entry to the pass. See
244 gimple_set_plf and gimple_plf for usage. */
245 unsigned int plf : 2;
246
247 /* Nonzero if this statement has been modified and needs to have its
248 operands rescanned. */
249 unsigned modified : 1;
250
251 /* Nonzero if this statement contains volatile operands. */
252 unsigned has_volatile_ops : 1;
253
254 /* Padding to get subcode to 16 bit alignment. */
255 unsigned pad : 1;
256
257 /* The SUBCODE field can be used for tuple-specific flags for tuples
258 that do not require subcodes. Note that SUBCODE should be at
259 least as wide as tree codes, as several tuples store tree codes
260 in there. */
261 unsigned int subcode : 16;
262
263 /* UID of this statement. This is used by passes that want to
264 assign IDs to statements. It must be assigned and used by each
265 pass. By default it should be assumed to contain garbage. */
266 unsigned uid;
267
268 /* [ WORD 2 ]
269 Locus information for debug info. */
270 location_t location;
271
272 /* Number of operands in this tuple. */
273 unsigned num_ops;
274
275 /* [ WORD 3 ]
276 Basic block holding this statement. */
277 basic_block bb;
278
279 /* [ WORD 4-5 ]
280 Linked lists of gimple statements. The next pointers form
281 a NULL terminated list, the prev pointers are a cyclic list.
282 A gimple statement is hence also a double-ended list of
283 statements, with the pointer itself being the first element,
284 and the prev pointer being the last. */
285 gimple *next;
286 gimple *GTY((skip)) prev;
287};
288
289
290/* Base structure for tuples with operands. */
291
292/* This gimple subclass has no tag value. */
293struct GTY(())
294 gimple_statement_with_ops_base : public gimple
295{
296 /* [ WORD 1-6 ] : base class */
297
298 /* [ WORD 7 ]
299 SSA operand vectors. NOTE: It should be possible to
300 amalgamate these vectors with the operand vector OP. However,
301 the SSA operand vectors are organized differently and contain
302 more information (like immediate use chaining). */
303 struct use_optype_d GTY((skip (""))) *use_ops;
304};
305
306
307/* Statements that take register operands. */
308
309struct GTY((tag("GSS_WITH_OPS")))
310 gimple_statement_with_ops : public gimple_statement_with_ops_base
311{
312 /* [ WORD 1-7 ] : base class */
313
314 /* [ WORD 8 ]
315 Operand vector. NOTE! This must always be the last field
316 of this structure. In particular, this means that this
317 structure cannot be embedded inside another one. */
318 tree GTY((length ("%h.num_ops"))) op[1];
319};
320
321
322/* Base for statements that take both memory and register operands. */
323
324struct GTY((tag("GSS_WITH_MEM_OPS_BASE")))
325 gimple_statement_with_memory_ops_base : public gimple_statement_with_ops_base
326{
327 /* [ WORD 1-7 ] : base class */
328
329 /* [ WORD 8-9 ]
330 Virtual operands for this statement. The GC will pick them
331 up via the ssa_names array. */
332 tree GTY((skip (""))) vdef;
333 tree GTY((skip (""))) vuse;
334};
335
336
337/* Statements that take both memory and register operands. */
338
339struct GTY((tag("GSS_WITH_MEM_OPS")))
340 gimple_statement_with_memory_ops :
341 public gimple_statement_with_memory_ops_base
342{
343 /* [ WORD 1-9 ] : base class */
344
345 /* [ WORD 10 ]
346 Operand vector. NOTE! This must always be the last field
347 of this structure. In particular, this means that this
348 structure cannot be embedded inside another one. */
349 tree GTY((length ("%h.num_ops"))) op[1];
350};
351
352
353/* Call statements that take both memory and register operands. */
354
355struct GTY((tag("GSS_CALL")))
356 gcall : public gimple_statement_with_memory_ops_base
357{
358 /* [ WORD 1-9 ] : base class */
359
360 /* [ WORD 10-13 ] */
361 struct pt_solution call_used;
362 struct pt_solution call_clobbered;
363
364 /* [ WORD 14 ] */
365 union GTY ((desc ("%1.subcode & GF_CALL_INTERNAL"))) {
366 tree GTY ((tag ("0"))) fntype;
367 enum internal_fn GTY ((tag ("GF_CALL_INTERNAL"))) internal_fn;
368 } u;
369
370 /* [ WORD 15 ]
371 Operand vector. NOTE! This must always be the last field
372 of this structure. In particular, this means that this
373 structure cannot be embedded inside another one. */
374 tree GTY((length ("%h.num_ops"))) op[1];
375
376 static const enum gimple_code code_ = GIMPLE_CALL;
377};
378
379
380/* OMP statements. */
381
382struct GTY((tag("GSS_OMP")))
383 gimple_statement_omp : public gimple
384{
385 /* [ WORD 1-6 ] : base class */
386
387 /* [ WORD 7 ] */
388 gimple_seq body;
389};
390
391
392/* GIMPLE_BIND */
393
394struct GTY((tag("GSS_BIND")))
395 gbind : public gimple
396{
397 /* [ WORD 1-6 ] : base class */
398
399 /* [ WORD 7 ]
400 Variables declared in this scope. */
401 tree vars;
402
403 /* [ WORD 8 ]
404 This is different than the BLOCK field in gimple,
405 which is analogous to TREE_BLOCK (i.e., the lexical block holding
406 this statement). This field is the equivalent of BIND_EXPR_BLOCK
407 in tree land (i.e., the lexical scope defined by this bind). See
408 gimple-low.c. */
409 tree block;
410
411 /* [ WORD 9 ] */
412 gimple_seq body;
413};
414
415
416/* GIMPLE_CATCH */
417
418struct GTY((tag("GSS_CATCH")))
419 gcatch : public gimple
420{
421 /* [ WORD 1-6 ] : base class */
422
423 /* [ WORD 7 ] */
424 tree types;
425
426 /* [ WORD 8 ] */
427 gimple_seq handler;
428};
429
430
431/* GIMPLE_EH_FILTER */
432
433struct GTY((tag("GSS_EH_FILTER")))
434 geh_filter : public gimple
435{
436 /* [ WORD 1-6 ] : base class */
437
438 /* [ WORD 7 ]
439 Filter types. */
440 tree types;
441
442 /* [ WORD 8 ]
443 Failure actions. */
444 gimple_seq failure;
445};
446
447/* GIMPLE_EH_ELSE */
448
449struct GTY((tag("GSS_EH_ELSE")))
450 geh_else : public gimple
451{
452 /* [ WORD 1-6 ] : base class */
453
454 /* [ WORD 7,8 ] */
455 gimple_seq n_body, e_body;
456};
457
458/* GIMPLE_EH_MUST_NOT_THROW */
459
460struct GTY((tag("GSS_EH_MNT")))
461 geh_mnt : public gimple
462{
463 /* [ WORD 1-6 ] : base class */
464
465 /* [ WORD 7 ] Abort function decl. */
466 tree fndecl;
467};
468
469/* GIMPLE_PHI */
470
471struct GTY((tag("GSS_PHI")))
472 gphi : public gimple
473{
474 /* [ WORD 1-6 ] : base class */
475
476 /* [ WORD 7 ] */
477 unsigned capacity;
478 unsigned nargs;
479
480 /* [ WORD 8 ] */
481 tree result;
482
483 /* [ WORD 9 ] */
484 struct phi_arg_d GTY ((length ("%h.nargs"))) args[1];
485};
486
487
488/* GIMPLE_RESX, GIMPLE_EH_DISPATCH */
489
490struct GTY((tag("GSS_EH_CTRL")))
491 gimple_statement_eh_ctrl : public gimple
492{
493 /* [ WORD 1-6 ] : base class */
494
495 /* [ WORD 7 ]
496 Exception region number. */
497 int region;
498};
499
500struct GTY((tag("GSS_EH_CTRL")))
501 gresx : public gimple_statement_eh_ctrl
502{
503 /* No extra fields; adds invariant:
504 stmt->code == GIMPLE_RESX. */
505};
506
507struct GTY((tag("GSS_EH_CTRL")))
508 geh_dispatch : public gimple_statement_eh_ctrl
509{
510 /* No extra fields; adds invariant:
511 stmt->code == GIMPLE_EH_DISPATH. */
512};
513
514
515/* GIMPLE_TRY */
516
517struct GTY((tag("GSS_TRY")))
518 gtry : public gimple
519{
520 /* [ WORD 1-6 ] : base class */
521
522 /* [ WORD 7 ]
523 Expression to evaluate. */
524 gimple_seq eval;
525
526 /* [ WORD 8 ]
527 Cleanup expression. */
528 gimple_seq cleanup;
529};
530
531/* Kind of GIMPLE_TRY statements. */
532enum gimple_try_flags
533{
534 /* A try/catch. */
535 GIMPLE_TRY_CATCH = 1 << 0,
536
537 /* A try/finally. */
538 GIMPLE_TRY_FINALLY = 1 << 1,
539 GIMPLE_TRY_KIND = GIMPLE_TRY_CATCH | GIMPLE_TRY_FINALLY,
540
541 /* Analogous to TRY_CATCH_IS_CLEANUP. */
542 GIMPLE_TRY_CATCH_IS_CLEANUP = 1 << 2
543};
544
545/* GIMPLE_WITH_CLEANUP_EXPR */
546
547struct GTY((tag("GSS_WCE")))
548 gimple_statement_wce : public gimple
549{
550 /* [ WORD 1-6 ] : base class */
551
552 /* Subcode: CLEANUP_EH_ONLY. True if the cleanup should only be
553 executed if an exception is thrown, not on normal exit of its
554 scope. This flag is analogous to the CLEANUP_EH_ONLY flag
555 in TARGET_EXPRs. */
556
557 /* [ WORD 7 ]
558 Cleanup expression. */
559 gimple_seq cleanup;
560};
561
562
563/* GIMPLE_ASM */
564
565struct GTY((tag("GSS_ASM")))
566 gasm : public gimple_statement_with_memory_ops_base
567{
568 /* [ WORD 1-9 ] : base class */
569
570 /* [ WORD 10 ]
571 __asm__ statement. */
572 const char *string;
573
574 /* [ WORD 11 ]
575 Number of inputs, outputs, clobbers, labels. */
576 unsigned char ni;
577 unsigned char no;
578 unsigned char nc;
579 unsigned char nl;
580
581 /* [ WORD 12 ]
582 Operand vector. NOTE! This must always be the last field
583 of this structure. In particular, this means that this
584 structure cannot be embedded inside another one. */
585 tree GTY((length ("%h.num_ops"))) op[1];
586};
587
588/* GIMPLE_OMP_CRITICAL */
589
590struct GTY((tag("GSS_OMP_CRITICAL")))
591 gomp_critical : public gimple_statement_omp
592{
593 /* [ WORD 1-7 ] : base class */
594
595 /* [ WORD 8 ] */
596 tree clauses;
597
598 /* [ WORD 9 ]
599 Critical section name. */
600 tree name;
601};
602
603
604struct GTY(()) gimple_omp_for_iter {
605 /* Condition code. */
606 enum tree_code cond;
607
608 /* Index variable. */
609 tree index;
610
611 /* Initial value. */
612 tree initial;
613
614 /* Final value. */
615 tree final;
616
617 /* Increment. */
618 tree incr;
619};
620
621/* GIMPLE_OMP_FOR */
622
623struct GTY((tag("GSS_OMP_FOR")))
624 gomp_for : public gimple_statement_omp
625{
626 /* [ WORD 1-7 ] : base class */
627
628 /* [ WORD 8 ] */
629 tree clauses;
630
631 /* [ WORD 9 ]
632 Number of elements in iter array. */
633 size_t collapse;
634
635 /* [ WORD 10 ] */
636 struct gimple_omp_for_iter * GTY((length ("%h.collapse"))) iter;
637
638 /* [ WORD 11 ]
639 Pre-body evaluated before the loop body begins. */
640 gimple_seq pre_body;
641};
642
643
644/* GIMPLE_OMP_PARALLEL, GIMPLE_OMP_TARGET, GIMPLE_OMP_TASK, GIMPLE_OMP_TEAMS */
645
646struct GTY((tag("GSS_OMP_PARALLEL_LAYOUT")))
647 gimple_statement_omp_parallel_layout : public gimple_statement_omp
648{
649 /* [ WORD 1-7 ] : base class */
650
651 /* [ WORD 8 ]
652 Clauses. */
653 tree clauses;
654
655 /* [ WORD 9 ]
656 Child function holding the body of the parallel region. */
657 tree child_fn;
658
659 /* [ WORD 10 ]
660 Shared data argument. */
661 tree data_arg;
662};
663
664/* GIMPLE_OMP_PARALLEL or GIMPLE_TASK */
665struct GTY((tag("GSS_OMP_PARALLEL_LAYOUT")))
666 gimple_statement_omp_taskreg : public gimple_statement_omp_parallel_layout
667{
668 /* No extra fields; adds invariant:
669 stmt->code == GIMPLE_OMP_PARALLEL
670 || stmt->code == GIMPLE_OMP_TASK
671 || stmt->code == GIMPLE_OMP_TEAMS. */
672};
673
674/* GIMPLE_OMP_PARALLEL */
675struct GTY((tag("GSS_OMP_PARALLEL_LAYOUT")))
676 gomp_parallel : public gimple_statement_omp_taskreg
677{
678 /* No extra fields; adds invariant:
679 stmt->code == GIMPLE_OMP_PARALLEL. */
680};
681
682/* GIMPLE_OMP_TARGET */
683struct GTY((tag("GSS_OMP_PARALLEL_LAYOUT")))
684 gomp_target : public gimple_statement_omp_parallel_layout
685{
686 /* No extra fields; adds invariant:
687 stmt->code == GIMPLE_OMP_TARGET. */
688};
689
690/* GIMPLE_OMP_TASK */
691
692struct GTY((tag("GSS_OMP_TASK")))
693 gomp_task : public gimple_statement_omp_taskreg
694{
695 /* [ WORD 1-10 ] : base class */
696
697 /* [ WORD 11 ]
698 Child function holding firstprivate initialization if needed. */
699 tree copy_fn;
700
701 /* [ WORD 12-13 ]
702 Size and alignment in bytes of the argument data block. */
703 tree arg_size;
704 tree arg_align;
705};
706
707
708/* GIMPLE_OMP_SECTION */
709/* Uses struct gimple_statement_omp. */
710
711
712/* GIMPLE_OMP_SECTIONS */
713
714struct GTY((tag("GSS_OMP_SECTIONS")))
715 gomp_sections : public gimple_statement_omp
716{
717 /* [ WORD 1-7 ] : base class */
718
719 /* [ WORD 8 ] */
720 tree clauses;
721
722 /* [ WORD 9 ]
723 The control variable used for deciding which of the sections to
724 execute. */
725 tree control;
726};
727
728/* GIMPLE_OMP_CONTINUE.
729
730 Note: This does not inherit from gimple_statement_omp, because we
731 do not need the body field. */
732
733struct GTY((tag("GSS_OMP_CONTINUE")))
734 gomp_continue : public gimple
735{
736 /* [ WORD 1-6 ] : base class */
737
738 /* [ WORD 7 ] */
739 tree control_def;
740
741 /* [ WORD 8 ] */
742 tree control_use;
743};
744
745/* GIMPLE_OMP_SINGLE, GIMPLE_OMP_ORDERED, GIMPLE_OMP_TASKGROUP,
746 GIMPLE_OMP_SCAN, GIMPLE_OMP_MASKED, GIMPLE_OMP_SCOPE. */
747
748struct GTY((tag("GSS_OMP_SINGLE_LAYOUT")))
749 gimple_statement_omp_single_layout : public gimple_statement_omp
750{
751 /* [ WORD 1-7 ] : base class */
752
753 /* [ WORD 8 ] */
754 tree clauses;
755};
756
757struct GTY((tag("GSS_OMP_SINGLE_LAYOUT")))
758 gomp_single : public gimple_statement_omp_single_layout
759{
760 /* No extra fields; adds invariant:
761 stmt->code == GIMPLE_OMP_SINGLE. */
762};
763
764struct GTY((tag("GSS_OMP_PARALLEL_LAYOUT")))
765 gomp_teams : public gimple_statement_omp_taskreg
766{
767 /* No extra fields; adds invariant:
768 stmt->code == GIMPLE_OMP_TEAMS. */
769};
770
771struct GTY((tag("GSS_OMP_SINGLE_LAYOUT")))
772 gomp_ordered : public gimple_statement_omp_single_layout
773{
774 /* No extra fields; adds invariant:
775 stmt->code == GIMPLE_OMP_ORDERED. */
776};
777
778struct GTY((tag("GSS_OMP_SINGLE_LAYOUT")))
779 gomp_scan : public gimple_statement_omp_single_layout
780{
781 /* No extra fields; adds invariant:
782 stmt->code == GIMPLE_OMP_SCAN. */
783};
784
785
786/* GIMPLE_OMP_ATOMIC_LOAD.
787 Note: This is based on gimple, not g_s_omp, because g_s_omp
788 contains a sequence, which we don't need here. */
789
790struct GTY((tag("GSS_OMP_ATOMIC_LOAD")))
791 gomp_atomic_load : public gimple
792{
793 /* [ WORD 1-6 ] : base class */
794
795 /* [ WORD 7-8 ] */
796 tree rhs, lhs;
797};
798
799/* GIMPLE_OMP_ATOMIC_STORE.
800 See note on GIMPLE_OMP_ATOMIC_LOAD. */
801
802struct GTY((tag("GSS_OMP_ATOMIC_STORE_LAYOUT")))
803 gimple_statement_omp_atomic_store_layout : public gimple
804{
805 /* [ WORD 1-6 ] : base class */
806
807 /* [ WORD 7 ] */
808 tree val;
809};
810
811struct GTY((tag("GSS_OMP_ATOMIC_STORE_LAYOUT")))
812 gomp_atomic_store :
813 public gimple_statement_omp_atomic_store_layout
814{
815 /* No extra fields; adds invariant:
816 stmt->code == GIMPLE_OMP_ATOMIC_STORE. */
817};
818
819struct GTY((tag("GSS_OMP_ATOMIC_STORE_LAYOUT")))
820 gimple_statement_omp_return :
821 public gimple_statement_omp_atomic_store_layout
822{
823 /* No extra fields; adds invariant:
824 stmt->code == GIMPLE_OMP_RETURN. */
825};
826
827/* GIMPLE_TRANSACTION. */
828
829/* Bits to be stored in the GIMPLE_TRANSACTION subcode. */
830
831/* The __transaction_atomic was declared [[outer]] or it is
832 __transaction_relaxed. */
833#define GTMA_IS_OUTER(1u << 0) (1u << 0)
834#define GTMA_IS_RELAXED(1u << 1) (1u << 1)
835#define GTMA_DECLARATION_MASK((1u << 0) | (1u << 1)) (GTMA_IS_OUTER(1u << 0) | GTMA_IS_RELAXED(1u << 1))
836
837/* The transaction is seen to not have an abort. */
838#define GTMA_HAVE_ABORT(1u << 2) (1u << 2)
839/* The transaction is seen to have loads or stores. */
840#define GTMA_HAVE_LOAD(1u << 3) (1u << 3)
841#define GTMA_HAVE_STORE(1u << 4) (1u << 4)
842/* The transaction MAY enter serial irrevocable mode in its dynamic scope. */
843#define GTMA_MAY_ENTER_IRREVOCABLE(1u << 5) (1u << 5)
844/* The transaction WILL enter serial irrevocable mode.
845 An irrevocable block post-dominates the entire transaction, such
846 that all invocations of the transaction will go serial-irrevocable.
847 In such case, we don't bother instrumenting the transaction, and
848 tell the runtime that it should begin the transaction in
849 serial-irrevocable mode. */
850#define GTMA_DOES_GO_IRREVOCABLE(1u << 6) (1u << 6)
851/* The transaction contains no instrumentation code whatsover, most
852 likely because it is guaranteed to go irrevocable upon entry. */
853#define GTMA_HAS_NO_INSTRUMENTATION(1u << 7) (1u << 7)
854
855struct GTY((tag("GSS_TRANSACTION")))
856 gtransaction : public gimple_statement_with_memory_ops_base
857{
858 /* [ WORD 1-9 ] : base class */
859
860 /* [ WORD 10 ] */
861 gimple_seq body;
862
863 /* [ WORD 11-13 ] */
864 tree label_norm;
865 tree label_uninst;
866 tree label_over;
867};
868
869#define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) SYM,
870enum gimple_statement_structure_enum {
871#include "gsstruct.def"
872 LAST_GSS_ENUM
873};
874#undef DEFGSSTRUCT
875
876/* A statement with the invariant that
877 stmt->code == GIMPLE_COND
878 i.e. a conditional jump statement. */
879
880struct GTY((tag("GSS_WITH_OPS")))
881 gcond : public gimple_statement_with_ops
882{
883 /* no additional fields; this uses the layout for GSS_WITH_OPS. */
884 static const enum gimple_code code_ = GIMPLE_COND;
885};
886
887/* A statement with the invariant that
888 stmt->code == GIMPLE_DEBUG
889 i.e. a debug statement. */
890
891struct GTY((tag("GSS_WITH_OPS")))
892 gdebug : public gimple_statement_with_ops
893{
894 /* no additional fields; this uses the layout for GSS_WITH_OPS. */
895};
896
897/* A statement with the invariant that
898 stmt->code == GIMPLE_GOTO
899 i.e. a goto statement. */
900
901struct GTY((tag("GSS_WITH_OPS")))
902 ggoto : public gimple_statement_with_ops
903{
904 /* no additional fields; this uses the layout for GSS_WITH_OPS. */
905};
906
907/* A statement with the invariant that
908 stmt->code == GIMPLE_LABEL
909 i.e. a label statement. */
910
911struct GTY((tag("GSS_WITH_OPS")))
912 glabel : public gimple_statement_with_ops
913{
914 /* no additional fields; this uses the layout for GSS_WITH_OPS. */
915};
916
917/* A statement with the invariant that
918 stmt->code == GIMPLE_SWITCH
919 i.e. a switch statement. */
920
921struct GTY((tag("GSS_WITH_OPS")))
922 gswitch : public gimple_statement_with_ops
923{
924 /* no additional fields; this uses the layout for GSS_WITH_OPS. */
925};
926
927/* A statement with the invariant that
928 stmt->code == GIMPLE_ASSIGN
929 i.e. an assignment statement. */
930
931struct GTY((tag("GSS_WITH_MEM_OPS")))
932 gassign : public gimple_statement_with_memory_ops
933{
934 static const enum gimple_code code_ = GIMPLE_ASSIGN;
935 /* no additional fields; this uses the layout for GSS_WITH_MEM_OPS. */
936};
937
938/* A statement with the invariant that
939 stmt->code == GIMPLE_RETURN
940 i.e. a return statement. */
941
942struct GTY((tag("GSS_WITH_MEM_OPS")))
943 greturn : public gimple_statement_with_memory_ops
944{
945 /* no additional fields; this uses the layout for GSS_WITH_MEM_OPS. */
946};
947
948template <>
949template <>
950inline bool
951is_a_helper <gasm *>::test (gimple *gs)
952{
953 return gs->code == GIMPLE_ASM;
954}
955
956template <>
957template <>
958inline bool
959is_a_helper <gassign *>::test (gimple *gs)
960{
961 return gs->code == GIMPLE_ASSIGN;
962}
963
964template <>
965template <>
966inline bool
967is_a_helper <const gassign *>::test (const gimple *gs)
968{
969 return gs->code == GIMPLE_ASSIGN;
970}
971
972template <>
973template <>
974inline bool
975is_a_helper <gbind *>::test (gimple *gs)
976{
977 return gs->code == GIMPLE_BIND;
978}
979
980template <>
981template <>
982inline bool
983is_a_helper <gcall *>::test (gimple *gs)
984{
985 return gs->code == GIMPLE_CALL;
986}
987
988template <>
989template <>
990inline bool
991is_a_helper <gcatch *>::test (gimple *gs)
992{
993 return gs->code == GIMPLE_CATCH;
994}
995
996template <>
997template <>
998inline bool
999is_a_helper <gcond *>::test (gimple *gs)
1000{
1001 return gs->code == GIMPLE_COND;
1002}
1003
1004template <>
1005template <>
1006inline bool
1007is_a_helper <const gcond *>::test (const gimple *gs)
1008{
1009 return gs->code == GIMPLE_COND;
1010}
1011
1012template <>
1013template <>
1014inline bool
1015is_a_helper <gdebug *>::test (gimple *gs)
1016{
1017 return gs->code == GIMPLE_DEBUG;
1018}
1019
1020template <>
1021template <>
1022inline bool
1023is_a_helper <const gdebug *>::test (const gimple *gs)
1024{
1025 return gs->code == GIMPLE_DEBUG;
1026}
1027
1028template <>
1029template <>
1030inline bool
1031is_a_helper <ggoto *>::test (gimple *gs)
1032{
1033 return gs->code == GIMPLE_GOTO;
1034}
1035
1036template <>
1037template <>
1038inline bool
1039is_a_helper <const ggoto *>::test (const gimple *gs)
1040{
1041 return gs->code == GIMPLE_GOTO;
1042}
1043
1044template <>
1045template <>
1046inline bool
1047is_a_helper <glabel *>::test (gimple *gs)
1048{
1049 return gs->code == GIMPLE_LABEL;
1050}
1051
1052template <>
1053template <>
1054inline bool
1055is_a_helper <const glabel *>::test (const gimple *gs)
1056{
1057 return gs->code == GIMPLE_LABEL;
1058}
1059
1060template <>
1061template <>
1062inline bool
1063is_a_helper <gresx *>::test (gimple *gs)
1064{
1065 return gs->code == GIMPLE_RESX;
1066}
1067
1068template <>
1069template <>
1070inline bool
1071is_a_helper <geh_dispatch *>::test (gimple *gs)
1072{
1073 return gs->code == GIMPLE_EH_DISPATCH;
1074}
1075
1076template <>
1077template <>
1078inline bool
1079is_a_helper <geh_else *>::test (gimple *gs)
1080{
1081 return gs->code == GIMPLE_EH_ELSE;
1082}
1083
1084template <>
1085template <>
1086inline bool
1087is_a_helper <const geh_else *>::test (const gimple *gs)
1088{
1089 return gs->code == GIMPLE_EH_ELSE;
1090}
1091
1092template <>
1093template <>
1094inline bool
1095is_a_helper <geh_filter *>::test (gimple *gs)
1096{
1097 return gs->code == GIMPLE_EH_FILTER;
1098}
1099
1100template <>
1101template <>
1102inline bool
1103is_a_helper <geh_mnt *>::test (gimple *gs)
1104{
1105 return gs->code == GIMPLE_EH_MUST_NOT_THROW;
1106}
1107
1108template <>
1109template <>
1110inline bool
1111is_a_helper <const geh_mnt *>::test (const gimple *gs)
1112{
1113 return gs->code == GIMPLE_EH_MUST_NOT_THROW;
1114}
1115
1116template <>
1117template <>
1118inline bool
1119is_a_helper <gomp_atomic_load *>::test (gimple *gs)
1120{
1121 return gs->code == GIMPLE_OMP_ATOMIC_LOAD;
1122}
1123
1124template <>
1125template <>
1126inline bool
1127is_a_helper <gomp_atomic_store *>::test (gimple *gs)
1128{
1129 return gs->code == GIMPLE_OMP_ATOMIC_STORE;
1130}
1131
1132template <>
1133template <>
1134inline bool
1135is_a_helper <gimple_statement_omp_return *>::test (gimple *gs)
1136{
1137 return gs->code == GIMPLE_OMP_RETURN;
1138}
1139
1140template <>
1141template <>
1142inline bool
1143is_a_helper <gomp_continue *>::test (gimple *gs)
1144{
1145 return gs->code == GIMPLE_OMP_CONTINUE;
1146}
1147
1148template <>
1149template <>
1150inline bool
1151is_a_helper <gomp_critical *>::test (gimple *gs)
1152{
1153 return gs->code == GIMPLE_OMP_CRITICAL;
1154}
1155
1156template <>
1157template <>
1158inline bool
1159is_a_helper <gomp_ordered *>::test (gimple *gs)
1160{
1161 return gs->code == GIMPLE_OMP_ORDERED;
1162}
1163
1164template <>
1165template <>
1166inline bool
1167is_a_helper <gomp_scan *>::test (gimple *gs)
1168{
1169 return gs->code == GIMPLE_OMP_SCAN;
1170}
1171
1172template <>
1173template <>
1174inline bool
1175is_a_helper <gomp_for *>::test (gimple *gs)
1176{
1177 return gs->code == GIMPLE_OMP_FOR;
1178}
1179
1180template <>
1181template <>
1182inline bool
1183is_a_helper <gimple_statement_omp_taskreg *>::test (gimple *gs)
1184{
1185 return (gs->code == GIMPLE_OMP_PARALLEL
1186 || gs->code == GIMPLE_OMP_TASK
1187 || gs->code == GIMPLE_OMP_TEAMS);
1188}
1189
1190template <>
1191template <>
1192inline bool
1193is_a_helper <gomp_parallel *>::test (gimple *gs)
1194{
1195 return gs->code == GIMPLE_OMP_PARALLEL;
1196}
1197
1198template <>
1199template <>
1200inline bool
1201is_a_helper <gomp_target *>::test (gimple *gs)
1202{
1203 return gs->code == GIMPLE_OMP_TARGET;
1204}
1205
1206template <>
1207template <>
1208inline bool
1209is_a_helper <gomp_sections *>::test (gimple *gs)
1210{
1211 return gs->code == GIMPLE_OMP_SECTIONS;
1212}
1213
1214template <>
1215template <>
1216inline bool
1217is_a_helper <gomp_single *>::test (gimple *gs)
1218{
1219 return gs->code == GIMPLE_OMP_SINGLE;
1220}
1221
1222template <>
1223template <>
1224inline bool
1225is_a_helper <gomp_teams *>::test (gimple *gs)
1226{
1227 return gs->code == GIMPLE_OMP_TEAMS;
1228}
1229
1230template <>
1231template <>
1232inline bool
1233is_a_helper <gomp_task *>::test (gimple *gs)
1234{
1235 return gs->code == GIMPLE_OMP_TASK;
1236}
1237
1238template <>
1239template <>
1240inline bool
1241is_a_helper <gphi *>::test (gimple *gs)
1242{
1243 return gs->code == GIMPLE_PHI;
1244}
1245
1246template <>
1247template <>
1248inline bool
1249is_a_helper <greturn *>::test (gimple *gs)
1250{
1251 return gs->code == GIMPLE_RETURN;
1252}
1253
1254template <>
1255template <>
1256inline bool
1257is_a_helper <gswitch *>::test (gimple *gs)
1258{
1259 return gs->code == GIMPLE_SWITCH;
1260}
1261
1262template <>
1263template <>
1264inline bool
1265is_a_helper <const gswitch *>::test (const gimple *gs)
1266{
1267 return gs->code == GIMPLE_SWITCH;
1268}
1269
1270template <>
1271template <>
1272inline bool
1273is_a_helper <gtransaction *>::test (gimple *gs)
1274{
1275 return gs->code == GIMPLE_TRANSACTION;
1276}
1277
1278template <>
1279template <>
1280inline bool
1281is_a_helper <gtry *>::test (gimple *gs)
1282{
1283 return gs->code == GIMPLE_TRY;
1284}
1285
1286template <>
1287template <>
1288inline bool
1289is_a_helper <const gtry *>::test (const gimple *gs)
1290{
1291 return gs->code == GIMPLE_TRY;
1292}
1293
1294template <>
1295template <>
1296inline bool
1297is_a_helper <gimple_statement_wce *>::test (gimple *gs)
1298{
1299 return gs->code == GIMPLE_WITH_CLEANUP_EXPR;
1300}
1301
1302template <>
1303template <>
1304inline bool
1305is_a_helper <const gasm *>::test (const gimple *gs)
1306{
1307 return gs->code == GIMPLE_ASM;
1308}
1309
1310template <>
1311template <>
1312inline bool
1313is_a_helper <const gbind *>::test (const gimple *gs)
1314{
1315 return gs->code == GIMPLE_BIND;
1316}
1317
1318template <>
1319template <>
1320inline bool
1321is_a_helper <const gcall *>::test (const gimple *gs)
1322{
1323 return gs->code == GIMPLE_CALL;
1324}
1325
1326template <>
1327template <>
1328inline bool
1329is_a_helper <const gcatch *>::test (const gimple *gs)
1330{
1331 return gs->code == GIMPLE_CATCH;
1332}
1333
1334template <>
1335template <>
1336inline bool
1337is_a_helper <const gresx *>::test (const gimple *gs)
1338{
1339 return gs->code == GIMPLE_RESX;
1340}
1341
1342template <>
1343template <>
1344inline bool
1345is_a_helper <const geh_dispatch *>::test (const gimple *gs)
1346{
1347 return gs->code == GIMPLE_EH_DISPATCH;
1348}
1349
1350template <>
1351template <>
1352inline bool
1353is_a_helper <const geh_filter *>::test (const gimple *gs)
1354{
1355 return gs->code == GIMPLE_EH_FILTER;
1356}
1357
1358template <>
1359template <>
1360inline bool
1361is_a_helper <const gomp_atomic_load *>::test (const gimple *gs)
1362{
1363 return gs->code == GIMPLE_OMP_ATOMIC_LOAD;
1364}
1365
1366template <>
1367template <>
1368inline bool
1369is_a_helper <const gomp_atomic_store *>::test (const gimple *gs)
1370{
1371 return gs->code == GIMPLE_OMP_ATOMIC_STORE;
1372}
1373
1374template <>
1375template <>
1376inline bool
1377is_a_helper <const gimple_statement_omp_return *>::test (const gimple *gs)
1378{
1379 return gs->code == GIMPLE_OMP_RETURN;
1380}
1381
1382template <>
1383template <>
1384inline bool
1385is_a_helper <const gomp_continue *>::test (const gimple *gs)
1386{
1387 return gs->code == GIMPLE_OMP_CONTINUE;
1388}
1389
1390template <>
1391template <>
1392inline bool
1393is_a_helper <const gomp_critical *>::test (const gimple *gs)
1394{
1395 return gs->code == GIMPLE_OMP_CRITICAL;
1396}
1397
1398template <>
1399template <>
1400inline bool
1401is_a_helper <const gomp_ordered *>::test (const gimple *gs)
1402{
1403 return gs->code == GIMPLE_OMP_ORDERED;
1404}
1405
1406template <>
1407template <>
1408inline bool
1409is_a_helper <const gomp_scan *>::test (const gimple *gs)
1410{
1411 return gs->code == GIMPLE_OMP_SCAN;
1412}
1413
1414template <>
1415template <>
1416inline bool
1417is_a_helper <const gomp_for *>::test (const gimple *gs)
1418{
1419 return gs->code == GIMPLE_OMP_FOR;
1420}
1421
1422template <>
1423template <>
1424inline bool
1425is_a_helper <const gimple_statement_omp_taskreg *>::test (const gimple *gs)
1426{
1427 return (gs->code == GIMPLE_OMP_PARALLEL
1428 || gs->code == GIMPLE_OMP_TASK
1429 || gs->code == GIMPLE_OMP_TEAMS);
1430}
1431
1432template <>
1433template <>
1434inline bool
1435is_a_helper <const gomp_parallel *>::test (const gimple *gs)
1436{
1437 return gs->code == GIMPLE_OMP_PARALLEL;
1438}
1439
1440template <>
1441template <>
1442inline bool
1443is_a_helper <const gomp_target *>::test (const gimple *gs)
1444{
1445 return gs->code == GIMPLE_OMP_TARGET;
1446}
1447
1448template <>
1449template <>
1450inline bool
1451is_a_helper <const gomp_sections *>::test (const gimple *gs)
1452{
1453 return gs->code == GIMPLE_OMP_SECTIONS;
1454}
1455
1456template <>
1457template <>
1458inline bool
1459is_a_helper <const gomp_single *>::test (const gimple *gs)
1460{
1461 return gs->code == GIMPLE_OMP_SINGLE;
1462}
1463
1464template <>
1465template <>
1466inline bool
1467is_a_helper <const gomp_teams *>::test (const gimple *gs)
1468{
1469 return gs->code == GIMPLE_OMP_TEAMS;
1470}
1471
1472template <>
1473template <>
1474inline bool
1475is_a_helper <const gomp_task *>::test (const gimple *gs)
1476{
1477 return gs->code == GIMPLE_OMP_TASK;
1478}
1479
1480template <>
1481template <>
1482inline bool
1483is_a_helper <const gphi *>::test (const gimple *gs)
1484{
1485 return gs->code == GIMPLE_PHI;
1486}
1487
1488template <>
1489template <>
1490inline bool
1491is_a_helper <const greturn *>::test (const gimple *gs)
1492{
1493 return gs->code == GIMPLE_RETURN;
1494}
1495
1496template <>
1497template <>
1498inline bool
1499is_a_helper <const gtransaction *>::test (const gimple *gs)
1500{
1501 return gs->code == GIMPLE_TRANSACTION;
1502}
1503
1504/* Offset in bytes to the location of the operand vector.
1505 Zero if there is no operand vector for this tuple structure. */
1506extern size_t const gimple_ops_offset_[];
1507
1508/* Map GIMPLE codes to GSS codes. */
1509extern enum gimple_statement_structure_enum const gss_for_code_[];
1510
1511/* This variable holds the currently expanded gimple statement for purposes
1512 of comminucating the profile info to the builtin expanders. */
1513extern gimple *currently_expanding_gimple_stmt;
1514
1515size_t gimple_size (enum gimple_code code, unsigned num_ops = 0);
1516void gimple_init (gimple *g, enum gimple_code code, unsigned num_ops);
1517gimple *gimple_alloc (enum gimple_code, unsigned CXX_MEM_STAT_INFO);
1518greturn *gimple_build_return (tree);
1519void gimple_call_reset_alias_info (gcall *);
1520gcall *gimple_build_call_vec (tree, const vec<tree> &);
1521gcall *gimple_build_call (tree, unsigned, ...);
1522gcall *gimple_build_call_valist (tree, unsigned, va_list);
1523gcall *gimple_build_call_internal (enum internal_fn, unsigned, ...);
1524gcall *gimple_build_call_internal_vec (enum internal_fn, const vec<tree> &);
1525gcall *gimple_build_call_from_tree (tree, tree);
1526gassign *gimple_build_assign (tree, tree CXX_MEM_STAT_INFO);
1527gassign *gimple_build_assign (tree, enum tree_code,
1528 tree, tree, tree CXX_MEM_STAT_INFO);
1529gassign *gimple_build_assign (tree, enum tree_code,
1530 tree, tree CXX_MEM_STAT_INFO);
1531gassign *gimple_build_assign (tree, enum tree_code, tree CXX_MEM_STAT_INFO);
1532gcond *gimple_build_cond (enum tree_code, tree, tree, tree, tree);
1533gcond *gimple_build_cond_from_tree (tree, tree, tree);
1534void gimple_cond_set_condition_from_tree (gcond *, tree);
1535glabel *gimple_build_label (tree label);
1536ggoto *gimple_build_goto (tree dest);
1537gimple *gimple_build_nop (void);
1538gbind *gimple_build_bind (tree, gimple_seq, tree);
1539gasm *gimple_build_asm_vec (const char *, vec<tree, va_gc> *,
1540 vec<tree, va_gc> *, vec<tree, va_gc> *,
1541 vec<tree, va_gc> *);
1542gcatch *gimple_build_catch (tree, gimple_seq);
1543geh_filter *gimple_build_eh_filter (tree, gimple_seq);
1544geh_mnt *gimple_build_eh_must_not_throw (tree);
1545geh_else *gimple_build_eh_else (gimple_seq, gimple_seq);
1546gtry *gimple_build_try (gimple_seq, gimple_seq,
1547 enum gimple_try_flags);
1548gimple *gimple_build_wce (gimple_seq);
1549gresx *gimple_build_resx (int);
1550gswitch *gimple_build_switch_nlabels (unsigned, tree, tree);
1551gswitch *gimple_build_switch (tree, tree, const vec<tree> &);
1552geh_dispatch *gimple_build_eh_dispatch (int);
1553gdebug *gimple_build_debug_bind (tree, tree, gimple * CXX_MEM_STAT_INFO);
1554gdebug *gimple_build_debug_source_bind (tree, tree, gimple * CXX_MEM_STAT_INFO);
1555gdebug *gimple_build_debug_begin_stmt (tree, location_t CXX_MEM_STAT_INFO);
1556gdebug *gimple_build_debug_inline_entry (tree, location_t CXX_MEM_STAT_INFO);
1557gomp_critical *gimple_build_omp_critical (gimple_seq, tree, tree);
1558gomp_for *gimple_build_omp_for (gimple_seq, int, tree, size_t, gimple_seq);
1559gomp_parallel *gimple_build_omp_parallel (gimple_seq, tree, tree, tree);
1560gomp_task *gimple_build_omp_task (gimple_seq, tree, tree, tree, tree,
1561 tree, tree);
1562gimple *gimple_build_omp_section (gimple_seq);
1563gimple *gimple_build_omp_scope (gimple_seq, tree);
1564gimple *gimple_build_omp_master (gimple_seq);
1565gimple *gimple_build_omp_masked (gimple_seq, tree);
1566gimple *gimple_build_omp_taskgroup (gimple_seq, tree);
1567gomp_continue *gimple_build_omp_continue (tree, tree);
1568gomp_ordered *gimple_build_omp_ordered (gimple_seq, tree);
1569gimple *gimple_build_omp_return (bool);
1570gomp_scan *gimple_build_omp_scan (gimple_seq, tree);
1571gomp_sections *gimple_build_omp_sections (gimple_seq, tree);
1572gimple *gimple_build_omp_sections_switch (void);
1573gomp_single *gimple_build_omp_single (gimple_seq, tree);
1574gomp_target *gimple_build_omp_target (gimple_seq, int, tree);
1575gomp_teams *gimple_build_omp_teams (gimple_seq, tree);
1576gomp_atomic_load *gimple_build_omp_atomic_load (tree, tree,
1577 enum omp_memory_order);
1578gomp_atomic_store *gimple_build_omp_atomic_store (tree, enum omp_memory_order);
1579gtransaction *gimple_build_transaction (gimple_seq);
1580extern void gimple_seq_add_stmt (gimple_seq *, gimple *);
1581extern void gimple_seq_add_stmt_without_update (gimple_seq *, gimple *);
1582void gimple_seq_add_seq (gimple_seq *, gimple_seq);
1583void gimple_seq_add_seq_without_update (gimple_seq *, gimple_seq);
1584extern void annotate_all_with_location_after (gimple_seq, gimple_stmt_iterator,
1585 location_t);
1586extern void annotate_all_with_location (gimple_seq, location_t);
1587bool empty_body_p (gimple_seq);
1588gimple_seq gimple_seq_copy (gimple_seq);
1589bool gimple_call_same_target_p (const gimple *, const gimple *);
1590int gimple_call_flags (const gimple *);
1591int gimple_call_arg_flags (const gcall *, unsigned);
1592int gimple_call_retslot_flags (const gcall *);
1593int gimple_call_static_chain_flags (const gcall *);
1594int gimple_call_return_flags (const gcall *);
1595bool gimple_call_nonnull_result_p (gcall *);
1596tree gimple_call_nonnull_arg (gcall *);
1597bool gimple_assign_copy_p (gimple *);
1598bool gimple_assign_ssa_name_copy_p (gimple *);
1599bool gimple_assign_unary_nop_p (gimple *);
1600void gimple_set_bb (gimple *, basic_block);
1601void gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *, tree);
1602void gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *, enum tree_code,
1603 tree, tree, tree);
1604tree gimple_get_lhs (const gimple *);
1605void gimple_set_lhs (gimple *, tree);
1606gimple *gimple_copy (gimple *);
1607void gimple_move_vops (gimple *, gimple *);
1608bool gimple_has_side_effects (const gimple *);
1609bool gimple_could_trap_p_1 (const gimple *, bool, bool);
1610bool gimple_could_trap_p (const gimple *);
1611bool gimple_assign_rhs_could_trap_p (gimple *);
1612extern void dump_gimple_statistics (void);
1613unsigned get_gimple_rhs_num_ops (enum tree_code);
1614extern tree canonicalize_cond_expr_cond (tree);
1615gcall *gimple_call_copy_skip_args (gcall *, bitmap);
1616extern bool gimple_compare_field_offset (tree, tree);
1617extern tree gimple_unsigned_type (tree);
1618extern tree gimple_signed_type (tree);
1619extern alias_set_type gimple_get_alias_set (tree);
1620extern bool gimple_ior_addresses_taken (bitmap, gimple *);
1621extern bool gimple_builtin_call_types_compatible_p (const gimple *, tree);
1622extern combined_fn gimple_call_combined_fn (const gimple *);
1623extern bool gimple_call_operator_delete_p (const gcall *);
1624extern bool gimple_call_builtin_p (const gimple *);
1625extern bool gimple_call_builtin_p (const gimple *, enum built_in_class);
1626extern bool gimple_call_builtin_p (const gimple *, enum built_in_function);
1627extern bool gimple_asm_clobbers_memory_p (const gasm *);
1628extern void dump_decl_set (FILE *, bitmap);
1629extern bool nonfreeing_call_p (gimple *);
1630extern bool nonbarrier_call_p (gimple *);
1631extern bool infer_nonnull_range (gimple *, tree);
1632extern bool infer_nonnull_range_by_dereference (gimple *, tree);
1633extern bool infer_nonnull_range_by_attribute (gimple *, tree);
1634extern void sort_case_labels (vec<tree> &);
1635extern void preprocess_case_label_vec_for_gimple (vec<tree> &, tree, tree *);
1636extern void gimple_seq_set_location (gimple_seq, location_t);
1637extern void gimple_seq_discard (gimple_seq);
1638extern void maybe_remove_unused_call_args (struct function *, gimple *);
1639extern bool gimple_inexpensive_call_p (gcall *);
1640extern bool stmt_can_terminate_bb_p (gimple *);
1641extern location_t gimple_or_expr_nonartificial_location (gimple *, tree);
1642
1643/* Return the disposition for a warning (or all warnings by default)
1644 for a statement. */
1645extern bool warning_suppressed_p (const gimple *, opt_code = all_warnings)
1646 ATTRIBUTE_NONNULL (1)__attribute__ ((__nonnull__ (1)));
1647/* Set the disposition for a warning (or all warnings by default)
1648 at a location to enabled by default. */
1649extern void suppress_warning (gimple *, opt_code = all_warnings,
1650 bool = true) ATTRIBUTE_NONNULL (1)__attribute__ ((__nonnull__ (1)));
1651
1652/* Copy the warning disposition mapping from one statement to another. */
1653extern void copy_warning (gimple *, const gimple *)
1654 ATTRIBUTE_NONNULL (1)__attribute__ ((__nonnull__ (1))) ATTRIBUTE_NONNULL (2)__attribute__ ((__nonnull__ (2)));
1655/* Copy the warning disposition mapping from an expression to a statement. */
1656extern void copy_warning (gimple *, const_tree)
1657 ATTRIBUTE_NONNULL (1)__attribute__ ((__nonnull__ (1))) ATTRIBUTE_NONNULL (2)__attribute__ ((__nonnull__ (2)));
1658/* Copy the warning disposition mapping from a statement to an expression. */
1659extern void copy_warning (tree, const gimple *)
1660 ATTRIBUTE_NONNULL (1)__attribute__ ((__nonnull__ (1))) ATTRIBUTE_NONNULL (2)__attribute__ ((__nonnull__ (2)));
1661
1662/* Formal (expression) temporary table handling: multiple occurrences of
1663 the same scalar expression are evaluated into the same temporary. */
1664
1665typedef struct gimple_temp_hash_elt
1666{
1667 tree val; /* Key */
1668 tree temp; /* Value */
1669} elt_t;
1670
1671/* Get the number of the next statement uid to be allocated. */
1672static inline unsigned int
1673gimple_stmt_max_uid (struct function *fn)
1674{
1675 return fn->last_stmt_uid;
1676}
1677
1678/* Set the number of the next statement uid to be allocated. */
1679static inline void
1680set_gimple_stmt_max_uid (struct function *fn, unsigned int maxid)
1681{
1682 fn->last_stmt_uid = maxid;
1683}
1684
1685/* Set the number of the next statement uid to be allocated. */
1686static inline unsigned int
1687inc_gimple_stmt_max_uid (struct function *fn)
1688{
1689 return fn->last_stmt_uid++;
1690}
1691
1692/* Return the first node in GIMPLE sequence S. */
1693
1694static inline gimple_seq_node
1695gimple_seq_first (gimple_seq s)
1696{
1697 return s;
1698}
1699
1700
1701/* Return the first statement in GIMPLE sequence S. */
1702
1703static inline gimple *
1704gimple_seq_first_stmt (gimple_seq s)
1705{
1706 gimple_seq_node n = gimple_seq_first (s);
1707 return n;
1708}
1709
1710/* Return the first statement in GIMPLE sequence S as a gbind *,
1711 verifying that it has code GIMPLE_BIND in a checked build. */
1712
1713static inline gbind *
1714gimple_seq_first_stmt_as_a_bind (gimple_seq s)
1715{
1716 gimple_seq_node n = gimple_seq_first (s);
1717 return as_a <gbind *> (n);
1718}
1719
1720
1721/* Return the last node in GIMPLE sequence S. */
1722
1723static inline gimple_seq_node
1724gimple_seq_last (gimple_seq s)
1725{
1726 return s ? s->prev : NULL__null;
1727}
1728
1729
1730/* Return the last statement in GIMPLE sequence S. */
1731
1732static inline gimple *
1733gimple_seq_last_stmt (gimple_seq s)
1734{
1735 gimple_seq_node n = gimple_seq_last (s);
1736 return n;
1737}
1738
1739
1740/* Set the last node in GIMPLE sequence *PS to LAST. */
1741
1742static inline void
1743gimple_seq_set_last (gimple_seq *ps, gimple_seq_node last)
1744{
1745 (*ps)->prev = last;
1746}
1747
1748
1749/* Set the first node in GIMPLE sequence *PS to FIRST. */
1750
1751static inline void
1752gimple_seq_set_first (gimple_seq *ps, gimple_seq_node first)
1753{
1754 *ps = first;
1755}
1756
1757
1758/* Return true if GIMPLE sequence S is empty. */
1759
1760static inline bool
1761gimple_seq_empty_p (gimple_seq s)
1762{
1763 return s == NULL__null;
1764}
1765
1766/* Allocate a new sequence and initialize its first element with STMT. */
1767
1768static inline gimple_seq
1769gimple_seq_alloc_with_stmt (gimple *stmt)
1770{
1771 gimple_seq seq = NULL__null;
1772 gimple_seq_add_stmt (&seq, stmt);
1773 return seq;
1774}
1775
1776
1777/* Returns the sequence of statements in BB. */
1778
1779static inline gimple_seq
1780bb_seq (const_basic_block bb)
1781{
1782 return (!(bb->flags & BB_RTL)) ? bb->il.gimple.seq : NULL__null;
1783}
1784
1785static inline gimple_seq *
1786bb_seq_addr (basic_block bb)
1787{
1788 return (!(bb->flags & BB_RTL)) ? &bb->il.gimple.seq : NULL__null;
1789}
1790
1791/* Sets the sequence of statements in BB to SEQ. */
1792
1793static inline void
1794set_bb_seq (basic_block bb, gimple_seq seq)
1795{
1796 gcc_checking_assert (!(bb->flags & BB_RTL))((void)(!(!(bb->flags & BB_RTL)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 1796, __FUNCTION__), 0 : 0))
;
1797 bb->il.gimple.seq = seq;
1798}
1799
1800
1801/* Return the code for GIMPLE statement G. */
1802
1803static inline enum gimple_code
1804gimple_code (const gimple *g)
1805{
1806 return g->code;
1807}
1808
1809
1810/* Return the GSS code used by a GIMPLE code. */
1811
1812static inline enum gimple_statement_structure_enum
1813gss_for_code (enum gimple_code code)
1814{
1815 gcc_gimple_checking_assert ((unsigned int)code < LAST_AND_UNUSED_GIMPLE_CODE)((void)(!((unsigned int)code < LAST_AND_UNUSED_GIMPLE_CODE
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 1815, __FUNCTION__), 0 : 0))
;
1816 return gss_for_code_[code];
1817}
1818
1819
1820/* Return which GSS code is used by GS. */
1821
1822static inline enum gimple_statement_structure_enum
1823gimple_statement_structure (gimple *gs)
1824{
1825 return gss_for_code (gimple_code (gs));
1826}
1827
1828
1829/* Return true if statement G has sub-statements. This is only true for
1830 High GIMPLE statements. */
1831
1832static inline bool
1833gimple_has_substatements (gimple *g)
1834{
1835 switch (gimple_code (g))
1836 {
1837 case GIMPLE_BIND:
1838 case GIMPLE_CATCH:
1839 case GIMPLE_EH_FILTER:
1840 case GIMPLE_EH_ELSE:
1841 case GIMPLE_TRY:
1842 case GIMPLE_OMP_FOR:
1843 case GIMPLE_OMP_MASTER:
1844 case GIMPLE_OMP_MASKED:
1845 case GIMPLE_OMP_TASKGROUP:
1846 case GIMPLE_OMP_ORDERED:
1847 case GIMPLE_OMP_SECTION:
1848 case GIMPLE_OMP_PARALLEL:
1849 case GIMPLE_OMP_TASK:
1850 case GIMPLE_OMP_SCOPE:
1851 case GIMPLE_OMP_SECTIONS:
1852 case GIMPLE_OMP_SINGLE:
1853 case GIMPLE_OMP_TARGET:
1854 case GIMPLE_OMP_TEAMS:
1855 case GIMPLE_OMP_CRITICAL:
1856 case GIMPLE_WITH_CLEANUP_EXPR:
1857 case GIMPLE_TRANSACTION:
1858 return true;
1859
1860 default:
1861 return false;
1862 }
1863}
1864
1865
1866/* Return the basic block holding statement G. */
1867
1868static inline basic_block
1869gimple_bb (const gimple *g)
1870{
1871 return g->bb;
1872}
1873
1874
1875/* Return the lexical scope block holding statement G. */
1876
1877static inline tree
1878gimple_block (const gimple *g)
1879{
1880 return LOCATION_BLOCK (g->location)((tree) ((IS_ADHOC_LOC (g->location)) ? get_data_from_adhoc_loc
(line_table, (g->location)) : __null))
;
1881}
1882
1883/* Forward declare. */
1884static inline void gimple_set_location (gimple *, location_t);
1885
1886/* Set BLOCK to be the lexical scope block holding statement G. */
1887
1888static inline void
1889gimple_set_block (gimple *g, tree block)
1890{
1891 gimple_set_location (g, set_block (g->location, block));
1892}
1893
1894/* Return location information for statement G. */
1895
1896static inline location_t
1897gimple_location (const gimple *g)
1898{
1899 return g->location;
1900}
1901
1902/* Return location information for statement G if g is not NULL.
1903 Otherwise, UNKNOWN_LOCATION is returned. */
1904
1905static inline location_t
1906gimple_location_safe (const gimple *g)
1907{
1908 return g ? gimple_location (g) : UNKNOWN_LOCATION((location_t) 0);
1909}
1910
1911/* Set location information for statement G. */
1912
1913static inline void
1914gimple_set_location (gimple *g, location_t location)
1915{
1916 /* Copy the no-warning data to the statement location. */
1917 copy_warning (location, g->location);
1918 g->location = location;
1919}
1920
1921/* Return address of the location information for statement G. */
1922
1923static inline location_t *
1924gimple_location_ptr (gimple *g)
1925{
1926 return &g->location;
1927}
1928
1929
1930/* Return true if G contains location information. */
1931
1932static inline bool
1933gimple_has_location (const gimple *g)
1934{
1935 return LOCATION_LOCUS (gimple_location (g))((IS_ADHOC_LOC (gimple_location (g))) ? get_location_from_adhoc_loc
(line_table, gimple_location (g)) : (gimple_location (g)))
!= UNKNOWN_LOCATION((location_t) 0);
1936}
1937
1938
1939/* Return non-artificial location information for statement G. */
1940
1941static inline location_t
1942gimple_nonartificial_location (const gimple *g)
1943{
1944 location_t *ploc = NULL__null;
1945
1946 if (tree block = gimple_block (g))
1947 ploc = block_nonartificial_location (block);
1948
1949 return ploc ? *ploc : gimple_location (g);
1950}
1951
1952
1953/* Return the file name of the location of STMT. */
1954
1955static inline const char *
1956gimple_filename (const gimple *stmt)
1957{
1958 return LOCATION_FILE (gimple_location (stmt))((expand_location (gimple_location (stmt))).file);
1959}
1960
1961
1962/* Return the line number of the location of STMT. */
1963
1964static inline int
1965gimple_lineno (const gimple *stmt)
1966{
1967 return LOCATION_LINE (gimple_location (stmt))((expand_location (gimple_location (stmt))).line);
1968}
1969
1970
1971/* Determine whether SEQ is a singleton. */
1972
1973static inline bool
1974gimple_seq_singleton_p (gimple_seq seq)
1975{
1976 return ((gimple_seq_first (seq) != NULL__null)
1977 && (gimple_seq_first (seq) == gimple_seq_last (seq)));
1978}
1979
1980/* Return true if no warnings should be emitted for statement STMT. */
1981
1982static inline bool
1983gimple_no_warning_p (const gimple *stmt)
1984{
1985 return stmt->no_warning;
1986}
1987
1988/* Set the no_warning flag of STMT to NO_WARNING. */
1989
1990static inline void
1991gimple_set_no_warning (gimple *stmt, bool no_warning)
1992{
1993 stmt->no_warning = (unsigned) no_warning;
1994}
1995
1996/* Set the visited status on statement STMT to VISITED_P.
1997
1998 Please note that this 'visited' property of the gimple statement is
1999 supposed to be undefined at pass boundaries. This means that a
2000 given pass should not assume it contains any useful value when the
2001 pass starts and thus can set it to any value it sees fit.
2002
2003 You can learn more about the visited property of the gimple
2004 statement by reading the comments of the 'visited' data member of
2005 struct gimple.
2006 */
2007
2008static inline void
2009gimple_set_visited (gimple *stmt, bool visited_p)
2010{
2011 stmt->visited = (unsigned) visited_p;
2012}
2013
2014
2015/* Return the visited status for statement STMT.
2016
2017 Please note that this 'visited' property of the gimple statement is
2018 supposed to be undefined at pass boundaries. This means that a
2019 given pass should not assume it contains any useful value when the
2020 pass starts and thus can set it to any value it sees fit.
2021
2022 You can learn more about the visited property of the gimple
2023 statement by reading the comments of the 'visited' data member of
2024 struct gimple. */
2025
2026static inline bool
2027gimple_visited_p (gimple *stmt)
2028{
2029 return stmt->visited;
2030}
2031
2032
2033/* Set pass local flag PLF on statement STMT to VAL_P.
2034
2035 Please note that this PLF property of the gimple statement is
2036 supposed to be undefined at pass boundaries. This means that a
2037 given pass should not assume it contains any useful value when the
2038 pass starts and thus can set it to any value it sees fit.
2039
2040 You can learn more about the PLF property by reading the comment of
2041 the 'plf' data member of struct gimple_statement_structure. */
2042
2043static inline void
2044gimple_set_plf (gimple *stmt, enum plf_mask plf, bool val_p)
2045{
2046 if (val_p)
2047 stmt->plf |= (unsigned int) plf;
2048 else
2049 stmt->plf &= ~((unsigned int) plf);
2050}
2051
2052
2053/* Return the value of pass local flag PLF on statement STMT.
2054
2055 Please note that this 'plf' property of the gimple statement is
2056 supposed to be undefined at pass boundaries. This means that a
2057 given pass should not assume it contains any useful value when the
2058 pass starts and thus can set it to any value it sees fit.
2059
2060 You can learn more about the plf property by reading the comment of
2061 the 'plf' data member of struct gimple_statement_structure. */
2062
2063static inline unsigned int
2064gimple_plf (gimple *stmt, enum plf_mask plf)
2065{
2066 return stmt->plf & ((unsigned int) plf);
2067}
2068
2069
2070/* Set the UID of statement.
2071
2072 Please note that this UID property is supposed to be undefined at
2073 pass boundaries. This means that a given pass should not assume it
2074 contains any useful value when the pass starts and thus can set it
2075 to any value it sees fit. */
2076
2077static inline void
2078gimple_set_uid (gimple *g, unsigned uid)
2079{
2080 g->uid = uid;
2081}
2082
2083
2084/* Return the UID of statement.
2085
2086 Please note that this UID property is supposed to be undefined at
2087 pass boundaries. This means that a given pass should not assume it
2088 contains any useful value when the pass starts and thus can set it
2089 to any value it sees fit. */
2090
2091static inline unsigned
2092gimple_uid (const gimple *g)
2093{
2094 return g->uid;
2095}
2096
2097
2098/* Make statement G a singleton sequence. */
2099
2100static inline void
2101gimple_init_singleton (gimple *g)
2102{
2103 g->next = NULL__null;
2104 g->prev = g;
2105}
2106
2107
2108/* Return true if GIMPLE statement G has register or memory operands. */
2109
2110static inline bool
2111gimple_has_ops (const gimple *g)
2112{
2113 return gimple_code (g) >= GIMPLE_COND && gimple_code (g) <= GIMPLE_RETURN;
2114}
2115
2116template <>
2117template <>
2118inline bool
2119is_a_helper <const gimple_statement_with_ops *>::test (const gimple *gs)
2120{
2121 return gimple_has_ops (gs);
2122}
2123
2124template <>
2125template <>
2126inline bool
2127is_a_helper <gimple_statement_with_ops *>::test (gimple *gs)
2128{
2129 return gimple_has_ops (gs);
2130}
2131
2132/* Return true if GIMPLE statement G has memory operands. */
2133
2134static inline bool
2135gimple_has_mem_ops (const gimple *g)
2136{
2137 return gimple_code (g) >= GIMPLE_ASSIGN && gimple_code (g) <= GIMPLE_RETURN;
2138}
2139
2140template <>
2141template <>
2142inline bool
2143is_a_helper <const gimple_statement_with_memory_ops *>::test (const gimple *gs)
2144{
2145 return gimple_has_mem_ops (gs);
2146}
2147
2148template <>
2149template <>
2150inline bool
2151is_a_helper <gimple_statement_with_memory_ops *>::test (gimple *gs)
2152{
2153 return gimple_has_mem_ops (gs);
2154}
2155
2156/* Return the set of USE operands for statement G. */
2157
2158static inline struct use_optype_d *
2159gimple_use_ops (const gimple *g)
2160{
2161 const gimple_statement_with_ops *ops_stmt =
2162 dyn_cast <const gimple_statement_with_ops *> (g);
2163 if (!ops_stmt)
2164 return NULL__null;
2165 return ops_stmt->use_ops;
2166}
2167
2168
2169/* Set USE to be the set of USE operands for statement G. */
2170
2171static inline void
2172gimple_set_use_ops (gimple *g, struct use_optype_d *use)
2173{
2174 gimple_statement_with_ops *ops_stmt =
2175 as_a <gimple_statement_with_ops *> (g);
2176 ops_stmt->use_ops = use;
2177}
2178
2179
2180/* Return the single VUSE operand of the statement G. */
2181
2182static inline tree
2183gimple_vuse (const gimple *g)
2184{
2185 const gimple_statement_with_memory_ops *mem_ops_stmt =
2186 dyn_cast <const gimple_statement_with_memory_ops *> (g);
2187 if (!mem_ops_stmt)
2188 return NULL_TREE(tree) __null;
2189 return mem_ops_stmt->vuse;
2190}
2191
2192/* Return the single VDEF operand of the statement G. */
2193
2194static inline tree
2195gimple_vdef (const gimple *g)
2196{
2197 const gimple_statement_with_memory_ops *mem_ops_stmt =
2198 dyn_cast <const gimple_statement_with_memory_ops *> (g);
2199 if (!mem_ops_stmt)
2200 return NULL_TREE(tree) __null;
2201 return mem_ops_stmt->vdef;
2202}
2203
2204/* Return the single VUSE operand of the statement G. */
2205
2206static inline tree *
2207gimple_vuse_ptr (gimple *g)
2208{
2209 gimple_statement_with_memory_ops *mem_ops_stmt =
2210 dyn_cast <gimple_statement_with_memory_ops *> (g);
2211 if (!mem_ops_stmt)
2212 return NULL__null;
2213 return &mem_ops_stmt->vuse;
2214}
2215
2216/* Return the single VDEF operand of the statement G. */
2217
2218static inline tree *
2219gimple_vdef_ptr (gimple *g)
2220{
2221 gimple_statement_with_memory_ops *mem_ops_stmt =
2222 dyn_cast <gimple_statement_with_memory_ops *> (g);
2223 if (!mem_ops_stmt)
2224 return NULL__null;
2225 return &mem_ops_stmt->vdef;
2226}
2227
2228/* Set the single VUSE operand of the statement G. */
2229
2230static inline void
2231gimple_set_vuse (gimple *g, tree vuse)
2232{
2233 gimple_statement_with_memory_ops *mem_ops_stmt =
2234 as_a <gimple_statement_with_memory_ops *> (g);
2235 mem_ops_stmt->vuse = vuse;
2236}
2237
2238/* Set the single VDEF operand of the statement G. */
2239
2240static inline void
2241gimple_set_vdef (gimple *g, tree vdef)
2242{
2243 gimple_statement_with_memory_ops *mem_ops_stmt =
2244 as_a <gimple_statement_with_memory_ops *> (g);
2245 mem_ops_stmt->vdef = vdef;
2246}
2247
2248
2249/* Return true if statement G has operands and the modified field has
2250 been set. */
2251
2252static inline bool
2253gimple_modified_p (const gimple *g)
2254{
2255 return (gimple_has_ops (g)) ? (bool) g->modified : false;
2256}
2257
2258
2259/* Set the MODIFIED flag to MODIFIEDP, iff the gimple statement G has
2260 a MODIFIED field. */
2261
2262static inline void
2263gimple_set_modified (gimple *s, bool modifiedp)
2264{
2265 if (gimple_has_ops (s))
2266 s->modified = (unsigned) modifiedp;
2267}
2268
2269
2270/* Return true if statement STMT contains volatile operands. */
2271
2272static inline bool
2273gimple_has_volatile_ops (const gimple *stmt)
2274{
2275 if (gimple_has_mem_ops (stmt))
2276 return stmt->has_volatile_ops;
2277 else
2278 return false;
2279}
2280
2281
2282/* Set the HAS_VOLATILE_OPS flag to VOLATILEP. */
2283
2284static inline void
2285gimple_set_has_volatile_ops (gimple *stmt, bool volatilep)
2286{
2287 if (gimple_has_mem_ops (stmt))
2288 stmt->has_volatile_ops = (unsigned) volatilep;
2289}
2290
2291/* Return true if STMT is in a transaction. */
2292
2293static inline bool
2294gimple_in_transaction (const gimple *stmt)
2295{
2296 return bb_in_transaction (gimple_bb (stmt));
2297}
2298
2299/* Return true if statement STMT may access memory. */
2300
2301static inline bool
2302gimple_references_memory_p (gimple *stmt)
2303{
2304 return gimple_has_mem_ops (stmt) && gimple_vuse (stmt);
2305}
2306
2307
2308/* Return the subcode for OMP statement S. */
2309
2310static inline unsigned
2311gimple_omp_subcode (const gimple *s)
2312{
2313 gcc_gimple_checking_assert (gimple_code (s) >= GIMPLE_OMP_ATOMIC_LOAD((void)(!(gimple_code (s) >= GIMPLE_OMP_ATOMIC_LOAD &&
gimple_code (s) <= GIMPLE_OMP_TEAMS) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 2314, __FUNCTION__), 0 : 0))
2314 && gimple_code (s) <= GIMPLE_OMP_TEAMS)((void)(!(gimple_code (s) >= GIMPLE_OMP_ATOMIC_LOAD &&
gimple_code (s) <= GIMPLE_OMP_TEAMS) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 2314, __FUNCTION__), 0 : 0))
;
2315 return s->subcode;
2316}
2317
2318/* Set the subcode for OMP statement S to SUBCODE. */
2319
2320static inline void
2321gimple_omp_set_subcode (gimple *s, unsigned int subcode)
2322{
2323 /* We only have 16 bits for the subcode. Assert that we are not
2324 overflowing it. */
2325 gcc_gimple_checking_assert (subcode < (1 << 16))((void)(!(subcode < (1 << 16)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 2325, __FUNCTION__), 0 : 0))
;
2326 s->subcode = subcode;
2327}
2328
2329/* Set the nowait flag on OMP_RETURN statement S. */
2330
2331static inline void
2332gimple_omp_return_set_nowait (gimple *s)
2333{
2334 GIMPLE_CHECK (s, GIMPLE_OMP_RETURN)do { const gimple *__gs = (s); if (gimple_code (__gs) != (GIMPLE_OMP_RETURN
)) gimple_check_failed (__gs, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 2334, __FUNCTION__, (GIMPLE_OMP_RETURN), ERROR_MARK); } while
(0)
;
2335 s->subcode |= GF_OMP_RETURN_NOWAIT;
2336}
2337
2338
2339/* Return true if OMP return statement G has the GF_OMP_RETURN_NOWAIT
2340 flag set. */
2341
2342static inline bool
2343gimple_omp_return_nowait_p (const gimple *g)
2344{
2345 GIMPLE_CHECK (g, GIMPLE_OMP_RETURN)do { const gimple *__gs = (g); if (gimple_code (__gs) != (GIMPLE_OMP_RETURN
)) gimple_check_failed (__gs, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 2345, __FUNCTION__, (GIMPLE_OMP_RETURN), ERROR_MARK); } while
(0)
;
2346 return (gimple_omp_subcode (g) & GF_OMP_RETURN_NOWAIT) != 0;
2347}
2348
2349
2350/* Set the LHS of OMP return. */
2351
2352static inline void
2353gimple_omp_return_set_lhs (gimple *g, tree lhs)
2354{
2355 gimple_statement_omp_return *omp_return_stmt =
2356 as_a <gimple_statement_omp_return *> (g);
2357 omp_return_stmt->val = lhs;
2358}
2359
2360
2361/* Get the LHS of OMP return. */
2362
2363static inline tree
2364gimple_omp_return_lhs (const gimple *g)
2365{
2366 const gimple_statement_omp_return *omp_return_stmt =
2367 as_a <const gimple_statement_omp_return *> (g);
2368 return omp_return_stmt->val;
2369}
2370
2371
2372/* Return a pointer to the LHS of OMP return. */
2373
2374static inline tree *
2375gimple_omp_return_lhs_ptr (gimple *g)
2376{
2377 gimple_statement_omp_return *omp_return_stmt =
2378 as_a <gimple_statement_omp_return *> (g);
2379 return &omp_return_stmt->val;
2380}
2381
2382
2383/* Return true if OMP section statement G has the GF_OMP_SECTION_LAST
2384 flag set. */
2385
2386static inline bool
2387gimple_omp_section_last_p (const gimple *g)
2388{
2389 GIMPLE_CHECK (g, GIMPLE_OMP_SECTION)do { const gimple *__gs = (g); if (gimple_code (__gs) != (GIMPLE_OMP_SECTION
)) gimple_check_failed (__gs, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 2389, __FUNCTION__, (GIMPLE_OMP_SECTION), ERROR_MARK); } while
(0)
;
2390 return (gimple_omp_subcode (g) & GF_OMP_SECTION_LAST) != 0;
2391}
2392
2393
2394/* Set the GF_OMP_SECTION_LAST flag on G. */
2395
2396static inline void
2397gimple_omp_section_set_last (gimple *g)
2398{
2399 GIMPLE_CHECK (g, GIMPLE_OMP_SECTION)do { const gimple *__gs = (g); if (gimple_code (__gs) != (GIMPLE_OMP_SECTION
)) gimple_check_failed (__gs, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 2399, __FUNCTION__, (GIMPLE_OMP_SECTION), ERROR_MARK); } while
(0)
;
2400 g->subcode |= GF_OMP_SECTION_LAST;
2401}
2402
2403
2404/* Return true if OMP parallel statement G has the
2405 GF_OMP_PARALLEL_COMBINED flag set. */
2406
2407static inline bool
2408gimple_omp_parallel_combined_p (const gimple *g)
2409{
2410 GIMPLE_CHECK (g, GIMPLE_OMP_PARALLEL)do { const gimple *__gs = (g); if (gimple_code (__gs) != (GIMPLE_OMP_PARALLEL
)) gimple_check_failed (__gs, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 2410, __FUNCTION__, (GIMPLE_OMP_PARALLEL), ERROR_MARK); } while
(0)
;
2411 return (gimple_omp_subcode (g) & GF_OMP_PARALLEL_COMBINED) != 0;
2412}
2413
2414
2415/* Set the GF_OMP_PARALLEL_COMBINED field in G depending on the boolean
2416 value of COMBINED_P. */
2417
2418static inline void
2419gimple_omp_parallel_set_combined_p (gimple *g, bool combined_p)
2420{
2421 GIMPLE_CHECK (g, GIMPLE_OMP_PARALLEL)do { const gimple *__gs = (g); if (gimple_code (__gs) != (GIMPLE_OMP_PARALLEL
)) gimple_check_failed (__gs, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 2421, __FUNCTION__, (GIMPLE_OMP_PARALLEL), ERROR_MARK); } while
(0)
;
2422 if (combined_p)
2423 g->subcode |= GF_OMP_PARALLEL_COMBINED;
2424 else
2425 g->subcode &= ~GF_OMP_PARALLEL_COMBINED;
2426}
2427
2428
2429/* Return true if OMP atomic load/store statement G has the
2430 GF_OMP_ATOMIC_NEED_VALUE flag set. */
2431
2432static inline bool
2433gimple_omp_atomic_need_value_p (const gimple *g)
2434{
2435 if (gimple_code (g) != GIMPLE_OMP_ATOMIC_LOAD)
2436 GIMPLE_CHECK (g, GIMPLE_OMP_ATOMIC_STORE)do { const gimple *__gs = (g); if (gimple_code (__gs) != (GIMPLE_OMP_ATOMIC_STORE
)) gimple_check_failed (__gs, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 2436, __FUNCTION__, (GIMPLE_OMP_ATOMIC_STORE), ERROR_MARK);
} while (0)
;
2437 return (gimple_omp_subcode (g) & GF_OMP_ATOMIC_NEED_VALUE) != 0;
2438}
2439
2440
2441/* Set the GF_OMP_ATOMIC_NEED_VALUE flag on G. */
2442
2443static inline void
2444gimple_omp_atomic_set_need_value (gimple *g)
2445{
2446 if (gimple_code (g) != GIMPLE_OMP_ATOMIC_LOAD)
2447 GIMPLE_CHECK (g, GIMPLE_OMP_ATOMIC_STORE)do { const gimple *__gs = (g); if (gimple_code (__gs) != (GIMPLE_OMP_ATOMIC_STORE
)) gimple_check_failed (__gs, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 2447, __FUNCTION__, (GIMPLE_OMP_ATOMIC_STORE), ERROR_MARK);
} while (0)
;
2448 g->subcode |= GF_OMP_ATOMIC_NEED_VALUE;
2449}
2450
2451
2452/* Return true if OMP atomic load/store statement G has the
2453 GF_OMP_ATOMIC_WEAK flag set. */
2454
2455static inline bool
2456gimple_omp_atomic_weak_p (const gimple *g)
2457{
2458 if (gimple_code (g) != GIMPLE_OMP_ATOMIC_LOAD)
2459 GIMPLE_CHECK (g, GIMPLE_OMP_ATOMIC_STORE)do { const gimple *__gs = (g); if (gimple_code (__gs) != (GIMPLE_OMP_ATOMIC_STORE
)) gimple_check_failed (__gs, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 2459, __FUNCTION__, (GIMPLE_OMP_ATOMIC_STORE), ERROR_MARK);
} while (0)
;
2460 return (gimple_omp_subcode (g) & GF_OMP_ATOMIC_WEAK) != 0;
2461}
2462
2463
2464/* Set the GF_OMP_ATOMIC_WEAK flag on G. */
2465
2466static inline void
2467gimple_omp_atomic_set_weak (gimple *g)
2468{
2469 if (gimple_code (g) != GIMPLE_OMP_ATOMIC_LOAD)
2470 GIMPLE_CHECK (g, GIMPLE_OMP_ATOMIC_STORE)do { const gimple *__gs = (g); if (gimple_code (__gs) != (GIMPLE_OMP_ATOMIC_STORE
)) gimple_check_failed (__gs, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 2470, __FUNCTION__, (GIMPLE_OMP_ATOMIC_STORE), ERROR_MARK);
} while (0)
;
2471 g->subcode |= GF_OMP_ATOMIC_WEAK;
2472}
2473
2474
2475/* Return the memory order of the OMP atomic load/store statement G. */
2476
2477static inline enum omp_memory_order
2478gimple_omp_atomic_memory_order (const gimple *g)
2479{
2480 if (gimple_code (g) != GIMPLE_OMP_ATOMIC_LOAD)
2481 GIMPLE_CHECK (g, GIMPLE_OMP_ATOMIC_STORE)do { const gimple *__gs = (g); if (gimple_code (__gs) != (GIMPLE_OMP_ATOMIC_STORE
)) gimple_check_failed (__gs, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 2481, __FUNCTION__, (GIMPLE_OMP_ATOMIC_STORE), ERROR_MARK);
} while (0)
;
2482 return (enum omp_memory_order)
2483 (gimple_omp_subcode (g) & GF_OMP_ATOMIC_MEMORY_ORDER);
2484}
2485
2486
2487/* Set the memory order on G. */
2488
2489static inline void
2490gimple_omp_atomic_set_memory_order (gimple *g, enum omp_memory_order mo)
2491{
2492 if (gimple_code (g) != GIMPLE_OMP_ATOMIC_LOAD)
2493 GIMPLE_CHECK (g, GIMPLE_OMP_ATOMIC_STORE)do { const gimple *__gs = (g); if (gimple_code (__gs) != (GIMPLE_OMP_ATOMIC_STORE
)) gimple_check_failed (__gs, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 2493, __FUNCTION__, (GIMPLE_OMP_ATOMIC_STORE), ERROR_MARK);
} while (0)
;
2494 g->subcode = ((g->subcode & ~GF_OMP_ATOMIC_MEMORY_ORDER)
2495 | (mo & GF_OMP_ATOMIC_MEMORY_ORDER));
2496}
2497
2498
2499/* Return the number of operands for statement GS. */
2500
2501static inline unsigned
2502gimple_num_ops (const gimple *gs)
2503{
2504 return gs->num_ops;
2505}
2506
2507
2508/* Set the number of operands for statement GS. */
2509
2510static inline void
2511gimple_set_num_ops (gimple *gs, unsigned num_ops)
2512{
2513 gs->num_ops = num_ops;
2514}
2515
2516
2517/* Return the array of operands for statement GS. */
2518
2519static inline tree *
2520gimple_ops (gimple *gs)
2521{
2522 size_t off;
2523
2524 /* All the tuples have their operand vector at the very bottom
2525 of the structure. Note that those structures that do not
2526 have an operand vector have a zero offset. */
2527 off = gimple_ops_offset_[gimple_statement_structure (gs)];
2528 gcc_gimple_checking_assert (off != 0)((void)(!(off != 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 2528, __FUNCTION__), 0 : 0))
;
2529
2530 return (tree *) ((char *) gs + off);
2531}
2532
2533
2534/* Return operand I for statement GS. */
2535
2536static inline tree
2537gimple_op (const gimple *gs, unsigned i)
2538{
2539 if (gimple_has_ops (gs))
2540 {
2541 gcc_gimple_checking_assert (i < gimple_num_ops (gs))((void)(!(i < gimple_num_ops (gs)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 2541, __FUNCTION__), 0 : 0))
;
2542 return gimple_ops (CONST_CAST_GIMPLE (gs)(const_cast<gimple *> (((gs)))))[i];
2543 }
2544 else
2545 return NULL_TREE(tree) __null;
2546}
2547
2548/* Return a pointer to operand I for statement GS. */
2549
2550static inline tree *
2551gimple_op_ptr (gimple *gs, unsigned i)
2552{
2553 if (gimple_has_ops (gs))
2554 {
2555 gcc_gimple_checking_assert (i < gimple_num_ops (gs))((void)(!(i < gimple_num_ops (gs)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 2555, __FUNCTION__), 0 : 0))
;
2556 return gimple_ops (gs) + i;
2557 }
2558 else
2559 return NULL__null;
2560}
2561
2562/* Set operand I of statement GS to OP. */
2563
2564static inline void
2565gimple_set_op (gimple *gs, unsigned i, tree op)
2566{
2567 gcc_gimple_checking_assert (gimple_has_ops (gs) && i < gimple_num_ops (gs))((void)(!(gimple_has_ops (gs) && i < gimple_num_ops
(gs)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 2567, __FUNCTION__), 0 : 0))
;
2568
2569 /* Note. It may be tempting to assert that OP matches
2570 is_gimple_operand, but that would be wrong. Different tuples
2571 accept slightly different sets of tree operands. Each caller
2572 should perform its own validation. */
2573 gimple_ops (gs)[i] = op;
2574}
2575
2576/* Return true if GS is a GIMPLE_ASSIGN. */
2577
2578static inline bool
2579is_gimple_assign (const gimple *gs)
2580{
2581 return gimple_code (gs) == GIMPLE_ASSIGN;
47
Assuming the condition is true
48
Returning the value 1, which participates in a condition later
2582}
2583
2584/* Determine if expression CODE is one of the valid expressions that can
2585 be used on the RHS of GIMPLE assignments. */
2586
2587static inline enum gimple_rhs_class
2588get_gimple_rhs_class (enum tree_code code)
2589{
2590 return (enum gimple_rhs_class) gimple_rhs_class_table[(int) code];
2591}
2592
2593/* Return the LHS of assignment statement GS. */
2594
2595static inline tree
2596gimple_assign_lhs (const gassign *gs)
2597{
2598 return gs->op[0];
2599}
2600
2601static inline tree
2602gimple_assign_lhs (const gimple *gs)
2603{
2604 const gassign *ass = GIMPLE_CHECK2<const gassign *> (gs);
2605 return gimple_assign_lhs (ass);
2606}
2607
2608
2609/* Return a pointer to the LHS of assignment statement GS. */
2610
2611static inline tree *
2612gimple_assign_lhs_ptr (gassign *gs)
2613{
2614 return &gs->op[0];
2615}
2616
2617static inline tree *
2618gimple_assign_lhs_ptr (gimple *gs)
2619{
2620 gassign *ass = GIMPLE_CHECK2<gassign *> (gs);
2621 return gimple_assign_lhs_ptr (ass);
2622}
2623
2624
2625/* Set LHS to be the LHS operand of assignment statement GS. */
2626
2627static inline void
2628gimple_assign_set_lhs (gassign *gs, tree lhs)
2629{
2630 gs->op[0] = lhs;
2631
2632 if (lhs && TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME)
2633 SSA_NAME_DEF_STMT (lhs)(tree_check ((lhs), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 2633, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
= gs;
2634}
2635
2636static inline void
2637gimple_assign_set_lhs (gimple *gs, tree lhs)
2638{
2639 gassign *ass = GIMPLE_CHECK2<gassign *> (gs);
2640 gimple_assign_set_lhs (ass, lhs);
2641}
2642
2643
2644/* Return the first operand on the RHS of assignment statement GS. */
2645
2646static inline tree
2647gimple_assign_rhs1 (const gassign *gs)
2648{
2649 return gs->op[1];
2650}
2651
2652static inline tree
2653gimple_assign_rhs1 (const gimple *gs)
2654{
2655 const gassign *ass = GIMPLE_CHECK2<const gassign *> (gs);
2656 return gimple_assign_rhs1 (ass);
2657}
2658
2659
2660/* Return a pointer to the first operand on the RHS of assignment
2661 statement GS. */
2662
2663static inline tree *
2664gimple_assign_rhs1_ptr (gassign *gs)
2665{
2666 return &gs->op[1];
2667}
2668
2669static inline tree *
2670gimple_assign_rhs1_ptr (gimple *gs)
2671{
2672 gassign *ass = GIMPLE_CHECK2<gassign *> (gs);
2673 return gimple_assign_rhs1_ptr (ass);
2674}
2675
2676/* Set RHS to be the first operand on the RHS of assignment statement GS. */
2677
2678static inline void
2679gimple_assign_set_rhs1 (gassign *gs, tree rhs)
2680{
2681 gs->op[1] = rhs;
2682}
2683
2684static inline void
2685gimple_assign_set_rhs1 (gimple *gs, tree rhs)
2686{
2687 gassign *ass = GIMPLE_CHECK2<gassign *> (gs);
2688 gimple_assign_set_rhs1 (ass, rhs);
2689}
2690
2691
2692/* Return the second operand on the RHS of assignment statement GS.
2693 If GS does not have two operands, NULL is returned instead. */
2694
2695static inline tree
2696gimple_assign_rhs2 (const gassign *gs)
2697{
2698 if (gimple_num_ops (gs) >= 3)
2699 return gs->op[2];
2700 else
2701 return NULL_TREE(tree) __null;
2702}
2703
2704static inline tree
2705gimple_assign_rhs2 (const gimple *gs)
2706{
2707 const gassign *ass = GIMPLE_CHECK2<const gassign *> (gs);
2708 return gimple_assign_rhs2 (ass);
2709}
2710
2711
2712/* Return a pointer to the second operand on the RHS of assignment
2713 statement GS. */
2714
2715static inline tree *
2716gimple_assign_rhs2_ptr (gassign *gs)
2717{
2718 gcc_gimple_checking_assert (gimple_num_ops (gs) >= 3)((void)(!(gimple_num_ops (gs) >= 3) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 2718, __FUNCTION__), 0 : 0))
;
2719 return &gs->op[2];
2720}
2721
2722static inline tree *
2723gimple_assign_rhs2_ptr (gimple *gs)
2724{
2725 gassign *ass = GIMPLE_CHECK2<gassign *> (gs);
2726 return gimple_assign_rhs2_ptr (ass);
2727}
2728
2729
2730/* Set RHS to be the second operand on the RHS of assignment statement GS. */
2731
2732static inline void
2733gimple_assign_set_rhs2 (gassign *gs, tree rhs)
2734{
2735 gcc_gimple_checking_assert (gimple_num_ops (gs) >= 3)((void)(!(gimple_num_ops (gs) >= 3) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 2735, __FUNCTION__), 0 : 0))
;
2736 gs->op[2] = rhs;
2737}
2738
2739static inline void
2740gimple_assign_set_rhs2 (gimple *gs, tree rhs)
2741{
2742 gassign *ass = GIMPLE_CHECK2<gassign *> (gs);
2743 return gimple_assign_set_rhs2 (ass, rhs);
2744}
2745
2746/* Return the third operand on the RHS of assignment statement GS.
2747 If GS does not have two operands, NULL is returned instead. */
2748
2749static inline tree
2750gimple_assign_rhs3 (const gassign *gs)
2751{
2752 if (gimple_num_ops (gs) >= 4)
2753 return gs->op[3];
2754 else
2755 return NULL_TREE(tree) __null;
2756}
2757
2758static inline tree
2759gimple_assign_rhs3 (const gimple *gs)
2760{
2761 const gassign *ass = GIMPLE_CHECK2<const gassign *> (gs);
2762 return gimple_assign_rhs3 (ass);
2763}
2764
2765/* Return a pointer to the third operand on the RHS of assignment
2766 statement GS. */
2767
2768static inline tree *
2769gimple_assign_rhs3_ptr (gimple *gs)
2770{
2771 gassign *ass = GIMPLE_CHECK2<gassign *> (gs);
2772 gcc_gimple_checking_assert (gimple_num_ops (gs) >= 4)((void)(!(gimple_num_ops (gs) >= 4) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 2772, __FUNCTION__), 0 : 0))
;
2773 return &ass->op[3];
2774}
2775
2776
2777/* Set RHS to be the third operand on the RHS of assignment statement GS. */
2778
2779static inline void
2780gimple_assign_set_rhs3 (gassign *gs, tree rhs)
2781{
2782 gcc_gimple_checking_assert (gimple_num_ops (gs) >= 4)((void)(!(gimple_num_ops (gs) >= 4) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 2782, __FUNCTION__), 0 : 0))
;
2783 gs->op[3] = rhs;
2784}
2785
2786static inline void
2787gimple_assign_set_rhs3 (gimple *gs, tree rhs)
2788{
2789 gassign *ass = GIMPLE_CHECK2<gassign *> (gs);
2790 gimple_assign_set_rhs3 (ass, rhs);
2791}
2792
2793
2794/* A wrapper around 3 operand gimple_assign_set_rhs_with_ops, for callers
2795 which expect to see only two operands. */
2796
2797static inline void
2798gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *gsi, enum tree_code code,
2799 tree op1, tree op2)
2800{
2801 gimple_assign_set_rhs_with_ops (gsi, code, op1, op2, NULL__null);
2802}
2803
2804/* A wrapper around 3 operand gimple_assign_set_rhs_with_ops, for callers
2805 which expect to see only one operands. */
2806
2807static inline void
2808gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *gsi, enum tree_code code,
2809 tree op1)
2810{
2811 gimple_assign_set_rhs_with_ops (gsi, code, op1, NULL__null, NULL__null);
2812}
2813
2814/* Returns true if GS is a nontemporal move. */
2815
2816static inline bool
2817gimple_assign_nontemporal_move_p (const gassign *gs)
2818{
2819 return gs->nontemporal_move;
2820}
2821
2822/* Sets nontemporal move flag of GS to NONTEMPORAL. */
2823
2824static inline void
2825gimple_assign_set_nontemporal_move (gimple *gs, bool nontemporal)
2826{
2827 GIMPLE_CHECK (gs, GIMPLE_ASSIGN)do { const gimple *__gs = (gs); if (gimple_code (__gs) != (GIMPLE_ASSIGN
)) gimple_check_failed (__gs, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 2827, __FUNCTION__, (GIMPLE_ASSIGN), ERROR_MARK); } while (
0)
;
2828 gs->nontemporal_move = nontemporal;
2829}
2830
2831
2832/* Return the code of the expression computed on the rhs of assignment
2833 statement GS. In case that the RHS is a single object, returns the
2834 tree code of the object. */
2835
2836static inline enum tree_code
2837gimple_assign_rhs_code (const gassign *gs)
2838{
2839 enum tree_code code = (enum tree_code) gs->subcode;
2840 /* While we initially set subcode to the TREE_CODE of the rhs for
2841 GIMPLE_SINGLE_RHS assigns we do not update that subcode to stay
2842 in sync when we rewrite stmts into SSA form or do SSA propagations. */
2843 if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
2844 code = TREE_CODE (gs->op[1])((enum tree_code) (gs->op[1])->base.code);
2845
2846 return code;
2847}
2848
2849static inline enum tree_code
2850gimple_assign_rhs_code (const gimple *gs)
2851{
2852 const gassign *ass = GIMPLE_CHECK2<const gassign *> (gs);
2853 return gimple_assign_rhs_code (ass);
2854}
2855
2856
2857/* Set CODE to be the code for the expression computed on the RHS of
2858 assignment S. */
2859
2860static inline void
2861gimple_assign_set_rhs_code (gimple *s, enum tree_code code)
2862{
2863 GIMPLE_CHECK (s, GIMPLE_ASSIGN)do { const gimple *__gs = (s); if (gimple_code (__gs) != (GIMPLE_ASSIGN
)) gimple_check_failed (__gs, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 2863, __FUNCTION__, (GIMPLE_ASSIGN), ERROR_MARK); } while (
0)
;
2864 s->subcode = code;
2865}
2866
2867
2868/* Return the gimple rhs class of the code of the expression computed on
2869 the rhs of assignment statement GS.
2870 This will never return GIMPLE_INVALID_RHS. */
2871
2872static inline enum gimple_rhs_class
2873gimple_assign_rhs_class (const gimple *gs)
2874{
2875 return get_gimple_rhs_class (gimple_assign_rhs_code (gs));
2876}
2877
2878/* Return true if GS is an assignment with a singleton RHS, i.e.,
2879 there is no operator associated with the assignment itself.
2880 Unlike gimple_assign_copy_p, this predicate returns true for
2881 any RHS operand, including those that perform an operation
2882 and do not have the semantics of a copy, such as COND_EXPR. */
2883
2884static inline bool
2885gimple_assign_single_p (const gimple *gs)
2886{
2887 return (is_gimple_assign (gs)
2888 && gimple_assign_rhs_class (gs) == GIMPLE_SINGLE_RHS);
2889}
2890
2891/* Return true if GS performs a store to its lhs. */
2892
2893static inline bool
2894gimple_store_p (const gimple *gs)
2895{
2896 tree lhs = gimple_get_lhs (gs);
2897 return lhs && !is_gimple_reg (lhs);
2898}
2899
2900/* Return true if GS is an assignment that loads from its rhs1. */
2901
2902static inline bool
2903gimple_assign_load_p (const gimple *gs)
2904{
2905 tree rhs;
2906 if (!gimple_assign_single_p (gs))
2907 return false;
2908 rhs = gimple_assign_rhs1 (gs);
2909 if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == WITH_SIZE_EXPR)
2910 return true;
2911 rhs = get_base_address (rhs);
2912 return (DECL_P (rhs)(tree_code_type[(int) (((enum tree_code) (rhs)->base.code)
)] == tcc_declaration)
2913 || TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == MEM_REF || TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == TARGET_MEM_REF);
2914}
2915
2916
2917/* Return true if S is a type-cast assignment. */
2918
2919static inline bool
2920gimple_assign_cast_p (const gimple *s)
2921{
2922 if (is_gimple_assign (s))
2923 {
2924 enum tree_code sc = gimple_assign_rhs_code (s);
2925 return CONVERT_EXPR_CODE_P (sc)((sc) == NOP_EXPR || (sc) == CONVERT_EXPR)
2926 || sc == VIEW_CONVERT_EXPR
2927 || sc == FIX_TRUNC_EXPR;
2928 }
2929
2930 return false;
2931}
2932
2933/* Return true if S is a clobber statement. */
2934
2935static inline bool
2936gimple_clobber_p (const gimple *s)
2937{
2938 return gimple_assign_single_p (s)
2939 && TREE_CLOBBER_P (gimple_assign_rhs1 (s))(((enum tree_code) (gimple_assign_rhs1 (s))->base.code) ==
CONSTRUCTOR && ((gimple_assign_rhs1 (s))->base.volatile_flag
))
;
2940}
2941
2942/* Return true if GS is a GIMPLE_CALL. */
2943
2944static inline bool
2945is_gimple_call (const gimple *gs)
2946{
2947 return gimple_code (gs) == GIMPLE_CALL;
35
Assuming the condition is false
36
Returning zero, which participates in a condition later
2948}
2949
2950/* Return the LHS of call statement GS. */
2951
2952static inline tree
2953gimple_call_lhs (const gcall *gs)
2954{
2955 return gs->op[0];
2956}
2957
2958static inline tree
2959gimple_call_lhs (const gimple *gs)
2960{
2961 const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
2962 return gimple_call_lhs (gc);
2963}
2964
2965
2966/* Return a pointer to the LHS of call statement GS. */
2967
2968static inline tree *
2969gimple_call_lhs_ptr (gcall *gs)
2970{
2971 return &gs->op[0];
2972}
2973
2974static inline tree *
2975gimple_call_lhs_ptr (gimple *gs)
2976{
2977 gcall *gc = GIMPLE_CHECK2<gcall *> (gs);
2978 return gimple_call_lhs_ptr (gc);
2979}
2980
2981
2982/* Set LHS to be the LHS operand of call statement GS. */
2983
2984static inline void
2985gimple_call_set_lhs (gcall *gs, tree lhs)
2986{
2987 gs->op[0] = lhs;
2988 if (lhs && TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME)
2989 SSA_NAME_DEF_STMT (lhs)(tree_check ((lhs), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 2989, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
= gs;
2990}
2991
2992static inline void
2993gimple_call_set_lhs (gimple *gs, tree lhs)
2994{
2995 gcall *gc = GIMPLE_CHECK2<gcall *> (gs);
2996 gimple_call_set_lhs (gc, lhs);
2997}
2998
2999
3000/* Return true if call GS calls an internal-only function, as enumerated
3001 by internal_fn. */
3002
3003static inline bool
3004gimple_call_internal_p (const gcall *gs)
3005{
3006 return (gs->subcode & GF_CALL_INTERNAL) != 0;
3007}
3008
3009static inline bool
3010gimple_call_internal_p (const gimple *gs)
3011{
3012 const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
3013 return gimple_call_internal_p (gc);
3014}
3015
3016/* Return true if call GS is marked as nocf_check. */
3017
3018static inline bool
3019gimple_call_nocf_check_p (const gcall *gs)
3020{
3021 return (gs->subcode & GF_CALL_NOCF_CHECK) != 0;
3022}
3023
3024/* Mark statement GS as nocf_check call. */
3025
3026static inline void
3027gimple_call_set_nocf_check (gcall *gs, bool nocf_check)
3028{
3029 if (nocf_check)
3030 gs->subcode |= GF_CALL_NOCF_CHECK;
3031 else
3032 gs->subcode &= ~GF_CALL_NOCF_CHECK;
3033}
3034
3035/* Return the target of internal call GS. */
3036
3037static inline enum internal_fn
3038gimple_call_internal_fn (const gcall *gs)
3039{
3040 gcc_gimple_checking_assert (gimple_call_internal_p (gs))((void)(!(gimple_call_internal_p (gs)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 3040, __FUNCTION__), 0 : 0))
;
3041 return gs->u.internal_fn;
3042}
3043
3044static inline enum internal_fn
3045gimple_call_internal_fn (const gimple *gs)
3046{
3047 const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
3048 return gimple_call_internal_fn (gc);
3049}
3050
3051/* Return true, if this internal gimple call is unique. */
3052
3053static inline bool
3054gimple_call_internal_unique_p (const gcall *gs)
3055{
3056 return gimple_call_internal_fn (gs) == IFN_UNIQUE;
3057}
3058
3059static inline bool
3060gimple_call_internal_unique_p (const gimple *gs)
3061{
3062 const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
3063 return gimple_call_internal_unique_p (gc);
3064}
3065
3066/* Return true if GS is an internal function FN. */
3067
3068static inline bool
3069gimple_call_internal_p (const gimple *gs, internal_fn fn)
3070{
3071 return (is_gimple_call (gs)
3072 && gimple_call_internal_p (gs)
3073 && gimple_call_internal_fn (gs) == fn);
3074}
3075
3076/* If CTRL_ALTERING_P is true, mark GIMPLE_CALL S to be a stmt
3077 that could alter control flow. */
3078
3079static inline void
3080gimple_call_set_ctrl_altering (gcall *s, bool ctrl_altering_p)
3081{
3082 if (ctrl_altering_p)
3083 s->subcode |= GF_CALL_CTRL_ALTERING;
3084 else
3085 s->subcode &= ~GF_CALL_CTRL_ALTERING;
3086}
3087
3088static inline void
3089gimple_call_set_ctrl_altering (gimple *s, bool ctrl_altering_p)
3090{
3091 gcall *gc = GIMPLE_CHECK2<gcall *> (s);
3092 gimple_call_set_ctrl_altering (gc, ctrl_altering_p);
3093}
3094
3095/* Return true if call GS calls an func whose GF_CALL_CTRL_ALTERING
3096 flag is set. Such call could not be a stmt in the middle of a bb. */
3097
3098static inline bool
3099gimple_call_ctrl_altering_p (const gcall *gs)
3100{
3101 return (gs->subcode & GF_CALL_CTRL_ALTERING) != 0;
3102}
3103
3104static inline bool
3105gimple_call_ctrl_altering_p (const gimple *gs)
3106{
3107 const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
3108 return gimple_call_ctrl_altering_p (gc);
3109}
3110
3111
3112/* Return the function type of the function called by GS. */
3113
3114static inline tree
3115gimple_call_fntype (const gcall *gs)
3116{
3117 if (gimple_call_internal_p (gs))
3118 return NULL_TREE(tree) __null;
3119 return gs->u.fntype;
3120}
3121
3122static inline tree
3123gimple_call_fntype (const gimple *gs)
3124{
3125 const gcall *call_stmt = GIMPLE_CHECK2<const gcall *> (gs);
3126 return gimple_call_fntype (call_stmt);
3127}
3128
3129/* Set the type of the function called by CALL_STMT to FNTYPE. */
3130
3131static inline void
3132gimple_call_set_fntype (gcall *call_stmt, tree fntype)
3133{
3134 gcc_gimple_checking_assert (!gimple_call_internal_p (call_stmt))((void)(!(!gimple_call_internal_p (call_stmt)) ? fancy_abort (
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 3134, __FUNCTION__), 0 : 0))
;
3135 call_stmt->u.fntype = fntype;
3136}
3137
3138
3139/* Return the tree node representing the function called by call
3140 statement GS. */
3141
3142static inline tree
3143gimple_call_fn (const gcall *gs)
3144{
3145 return gs->op[1];
3146}
3147
3148static inline tree
3149gimple_call_fn (const gimple *gs)
3150{
3151 const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
3152 return gimple_call_fn (gc);
3153}
3154
3155/* Return a pointer to the tree node representing the function called by call
3156 statement GS. */
3157
3158static inline tree *
3159gimple_call_fn_ptr (gcall *gs)
3160{
3161 return &gs->op[1];
3162}
3163
3164static inline tree *
3165gimple_call_fn_ptr (gimple *gs)
3166{
3167 gcall *gc = GIMPLE_CHECK2<gcall *> (gs);
3168 return gimple_call_fn_ptr (gc);
3169}
3170
3171
3172/* Set FN to be the function called by call statement GS. */
3173
3174static inline void
3175gimple_call_set_fn (gcall *gs, tree fn)
3176{
3177 gcc_gimple_checking_assert (!gimple_call_internal_p (gs))((void)(!(!gimple_call_internal_p (gs)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 3177, __FUNCTION__), 0 : 0))
;
3178 gs->op[1] = fn;
3179}
3180
3181
3182/* Set FNDECL to be the function called by call statement GS. */
3183
3184static inline void
3185gimple_call_set_fndecl (gcall *gs, tree decl)
3186{
3187 gcc_gimple_checking_assert (!gimple_call_internal_p (gs))((void)(!(!gimple_call_internal_p (gs)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 3187, __FUNCTION__), 0 : 0))
;
3188 gs->op[1] = build1_loc (gimple_location (gs), ADDR_EXPR,
3189 build_pointer_type (TREE_TYPE (decl)((contains_struct_check ((decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 3189, __FUNCTION__))->typed.type)
), decl);
3190}
3191
3192static inline void
3193gimple_call_set_fndecl (gimple *gs, tree decl)
3194{
3195 gcall *gc = GIMPLE_CHECK2<gcall *> (gs);
3196 gimple_call_set_fndecl (gc, decl);
3197}
3198
3199
3200/* Set internal function FN to be the function called by call statement CALL_STMT. */
3201
3202static inline void
3203gimple_call_set_internal_fn (gcall *call_stmt, enum internal_fn fn)
3204{
3205 gcc_gimple_checking_assert (gimple_call_internal_p (call_stmt))((void)(!(gimple_call_internal_p (call_stmt)) ? fancy_abort (
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 3205, __FUNCTION__), 0 : 0))
;
3206 call_stmt->u.internal_fn = fn;
3207}
3208
3209
3210/* If a given GIMPLE_CALL's callee is a FUNCTION_DECL, return it.
3211 Otherwise return NULL. This function is analogous to
3212 get_callee_fndecl in tree land. */
3213
3214static inline tree
3215gimple_call_fndecl (const gcall *gs)
3216{
3217 return gimple_call_addr_fndecl (gimple_call_fn (gs));
3218}
3219
3220static inline tree
3221gimple_call_fndecl (const gimple *gs)
3222{
3223 const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
3224 return gimple_call_fndecl (gc);
3225}
3226
3227
3228/* Return the type returned by call statement GS. */
3229
3230static inline tree
3231gimple_call_return_type (const gcall *gs)
3232{
3233 tree type = gimple_call_fntype (gs);
3234
3235 if (type == NULL_TREE(tree) __null)
3236 return TREE_TYPE (gimple_call_lhs (gs))((contains_struct_check ((gimple_call_lhs (gs)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 3236, __FUNCTION__))->typed.type)
;
3237
3238 /* The type returned by a function is the type of its
3239 function type. */
3240 return TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 3240, __FUNCTION__))->typed.type)
;
3241}
3242
3243
3244/* Return the static chain for call statement GS. */
3245
3246static inline tree
3247gimple_call_chain (const gcall *gs)
3248{
3249 return gs->op[2];
3250}
3251
3252static inline tree
3253gimple_call_chain (const gimple *gs)
3254{
3255 const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
3256 return gimple_call_chain (gc);
3257}
3258
3259
3260/* Return a pointer to the static chain for call statement CALL_STMT. */
3261
3262static inline tree *
3263gimple_call_chain_ptr (gcall *call_stmt)
3264{
3265 return &call_stmt->op[2];
3266}
3267
3268/* Set CHAIN to be the static chain for call statement CALL_STMT. */
3269
3270static inline void
3271gimple_call_set_chain (gcall *call_stmt, tree chain)
3272{
3273 call_stmt->op[2] = chain;
3274}
3275
3276
3277/* Return the number of arguments used by call statement GS. */
3278
3279static inline unsigned
3280gimple_call_num_args (const gcall *gs)
3281{
3282 return gimple_num_ops (gs) - 3;
3283}
3284
3285static inline unsigned
3286gimple_call_num_args (const gimple *gs)
3287{
3288 const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
3289 return gimple_call_num_args (gc);
3290}
3291
3292
3293/* Return the argument at position INDEX for call statement GS. */
3294
3295static inline tree
3296gimple_call_arg (const gcall *gs, unsigned index)
3297{
3298 gcc_gimple_checking_assert (gimple_num_ops (gs) > index + 3)((void)(!(gimple_num_ops (gs) > index + 3) ? fancy_abort (
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 3298, __FUNCTION__), 0 : 0))
;
3299 return gs->op[index + 3];
3300}
3301
3302static inline tree
3303gimple_call_arg (const gimple *gs, unsigned index)
3304{
3305 const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
3306 return gimple_call_arg (gc, index);
3307}
3308
3309
3310/* Return a pointer to the argument at position INDEX for call
3311 statement GS. */
3312
3313static inline tree *
3314gimple_call_arg_ptr (gcall *gs, unsigned index)
3315{
3316 gcc_gimple_checking_assert (gimple_num_ops (gs) > index + 3)((void)(!(gimple_num_ops (gs) > index + 3) ? fancy_abort (
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 3316, __FUNCTION__), 0 : 0))
;
3317 return &gs->op[index + 3];
3318}
3319
3320static inline tree *
3321gimple_call_arg_ptr (gimple *gs, unsigned index)
3322{
3323 gcall *gc = GIMPLE_CHECK2<gcall *> (gs);
3324 return gimple_call_arg_ptr (gc, index);
3325}
3326
3327
3328/* Set ARG to be the argument at position INDEX for call statement GS. */
3329
3330static inline void
3331gimple_call_set_arg (gcall *gs, unsigned index, tree arg)
3332{
3333 gcc_gimple_checking_assert (gimple_num_ops (gs) > index + 3)((void)(!(gimple_num_ops (gs) > index + 3) ? fancy_abort (
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.h"
, 3333, __FUNCTION__), 0 : 0))
;
3334 gs->op[index + 3] = arg;
3335}
3336
3337static inline void
3338gimple_call_set_arg (gimple *gs, unsigned index, tree arg)
3339{
3340 gcall *gc = GIMPLE_CHECK2<gcall *> (gs);
3341 gimple_call_set_arg (gc, index, arg);
3342}
3343
3344
3345/* If TAIL_P is true, mark call statement S as being a tail call
3346 (i.e., a call just before the exit of a function). These calls are
3347 candidate for tail call optimization. */
3348
3349static inline void
3350gimple_call_set_tail (gcall *s, bool tail_p)
3351{
3352 if (tail_p)
3353 s->subcode |= GF_CALL_TAILCALL;
3354 else
3355 s->subcode &= ~GF_CALL_TAILCALL;
3356}
3357
3358
3359/* Return true if GIMPLE_CALL S is marked as a tail call. */
3360
3361static inline bool
3362gimple_call_tail_p (const gcall *s)
3363{
3364 return (s->subcode & GF_CALL_TAILCALL) != 0;
3365}
3366
3367/* Mark (or clear) call statement S as requiring tail call optimization. */
3368
3369static inline void
3370gimple_call_set_must_tail (gcall *s, bool must_tail_p)
3371{
3372 if (must_tail_p)
3373 s->subcode |= GF_CALL_MUST_TAIL_CALL;
3374 else
3375 s->subcode &= ~GF_CALL_MUST_TAIL_CALL;
3376}
3377
3378/* Return true if call statement has been marked as requiring
3379 tail call optimization. */
3380
3381static inline bool
3382gimple_call_must_tail_p (const gcall *s)
3383{
3384 return (s->subcode & GF_CALL_MUST_TAIL_CALL) != 0;
3385}
3386
3387/* If RETURN_SLOT_OPT_P is true mark GIMPLE_CALL S as valid for return
3388 slot optimization. This transformation uses the target of the call
3389 expansion as the return slot for calls that return in memory. */
3390
3391static inline void
3392gimple_call_set_return_slot_opt (gcall *s, bool return_slot_opt_p)
3393{
3394 if (return_slot_opt_p)
3395 s->subcode |= GF_CALL_RETURN_SLOT_OPT;
3396 else
3397 s->subcode &= ~GF_CALL_RETURN_SLOT_OPT;
3398}
3399
3400
3401/* Return true if S is marked for return slot optimization. */
3402
3403static inline bool
3404gimple_call_return_slot_opt_p (const gcall *s)
3405{
3406 return (s->subcode & GF_CALL_RETURN_SLOT_OPT) != 0;
3407}
3408
3409
3410/* If FROM_THUNK_P is true, mark GIMPLE_CALL S as being the jump from a
3411 thunk to the thunked-to function. */
3412
3413static inline void
3414gimple_call_set_from_thunk (gcall *s, bool from_thunk_p)
3415{
3416 if (from_thunk_p)
3417 s->subcode |= GF_CALL_FROM_THUNK;
3418 else
3419 s->subcode &= ~GF_CALL_FROM_THUNK;
3420}
3421
3422
3423/* Return true if GIMPLE_CALL S is a jump from a thunk. */
3424
3425static inline bool
3426gimple_call_from_thunk_p (gcall *s)
3427{
3428 return (s->subcode & GF_CALL_FROM_THUNK) != 0;
3429}
3430
3431
3432/* If FROM_NEW_OR_DELETE_P is true, mark GIMPLE_CALL S as being a call
3433 to operator new or delete created from a new or delete expression. */
3434
3435static inline void
3436gimple_call_set_from_new_or_delete (gcall *s, bool from_new_or_delete_p)
3437{
3438 if (from_new_or_delete_p)
3439 s->subcode |= GF_CALL_FROM_NEW_OR_DELETE;
3440 else
3441 s->subcode &= ~GF_CALL_FROM_NEW_OR_DELETE;
3442}
3443
3444
3445/* Return true if GIMPLE_CALL S is a call to operator new or delete from
3446 from a new or delete expression. */
3447
3448static inline bool
3449gimple_call_from_new_or_delete (const gcall *s)
3450{
3451 return (s->subcode & GF_CALL_FROM_NEW_OR_DELETE) != 0;
3452}
3453
3454
3455/* If PASS_ARG_PACK_P is true, GIMPLE_CALL S is a stdarg call that needs the
3456 argument pack in its argument list. */
3457
3458static inline void
3459gimple_call_set_va_arg_pack (gcall *s, bool pass_arg_pack_p)
3460{
3461 if (pass_arg_pack_p)
3462 s->subcode |= GF_CALL_VA_ARG_PACK;
3463 else
3464 s->subcode &= ~GF_CALL_VA_ARG_PACK;
3465}
3466
3467
3468/* Return true if GIMPLE_CALL S is a stdarg call that needs the
3469 argument pack in its argument list. */
3470
3471static inline bool
3472gimple_call_va_arg_pack_p (const gcall *s)
3473{
3474 return (s->subcode & GF_CALL_VA_ARG_PACK) != 0;
3475}
3476
3477
3478/* Return true if S is a noreturn call. */
3479
3480static inline bool
3481gimple_call_noreturn_p (const gcall *s)
3482{
3483 return (gimple_call_flags (s) & ECF_NORETURN(1 << 3)) != 0;
3484}
3485
3486static inline bool
3487gimple_call_noreturn_p (const gimple *s)
3488{
3489 const gcall *gc = GIMPLE_CHECK2<const gcall *> (s);
3490 return gimple_call_noreturn_p (gc);
3491}
3492
3493
3494/* If NOTHROW_P is true, GIMPLE_CALL S is a call that is known to not throw
3495 even if the called function can throw in other cases. */
3496
3497static inline void
3498gimple_call_set_nothrow (gcall *s, bool nothrow_p)
3499{
3500 if (nothrow_p)
3501 s->subcode |= GF_CALL_NOTHROW;
3502 else
3503 s->subcode &= ~GF_CALL_NOTHROW;
3504}
3505
3506/* Return true if S is a nothrow call. */
3507
3508static inline bool
3509gimple_call_nothrow_p (gcall *s)
3510{
3511 return (gimple_call_flags (s) & ECF_NOTHROW(1 << 6)) != 0;
3512}
3513
3514/* If FOR_VAR is true, GIMPLE_CALL S is a call to builtin_alloca that
3515 is known to be emitted for VLA objects. Those are wrapped by
3516 stack_save/stack_restore calls and hence can't lead to unbounded
3517 stack growth even when they occur in loops. */
3518
3519static inline void
3520gimple_call_set_alloca_for_var (gcall *s, bool for_var)
3521{
3522 if (for_var)
3523 s->subcode |= GF_CALL_ALLOCA_FOR_VAR;
3524 else
3525 s->subcode &= ~GF_CALL_ALLOCA_FOR_VAR;
3526}
3527
3528/* Return true of S is a call to builtin_alloca emitted for VLA objects. */
3529
3530static inline bool
3531gimple_call_alloca_for_var_p (gcall *s)
3532{
3533 return (s->subcode & GF_CALL_ALLOCA_FOR_VAR) != 0;
3534}
3535
3536static inline bool
3537gimple_call_alloca_for_var_p (gimple *s)
3538{
3539 const gcall *gc = GIMPLE_CHECK2<gcall *> (s);
3540 return (gc->subcode & GF_CALL_ALLOCA_FOR_VAR) != 0;
3541}
3542
3543/* If BY_DESCRIPTOR_P is true, GIMPLE_CALL S is an indirect call for which
3544 pointers to nested function are descriptors instead of trampolines. */
3545
3546static inline void
3547gimple_call_set_by_descriptor (gcall *s, bool by_descriptor_p)
3548{
3549 if (by_descriptor_p)
3550 s->subcode |= GF_CALL_BY_DESCRIPTOR;
3551 else
3552 s->subcode &= ~GF_CALL_BY_DESCRIPTOR;
3553}
3554
3555/* Return true if S is a by-descriptor call. */
3556
3557static inline bool
3558gimple_call_by_descriptor_p (gcall *s)
3559{
3560 return (s->subcode & GF_CALL_BY_DESCRIPTOR) != 0;
3561}
3562
3563/* Copy all the GF_CALL_* flags from ORIG_CALL to DEST_CALL. */
3564
3565static inline void
3566gimple_call_copy_flags (gcall *dest_call, gcall *orig_call)
3567{
3568 dest_call->subcode = orig_call->subcode;
3569}
3570
3571
3572/* Return a pointer to the points-to solution for the set of call-used
3573 variables of the call CALL_STMT. */
3574
3575static inline struct pt_solution *
3576gimple_call_use_set (gcall *call_stmt)
3577{
3578 return &call_stmt->call_used;
3579}
3580
3581/* As above, but const. */
3582
3583static inline const pt_solution *
3584gimple_call_use_set (const gcall *call_stmt)
3585{
3586 return &call_stmt->call_used;
3587}
3588
3589/* Return a pointer to the points-to solution for the set of call-used
3590 variables of the call CALL_STMT. */
3591
3592static inline struct pt_solution *
3593gimple_call_clobber_set (gcall *call_stmt)
3594{
3595 return &call_stmt->call_clobbered;
3596}
3597
3598/* As above, but const. */
3599
3600static inline const pt_solution *
3601gimple_call_clobber_set (const gcall *call_stmt)
3602{
3603 return &call_stmt->call_clobbered;
3604}
3605
3606
3607/* Returns true if this is a GIMPLE_ASSIGN or a GIMPLE_CALL with a
3608 non-NULL lhs. */
3609
3610static inline bool
3611gimple_has_lhs (const gimple *stmt)
3612{
3613 if (is_gimple_assign (stmt))
3614 return true;
3615 if (const gcall *call = dyn_cast <const gcall *> (stmt))
3616 return gimple_call_lhs (call) != NULL_TREE(tree) __null;
3617 return false;
3618}
3619
3620
3621/* Return the code of the predicate computed by conditional statement GS. */
3622
3623static inline enum tree_code
3624gimple_cond_code (const gcond *gs)
3625{
3626 return (enum tree_code) gs->subcode;
3627}
3628
3629static inline enum tree_code
3630gimple_cond_code (const gimple *gs)
3631{
3632 const gcond *gc = GIMPLE_CHECK2<const gcond *> (gs);
3633 return gimple_cond_code (gc);
3634}
3635
3636
3637/* Set CODE to be the predicate code for the conditional statement GS. */
3638
3639static inline void
3640gimple_cond_set_code (gcond *gs, enum tree_code code)
3641{
3642 gs->subcode = code;
3643}
3644
3645
3646/* Return the LHS of the predicate computed by conditional statement GS. */
3647
3648static inline tree
3649gimple_cond_lhs (const gcond *gs)
3650{
3651 return gs->op[0];
3652}
3653
3654static inline tree
3655gimple_cond_lhs (const gimple *gs)
3656{
3657 const gcond *gc = GIMPLE_CHECK2<const gcond *> (gs);
3658 return gimple_cond_lhs (gc);
3659}
3660
3661/* Return the pointer to the LHS of the predicate computed by conditional
3662 statement GS. */
3663
3664static inline tree *
3665gimple_cond_lhs_ptr (gcond *gs)
3666{
3667 return &gs->op[0];
3668}
3669
3670/* Set LHS to be the LHS operand of the predicate computed by
3671 conditional statement GS. */
3672
3673static inline void
3674gimple_cond_set_lhs (gcond *gs, tree lhs)
3675{
3676 gs->op[0] = lhs;
3677}
3678
3679
3680/* Return the RHS operand of the predicate computed by conditional GS. */
3681
3682static inline tree
3683gimple_cond_rhs (const gcond *gs)
3684{
3685 return gs->op[1];
3686}
3687
3688static inline tree
3689gimple_cond_rhs (const gimple *gs)
3690{
3691 const gcond *gc = GIMPLE_CHECK2<const gcond *> (gs);
3692 return gimple_cond_rhs (gc);
3693}
3694
3695/* Return the pointer to the RHS operand of the predicate computed by
3696 conditional GS. */
3697
3698static inline tree *
3699gimple_cond_rhs_ptr (gcond *gs)
3700{
3701 return &gs->op[1];
3702}
3703
3704
3705/* Set RHS to be the RHS operand of the predicate computed by
3706 conditional statement GS. */
3707
3708static inline void
3709gimple_cond_set_rhs (gcond *gs, tree rhs)
3710{
3711 gs->op[1] = rhs;
3712}
3713
3714
3715/* Return the label used by conditional statement GS when its
3716 predicate evaluates to true. */
3717
3718static inline tree
3719gimple_cond_true_label (const gcond *gs)
3720{
3721 return gs->op[2];
3722}
3723
3724
3725/* Set LABEL to be the label used by conditional statement GS when its
3726 predicate evaluates to true. */
3727
3728static inline void
3729gimple_cond_set_true_label (gcond *gs, tree label)
3730{
3731 gs->op[2] = label;
3732}
3733 </