Bug Summary

File:build/gcc/builtins.c
Warning:line 3968, column 22
Although the value stored to 'val' is used in the enclosing expression, the value is never actually read from 'val'

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-unknown-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name builtins.c -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -fno-rounding-math -mconstructor-aliases -munwind-tables -target-cpu x86-64 -tune-cpu generic -debugger-tuning=gdb -fcoverage-compilation-dir=/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -resource-dir /usr/lib64/clang/13.0.0 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../include/c++/11/backward -internal-isystem /usr/lib64/clang/13.0.0/include -internal-isystem /usr/local/include -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/11/../../../../x86_64-suse-linux/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-error=format-diag -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir=/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -D__GCC_HAVE_DWARF2_CFI_ASM=1 -o /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2021-11-20-133755-20252-1/report-Uc2TOL.plist -x c++ /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c
1/* Expand builtin functions.
2 Copyright (C) 1988-2021 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20/* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24#include "config.h"
25#include "system.h"
26#include "coretypes.h"
27#include "backend.h"
28#include "target.h"
29#include "rtl.h"
30#include "tree.h"
31#include "memmodel.h"
32#include "gimple.h"
33#include "predict.h"
34#include "tm_p.h"
35#include "stringpool.h"
36#include "tree-vrp.h"
37#include "tree-ssanames.h"
38#include "expmed.h"
39#include "optabs.h"
40#include "emit-rtl.h"
41#include "recog.h"
42#include "diagnostic-core.h"
43#include "alias.h"
44#include "fold-const.h"
45#include "fold-const-call.h"
46#include "gimple-ssa-warn-access.h"
47#include "stor-layout.h"
48#include "calls.h"
49#include "varasm.h"
50#include "tree-object-size.h"
51#include "tree-ssa-strlen.h"
52#include "realmpfr.h"
53#include "cfgrtl.h"
54#include "except.h"
55#include "dojump.h"
56#include "explow.h"
57#include "stmt.h"
58#include "expr.h"
59#include "libfuncs.h"
60#include "output.h"
61#include "typeclass.h"
62#include "langhooks.h"
63#include "value-prof.h"
64#include "builtins.h"
65#include "stringpool.h"
66#include "attribs.h"
67#include "asan.h"
68#include "internal-fn.h"
69#include "case-cfn-macros.h"
70#include "gimple-fold.h"
71#include "intl.h"
72#include "file-prefix-map.h" /* remap_macro_filename() */
73#include "gomp-constants.h"
74#include "omp-general.h"
75#include "tree-dfa.h"
76#include "gimple-iterator.h"
77#include "gimple-ssa.h"
78#include "tree-ssa-live.h"
79#include "tree-outof-ssa.h"
80#include "attr-fnspec.h"
81#include "demangle.h"
82#include "gimple-range.h"
83#include "pointer-query.h"
84
85struct target_builtins default_target_builtins;
86#if SWITCHABLE_TARGET1
87struct target_builtins *this_target_builtins = &default_target_builtins;
88#endif
89
90/* Define the names of the builtin function types and codes. */
91const char *const built_in_class_names[BUILT_IN_LAST(BUILT_IN_NORMAL + 1)]
92 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
93
94#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
95const char * built_in_names[(int) END_BUILTINS] =
96{
97#include "builtins.def"
98};
99
100/* Setup an array of builtin_info_type, make sure each element decl is
101 initialized to NULL_TREE. */
102builtin_info_type builtin_info[(int)END_BUILTINS];
103
104/* Non-zero if __builtin_constant_p should be folded right away. */
105bool force_folding_builtin_constant_p;
106
107static int target_char_cast (tree, char *);
108static int apply_args_size (void);
109static int apply_result_size (void);
110static rtx result_vector (int, rtx);
111static void expand_builtin_prefetch (tree);
112static rtx expand_builtin_apply_args (void);
113static rtx expand_builtin_apply_args_1 (void);
114static rtx expand_builtin_apply (rtx, rtx, rtx);
115static void expand_builtin_return (rtx);
116static enum type_class type_to_class (tree);
117static rtx expand_builtin_classify_type (tree);
118static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
119static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
120static rtx expand_builtin_interclass_mathfn (tree, rtx);
121static rtx expand_builtin_sincos (tree);
122static rtx expand_builtin_cexpi (tree, rtx);
123static rtx expand_builtin_int_roundingfn (tree, rtx);
124static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
125static rtx expand_builtin_next_arg (void);
126static rtx expand_builtin_va_start (tree);
127static rtx expand_builtin_va_end (tree);
128static rtx expand_builtin_va_copy (tree);
129static rtx inline_expand_builtin_bytecmp (tree, rtx);
130static rtx expand_builtin_strcmp (tree, rtx);
131static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
132static rtx expand_builtin_memcpy (tree, rtx);
133static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
134 rtx target, tree exp,
135 memop_ret retmode,
136 bool might_overlap);
137static rtx expand_builtin_memmove (tree, rtx);
138static rtx expand_builtin_mempcpy (tree, rtx);
139static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
140static rtx expand_builtin_strcpy (tree, rtx);
141static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
142static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
143static rtx expand_builtin_strncpy (tree, rtx);
144static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
145static rtx expand_builtin_bzero (tree);
146static rtx expand_builtin_strlen (tree, rtx, machine_mode);
147static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
148static rtx expand_builtin_alloca (tree);
149static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
150static rtx expand_builtin_frame_address (tree, tree);
151static tree stabilize_va_list_loc (location_t, tree, int);
152static rtx expand_builtin_expect (tree, rtx);
153static rtx expand_builtin_expect_with_probability (tree, rtx);
154static tree fold_builtin_constant_p (tree);
155static tree fold_builtin_classify_type (tree);
156static tree fold_builtin_strlen (location_t, tree, tree, tree);
157static tree fold_builtin_inf (location_t, tree, int);
158static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
159static bool validate_arg (const_tree, enum tree_code code);
160static rtx expand_builtin_fabs (tree, rtx, rtx);
161static rtx expand_builtin_signbit (tree, rtx);
162static tree fold_builtin_memcmp (location_t, tree, tree, tree);
163static tree fold_builtin_isascii (location_t, tree);
164static tree fold_builtin_toascii (location_t, tree);
165static tree fold_builtin_isdigit (location_t, tree);
166static tree fold_builtin_fabs (location_t, tree, tree);
167static tree fold_builtin_abs (location_t, tree, tree);
168static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
169 enum tree_code);
170static tree fold_builtin_varargs (location_t, tree, tree*, int);
171
172static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
173static tree fold_builtin_strspn (location_t, tree, tree, tree);
174static tree fold_builtin_strcspn (location_t, tree, tree, tree);
175
176static rtx expand_builtin_object_size (tree);
177static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
178 enum built_in_function);
179static void maybe_emit_chk_warning (tree, enum built_in_function);
180static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
181static tree fold_builtin_object_size (tree, tree);
182
183unsigned HOST_WIDE_INTlong target_newline;
184unsigned HOST_WIDE_INTlong target_percent;
185static unsigned HOST_WIDE_INTlong target_c;
186static unsigned HOST_WIDE_INTlong target_s;
187char target_percent_c[3];
188char target_percent_s[3];
189char target_percent_s_newline[4];
190static tree do_mpfr_remquo (tree, tree, tree);
191static tree do_mpfr_lgamma_r (tree, tree, tree);
192static void expand_builtin_sync_synchronize (void);
193
194/* Return true if NAME starts with __builtin_ or __sync_. */
195
196static bool
197is_builtin_name (const char *name)
198{
199 return (startswith (name, "__builtin_")
200 || startswith (name, "__sync_")
201 || startswith (name, "__atomic_"));
202}
203
204/* Return true if NODE should be considered for inline expansion regardless
205 of the optimization level. This means whenever a function is invoked with
206 its "internal" name, which normally contains the prefix "__builtin". */
207
208bool
209called_as_built_in (tree node)
210{
211 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
212 we want the name used to call the function, not the name it
213 will have. */
214 const char *name = IDENTIFIER_POINTER (DECL_NAME (node))((const char *) (tree_check ((((contains_struct_check ((node)
, (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 214, __FUNCTION__))->decl_minimal.name)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 214, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)
;
215 return is_builtin_name (name);
216}
217
218/* Compute values M and N such that M divides (address of EXP - N) and such
219 that N < M. If these numbers can be determined, store M in alignp and N in
220 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
221 *alignp and any bit-offset to *bitposp.
222
223 Note that the address (and thus the alignment) computed here is based
224 on the address to which a symbol resolves, whereas DECL_ALIGN is based
225 on the address at which an object is actually located. These two
226 addresses are not always the same. For example, on ARM targets,
227 the address &foo of a Thumb function foo() has the lowest bit set,
228 whereas foo() itself starts on an even address.
229
230 If ADDR_P is true we are taking the address of the memory reference EXP
231 and thus cannot rely on the access taking place. */
232
233static bool
234get_object_alignment_2 (tree exp, unsigned int *alignp,
235 unsigned HOST_WIDE_INTlong *bitposp, bool addr_p)
236{
237 poly_int64 bitsize, bitpos;
238 tree offset;
239 machine_mode mode;
240 int unsignedp, reversep, volatilep;
241 unsigned int align = BITS_PER_UNIT(8);
242 bool known_alignment = false;
243
244 /* Get the innermost object and the constant (bitpos) and possibly
245 variable (offset) offset of the access. */
246 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
247 &unsignedp, &reversep, &volatilep);
248
249 /* Extract alignment information from the innermost object and
250 possibly adjust bitpos and offset. */
251 if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == FUNCTION_DECL)
252 {
253 /* Function addresses can encode extra information besides their
254 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
255 allows the low bit to be used as a virtual bit, we know
256 that the address itself must be at least 2-byte aligned. */
257 if (TARGET_PTRMEMFUNC_VBIT_LOCATIONptrmemfunc_vbit_in_pfn == ptrmemfunc_vbit_in_pfn)
258 align = 2 * BITS_PER_UNIT(8);
259 }
260 else if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == LABEL_DECL)
261 ;
262 else if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == CONST_DECL)
263 {
264 /* The alignment of a CONST_DECL is determined by its initializer. */
265 exp = DECL_INITIAL (exp)((contains_struct_check ((exp), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 265, __FUNCTION__))->decl_common.initial)
;
266 align = TYPE_ALIGN (TREE_TYPE (exp))(((tree_class_check ((((contains_struct_check ((exp), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 266, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 266, __FUNCTION__))->type_common.align) ? ((unsigned)1) <<
(((tree_class_check ((((contains_struct_check ((exp), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 266, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 266, __FUNCTION__))->type_common.align) - 1) : 0)
;
267 if (CONSTANT_CLASS_P (exp)(tree_code_type[(int) (((enum tree_code) (exp)->base.code)
)] == tcc_constant)
)
268 align = targetm.constant_alignment (exp, align);
269
270 known_alignment = true;
271 }
272 else if (DECL_P (exp)(tree_code_type[(int) (((enum tree_code) (exp)->base.code)
)] == tcc_declaration)
)
273 {
274 align = DECL_ALIGN (exp)(((contains_struct_check ((exp), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 274, __FUNCTION__))->decl_common.align) ? ((unsigned)1) <<
(((contains_struct_check ((exp), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 274, __FUNCTION__))->decl_common.align) - 1) : 0)
;
275 known_alignment = true;
276 }
277 else if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == INDIRECT_REF
278 || TREE_CODE (exp)((enum tree_code) (exp)->base.code) == MEM_REF
279 || TREE_CODE (exp)((enum tree_code) (exp)->base.code) == TARGET_MEM_REF)
280 {
281 tree addr = TREE_OPERAND (exp, 0)(*((const_cast<tree*> (tree_operand_check ((exp), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 281, __FUNCTION__)))))
;
282 unsigned ptr_align;
283 unsigned HOST_WIDE_INTlong ptr_bitpos;
284 unsigned HOST_WIDE_INTlong ptr_bitmask = ~0;
285
286 /* If the address is explicitely aligned, handle that. */
287 if (TREE_CODE (addr)((enum tree_code) (addr)->base.code) == BIT_AND_EXPR
288 && TREE_CODE (TREE_OPERAND (addr, 1))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((addr), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 288, __FUNCTION__))))))->base.code)
== INTEGER_CST)
289 {
290 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))((unsigned long) (*tree_int_cst_elt_check (((*((const_cast<
tree*> (tree_operand_check ((addr), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 290, __FUNCTION__)))))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 290, __FUNCTION__)))
;
291 ptr_bitmask *= BITS_PER_UNIT(8);
292 align = least_bit_hwi (ptr_bitmask);
293 addr = TREE_OPERAND (addr, 0)(*((const_cast<tree*> (tree_operand_check ((addr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 293, __FUNCTION__)))))
;
294 }
295
296 known_alignment
297 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
298 align = MAX (ptr_align, align)((ptr_align) > (align) ? (ptr_align) : (align));
299
300 /* Re-apply explicit alignment to the bitpos. */
301 ptr_bitpos &= ptr_bitmask;
302
303 /* The alignment of the pointer operand in a TARGET_MEM_REF
304 has to take the variable offset parts into account. */
305 if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == TARGET_MEM_REF)
306 {
307 if (TMR_INDEX (exp)((*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 307, __FUNCTION__, (TARGET_MEM_REF)))), (2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 307, __FUNCTION__))))))
)
308 {
309 unsigned HOST_WIDE_INTlong step = 1;
310 if (TMR_STEP (exp)((*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 310, __FUNCTION__, (TARGET_MEM_REF)))), (3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 310, __FUNCTION__))))))
)
311 step = TREE_INT_CST_LOW (TMR_STEP (exp))((unsigned long) (*tree_int_cst_elt_check ((((*((const_cast<
tree*> (tree_operand_check (((tree_check ((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 311, __FUNCTION__, (TARGET_MEM_REF)))), (3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 311, __FUNCTION__))))))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 311, __FUNCTION__)))
;
312 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT)((align) < (least_bit_hwi (step) * (8)) ? (align) : (least_bit_hwi
(step) * (8)))
;
313 }
314 if (TMR_INDEX2 (exp)((*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 314, __FUNCTION__, (TARGET_MEM_REF)))), (4), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 314, __FUNCTION__))))))
)
315 align = BITS_PER_UNIT(8);
316 known_alignment = false;
317 }
318
319 /* When EXP is an actual memory reference then we can use
320 TYPE_ALIGN of a pointer indirection to derive alignment.
321 Do so only if get_pointer_alignment_1 did not reveal absolute
322 alignment knowledge and if using that alignment would
323 improve the situation. */
324 unsigned int talign;
325 if (!addr_p && !known_alignment
326 && (talign = min_align_of_type (TREE_TYPE (exp)((contains_struct_check ((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 326, __FUNCTION__))->typed.type)
) * BITS_PER_UNIT(8))
327 && talign > align)
328 align = talign;
329 else
330 {
331 /* Else adjust bitpos accordingly. */
332 bitpos += ptr_bitpos;
333 if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == MEM_REF
334 || TREE_CODE (exp)((enum tree_code) (exp)->base.code) == TARGET_MEM_REF)
335 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT(8);
336 }
337 }
338 else if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == STRING_CST)
339 {
340 /* STRING_CST are the only constant objects we allow to be not
341 wrapped inside a CONST_DECL. */
342 align = TYPE_ALIGN (TREE_TYPE (exp))(((tree_class_check ((((contains_struct_check ((exp), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 342, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 342, __FUNCTION__))->type_common.align) ? ((unsigned)1) <<
(((tree_class_check ((((contains_struct_check ((exp), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 342, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 342, __FUNCTION__))->type_common.align) - 1) : 0)
;
343 if (CONSTANT_CLASS_P (exp)(tree_code_type[(int) (((enum tree_code) (exp)->base.code)
)] == tcc_constant)
)
344 align = targetm.constant_alignment (exp, align);
345
346 known_alignment = true;
347 }
348
349 /* If there is a non-constant offset part extract the maximum
350 alignment that can prevail. */
351 if (offset)
352 {
353 unsigned int trailing_zeros = tree_ctz (offset);
354 if (trailing_zeros < HOST_BITS_PER_INT(8 * 4))
355 {
356 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT(8);
357 if (inner)
358 align = MIN (align, inner)((align) < (inner) ? (align) : (inner));
359 }
360 }
361
362 /* Account for the alignment of runtime coefficients, so that the constant
363 bitpos is guaranteed to be accurate. */
364 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
365 if (alt_align != 0 && alt_align < align)
366 {
367 align = alt_align;
368 known_alignment = false;
369 }
370
371 *alignp = align;
372 *bitposp = bitpos.coeffs[0] & (align - 1);
373 return known_alignment;
374}
375
376/* For a memory reference expression EXP compute values M and N such that M
377 divides (&EXP - N) and such that N < M. If these numbers can be determined,
378 store M in alignp and N in *BITPOSP and return true. Otherwise return false
379 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
380
381bool
382get_object_alignment_1 (tree exp, unsigned int *alignp,
383 unsigned HOST_WIDE_INTlong *bitposp)
384{
385 /* Strip a WITH_SIZE_EXPR, get_inner_reference doesn't know how to deal
386 with it. */
387 if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == WITH_SIZE_EXPR)
388 exp = TREE_OPERAND (exp, 0)(*((const_cast<tree*> (tree_operand_check ((exp), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 388, __FUNCTION__)))))
;
389 return get_object_alignment_2 (exp, alignp, bitposp, false);
390}
391
392/* Return the alignment in bits of EXP, an object. */
393
394unsigned int
395get_object_alignment (tree exp)
396{
397 unsigned HOST_WIDE_INTlong bitpos = 0;
398 unsigned int align;
399
400 get_object_alignment_1 (exp, &align, &bitpos);
401
402 /* align and bitpos now specify known low bits of the pointer.
403 ptr & (align - 1) == bitpos. */
404
405 if (bitpos != 0)
406 align = least_bit_hwi (bitpos);
407 return align;
408}
409
410/* For a pointer valued expression EXP compute values M and N such that M
411 divides (EXP - N) and such that N < M. If these numbers can be determined,
412 store M in alignp and N in *BITPOSP and return true. Return false if
413 the results are just a conservative approximation.
414
415 If EXP is not a pointer, false is returned too. */
416
417bool
418get_pointer_alignment_1 (tree exp, unsigned int *alignp,
419 unsigned HOST_WIDE_INTlong *bitposp)
420{
421 STRIP_NOPS (exp)(exp) = tree_strip_nop_conversions ((const_cast<union tree_node
*> (((exp)))))
;
422
423 if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == ADDR_EXPR)
424 return get_object_alignment_2 (TREE_OPERAND (exp, 0)(*((const_cast<tree*> (tree_operand_check ((exp), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 424, __FUNCTION__)))))
,
425 alignp, bitposp, true);
426 else if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == POINTER_PLUS_EXPR)
427 {
428 unsigned int align;
429 unsigned HOST_WIDE_INTlong bitpos;
430 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0)(*((const_cast<tree*> (tree_operand_check ((exp), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 430, __FUNCTION__)))))
,
431 &align, &bitpos);
432 if (TREE_CODE (TREE_OPERAND (exp, 1))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((exp), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 432, __FUNCTION__))))))->base.code)
== INTEGER_CST)
433 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))((unsigned long) (*tree_int_cst_elt_check (((*((const_cast<
tree*> (tree_operand_check ((exp), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 433, __FUNCTION__)))))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 433, __FUNCTION__)))
* BITS_PER_UNIT(8);
434 else
435 {
436 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1)(*((const_cast<tree*> (tree_operand_check ((exp), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 436, __FUNCTION__)))))
);
437 if (trailing_zeros < HOST_BITS_PER_INT(8 * 4))
438 {
439 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT(8);
440 if (inner)
441 align = MIN (align, inner)((align) < (inner) ? (align) : (inner));
442 }
443 }
444 *alignp = align;
445 *bitposp = bitpos & (align - 1);
446 return res;
447 }
448 else if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == SSA_NAME
449 && POINTER_TYPE_P (TREE_TYPE (exp))(((enum tree_code) (((contains_struct_check ((exp), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 449, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((exp), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 449, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
450 {
451 unsigned int ptr_align, ptr_misalign;
452 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp)(tree_check ((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 452, __FUNCTION__, (SSA_NAME)))->ssa_name.info.ptr_info
;
453
454 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
455 {
456 *bitposp = ptr_misalign * BITS_PER_UNIT(8);
457 *alignp = ptr_align * BITS_PER_UNIT(8);
458 /* Make sure to return a sensible alignment when the multiplication
459 by BITS_PER_UNIT overflowed. */
460 if (*alignp == 0)
461 *alignp = 1u << (HOST_BITS_PER_INT(8 * 4) - 1);
462 /* We cannot really tell whether this result is an approximation. */
463 return false;
464 }
465 else
466 {
467 *bitposp = 0;
468 *alignp = BITS_PER_UNIT(8);
469 return false;
470 }
471 }
472 else if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == INTEGER_CST)
473 {
474 *alignp = BIGGEST_ALIGNMENT(((global_options.x_target_flags & (1U << 12)) != 0
) ? 32 : (((global_options.x_ix86_isa_flags & (1UL <<
15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & (
1UL << 8)) != 0) ? 256 : 128)))
;
475 *bitposp = ((TREE_INT_CST_LOW (exp)((unsigned long) (*tree_int_cst_elt_check ((exp), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 475, __FUNCTION__)))
* BITS_PER_UNIT(8))
476 & (BIGGEST_ALIGNMENT(((global_options.x_target_flags & (1U << 12)) != 0
) ? 32 : (((global_options.x_ix86_isa_flags & (1UL <<
15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & (
1UL << 8)) != 0) ? 256 : 128)))
- 1));
477 return true;
478 }
479
480 *bitposp = 0;
481 *alignp = BITS_PER_UNIT(8);
482 return false;
483}
484
485/* Return the alignment in bits of EXP, a pointer valued expression.
486 The alignment returned is, by default, the alignment of the thing that
487 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
488
489 Otherwise, look at the expression to see if we can do better, i.e., if the
490 expression is actually pointing at an object whose alignment is tighter. */
491
492unsigned int
493get_pointer_alignment (tree exp)
494{
495 unsigned HOST_WIDE_INTlong bitpos = 0;
496 unsigned int align;
497
498 get_pointer_alignment_1 (exp, &align, &bitpos);
499
500 /* align and bitpos now specify known low bits of the pointer.
501 ptr & (align - 1) == bitpos. */
502
503 if (bitpos != 0)
504 align = least_bit_hwi (bitpos);
505
506 return align;
507}
508
509/* Return the number of leading non-zero elements in the sequence
510 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
511 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
512
513unsigned
514string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
515{
516 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4)((void)(!(eltsize == 1 || eltsize == 2 || eltsize == 4) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 516, __FUNCTION__), 0 : 0))
;
517
518 unsigned n;
519
520 if (eltsize == 1)
521 {
522 /* Optimize the common case of plain char. */
523 for (n = 0; n < maxelts; n++)
524 {
525 const char *elt = (const char*) ptr + n;
526 if (!*elt)
527 break;
528 }
529 }
530 else
531 {
532 for (n = 0; n < maxelts; n++)
533 {
534 const char *elt = (const char*) ptr + n * eltsize;
535 if (!memcmp (elt, "\0\0\0\0", eltsize))
536 break;
537 }
538 }
539 return n;
540}
541
542/* Compute the length of a null-terminated character string or wide
543 character string handling character sizes of 1, 2, and 4 bytes.
544 TREE_STRING_LENGTH is not the right way because it evaluates to
545 the size of the character array in bytes (as opposed to characters)
546 and because it can contain a zero byte in the middle.
547
548 ONLY_VALUE should be nonzero if the result is not going to be emitted
549 into the instruction stream and zero if it is going to be expanded.
550 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
551 is returned, otherwise NULL, since
552 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
553 evaluate the side-effects.
554
555 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
556 accesses. Note that this implies the result is not going to be emitted
557 into the instruction stream.
558
559 Additional information about the string accessed may be recorded
560 in DATA. For example, if ARG references an unterminated string,
561 then the declaration will be stored in the DECL field. If the
562 length of the unterminated string can be determined, it'll be
563 stored in the LEN field. Note this length could well be different
564 than what a C strlen call would return.
565
566 ELTSIZE is 1 for normal single byte character strings, and 2 or
567 4 for wide characer strings. ELTSIZE is by default 1.
568
569 The value returned is of type `ssizetype'. */
570
571tree
572c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
573{
574 /* If we were not passed a DATA pointer, then get one to a local
575 structure. That avoids having to check DATA for NULL before
576 each time we want to use it. */
577 c_strlen_data local_strlen_data = { };
578 if (!data)
579 data = &local_strlen_data;
580
581 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4)((void)(!(eltsize == 1 || eltsize == 2 || eltsize == 4) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 581, __FUNCTION__), 0 : 0))
;
582
583 tree src = STRIP_NOPS (arg)(arg) = tree_strip_nop_conversions ((const_cast<union tree_node
*> (((arg)))))
;
584 if (TREE_CODE (src)((enum tree_code) (src)->base.code) == COND_EXPR
585 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))((non_type_check (((*((const_cast<tree*> (tree_operand_check
((src), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 585, __FUNCTION__)))))), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 585, __FUNCTION__))->base.side_effects_flag)
))
586 {
587 tree len1, len2;
588
589 len1 = c_strlen (TREE_OPERAND (src, 1)(*((const_cast<tree*> (tree_operand_check ((src), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 589, __FUNCTION__)))))
, only_value, data, eltsize);
590 len2 = c_strlen (TREE_OPERAND (src, 2)(*((const_cast<tree*> (tree_operand_check ((src), (2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 590, __FUNCTION__)))))
, only_value, data, eltsize);
591 if (tree_int_cst_equal (len1, len2))
592 return len1;
593 }
594
595 if (TREE_CODE (src)((enum tree_code) (src)->base.code) == COMPOUND_EXPR
596 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))((non_type_check (((*((const_cast<tree*> (tree_operand_check
((src), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 596, __FUNCTION__)))))), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 596, __FUNCTION__))->base.side_effects_flag)
))
597 return c_strlen (TREE_OPERAND (src, 1)(*((const_cast<tree*> (tree_operand_check ((src), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 597, __FUNCTION__)))))
, only_value, data, eltsize);
598
599 location_t loc = EXPR_LOC_OR_LOC (src, input_location)((((IS_ADHOC_LOC (((((src)) && ((tree_code_type[(int)
(((enum tree_code) ((src))->base.code))]) >= tcc_reference
&& (tree_code_type[(int) (((enum tree_code) ((src))->
base.code))]) <= tcc_expression)) ? (src)->exp.locus : (
(location_t) 0)))) ? get_location_from_adhoc_loc (line_table,
((((src)) && ((tree_code_type[(int) (((enum tree_code
) ((src))->base.code))]) >= tcc_reference && (tree_code_type
[(int) (((enum tree_code) ((src))->base.code))]) <= tcc_expression
)) ? (src)->exp.locus : ((location_t) 0))) : (((((src)) &&
((tree_code_type[(int) (((enum tree_code) ((src))->base.code
))]) >= tcc_reference && (tree_code_type[(int) (((
enum tree_code) ((src))->base.code))]) <= tcc_expression
)) ? (src)->exp.locus : ((location_t) 0)))) != ((location_t
) 0)) ? (src)->exp.locus : (input_location))
;
600
601 /* Offset from the beginning of the string in bytes. */
602 tree byteoff;
603 tree memsize;
604 tree decl;
605 src = string_constant (src, &byteoff, &memsize, &decl);
606 if (src == 0)
607 return NULL_TREE(tree) __null;
608
609 /* Determine the size of the string element. */
610 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))((tree_class_check ((((contains_struct_check ((((contains_struct_check
((src), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 610, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 610, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 610, __FUNCTION__))->type_common.size_unit)
))
611 return NULL_TREE(tree) __null;
612
613 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
614 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
615 in case the latter is less than the size of the array, such as when
616 SRC refers to a short string literal used to initialize a large array.
617 In that case, the elements of the array after the terminating NUL are
618 all NUL. */
619 HOST_WIDE_INTlong strelts = TREE_STRING_LENGTH (src)((tree_check ((src), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 619, __FUNCTION__, (STRING_CST)))->string.length)
;
620 strelts = strelts / eltsize;
621
622 if (!tree_fits_uhwi_p (memsize))
623 return NULL_TREE(tree) __null;
624
625 HOST_WIDE_INTlong maxelts = tree_to_uhwi (memsize) / eltsize;
626
627 /* PTR can point to the byte representation of any string type, including
628 char* and wchar_t*. */
629 const char *ptr = TREE_STRING_POINTER (src)((const char *)((tree_check ((src), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 629, __FUNCTION__, (STRING_CST)))->string.str))
;
630
631 if (byteoff && TREE_CODE (byteoff)((enum tree_code) (byteoff)->base.code) != INTEGER_CST)
632 {
633 /* The code below works only for single byte character types. */
634 if (eltsize != 1)
635 return NULL_TREE(tree) __null;
636
637 /* If the string has an internal NUL character followed by any
638 non-NUL characters (e.g., "foo\0bar"), we can't compute
639 the offset to the following NUL if we don't know where to
640 start searching for it. */
641 unsigned len = string_length (ptr, eltsize, strelts);
642
643 /* Return when an embedded null character is found or none at all.
644 In the latter case, set the DECL/LEN field in the DATA structure
645 so that callers may examine them. */
646 if (len + 1 < strelts)
647 return NULL_TREE(tree) __null;
648 else if (len >= maxelts)
649 {
650 data->decl = decl;
651 data->off = byteoff;
652 data->minlen = ssize_int (len)size_int_kind (len, stk_ssizetype);
653 return NULL_TREE(tree) __null;
654 }
655
656 /* For empty strings the result should be zero. */
657 if (len == 0)
658 return ssize_int (0)size_int_kind (0, stk_ssizetype);
659
660 /* We don't know the starting offset, but we do know that the string
661 has no internal zero bytes. If the offset falls within the bounds
662 of the string subtract the offset from the length of the string,
663 and return that. Otherwise the length is zero. Take care to
664 use SAVE_EXPR in case the OFFSET has side-effects. */
665 tree offsave = TREE_SIDE_EFFECTS (byteoff)((non_type_check ((byteoff), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 665, __FUNCTION__))->base.side_effects_flag)
? save_expr (byteoff)
666 : byteoff;
667 offsave = fold_convert_loc (loc, sizetypesizetype_tab[(int) stk_sizetype], offsave);
668 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], offsave,
669 size_int (len)size_int_kind (len, stk_sizetype));
670 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetypesizetype_tab[(int) stk_sizetype], size_int (len)size_int_kind (len, stk_sizetype),
671 offsave);
672 lenexp = fold_convert_loc (loc, ssizetypesizetype_tab[(int) stk_ssizetype], lenexp);
673 return fold_build3_loc (loc, COND_EXPR, ssizetypesizetype_tab[(int) stk_ssizetype], condexp, lenexp,
674 build_zero_cst (ssizetypesizetype_tab[(int) stk_ssizetype]));
675 }
676
677 /* Offset from the beginning of the string in elements. */
678 HOST_WIDE_INTlong eltoff;
679
680 /* We have a known offset into the string. Start searching there for
681 a null character if we can represent it as a single HOST_WIDE_INT. */
682 if (byteoff == 0)
683 eltoff = 0;
684 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
685 eltoff = -1;
686 else
687 eltoff = tree_to_uhwi (byteoff) / eltsize;
688
689 /* If the offset is known to be out of bounds, warn, and call strlen at
690 runtime. */
691 if (eltoff < 0 || eltoff >= maxelts)
692 {
693 /* Suppress multiple warnings for propagated constant strings. */
694 if (only_value != 2
695 && !warning_suppressed_p (arg, OPT_Warray_bounds)
696 && warning_at (loc, OPT_Warray_bounds,
697 "offset %qwi outside bounds of constant string",
698 eltoff))
699 {
700 if (decl)
701 inform (DECL_SOURCE_LOCATION (decl)((contains_struct_check ((decl), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 701, __FUNCTION__))->decl_minimal.locus)
, "%qE declared here", decl);
702 suppress_warning (arg, OPT_Warray_bounds);
703 }
704 return NULL_TREE(tree) __null;
705 }
706
707 /* If eltoff is larger than strelts but less than maxelts the
708 string length is zero, since the excess memory will be zero. */
709 if (eltoff > strelts)
710 return ssize_int (0)size_int_kind (0, stk_ssizetype);
711
712 /* Use strlen to search for the first zero byte. Since any strings
713 constructed with build_string will have nulls appended, we win even
714 if we get handed something like (char[4])"abcd".
715
716 Since ELTOFF is our starting index into the string, no further
717 calculation is needed. */
718 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
719 strelts - eltoff);
720
721 /* Don't know what to return if there was no zero termination.
722 Ideally this would turn into a gcc_checking_assert over time.
723 Set DECL/LEN so callers can examine them. */
724 if (len >= maxelts - eltoff)
725 {
726 data->decl = decl;
727 data->off = byteoff;
728 data->minlen = ssize_int (len)size_int_kind (len, stk_ssizetype);
729 return NULL_TREE(tree) __null;
730 }
731
732 return ssize_int (len)size_int_kind (len, stk_ssizetype);
733}
734
735/* Return a constant integer corresponding to target reading
736 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
737 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
738 are assumed to be zero, otherwise it reads as many characters
739 as needed. */
740
741rtx
742c_readstr (const char *str, scalar_int_mode mode,
743 bool null_terminated_p/*=true*/)
744{
745 HOST_WIDE_INTlong ch;
746 unsigned int i, j;
747 HOST_WIDE_INTlong tmp[MAX_BITSIZE_MODE_ANY_INT(64*(8)) / HOST_BITS_PER_WIDE_INT64];
748
749 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT)((void)(!(((enum mode_class) mode_class[mode]) == MODE_INT) ?
fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 749, __FUNCTION__), 0 : 0))
;
750 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT64 - 1)
751 / HOST_BITS_PER_WIDE_INT64;
752
753 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT)((void)(!(len <= (64*(8)) / 64) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 753, __FUNCTION__), 0 : 0))
;
754 for (i = 0; i < len; i++)
755 tmp[i] = 0;
756
757 ch = 1;
758 for (i = 0; i < GET_MODE_SIZE (mode); i++)
759 {
760 j = i;
761 if (WORDS_BIG_ENDIAN0)
762 j = GET_MODE_SIZE (mode) - i - 1;
763 if (BYTES_BIG_ENDIAN0 != WORDS_BIG_ENDIAN0
764 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? 8 : 4)
)
765 j = j + UNITS_PER_WORD(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? 8 : 4)
- 2 * (j % UNITS_PER_WORD(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? 8 : 4)
) - 1;
766 j *= BITS_PER_UNIT(8);
767
768 if (ch || !null_terminated_p)
769 ch = (unsigned char) str[i];
770 tmp[j / HOST_BITS_PER_WIDE_INT64] |= ch << (j % HOST_BITS_PER_WIDE_INT64);
771 }
772
773 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
774 return immed_wide_int_const (c, mode);
775}
776
777/* Cast a target constant CST to target CHAR and if that value fits into
778 host char type, return zero and put that value into variable pointed to by
779 P. */
780
781static int
782target_char_cast (tree cst, char *p)
783{
784 unsigned HOST_WIDE_INTlong val, hostval;
785
786 if (TREE_CODE (cst)((enum tree_code) (cst)->base.code) != INTEGER_CST
787 || CHAR_TYPE_SIZE(8) > HOST_BITS_PER_WIDE_INT64)
788 return 1;
789
790 /* Do not care if it fits or not right here. */
791 val = TREE_INT_CST_LOW (cst)((unsigned long) (*tree_int_cst_elt_check ((cst), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 791, __FUNCTION__)))
;
792
793 if (CHAR_TYPE_SIZE(8) < HOST_BITS_PER_WIDE_INT64)
794 val &= (HOST_WIDE_INT_1U1UL << CHAR_TYPE_SIZE(8)) - 1;
795
796 hostval = val;
797 if (HOST_BITS_PER_CHAR8 < HOST_BITS_PER_WIDE_INT64)
798 hostval &= (HOST_WIDE_INT_1U1UL << HOST_BITS_PER_CHAR8) - 1;
799
800 if (val != hostval)
801 return 1;
802
803 *p = hostval;
804 return 0;
805}
806
807/* Similar to save_expr, but assumes that arbitrary code is not executed
808 in between the multiple evaluations. In particular, we assume that a
809 non-addressable local variable will not be modified. */
810
811static tree
812builtin_save_expr (tree exp)
813{
814 if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == SSA_NAME
815 || (TREE_ADDRESSABLE (exp)((exp)->base.addressable_flag) == 0
816 && (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == PARM_DECL
817 || (VAR_P (exp)(((enum tree_code) (exp)->base.code) == VAR_DECL) && !TREE_STATIC (exp)((exp)->base.static_flag)))))
818 return exp;
819
820 return save_expr (exp);
821}
822
823/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
824 times to get the address of either a higher stack frame, or a return
825 address located within it (depending on FNDECL_CODE). */
826
827static rtx
828expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
829{
830 int i;
831 rtx tem = INITIAL_FRAME_ADDRESS_RTX__null;
832 if (tem == NULL_RTX(rtx) 0)
833 {
834 /* For a zero count with __builtin_return_address, we don't care what
835 frame address we return, because target-specific definitions will
836 override us. Therefore frame pointer elimination is OK, and using
837 the soft frame pointer is OK.
838
839 For a nonzero count, or a zero count with __builtin_frame_address,
840 we require a stable offset from the current frame pointer to the
841 previous one, so we must use the hard frame pointer, and
842 we must disable frame pointer elimination. */
843 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
844 tem = frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_FRAME_POINTER]);
845 else
846 {
847 tem = hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER]);
848
849 /* Tell reload not to eliminate the frame pointer. */
850 crtl(&x_rtl)->accesses_prior_frames = 1;
851 }
852 }
853
854 if (count > 0)
855 SETUP_FRAME_ADDRESSES ()ix86_setup_frame_addresses ();
856
857 /* On the SPARC, the return address is not in the frame, it is in a
858 register. There is no way to access it off of the current frame
859 pointer, but it can be accessed off the previous frame pointer by
860 reading the value from the register window save area. */
861 if (RETURN_ADDR_IN_PREVIOUS_FRAME0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
862 count--;
863
864 /* Scan back COUNT frames to the specified frame. */
865 for (i = 0; i < count; i++)
866 {
867 /* Assume the dynamic chain pointer is in the word that the
868 frame address points to, unless otherwise specified. */
869 tem = DYNAMIC_CHAIN_ADDRESS (tem)(tem);
870 tem = memory_address (Pmode, tem)memory_address_addr_space (((global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode)))), (
tem), 0)
;
871 tem = gen_frame_mem (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, tem);
872 tem = copy_to_reg (tem);
873 }
874
875 /* For __builtin_frame_address, return what we've got. But, on
876 the SPARC for example, we may have to add a bias. */
877 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
878 return FRAME_ADDR_RTX (tem)(tem);
879
880 /* For __builtin_return_address, get the return address from that frame. */
881#ifdef RETURN_ADDR_RTX
882 tem = RETURN_ADDR_RTX (count, tem)((count) == 0 ? gen_rtx_MEM ((global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), plus_constant
((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode
((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode (
(scalar_int_mode::from_int) E_SImode))), ((this_target_rtl->
x_global_rtl)[GR_ARG_POINTER]), -(((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) ? 8 : 4))) : gen_rtx_MEM ((global_options
.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode
::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::
from_int) E_SImode))), plus_constant ((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
), (tem), (((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4))))
;
883#else
884 tem = memory_address (Pmode,memory_address_addr_space (((global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode)))), (
plus_constant ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode
((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode (
(scalar_int_mode::from_int) E_SImode))), tem, GET_MODE_SIZE (
(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))))), 0)
885 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)))memory_address_addr_space (((global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode)))), (
plus_constant ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode
((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode (
(scalar_int_mode::from_int) E_SImode))), tem, GET_MODE_SIZE (
(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))))), 0)
;
886 tem = gen_frame_mem (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, tem);
887#endif
888 return tem;
889}
890
891/* Alias set used for setjmp buffer. */
892static alias_set_type setjmp_alias_set = -1;
893
894/* Construct the leading half of a __builtin_setjmp call. Control will
895 return to RECEIVER_LABEL. This is also called directly by the SJLJ
896 exception handling code. */
897
898void
899expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
900{
901 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL)((SAVE_NONLOCAL) == SAVE_NONLOCAL ? (((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) ? (scalar_int_mode ((scalar_int_mode
::from_int) E_TImode)) : (scalar_int_mode ((scalar_int_mode::
from_int) E_DImode))) : (global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))))
;
902 rtx stack_save;
903 rtx mem;
904
905 if (setjmp_alias_set == -1)
906 setjmp_alias_set = new_alias_set ();
907
908 buf_addr = convert_memory_address (Pmode, buf_addr)convert_memory_address_addr_space (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
)), (buf_addr), 0)
;
909
910 buf_addr = force_reg (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, force_operand (buf_addr, NULL_RTX(rtx) 0));
911
912 /* We store the frame pointer and the address of receiver_label in
913 the buffer and use the rest of it for the stack save area, which
914 is machine-dependent. */
915
916 mem = gen_rtx_MEM (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, buf_addr);
917 set_mem_alias_set (mem, setjmp_alias_set);
918 emit_move_insn (mem, hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER]));
919
920 mem = gen_rtx_MEM (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, buf_addr,
921 GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
))),
922 set_mem_alias_set (mem, setjmp_alias_set);
923
924 emit_move_insn (validize_mem (mem),
925 force_reg (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, gen_rtx_LABEL_REF (Pmode, receiver_label)gen_rtx_fmt_u_stat ((LABEL_REF), (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
))), ((receiver_label)) )
));
926
927 stack_save = gen_rtx_MEM (sa_mode,
928 plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, buf_addr,
929 2 * GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
)));
930 set_mem_alias_set (stack_save, setjmp_alias_set);
931 emit_stack_save (SAVE_NONLOCAL, &stack_save);
932
933 /* If there is further processing to do, do it. */
934 if (targetm.have_builtin_setjmp_setup ())
935 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
936
937 /* We have a nonlocal label. */
938 cfun(cfun + 0)->has_nonlocal_label = 1;
939}
940
941/* Construct the trailing part of a __builtin_setjmp call. This is
942 also called directly by the SJLJ exception handling code.
943 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
944
945void
946expand_builtin_setjmp_receiver (rtx receiver_label)
947{
948 rtx chain;
949
950 /* Mark the FP as used when we get here, so we have to make sure it's
951 marked as used by this function. */
952 emit_use (hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER]));
953
954 /* Mark the static chain as clobbered here so life information
955 doesn't get messed up for it. */
956 chain = rtx_for_static_chain (current_function_decl, true);
957 if (chain && REG_P (chain)(((enum rtx_code) (chain)->code) == REG))
958 emit_clobber (chain);
959
960 if (!HARD_FRAME_POINTER_IS_ARG_POINTER(6 == 16) && fixed_regs(this_target_hard_regs->x_fixed_regs)[ARG_POINTER_REGNUM16])
961 {
962 /* If the argument pointer can be eliminated in favor of the
963 frame pointer, we don't need to restore it. We assume here
964 that if such an elimination is present, it can always be used.
965 This is the case on all known machines; if we don't make this
966 assumption, we do unnecessary saving on many machines. */
967 size_t i;
968 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS{{ 16, 7}, { 16, 6}, { 19, 7}, { 19, 6}};
969
970 for (i = 0; i < ARRAY_SIZE (elim_regs)(sizeof (elim_regs) / sizeof ((elim_regs)[0])); i++)
971 if (elim_regs[i].from == ARG_POINTER_REGNUM16
972 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM6)
973 break;
974
975 if (i == ARRAY_SIZE (elim_regs)(sizeof (elim_regs) / sizeof ((elim_regs)[0])))
976 {
977 /* Now restore our arg pointer from the address at which it
978 was saved in our stack frame. */
979 emit_move_insn (crtl(&x_rtl)->args.internal_arg_pointer,
980 copy_to_reg (get_arg_pointer_save_area ()));
981 }
982 }
983
984 if (receiver_label != NULL__null && targetm.have_builtin_setjmp_receiver ())
985 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
986 else if (targetm.have_nonlocal_goto_receiver ())
987 emit_insn (targetm.gen_nonlocal_goto_receiver ());
988 else
989 { /* Nothing */ }
990
991 /* We must not allow the code we just generated to be reordered by
992 scheduling. Specifically, the update of the frame pointer must
993 happen immediately, not later. */
994 emit_insn (gen_blockage ());
995}
996
997/* __builtin_longjmp is passed a pointer to an array of five words (not
998 all will be used on all machines). It operates similarly to the C
999 library function of the same name, but is more efficient. Much of
1000 the code below is copied from the handling of non-local gotos. */
1001
1002static void
1003expand_builtin_longjmp (rtx buf_addr, rtx value)
1004{
1005 rtx fp, lab, stack;
1006 rtx_insn *insn, *last;
1007 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL)((SAVE_NONLOCAL) == SAVE_NONLOCAL ? (((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) ? (scalar_int_mode ((scalar_int_mode
::from_int) E_TImode)) : (scalar_int_mode ((scalar_int_mode::
from_int) E_DImode))) : (global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))))
;
1008
1009 /* DRAP is needed for stack realign if longjmp is expanded to current
1010 function */
1011 if (SUPPORTS_STACK_ALIGNMENT((((unsigned int) 1 << 28) * 8) > ((((global_options
.x_ix86_isa_flags & (1UL << 1)) != 0) && ix86_cfun_abi
() == MS_ABI) ? 128 : ((8) * (((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) ? 8 : 4))))
)
1012 crtl(&x_rtl)->need_drap = true;
1013
1014 if (setjmp_alias_set == -1)
1015 setjmp_alias_set = new_alias_set ();
1016
1017 buf_addr = convert_memory_address (Pmode, buf_addr)convert_memory_address_addr_space (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
)), (buf_addr), 0)
;
1018
1019 buf_addr = force_reg (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, buf_addr);
1020
1021 /* We require that the user must pass a second argument of 1, because
1022 that is what builtin_setjmp will return. */
1023 gcc_assert (value == const1_rtx)((void)(!(value == (const_int_rtx[64 +1])) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1023, __FUNCTION__), 0 : 0))
;
1024
1025 last = get_last_insn ();
1026 if (targetm.have_builtin_longjmp ())
1027 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1028 else
1029 {
1030 fp = gen_rtx_MEM (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, buf_addr);
1031 lab = gen_rtx_MEM (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, buf_addr,
1032 GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
)));
1033
1034 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, buf_addr,
1035 2 * GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
)));
1036 set_mem_alias_set (fp, setjmp_alias_set);
1037 set_mem_alias_set (lab, setjmp_alias_set);
1038 set_mem_alias_set (stack, setjmp_alias_set);
1039
1040 /* Pick up FP, label, and SP from the block and jump. This code is
1041 from expand_goto in stmt.c; see there for detailed comments. */
1042 if (targetm.have_nonlocal_goto ())
1043 /* We have to pass a value to the nonlocal_goto pattern that will
1044 get copied into the static_chain pointer, but it does not matter
1045 what that value is, because builtin_setjmp does not use it. */
1046 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1047 else
1048 {
1049 emit_clobber (gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), gen_rtx_SCRATCH (VOIDmode)gen_rtx_fmt__stat ((SCRATCH), ((((void) 0, E_VOIDmode))) )));
1050 emit_clobber (gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER])));
1051
1052 lab = copy_to_reg (lab);
1053
1054 /* Restore the frame pointer and stack pointer. We must use a
1055 temporary since the setjmp buffer may be a local. */
1056 fp = copy_to_reg (fp);
1057 emit_stack_restore (SAVE_NONLOCAL, stack);
1058
1059 /* Ensure the frame pointer move is not optimized. */
1060 emit_insn (gen_blockage ());
1061 emit_clobber (hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER]));
1062 emit_clobber (frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_FRAME_POINTER]));
1063 emit_move_insn (hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER]), fp);
1064
1065 emit_use (hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER]));
1066 emit_use (stack_pointer_rtx((this_target_rtl->x_global_rtl)[GR_STACK_POINTER]));
1067 emit_indirect_jump (lab);
1068 }
1069 }
1070
1071 /* Search backwards and mark the jump insn as a non-local goto.
1072 Note that this precludes the use of __builtin_longjmp to a
1073 __builtin_setjmp target in the same function. However, we've
1074 already cautioned the user that these functions are for
1075 internal exception handling use only. */
1076 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1077 {
1078 gcc_assert (insn != last)((void)(!(insn != last) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1078, __FUNCTION__), 0 : 0))
;
1079
1080 if (JUMP_P (insn)(((enum rtx_code) (insn)->code) == JUMP_INSN))
1081 {
1082 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx(const_int_rtx[64]));
1083 break;
1084 }
1085 else if (CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN))
1086 break;
1087 }
1088}
1089
1090static inline bool
1091more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1092{
1093 return (iter->i < iter->n);
1094}
1095
1096/* This function validates the types of a function call argument list
1097 against a specified list of tree_codes. If the last specifier is a 0,
1098 that represents an ellipsis, otherwise the last specifier must be a
1099 VOID_TYPE. */
1100
1101static bool
1102validate_arglist (const_tree callexpr, ...)
1103{
1104 enum tree_code code;
1105 bool res = 0;
1106 va_list ap;
1107 const_call_expr_arg_iterator iter;
1108 const_tree arg;
1109
1110 va_start (ap, callexpr)__builtin_va_start(ap, callexpr);
1111 init_const_call_expr_arg_iterator (callexpr, &iter);
1112
1113 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1114 tree fn = CALL_EXPR_FN (callexpr)(*((const_cast<tree*> (tree_operand_check (((tree_check
((callexpr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1114, __FUNCTION__, (CALL_EXPR)))), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1114, __FUNCTION__)))))
;
1115 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn))((contains_struct_check ((((contains_struct_check ((fn), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1115, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1115, __FUNCTION__))->typed.type)
);
1116
1117 for (unsigned argno = 1; ; ++argno)
1118 {
1119 code = (enum tree_code) va_arg (ap, int)__builtin_va_arg(ap, int);
1120
1121 switch (code)
1122 {
1123 case 0:
1124 /* This signifies an ellipses, any further arguments are all ok. */
1125 res = true;
1126 goto end;
1127 case VOID_TYPE:
1128 /* This signifies an endlink, if no arguments remain, return
1129 true, otherwise return false. */
1130 res = !more_const_call_expr_args_p (&iter);
1131 goto end;
1132 case POINTER_TYPE:
1133 /* The actual argument must be nonnull when either the whole
1134 called function has been declared nonnull, or when the formal
1135 argument corresponding to the actual argument has been. */
1136 if (argmap
1137 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1138 {
1139 arg = next_const_call_expr_arg (&iter);
1140 if (!validate_arg (arg, code) || integer_zerop (arg))
1141 goto end;
1142 break;
1143 }
1144 /* FALLTHRU */
1145 default:
1146 /* If no parameters remain or the parameter's code does not
1147 match the specified code, return false. Otherwise continue
1148 checking any remaining arguments. */
1149 arg = next_const_call_expr_arg (&iter);
1150 if (!validate_arg (arg, code))
1151 goto end;
1152 break;
1153 }
1154 }
1155
1156 /* We need gotos here since we can only have one VA_CLOSE in a
1157 function. */
1158 end: ;
1159 va_end (ap)__builtin_va_end(ap);
1160
1161 BITMAP_FREE (argmap)((void) (bitmap_obstack_free ((bitmap) argmap), (argmap) = (bitmap
) __null))
;
1162
1163 return res;
1164}
1165
1166/* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1167 and the address of the save area. */
1168
1169static rtx
1170expand_builtin_nonlocal_goto (tree exp)
1171{
1172 tree t_label, t_save_area;
1173 rtx r_label, r_save_area, r_fp, r_sp;
1174 rtx_insn *insn;
1175
1176 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1177 return NULL_RTX(rtx) 0;
1178
1179 t_label = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1179, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1179, __FUNCTION__)))))
;
1180 t_save_area = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1180, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1180, __FUNCTION__)))))
;
1181
1182 r_label = expand_normal (t_label);
1183 r_label = convert_memory_address (Pmode, r_label)convert_memory_address_addr_space (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
)), (r_label), 0)
;
1184 r_save_area = expand_normal (t_save_area);
1185 r_save_area = convert_memory_address (Pmode, r_save_area)convert_memory_address_addr_space (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
)), (r_save_area), 0)
;
1186 /* Copy the address of the save location to a register just in case it was
1187 based on the frame pointer. */
1188 r_save_area = copy_to_reg (r_save_area);
1189 r_fp = gen_rtx_MEM (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, r_save_area);
1190 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL)((SAVE_NONLOCAL) == SAVE_NONLOCAL ? (((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) ? (scalar_int_mode ((scalar_int_mode
::from_int) E_TImode)) : (scalar_int_mode ((scalar_int_mode::
from_int) E_DImode))) : (global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))))
,
1191 plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, r_save_area,
1192 GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
)));
1193
1194 crtl(&x_rtl)->has_nonlocal_goto = 1;
1195
1196 /* ??? We no longer need to pass the static chain value, afaik. */
1197 if (targetm.have_nonlocal_goto ())
1198 emit_insn (targetm.gen_nonlocal_goto (const0_rtx(const_int_rtx[64]), r_label, r_sp, r_fp));
1199 else
1200 {
1201 emit_clobber (gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), gen_rtx_SCRATCH (VOIDmode)gen_rtx_fmt__stat ((SCRATCH), ((((void) 0, E_VOIDmode))) )));
1202 emit_clobber (gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER])));
1203
1204 r_label = copy_to_reg (r_label);
1205
1206 /* Restore the frame pointer and stack pointer. We must use a
1207 temporary since the setjmp buffer may be a local. */
1208 r_fp = copy_to_reg (r_fp);
1209 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1210
1211 /* Ensure the frame pointer move is not optimized. */
1212 emit_insn (gen_blockage ());
1213 emit_clobber (hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER]));
1214 emit_clobber (frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_FRAME_POINTER]));
1215 emit_move_insn (hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER]), r_fp);
1216
1217 /* USE of hard_frame_pointer_rtx added for consistency;
1218 not clear if really needed. */
1219 emit_use (hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER]));
1220 emit_use (stack_pointer_rtx((this_target_rtl->x_global_rtl)[GR_STACK_POINTER]));
1221
1222 /* If the architecture is using a GP register, we must
1223 conservatively assume that the target function makes use of it.
1224 The prologue of functions with nonlocal gotos must therefore
1225 initialize the GP register to the appropriate value, and we
1226 must then make sure that this value is live at the point
1227 of the jump. (Note that this doesn't necessarily apply
1228 to targets with a nonlocal_goto pattern; they are free
1229 to implement it in their own way. Note also that this is
1230 a no-op if the GP register is a global invariant.) */
1231 unsigned regnum = PIC_OFFSET_TABLE_REGNUM(ix86_use_pseudo_pic_reg () ? ((this_target_rtl->x_pic_offset_table_rtx
) ? (~(unsigned int) 0) : (((global_options.x_ix86_isa_flags &
(1UL << 1)) != 0) ? 43 : 3)) : (~(unsigned int) 0))
;
1232 if (regnum != INVALID_REGNUM(~(unsigned int) 0) && fixed_regs(this_target_hard_regs->x_fixed_regs)[regnum])
1233 emit_use (pic_offset_table_rtx(this_target_rtl->x_pic_offset_table_rtx));
1234
1235 emit_indirect_jump (r_label);
1236 }
1237
1238 /* Search backwards to the jump insn and mark it as a
1239 non-local goto. */
1240 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1241 {
1242 if (JUMP_P (insn)(((enum rtx_code) (insn)->code) == JUMP_INSN))
1243 {
1244 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx(const_int_rtx[64]));
1245 break;
1246 }
1247 else if (CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN))
1248 break;
1249 }
1250
1251 return const0_rtx(const_int_rtx[64]);
1252}
1253
1254/* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1255 (not all will be used on all machines) that was passed to __builtin_setjmp.
1256 It updates the stack pointer in that block to the current value. This is
1257 also called directly by the SJLJ exception handling code. */
1258
1259void
1260expand_builtin_update_setjmp_buf (rtx buf_addr)
1261{
1262 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL)((SAVE_NONLOCAL) == SAVE_NONLOCAL ? (((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) ? (scalar_int_mode ((scalar_int_mode
::from_int) E_TImode)) : (scalar_int_mode ((scalar_int_mode::
from_int) E_DImode))) : (global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))))
;
1263 buf_addr = convert_memory_address (Pmode, buf_addr)convert_memory_address_addr_space (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
)), (buf_addr), 0)
;
1264 rtx stack_save
1265 = gen_rtx_MEM (sa_mode,
1266 memory_addressmemory_address_addr_space ((sa_mode), (plus_constant ((global_options
.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode
::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::
from_int) E_SImode))), buf_addr, 2 * GET_MODE_SIZE ((global_options
.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode
::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::
from_int) E_SImode)))))), 0)
1267 (sa_mode,memory_address_addr_space ((sa_mode), (plus_constant ((global_options
.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode
::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::
from_int) E_SImode))), buf_addr, 2 * GET_MODE_SIZE ((global_options
.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode
::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::
from_int) E_SImode)))))), 0)
1268 plus_constant (Pmode, buf_addr,memory_address_addr_space ((sa_mode), (plus_constant ((global_options
.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode
::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::
from_int) E_SImode))), buf_addr, 2 * GET_MODE_SIZE ((global_options
.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode
::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::
from_int) E_SImode)))))), 0)
1269 2 * GET_MODE_SIZE (Pmode)))memory_address_addr_space ((sa_mode), (plus_constant ((global_options
.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode
::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::
from_int) E_SImode))), buf_addr, 2 * GET_MODE_SIZE ((global_options
.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode
::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::
from_int) E_SImode)))))), 0)
);
1270
1271 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1272}
1273
1274/* Expand a call to __builtin_prefetch. For a target that does not support
1275 data prefetch, evaluate the memory address argument in case it has side
1276 effects. */
1277
1278static void
1279expand_builtin_prefetch (tree exp)
1280{
1281 tree arg0, arg1, arg2;
1282 int nargs;
1283 rtx op0, op1, op2;
1284
1285 if (!validate_arglist (exp, POINTER_TYPE, 0))
1286 return;
1287
1288 arg0 = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1288, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1288, __FUNCTION__)))))
;
1289
1290 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1291 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1292 locality). */
1293 nargs = call_expr_nargs (exp)(((int)((unsigned long) (*tree_int_cst_elt_check (((tree_class_check
((exp), (tcc_vl_exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1293, __FUNCTION__))->exp.operands[0]), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1293, __FUNCTION__)))) - 3)
;
1294 if (nargs > 1)
1295 arg1 = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1295, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1295, __FUNCTION__)))))
;
1296 else
1297 arg1 = integer_zero_nodeglobal_trees[TI_INTEGER_ZERO];
1298 if (nargs > 2)
1299 arg2 = CALL_EXPR_ARG (exp, 2)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1299, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1299, __FUNCTION__)))))
;
1300 else
1301 arg2 = integer_three_nodeglobal_trees[TI_INTEGER_THREE];
1302
1303 /* Argument 0 is an address. */
1304 op0 = expand_expr (arg0, NULL_RTX(rtx) 0, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, EXPAND_NORMAL);
1305
1306 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1307 if (TREE_CODE (arg1)((enum tree_code) (arg1)->base.code) != INTEGER_CST)
1308 {
1309 error ("second argument to %<__builtin_prefetch%> must be a constant");
1310 arg1 = integer_zero_nodeglobal_trees[TI_INTEGER_ZERO];
1311 }
1312 op1 = expand_normal (arg1);
1313 /* Argument 1 must be either zero or one. */
1314 if (INTVAL (op1)((op1)->u.hwint[0]) != 0 && INTVAL (op1)((op1)->u.hwint[0]) != 1)
1315 {
1316 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1317 " using zero");
1318 op1 = const0_rtx(const_int_rtx[64]);
1319 }
1320
1321 /* Argument 2 (locality) must be a compile-time constant int. */
1322 if (TREE_CODE (arg2)((enum tree_code) (arg2)->base.code) != INTEGER_CST)
1323 {
1324 error ("third argument to %<__builtin_prefetch%> must be a constant");
1325 arg2 = integer_zero_nodeglobal_trees[TI_INTEGER_ZERO];
1326 }
1327 op2 = expand_normal (arg2);
1328 /* Argument 2 must be 0, 1, 2, or 3. */
1329 if (INTVAL (op2)((op2)->u.hwint[0]) < 0 || INTVAL (op2)((op2)->u.hwint[0]) > 3)
1330 {
1331 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1332 op2 = const0_rtx(const_int_rtx[64]);
1333 }
1334
1335 if (targetm.have_prefetch ())
1336 {
1337 class expand_operand ops[3];
1338
1339 create_address_operand (&ops[0], op0);
1340 create_integer_operand (&ops[1], INTVAL (op1)((op1)->u.hwint[0]));
1341 create_integer_operand (&ops[2], INTVAL (op2)((op2)->u.hwint[0]));
1342 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1343 return;
1344 }
1345
1346 /* Don't do anything with direct references to volatile memory, but
1347 generate code to handle other side effects. */
1348 if (!MEM_P (op0)(((enum rtx_code) (op0)->code) == MEM) && side_effects_p (op0))
1349 emit_insn (op0);
1350}
1351
1352/* Get a MEM rtx for expression EXP which is the address of an operand
1353 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1354 the maximum length of the block of memory that might be accessed or
1355 NULL if unknown. */
1356
1357rtx
1358get_memory_rtx (tree exp, tree len)
1359{
1360 tree orig_exp = exp;
1361 rtx addr, mem;
1362
1363 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1364 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1365 if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp)((tree_check ((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1365, __FUNCTION__, (SAVE_EXPR)))->base.public_flag)
)
1366 exp = TREE_OPERAND (exp, 0)(*((const_cast<tree*> (tree_operand_check ((exp), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1366, __FUNCTION__)))))
;
1367
1368 addr = expand_expr (orig_exp, NULL_RTX(rtx) 0, ptr_mode, EXPAND_NORMAL);
1369 mem = gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), memory_address (BLKmode, addr)memory_address_addr_space ((((void) 0, E_BLKmode)), (addr), 0
)
);
1370
1371 /* Get an expression we can use to find the attributes to assign to MEM.
1372 First remove any nops. */
1373 while (CONVERT_EXPR_P (exp)((((enum tree_code) (exp)->base.code)) == NOP_EXPR || (((enum
tree_code) (exp)->base.code)) == CONVERT_EXPR)
1374 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0)))(((enum tree_code) (((contains_struct_check (((*((const_cast<
tree*> (tree_operand_check ((exp), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1374, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1374, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check (((*((const_cast
<tree*> (tree_operand_check ((exp), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1374, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1374, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
1375 exp = TREE_OPERAND (exp, 0)(*((const_cast<tree*> (tree_operand_check ((exp), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1375, __FUNCTION__)))))
;
1376
1377 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1378 (as builtin stringops may alias with anything). */
1379 exp = fold_build2 (MEM_REF,fold_build2_loc (((location_t) 0), MEM_REF, build_array_type (
integer_types[itk_char], build_range_type (sizetype_tab[(int)
stk_sizetype], global_trees[TI_SIZE_ONE], len)), exp, build_int_cst
(global_trees[TI_PTR_TYPE], 0) )
1380 build_array_type (char_type_node,fold_build2_loc (((location_t) 0), MEM_REF, build_array_type (
integer_types[itk_char], build_range_type (sizetype_tab[(int)
stk_sizetype], global_trees[TI_SIZE_ONE], len)), exp, build_int_cst
(global_trees[TI_PTR_TYPE], 0) )
1381 build_range_type (sizetype,fold_build2_loc (((location_t) 0), MEM_REF, build_array_type (
integer_types[itk_char], build_range_type (sizetype_tab[(int)
stk_sizetype], global_trees[TI_SIZE_ONE], len)), exp, build_int_cst
(global_trees[TI_PTR_TYPE], 0) )
1382 size_one_node, len)),fold_build2_loc (((location_t) 0), MEM_REF, build_array_type (
integer_types[itk_char], build_range_type (sizetype_tab[(int)
stk_sizetype], global_trees[TI_SIZE_ONE], len)), exp, build_int_cst
(global_trees[TI_PTR_TYPE], 0) )
1383 exp, build_int_cst (ptr_type_node, 0))fold_build2_loc (((location_t) 0), MEM_REF, build_array_type (
integer_types[itk_char], build_range_type (sizetype_tab[(int)
stk_sizetype], global_trees[TI_SIZE_ONE], len)), exp, build_int_cst
(global_trees[TI_PTR_TYPE], 0) )
;
1384
1385 /* If the MEM_REF has no acceptable address, try to get the base object
1386 from the original address we got, and build an all-aliasing
1387 unknown-sized access to that one. */
1388 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)(*((const_cast<tree*> (tree_operand_check ((exp), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1388, __FUNCTION__)))))
))
1389 set_mem_attributes (mem, exp, 0);
1390 else if (TREE_CODE (TREE_OPERAND (exp, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((exp), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1390, __FUNCTION__))))))->base.code)
== ADDR_EXPR
1391 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),(*((const_cast<tree*> (tree_operand_check (((*((const_cast
<tree*> (tree_operand_check ((exp), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1391, __FUNCTION__)))))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1392, __FUNCTION__)))))
1392 0)(*((const_cast<tree*> (tree_operand_check (((*((const_cast
<tree*> (tree_operand_check ((exp), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1391, __FUNCTION__)))))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1392, __FUNCTION__)))))
)))
1393 {
1394 exp = build_fold_addr_expr (exp)build_fold_addr_expr_loc (((location_t) 0), (exp));
1395 exp = fold_build2 (MEM_REF,fold_build2_loc (((location_t) 0), MEM_REF, build_array_type (
integer_types[itk_char], build_range_type (sizetype_tab[(int)
stk_sizetype], global_trees[TI_SIZE_ZERO], __null)), exp, build_int_cst
(global_trees[TI_PTR_TYPE], 0) )
1396 build_array_type (char_type_node,fold_build2_loc (((location_t) 0), MEM_REF, build_array_type (
integer_types[itk_char], build_range_type (sizetype_tab[(int)
stk_sizetype], global_trees[TI_SIZE_ZERO], __null)), exp, build_int_cst
(global_trees[TI_PTR_TYPE], 0) )
1397 build_range_type (sizetype,fold_build2_loc (((location_t) 0), MEM_REF, build_array_type (
integer_types[itk_char], build_range_type (sizetype_tab[(int)
stk_sizetype], global_trees[TI_SIZE_ZERO], __null)), exp, build_int_cst
(global_trees[TI_PTR_TYPE], 0) )
1398 size_zero_node,fold_build2_loc (((location_t) 0), MEM_REF, build_array_type (
integer_types[itk_char], build_range_type (sizetype_tab[(int)
stk_sizetype], global_trees[TI_SIZE_ZERO], __null)), exp, build_int_cst
(global_trees[TI_PTR_TYPE], 0) )
1399 NULL)),fold_build2_loc (((location_t) 0), MEM_REF, build_array_type (
integer_types[itk_char], build_range_type (sizetype_tab[(int)
stk_sizetype], global_trees[TI_SIZE_ZERO], __null)), exp, build_int_cst
(global_trees[TI_PTR_TYPE], 0) )
1400 exp, build_int_cst (ptr_type_node, 0))fold_build2_loc (((location_t) 0), MEM_REF, build_array_type (
integer_types[itk_char], build_range_type (sizetype_tab[(int)
stk_sizetype], global_trees[TI_SIZE_ZERO], __null)), exp, build_int_cst
(global_trees[TI_PTR_TYPE], 0) )
;
1401 set_mem_attributes (mem, exp, 0);
1402 }
1403 set_mem_alias_set (mem, 0);
1404 return mem;
1405}
1406
1407/* Built-in functions to perform an untyped call and return. */
1408
1409#define apply_args_mode(this_target_builtins->x_apply_args_mode) \
1410 (this_target_builtins->x_apply_args_mode)
1411#define apply_result_mode(this_target_builtins->x_apply_result_mode) \
1412 (this_target_builtins->x_apply_result_mode)
1413
1414/* Return the size required for the block returned by __builtin_apply_args,
1415 and initialize apply_args_mode. */
1416
1417static int
1418apply_args_size (void)
1419{
1420 static int size = -1;
1421 int align;
1422 unsigned int regno;
1423
1424 /* The values computed by this function never change. */
1425 if (size < 0)
1426 {
1427 /* The first value is the incoming arg-pointer. */
1428 size = GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
1429
1430 /* The second value is the structure value address unless this is
1431 passed as an "invisible" first argument. */
1432 if (targetm.calls.struct_value_rtx (cfun(cfun + 0) ? TREE_TYPE (cfun->decl)((contains_struct_check (((cfun + 0)->decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1432, __FUNCTION__))->typed.type)
: 0, 0))
1433 size += GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
1434
1435 for (regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++)
1436 if (FUNCTION_ARG_REGNO_P (regno)ix86_function_arg_regno_p (regno))
1437 {
1438 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1439
1440 gcc_assert (mode != VOIDmode)((void)(!(mode != ((void) 0, E_VOIDmode)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1440, __FUNCTION__), 0 : 0))
;
1441
1442 align = GET_MODE_ALIGNMENT (mode)get_mode_alignment (mode) / BITS_PER_UNIT(8);
1443 if (size % align != 0)
1444 size = CEIL (size, align)(((size) + (align) - 1) / (align)) * align;
1445 size += GET_MODE_SIZE (mode);
1446 apply_args_mode(this_target_builtins->x_apply_args_mode)[regno] = mode;
1447 }
1448 else
1449 {
1450 apply_args_mode(this_target_builtins->x_apply_args_mode)[regno] = as_a <fixed_size_mode> (VOIDmode((void) 0, E_VOIDmode));
1451 }
1452 }
1453 return size;
1454}
1455
1456/* Return the size required for the block returned by __builtin_apply,
1457 and initialize apply_result_mode. */
1458
1459static int
1460apply_result_size (void)
1461{
1462 static int size = -1;
1463 int align, regno;
1464
1465 /* The values computed by this function never change. */
1466 if (size < 0)
1467 {
1468 size = 0;
1469
1470 for (regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++)
1471 if (targetm.calls.function_value_regno_p (regno))
1472 {
1473 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1474
1475 gcc_assert (mode != VOIDmode)((void)(!(mode != ((void) 0, E_VOIDmode)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1475, __FUNCTION__), 0 : 0))
;
1476
1477 align = GET_MODE_ALIGNMENT (mode)get_mode_alignment (mode) / BITS_PER_UNIT(8);
1478 if (size % align != 0)
1479 size = CEIL (size, align)(((size) + (align) - 1) / (align)) * align;
1480 size += GET_MODE_SIZE (mode);
1481 apply_result_mode(this_target_builtins->x_apply_result_mode)[regno] = mode;
1482 }
1483 else
1484 apply_result_mode(this_target_builtins->x_apply_result_mode)[regno] = as_a <fixed_size_mode> (VOIDmode((void) 0, E_VOIDmode));
1485
1486 /* Allow targets that use untyped_call and untyped_return to override
1487 the size so that machine-specific information can be stored here. */
1488#ifdef APPLY_RESULT_SIZE(8+108)
1489 size = APPLY_RESULT_SIZE(8+108);
1490#endif
1491 }
1492 return size;
1493}
1494
1495/* Create a vector describing the result block RESULT. If SAVEP is true,
1496 the result block is used to save the values; otherwise it is used to
1497 restore the values. */
1498
1499static rtx
1500result_vector (int savep, rtx result)
1501{
1502 int regno, size, align, nelts;
1503 fixed_size_mode mode;
1504 rtx reg, mem;
1505 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER)((rtx *) __builtin_alloca(sizeof (rtx) * (76)));
1506
1507 size = nelts = 0;
1508 for (regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++)
1509 if ((mode = apply_result_mode(this_target_builtins->x_apply_result_mode)[regno]) != VOIDmode((void) 0, E_VOIDmode))
1510 {
1511 align = GET_MODE_ALIGNMENT (mode)get_mode_alignment (mode) / BITS_PER_UNIT(8);
1512 if (size % align != 0)
1513 size = CEIL (size, align)(((size) + (align) - 1) / (align)) * align;
1514 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno)(regno));
1515 mem = adjust_address (result, mode, size)adjust_address_1 (result, mode, size, 1, 1, 0, 0);
1516 savevec[nelts++] = (savep
1517 ? gen_rtx_SET (mem, reg)gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((mem))
, ((reg)) )
1518 : gen_rtx_SET (reg, mem)gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((reg))
, ((mem)) )
);
1519 size += GET_MODE_SIZE (mode);
1520 }
1521 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec))gen_rtx_fmt_E_stat ((PARALLEL), ((((void) 0, E_VOIDmode))), (
(gen_rtvec_v (nelts, savevec))) )
;
1522}
1523
1524/* Save the state required to perform an untyped call with the same
1525 arguments as were passed to the current function. */
1526
1527static rtx
1528expand_builtin_apply_args_1 (void)
1529{
1530 rtx registers, tem;
1531 int size, align, regno;
1532 fixed_size_mode mode;
1533 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun(cfun + 0) ? TREE_TYPE (cfun->decl)((contains_struct_check (((cfun + 0)->decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1533, __FUNCTION__))->typed.type)
: 0, 1);
1534
1535 /* Create a block where the arg-pointer, structure value address,
1536 and argument registers can be saved. */
1537 registers = assign_stack_local (BLKmode((void) 0, E_BLKmode), apply_args_size (), -1);
1538
1539 /* Walk past the arg-pointer and structure value address. */
1540 size = GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
1541 if (targetm.calls.struct_value_rtx (cfun(cfun + 0) ? TREE_TYPE (cfun->decl)((contains_struct_check (((cfun + 0)->decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1541, __FUNCTION__))->typed.type)
: 0, 0))
1542 size += GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
1543
1544 /* Save each register used in calling a function to the block. */
1545 for (regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++)
1546 if ((mode = apply_args_mode(this_target_builtins->x_apply_args_mode)[regno]) != VOIDmode((void) 0, E_VOIDmode))
1547 {
1548 align = GET_MODE_ALIGNMENT (mode)get_mode_alignment (mode) / BITS_PER_UNIT(8);
1549 if (size % align != 0)
1550 size = CEIL (size, align)(((size) + (align) - 1) / (align)) * align;
1551
1552 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno)(regno));
1553
1554 emit_move_insn (adjust_address (registers, mode, size)adjust_address_1 (registers, mode, size, 1, 1, 0, 0), tem);
1555 size += GET_MODE_SIZE (mode);
1556 }
1557
1558 /* Save the arg pointer to the block. */
1559 tem = copy_to_reg (crtl(&x_rtl)->args.internal_arg_pointer);
1560 /* We need the pointer as the caller actually passed them to us, not
1561 as we might have pretended they were passed. Make sure it's a valid
1562 operand, as emit_move_insn isn't expected to handle a PLUS. */
1563 if (STACK_GROWS_DOWNWARD1)
1564 tem
1565 = force_operand (plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, tem,
1566 crtl(&x_rtl)->args.pretend_args_size),
1567 NULL_RTX(rtx) 0);
1568 emit_move_insn (adjust_address (registers, Pmode, 0)adjust_address_1 (registers, (global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), 0
, 1, 1, 0, 0)
, tem);
1569
1570 size = GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
1571
1572 /* Save the structure value address unless this is passed as an
1573 "invisible" first argument. */
1574 if (struct_incoming_value)
1575 emit_move_insn (adjust_address (registers, Pmode, size)adjust_address_1 (registers, (global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), size
, 1, 1, 0, 0)
,
1576 copy_to_reg (struct_incoming_value));
1577
1578 /* Return the address of the block. */
1579 return copy_addr_to_reg (XEXP (registers, 0)(((registers)->u.fld[0]).rt_rtx));
1580}
1581
1582/* __builtin_apply_args returns block of memory allocated on
1583 the stack into which is stored the arg pointer, structure
1584 value address, static chain, and all the registers that might
1585 possibly be used in performing a function call. The code is
1586 moved to the start of the function so the incoming values are
1587 saved. */
1588
1589static rtx
1590expand_builtin_apply_args (void)
1591{
1592 /* Don't do __builtin_apply_args more than once in a function.
1593 Save the result of the first call and reuse it. */
1594 if (apply_args_value((&x_rtl)->expr.x_apply_args_value) != 0)
1595 return apply_args_value((&x_rtl)->expr.x_apply_args_value);
1596 {
1597 /* When this function is called, it means that registers must be
1598 saved on entry to this function. So we migrate the
1599 call to the first insn of this function. */
1600 rtx temp;
1601
1602 start_sequence ();
1603 temp = expand_builtin_apply_args_1 ();
1604 rtx_insn *seq = get_insns ();
1605 end_sequence ();
1606
1607 apply_args_value((&x_rtl)->expr.x_apply_args_value) = temp;
1608
1609 /* Put the insns after the NOTE that starts the function.
1610 If this is inside a start_sequence, make the outer-level insn
1611 chain current, so the code is placed at the start of the
1612 function. If internal_arg_pointer is a non-virtual pseudo,
1613 it needs to be placed after the function that initializes
1614 that pseudo. */
1615 push_topmost_sequence ();
1616 if (REG_P (crtl->args.internal_arg_pointer)(((enum rtx_code) ((&x_rtl)->args.internal_arg_pointer
)->code) == REG)
1617 && REGNO (crtl->args.internal_arg_pointer)(rhs_regno((&x_rtl)->args.internal_arg_pointer)) > LAST_VIRTUAL_REGISTER(((76)) + 5))
1618 emit_insn_before (seq, parm_birth_insn((&x_rtl)->x_parm_birth_insn));
1619 else
1620 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1621 pop_topmost_sequence ();
1622 return temp;
1623 }
1624}
1625
1626/* Perform an untyped call and save the state required to perform an
1627 untyped return of whatever value was returned by the given function. */
1628
1629static rtx
1630expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1631{
1632 int size, align, regno;
1633 fixed_size_mode mode;
1634 rtx incoming_args, result, reg, dest, src;
1635 rtx_call_insn *call_insn;
1636 rtx old_stack_level = 0;
1637 rtx call_fusage = 0;
1638 rtx struct_value = targetm.calls.struct_value_rtx (cfun(cfun + 0) ? TREE_TYPE (cfun->decl)((contains_struct_check (((cfun + 0)->decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1638, __FUNCTION__))->typed.type)
: 0, 0);
1639
1640 arguments = convert_memory_address (Pmode, arguments)convert_memory_address_addr_space (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
)), (arguments), 0)
;
1641
1642 /* Create a block where the return registers can be saved. */
1643 result = assign_stack_local (BLKmode((void) 0, E_BLKmode), apply_result_size (), -1);
1644
1645 /* Fetch the arg pointer from the ARGUMENTS block. */
1646 incoming_args = gen_reg_rtx (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
1647 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, arguments));
1648 if (!STACK_GROWS_DOWNWARD1)
1649 incoming_args = expand_simple_binop (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, MINUS, incoming_args, argsize,
1650 incoming_args, 0, OPTAB_LIB_WIDEN);
1651
1652 /* Push a new argument block and copy the arguments. Do not allow
1653 the (potential) memcpy call below to interfere with our stack
1654 manipulations. */
1655 do_pending_stack_adjust ();
1656 NO_DEFER_POP(((&x_rtl)->expr.x_inhibit_defer_pop) += 1);
1657
1658 /* Save the stack with nonlocal if available. */
1659 if (targetm.have_save_stack_nonlocal ())
1660 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1661 else
1662 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1663
1664 /* Allocate a block of memory onto the stack and copy the memory
1665 arguments to the outgoing arguments address. We can pass TRUE
1666 as the 4th argument because we just saved the stack pointer
1667 and will restore it right after the call. */
1668 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT(((global_options.x_target_flags & (1U << 12)) != 0
) ? 32 : (((global_options.x_ix86_isa_flags & (1UL <<
15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & (
1UL << 8)) != 0) ? 256 : 128)))
, -1, true);
1669
1670 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1671 may have already set current_function_calls_alloca to true.
1672 current_function_calls_alloca won't be set if argsize is zero,
1673 so we have to guarantee need_drap is true here. */
1674 if (SUPPORTS_STACK_ALIGNMENT((((unsigned int) 1 << 28) * 8) > ((((global_options
.x_ix86_isa_flags & (1UL << 1)) != 0) && ix86_cfun_abi
() == MS_ABI) ? 128 : ((8) * (((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) ? 8 : 4))))
)
1675 crtl(&x_rtl)->need_drap = true;
1676
1677 dest = virtual_outgoing_args_rtx((this_target_rtl->x_global_rtl)[GR_VIRTUAL_OUTGOING_ARGS]
)
;
1678 if (!STACK_GROWS_DOWNWARD1)
1679 {
1680 if (CONST_INT_P (argsize)(((enum rtx_code) (argsize)->code) == CONST_INT))
1681 dest = plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, dest, -INTVAL (argsize)((argsize)->u.hwint[0]));
1682 else
1683 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize))gen_rtx_fmt_ee_stat ((PLUS), (((global_options.x_ix86_pmode ==
PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
))), ((dest)), ((negate_rtx ((global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), argsize
))) )
;
1684 }
1685 dest = gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), dest);
1686 set_mem_align (dest, PARM_BOUNDARY((8) * (((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4))
);
1687 src = gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), incoming_args);
1688 set_mem_align (src, PARM_BOUNDARY((8) * (((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4))
);
1689 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1690
1691 /* Refer to the argument block. */
1692 apply_args_size ();
1693 arguments = gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), arguments);
1694 set_mem_align (arguments, PARM_BOUNDARY((8) * (((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4))
);
1695
1696 /* Walk past the arg-pointer and structure value address. */
1697 size = GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
1698 if (struct_value)
1699 size += GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
1700
1701 /* Restore each of the registers previously saved. Make USE insns
1702 for each of these registers for use in making the call. */
1703 for (regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++)
1704 if ((mode = apply_args_mode(this_target_builtins->x_apply_args_mode)[regno]) != VOIDmode((void) 0, E_VOIDmode))
1705 {
1706 align = GET_MODE_ALIGNMENT (mode)get_mode_alignment (mode) / BITS_PER_UNIT(8);
1707 if (size % align != 0)
1708 size = CEIL (size, align)(((size) + (align) - 1) / (align)) * align;
1709 reg = gen_rtx_REG (mode, regno);
1710 emit_move_insn (reg, adjust_address (arguments, mode, size)adjust_address_1 (arguments, mode, size, 1, 1, 0, 0));
1711 use_reg (&call_fusage, reg);
1712 size += GET_MODE_SIZE (mode);
1713 }
1714
1715 /* Restore the structure value address unless this is passed as an
1716 "invisible" first argument. */
1717 size = GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
1718 if (struct_value)
1719 {
1720 rtx value = gen_reg_rtx (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
1721 emit_move_insn (value, adjust_address (arguments, Pmode, size)adjust_address_1 (arguments, (global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), size
, 1, 1, 0, 0)
);
1722 emit_move_insn (struct_value, value);
1723 if (REG_P (struct_value)(((enum rtx_code) (struct_value)->code) == REG))
1724 use_reg (&call_fusage, struct_value);
1725 }
1726
1727 /* All arguments and registers used for the call are set up by now! */
1728 function = prepare_call_address (NULL__null, function, NULL__null, &call_fusage, 0, 0);
1729
1730 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1731 and we don't want to load it into a register as an optimization,
1732 because prepare_call_address already did it if it should be done. */
1733 if (GET_CODE (function)((enum rtx_code) (function)->code) != SYMBOL_REF)
1734 function = memory_address (FUNCTION_MODE, function)memory_address_addr_space (((scalar_int_mode ((scalar_int_mode
::from_int) E_QImode))), (function), 0)
;
1735
1736 /* Generate the actual call instruction and save the return value. */
1737 if (targetm.have_untyped_call ())
1738 {
1739 rtx mem = gen_rtx_MEM (FUNCTION_MODE(scalar_int_mode ((scalar_int_mode::from_int) E_QImode)), function);
1740 rtx_insn *seq = targetm.gen_untyped_call (mem, result,
1741 result_vector (1, result));
1742 for (rtx_insn *insn = seq; insn; insn = NEXT_INSN (insn))
1743 if (CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN))
1744 add_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX(rtx) 0);
1745 emit_insn (seq);
1746 }
1747 else if (targetm.have_call_value ())
1748 {
1749 rtx valreg = 0;
1750
1751 /* Locate the unique return register. It is not possible to
1752 express a call that sets more than one return register using
1753 call_value; use untyped_call for that. In fact, untyped_call
1754 only needs to save the return registers in the given block. */
1755 for (regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++)
1756 if ((mode = apply_result_mode(this_target_builtins->x_apply_result_mode)[regno]) != VOIDmode((void) 0, E_VOIDmode))
1757 {
1758 gcc_assert (!valreg)((void)(!(!valreg) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1758, __FUNCTION__), 0 : 0))
; /* have_untyped_call required. */
1759
1760 valreg = gen_rtx_REG (mode, regno);
1761 }
1762
1763 emit_insn (targetm.gen_call_value (valreg,
1764 gen_rtx_MEM (FUNCTION_MODE(scalar_int_mode ((scalar_int_mode::from_int) E_QImode)), function),
1765 const0_rtx(const_int_rtx[64]), NULL_RTX(rtx) 0, const0_rtx(const_int_rtx[64])));
1766
1767 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0)adjust_address_1 (result, ((machine_mode) (valreg)->mode),
0, 1, 1, 0, 0)
, valreg);
1768 }
1769 else
1770 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1770, __FUNCTION__))
;
1771
1772 /* Find the CALL insn we just emitted, and attach the register usage
1773 information. */
1774 call_insn = last_call_insn ();
1775 add_function_usage_to (call_insn, call_fusage);
1776
1777 /* Restore the stack. */
1778 if (targetm.have_save_stack_nonlocal ())
1779 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1780 else
1781 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1782 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1783
1784 OK_DEFER_POP(((&x_rtl)->expr.x_inhibit_defer_pop) -= 1);
1785
1786 /* Return the address of the result block. */
1787 result = copy_addr_to_reg (XEXP (result, 0)(((result)->u.fld[0]).rt_rtx));
1788 return convert_memory_address (ptr_mode, result)convert_memory_address_addr_space ((ptr_mode), (result), 0);
1789}
1790
1791/* Perform an untyped return. */
1792
1793static void
1794expand_builtin_return (rtx result)
1795{
1796 int size, align, regno;
1797 fixed_size_mode mode;
1798 rtx reg;
1799 rtx_insn *call_fusage = 0;
1800
1801 result = convert_memory_address (Pmode, result)convert_memory_address_addr_space (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
)), (result), 0)
;
1802
1803 apply_result_size ();
1804 result = gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), result);
1805
1806 if (targetm.have_untyped_return ())
1807 {
1808 rtx vector = result_vector (0, result);
1809 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1810 emit_barrier ();
1811 return;
1812 }
1813
1814 /* Restore the return value and note that each value is used. */
1815 size = 0;
1816 for (regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++)
1817 if ((mode = apply_result_mode(this_target_builtins->x_apply_result_mode)[regno]) != VOIDmode((void) 0, E_VOIDmode))
1818 {
1819 align = GET_MODE_ALIGNMENT (mode)get_mode_alignment (mode) / BITS_PER_UNIT(8);
1820 if (size % align != 0)
1821 size = CEIL (size, align)(((size) + (align) - 1) / (align)) * align;
1822 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno)(regno));
1823 emit_move_insn (reg, adjust_address (result, mode, size)adjust_address_1 (result, mode, size, 1, 1, 0, 0));
1824
1825 push_to_sequence (call_fusage);
1826 emit_use (reg);
1827 call_fusage = get_insns ();
1828 end_sequence ();
1829 size += GET_MODE_SIZE (mode);
1830 }
1831
1832 /* Put the USE insns before the return. */
1833 emit_insn (call_fusage);
1834
1835 /* Return whatever values was restored by jumping directly to the end
1836 of the function. */
1837 expand_naked_return ();
1838}
1839
1840/* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1841
1842static enum type_class
1843type_to_class (tree type)
1844{
1845 switch (TREE_CODE (type)((enum tree_code) (type)->base.code))
1846 {
1847 case VOID_TYPE: return void_type_class;
1848 case INTEGER_TYPE: return integer_type_class;
1849 case ENUMERAL_TYPE: return enumeral_type_class;
1850 case BOOLEAN_TYPE: return boolean_type_class;
1851 case POINTER_TYPE: return pointer_type_class;
1852 case REFERENCE_TYPE: return reference_type_class;
1853 case OFFSET_TYPE: return offset_type_class;
1854 case REAL_TYPE: return real_type_class;
1855 case COMPLEX_TYPE: return complex_type_class;
1856 case FUNCTION_TYPE: return function_type_class;
1857 case METHOD_TYPE: return method_type_class;
1858 case RECORD_TYPE: return record_type_class;
1859 case UNION_TYPE:
1860 case QUAL_UNION_TYPE: return union_type_class;
1861 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)((tree_check2 ((type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1861, __FUNCTION__, (ARRAY_TYPE), (INTEGER_TYPE)))->type_common
.string_flag)
1862 ? string_type_class : array_type_class);
1863 case LANG_TYPE: return lang_type_class;
1864 case OPAQUE_TYPE: return opaque_type_class;
1865 default: return no_type_class;
1866 }
1867}
1868
1869/* Expand a call EXP to __builtin_classify_type. */
1870
1871static rtx
1872expand_builtin_classify_type (tree exp)
1873{
1874 if (call_expr_nargs (exp)(((int)((unsigned long) (*tree_int_cst_elt_check (((tree_class_check
((exp), (tcc_vl_exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1874, __FUNCTION__))->exp.operands[0]), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1874, __FUNCTION__)))) - 3)
)
1875 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))))gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (type_to_class (((
contains_struct_check (((*((const_cast<tree*> (tree_operand_check
(((tree_check ((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1875, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1875, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1875, __FUNCTION__))->typed.type))))
;
1876 return GEN_INT (no_type_class)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (no_type_class));
1877}
1878
1879/* This helper macro, meant to be used in mathfn_built_in below, determines
1880 which among a set of builtin math functions is appropriate for a given type
1881 mode. The `F' (float) and `L' (long double) are automatically generated
1882 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1883 types, there are additional types that are considered with 'F32', 'F64',
1884 'F128', etc. suffixes. */
1885#define CASE_MATHFN(MATHFN) \
1886 CASE_CFN_##MATHFN: \
1887 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1888 fcodel = BUILT_IN_##MATHFN##L ; break;
1889/* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1890 types. */
1891#define CASE_MATHFN_FLOATN(MATHFN) \
1892 CASE_CFN_##MATHFN: \
1893 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1894 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1895 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1896 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1897 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1898 break;
1899/* Similar to above, but appends _R after any F/L suffix. */
1900#define CASE_MATHFN_REENT(MATHFN) \
1901 case CFN_BUILT_IN_##MATHFN##_R: \
1902 case CFN_BUILT_IN_##MATHFN##F_R: \
1903 case CFN_BUILT_IN_##MATHFN##L_R: \
1904 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1905 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1906
1907/* Return a function equivalent to FN but operating on floating-point
1908 values of type TYPE, or END_BUILTINS if no such function exists.
1909 This is purely an operation on function codes; it does not guarantee
1910 that the target actually has an implementation of the function. */
1911
1912static built_in_function
1913mathfn_built_in_2 (tree type, combined_fn fn)
1914{
1915 tree mtype;
1916 built_in_function fcode, fcodef, fcodel;
1917 built_in_function fcodef16 = END_BUILTINS;
1918 built_in_function fcodef32 = END_BUILTINS;
1919 built_in_function fcodef64 = END_BUILTINS;
1920 built_in_function fcodef128 = END_BUILTINS;
1921 built_in_function fcodef32x = END_BUILTINS;
1922 built_in_function fcodef64x = END_BUILTINS;
1923 built_in_function fcodef128x = END_BUILTINS;
1924
1925 switch (fn)
1926 {
1927#define SEQ_OF_CASE_MATHFN \
1928 CASE_MATHFN (ACOS) \
1929 CASE_MATHFN (ACOSH) \
1930 CASE_MATHFN (ASIN) \
1931 CASE_MATHFN (ASINH) \
1932 CASE_MATHFN (ATAN) \
1933 CASE_MATHFN (ATAN2) \
1934 CASE_MATHFN (ATANH) \
1935 CASE_MATHFN (CBRT) \
1936 CASE_MATHFN_FLOATN (CEIL) \
1937 CASE_MATHFN (CEXPI) \
1938 CASE_MATHFN_FLOATN (COPYSIGN) \
1939 CASE_MATHFN (COS) \
1940 CASE_MATHFN (COSH) \
1941 CASE_MATHFN (DREM) \
1942 CASE_MATHFN (ERF) \
1943 CASE_MATHFN (ERFC) \
1944 CASE_MATHFN (EXP) \
1945 CASE_MATHFN (EXP10) \
1946 CASE_MATHFN (EXP2) \
1947 CASE_MATHFN (EXPM1) \
1948 CASE_MATHFN (FABS) \
1949 CASE_MATHFN (FDIM) \
1950 CASE_MATHFN_FLOATN (FLOOR) \
1951 CASE_MATHFN_FLOATN (FMA) \
1952 CASE_MATHFN_FLOATN (FMAX) \
1953 CASE_MATHFN_FLOATN (FMIN) \
1954 CASE_MATHFN (FMOD) \
1955 CASE_MATHFN (FREXP) \
1956 CASE_MATHFN (GAMMA) \
1957 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
1958 CASE_MATHFN (HUGE_VAL) \
1959 CASE_MATHFN (HYPOT) \
1960 CASE_MATHFN (ILOGB) \
1961 CASE_MATHFN (ICEIL) \
1962 CASE_MATHFN (IFLOOR) \
1963 CASE_MATHFN (INF) \
1964 CASE_MATHFN (IRINT) \
1965 CASE_MATHFN (IROUND) \
1966 CASE_MATHFN (ISINF) \
1967 CASE_MATHFN (J0) \
1968 CASE_MATHFN (J1) \
1969 CASE_MATHFN (JN) \
1970 CASE_MATHFN (LCEIL) \
1971 CASE_MATHFN (LDEXP) \
1972 CASE_MATHFN (LFLOOR) \
1973 CASE_MATHFN (LGAMMA) \
1974 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
1975 CASE_MATHFN (LLCEIL) \
1976 CASE_MATHFN (LLFLOOR) \
1977 CASE_MATHFN (LLRINT) \
1978 CASE_MATHFN (LLROUND) \
1979 CASE_MATHFN (LOG) \
1980 CASE_MATHFN (LOG10) \
1981 CASE_MATHFN (LOG1P) \
1982 CASE_MATHFN (LOG2) \
1983 CASE_MATHFN (LOGB) \
1984 CASE_MATHFN (LRINT) \
1985 CASE_MATHFN (LROUND) \
1986 CASE_MATHFN (MODF) \
1987 CASE_MATHFN (NAN) \
1988 CASE_MATHFN (NANS) \
1989 CASE_MATHFN_FLOATN (NEARBYINT) \
1990 CASE_MATHFN (NEXTAFTER) \
1991 CASE_MATHFN (NEXTTOWARD) \
1992 CASE_MATHFN (POW) \
1993 CASE_MATHFN (POWI) \
1994 CASE_MATHFN (POW10) \
1995 CASE_MATHFN (REMAINDER) \
1996 CASE_MATHFN (REMQUO) \
1997 CASE_MATHFN_FLOATN (RINT) \
1998 CASE_MATHFN_FLOATN (ROUND) \
1999 CASE_MATHFN_FLOATN (ROUNDEVEN) \
2000 CASE_MATHFN (SCALB) \
2001 CASE_MATHFN (SCALBLN) \
2002 CASE_MATHFN (SCALBN) \
2003 CASE_MATHFN (SIGNBIT) \
2004 CASE_MATHFN (SIGNIFICAND) \
2005 CASE_MATHFN (SIN) \
2006 CASE_MATHFN (SINCOS) \
2007 CASE_MATHFN (SINH) \
2008 CASE_MATHFN_FLOATN (SQRT) \
2009 CASE_MATHFN (TAN) \
2010 CASE_MATHFN (TANH) \
2011 CASE_MATHFN (TGAMMA) \
2012 CASE_MATHFN_FLOATN (TRUNC) \
2013 CASE_MATHFN (Y0) \
2014 CASE_MATHFN (Y1) \
2015 CASE_MATHFN (YN)
2016
2017 SEQ_OF_CASE_MATHFN
2018
2019 default:
2020 return END_BUILTINS;
2021 }
2022
2023 mtype = TYPE_MAIN_VARIANT (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2023, __FUNCTION__))->type_common.main_variant)
;
2024 if (mtype == double_type_nodeglobal_trees[TI_DOUBLE_TYPE])
2025 return fcode;
2026 else if (mtype == float_type_nodeglobal_trees[TI_FLOAT_TYPE])
2027 return fcodef;
2028 else if (mtype == long_double_type_nodeglobal_trees[TI_LONG_DOUBLE_TYPE])
2029 return fcodel;
2030 else if (mtype == float16_type_nodeglobal_trees[TI_FLOAT16_TYPE])
2031 return fcodef16;
2032 else if (mtype == float32_type_nodeglobal_trees[TI_FLOAT32_TYPE])
2033 return fcodef32;
2034 else if (mtype == float64_type_nodeglobal_trees[TI_FLOAT64_TYPE])
2035 return fcodef64;
2036 else if (mtype == float128_type_nodeglobal_trees[TI_FLOAT128_TYPE])
2037 return fcodef128;
2038 else if (mtype == float32x_type_nodeglobal_trees[TI_FLOAT32X_TYPE])
2039 return fcodef32x;
2040 else if (mtype == float64x_type_nodeglobal_trees[TI_FLOAT64X_TYPE])
2041 return fcodef64x;
2042 else if (mtype == float128x_type_nodeglobal_trees[TI_FLOAT128X_TYPE])
2043 return fcodef128x;
2044 else
2045 return END_BUILTINS;
2046}
2047
2048#undef CASE_MATHFN
2049#undef CASE_MATHFN_FLOATN
2050#undef CASE_MATHFN_REENT
2051
2052/* Return mathematic function equivalent to FN but operating directly on TYPE,
2053 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2054 otherwise use the explicit declaration. If we can't do the conversion,
2055 return null. */
2056
2057static tree
2058mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2059{
2060 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2061 if (fcode2 == END_BUILTINS)
2062 return NULL_TREE(tree) __null;
2063
2064 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2065 return NULL_TREE(tree) __null;
2066
2067 return builtin_decl_explicit (fcode2);
2068}
2069
2070/* Like mathfn_built_in_1, but always use the implicit array. */
2071
2072tree
2073mathfn_built_in (tree type, combined_fn fn)
2074{
2075 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2076}
2077
2078/* Like mathfn_built_in_1, but take a built_in_function and
2079 always use the implicit array. */
2080
2081tree
2082mathfn_built_in (tree type, enum built_in_function fn)
2083{
2084 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2085}
2086
2087/* Return the type associated with a built in function, i.e., the one
2088 to be passed to mathfn_built_in to get the type-specific
2089 function. */
2090
2091tree
2092mathfn_built_in_type (combined_fn fn)
2093{
2094#define CASE_MATHFN(MATHFN) \
2095 case CFN_BUILT_IN_##MATHFN: \
2096 return double_type_nodeglobal_trees[TI_DOUBLE_TYPE]; \
2097 case CFN_BUILT_IN_##MATHFN##F: \
2098 return float_type_nodeglobal_trees[TI_FLOAT_TYPE]; \
2099 case CFN_BUILT_IN_##MATHFN##L: \
2100 return long_double_type_nodeglobal_trees[TI_LONG_DOUBLE_TYPE];
2101
2102#define CASE_MATHFN_FLOATN(MATHFN) \
2103 CASE_MATHFN(MATHFN) \
2104 case CFN_BUILT_IN_##MATHFN##F16: \
2105 return float16_type_nodeglobal_trees[TI_FLOAT16_TYPE]; \
2106 case CFN_BUILT_IN_##MATHFN##F32: \
2107 return float32_type_nodeglobal_trees[TI_FLOAT32_TYPE]; \
2108 case CFN_BUILT_IN_##MATHFN##F64: \
2109 return float64_type_nodeglobal_trees[TI_FLOAT64_TYPE]; \
2110 case CFN_BUILT_IN_##MATHFN##F128: \
2111 return float128_type_nodeglobal_trees[TI_FLOAT128_TYPE]; \
2112 case CFN_BUILT_IN_##MATHFN##F32X: \
2113 return float32x_type_nodeglobal_trees[TI_FLOAT32X_TYPE]; \
2114 case CFN_BUILT_IN_##MATHFN##F64X: \
2115 return float64x_type_nodeglobal_trees[TI_FLOAT64X_TYPE]; \
2116 case CFN_BUILT_IN_##MATHFN##F128X: \
2117 return float128x_type_nodeglobal_trees[TI_FLOAT128X_TYPE];
2118
2119/* Similar to above, but appends _R after any F/L suffix. */
2120#define CASE_MATHFN_REENT(MATHFN) \
2121 case CFN_BUILT_IN_##MATHFN##_R: \
2122 return double_type_nodeglobal_trees[TI_DOUBLE_TYPE]; \
2123 case CFN_BUILT_IN_##MATHFN##F_R: \
2124 return float_type_nodeglobal_trees[TI_FLOAT_TYPE]; \
2125 case CFN_BUILT_IN_##MATHFN##L_R: \
2126 return long_double_type_nodeglobal_trees[TI_LONG_DOUBLE_TYPE];
2127
2128 switch (fn)
2129 {
2130 SEQ_OF_CASE_MATHFN
2131
2132 default:
2133 return NULL_TREE(tree) __null;
2134 }
2135
2136#undef CASE_MATHFN
2137#undef CASE_MATHFN_FLOATN
2138#undef CASE_MATHFN_REENT
2139#undef SEQ_OF_CASE_MATHFN
2140}
2141
2142/* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2143 return its code, otherwise return IFN_LAST. Note that this function
2144 only tests whether the function is defined in internals.def, not whether
2145 it is actually available on the target. */
2146
2147internal_fn
2148associated_internal_fn (tree fndecl)
2149{
2150 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)((void)(!(((built_in_class) (tree_check ((fndecl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2150, __FUNCTION__, (FUNCTION_DECL)))->function_decl.built_in_class
) == BUILT_IN_NORMAL) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2150, __FUNCTION__), 0 : 0))
;
2151 tree return_type = TREE_TYPE (TREE_TYPE (fndecl))((contains_struct_check ((((contains_struct_check ((fndecl), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2151, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2151, __FUNCTION__))->typed.type)
;
2152 switch (DECL_FUNCTION_CODE (fndecl))
2153 {
2154#define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2155 CASE_FLT_FN (BUILT_IN_##NAME)case BUILT_IN_##NAME: case BUILT_IN_##NAMEF: case BUILT_IN_##
NAMEL
: return IFN_##NAME;
2156#define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2157 CASE_FLT_FN (BUILT_IN_##NAME)case BUILT_IN_##NAME: case BUILT_IN_##NAMEF: case BUILT_IN_##
NAMEL
: return IFN_##NAME; \
2158 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME)case BUILT_IN_##NAMEF16: case BUILT_IN_##NAMEF32: case BUILT_IN_
##NAMEF64: case BUILT_IN_##NAMEF128: case BUILT_IN_##NAMEF32X
: case BUILT_IN_##NAMEF64X: case BUILT_IN_##NAMEF128X
: return IFN_##NAME;
2159#define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2160 CASE_INT_FN (BUILT_IN_##NAME)case BUILT_IN_##NAME: case BUILT_IN_##NAMEL: case BUILT_IN_##
NAMELL: case BUILT_IN_##NAMEIMAX
: return IFN_##NAME;
2161#include "internal-fn.def"
2162
2163 CASE_FLT_FN (BUILT_IN_POW10)case BUILT_IN_POW10: case BUILT_IN_POW10F: case BUILT_IN_POW10L:
2164 return IFN_EXP10;
2165
2166 CASE_FLT_FN (BUILT_IN_DREM)case BUILT_IN_DREM: case BUILT_IN_DREMF: case BUILT_IN_DREML:
2167 return IFN_REMAINDER;
2168
2169 CASE_FLT_FN (BUILT_IN_SCALBN)case BUILT_IN_SCALBN: case BUILT_IN_SCALBNF: case BUILT_IN_SCALBNL:
2170 CASE_FLT_FN (BUILT_IN_SCALBLN)case BUILT_IN_SCALBLN: case BUILT_IN_SCALBLNF: case BUILT_IN_SCALBLNL:
2171 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))(real_format_for_mode[(((enum mode_class) mode_class[((((enum
tree_code) ((tree_class_check ((return_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2171, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(return_type) : (return_type)->type_common.mode)]) == MODE_DECIMAL_FLOAT
) ? (((((((enum tree_code) ((tree_class_check ((return_type),
(tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2171, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(return_type) : (return_type)->type_common.mode)) - MIN_MODE_DECIMAL_FLOAT
) + (MAX_MODE_FLOAT - MIN_MODE_FLOAT + 1)) : ((enum mode_class
) mode_class[((((enum tree_code) ((tree_class_check ((return_type
), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2171, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(return_type) : (return_type)->type_common.mode)]) == MODE_FLOAT
? ((((((enum tree_code) ((tree_class_check ((return_type), (
tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2171, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(return_type) : (return_type)->type_common.mode)) - MIN_MODE_FLOAT
) : ((fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2171, __FUNCTION__)), 0)])
->b == 2)
2172 return IFN_LDEXP;
2173 return IFN_LAST;
2174
2175 default:
2176 return IFN_LAST;
2177 }
2178}
2179
2180/* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2181 on the current target by a call to an internal function, return the
2182 code of that internal function, otherwise return IFN_LAST. The caller
2183 is responsible for ensuring that any side-effects of the built-in
2184 call are dealt with correctly. E.g. if CALL sets errno, the caller
2185 must decide that the errno result isn't needed or make it available
2186 in some other way. */
2187
2188internal_fn
2189replacement_internal_fn (gcall *call)
2190{
2191 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2192 {
2193 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2194 if (ifn != IFN_LAST)
2195 {
2196 tree_pair types = direct_internal_fn_types (ifn, call);
2197 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2198 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2199 return ifn;
2200 }
2201 }
2202 return IFN_LAST;
2203}
2204
2205/* Expand a call to the builtin trinary math functions (fma).
2206 Return NULL_RTX if a normal call should be emitted rather than expanding the
2207 function in-line. EXP is the expression that is a call to the builtin
2208 function; if convenient, the result should be placed in TARGET.
2209 SUBTARGET may be used as the target for computing one of EXP's
2210 operands. */
2211
2212static rtx
2213expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2214{
2215 optab builtin_optab;
2216 rtx op0, op1, op2, result;
2217 rtx_insn *insns;
2218 tree fndecl = get_callee_fndecl (exp);
2219 tree arg0, arg1, arg2;
2220 machine_mode mode;
2221
2222 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2223 return NULL_RTX(rtx) 0;
2224
2225 arg0 = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2225, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2225, __FUNCTION__)))))
;
2226 arg1 = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2226, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2226, __FUNCTION__)))))
;
2227 arg2 = CALL_EXPR_ARG (exp, 2)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2227, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2227, __FUNCTION__)))))
;
2228
2229 switch (DECL_FUNCTION_CODE (fndecl))
2230 {
2231 CASE_FLT_FN (BUILT_IN_FMA)case BUILT_IN_FMA: case BUILT_IN_FMAF: case BUILT_IN_FMAL:
2232 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA)case BUILT_IN_FMAF16: case BUILT_IN_FMAF32: case BUILT_IN_FMAF64
: case BUILT_IN_FMAF128: case BUILT_IN_FMAF32X: case BUILT_IN_FMAF64X
: case BUILT_IN_FMAF128X
:
2233 builtin_optab = fma_optab; break;
2234 default:
2235 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2235, __FUNCTION__))
;
2236 }
2237
2238 /* Make a suitable register to place result in. */
2239 mode = TYPE_MODE (TREE_TYPE (exp))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2239, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2239, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2239, __FUNCTION__))->typed.type)) : (((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2239, __FUNCTION__))->typed.type))->type_common.mode)
;
2240
2241 /* Before working hard, check whether the instruction is available. */
2242 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2243 return NULL_RTX(rtx) 0;
2244
2245 result = gen_reg_rtx (mode);
2246
2247 /* Always stabilize the argument list. */
2248 CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2248, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2248, __FUNCTION__)))))
= arg0 = builtin_save_expr (arg0);
2249 CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2249, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2249, __FUNCTION__)))))
= arg1 = builtin_save_expr (arg1);
2250 CALL_EXPR_ARG (exp, 2)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2250, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2250, __FUNCTION__)))))
= arg2 = builtin_save_expr (arg2);
2251
2252 op0 = expand_expr (arg0, subtarget, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
2253 op1 = expand_normal (arg1);
2254 op2 = expand_normal (arg2);
2255
2256 start_sequence ();
2257
2258 /* Compute into RESULT.
2259 Set RESULT to wherever the result comes back. */
2260 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2261 result, 0);
2262
2263 /* If we were unable to expand via the builtin, stop the sequence
2264 (without outputting the insns) and call to the library function
2265 with the stabilized argument list. */
2266 if (result == 0)
2267 {
2268 end_sequence ();
2269 return expand_call (exp, target, target == const0_rtx(const_int_rtx[64]));
2270 }
2271
2272 /* Output the entire sequence. */
2273 insns = get_insns ();
2274 end_sequence ();
2275 emit_insn (insns);
2276
2277 return result;
2278}
2279
2280/* Expand a call to the builtin sin and cos math functions.
2281 Return NULL_RTX if a normal call should be emitted rather than expanding the
2282 function in-line. EXP is the expression that is a call to the builtin
2283 function; if convenient, the result should be placed in TARGET.
2284 SUBTARGET may be used as the target for computing one of EXP's
2285 operands. */
2286
2287static rtx
2288expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2289{
2290 optab builtin_optab;
2291 rtx op0;
2292 rtx_insn *insns;
2293 tree fndecl = get_callee_fndecl (exp);
2294 machine_mode mode;
2295 tree arg;
2296
2297 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2298 return NULL_RTX(rtx) 0;
2299
2300 arg = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2300, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2300, __FUNCTION__)))))
;
2301
2302 switch (DECL_FUNCTION_CODE (fndecl))
2303 {
2304 CASE_FLT_FN (BUILT_IN_SIN)case BUILT_IN_SIN: case BUILT_IN_SINF: case BUILT_IN_SINL:
2305 CASE_FLT_FN (BUILT_IN_COS)case BUILT_IN_COS: case BUILT_IN_COSF: case BUILT_IN_COSL:
2306 builtin_optab = sincos_optab; break;
2307 default:
2308 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2308, __FUNCTION__))
;
2309 }
2310
2311 /* Make a suitable register to place result in. */
2312 mode = TYPE_MODE (TREE_TYPE (exp))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2312, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2312, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2312, __FUNCTION__))->typed.type)) : (((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2312, __FUNCTION__))->typed.type))->type_common.mode)
;
2313
2314 /* Check if sincos insn is available, otherwise fallback
2315 to sin or cos insn. */
2316 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2317 switch (DECL_FUNCTION_CODE (fndecl))
2318 {
2319 CASE_FLT_FN (BUILT_IN_SIN)case BUILT_IN_SIN: case BUILT_IN_SINF: case BUILT_IN_SINL:
2320 builtin_optab = sin_optab; break;
2321 CASE_FLT_FN (BUILT_IN_COS)case BUILT_IN_COS: case BUILT_IN_COSF: case BUILT_IN_COSL:
2322 builtin_optab = cos_optab; break;
2323 default:
2324 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2324, __FUNCTION__))
;
2325 }
2326
2327 /* Before working hard, check whether the instruction is available. */
2328 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2329 {
2330 rtx result = gen_reg_rtx (mode);
2331
2332 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2333 need to expand the argument again. This way, we will not perform
2334 side-effects more the once. */
2335 CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2335, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2335, __FUNCTION__)))))
= arg = builtin_save_expr (arg);
2336
2337 op0 = expand_expr (arg, subtarget, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
2338
2339 start_sequence ();
2340
2341 /* Compute into RESULT.
2342 Set RESULT to wherever the result comes back. */
2343 if (builtin_optab == sincos_optab)
2344 {
2345 int ok;
2346
2347 switch (DECL_FUNCTION_CODE (fndecl))
2348 {
2349 CASE_FLT_FN (BUILT_IN_SIN)case BUILT_IN_SIN: case BUILT_IN_SINF: case BUILT_IN_SINL:
2350 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2351 break;
2352 CASE_FLT_FN (BUILT_IN_COS)case BUILT_IN_COS: case BUILT_IN_COSF: case BUILT_IN_COSL:
2353 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2354 break;
2355 default:
2356 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2356, __FUNCTION__))
;
2357 }
2358 gcc_assert (ok)((void)(!(ok) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2358, __FUNCTION__), 0 : 0))
;
2359 }
2360 else
2361 result = expand_unop (mode, builtin_optab, op0, result, 0);
2362
2363 if (result != 0)
2364 {
2365 /* Output the entire sequence. */
2366 insns = get_insns ();
2367 end_sequence ();
2368 emit_insn (insns);
2369 return result;
2370 }
2371
2372 /* If we were unable to expand via the builtin, stop the sequence
2373 (without outputting the insns) and call to the library function
2374 with the stabilized argument list. */
2375 end_sequence ();
2376 }
2377
2378 return expand_call (exp, target, target == const0_rtx(const_int_rtx[64]));
2379}
2380
2381/* Given an interclass math builtin decl FNDECL and it's argument ARG
2382 return an RTL instruction code that implements the functionality.
2383 If that isn't possible or available return CODE_FOR_nothing. */
2384
2385static enum insn_code
2386interclass_mathfn_icode (tree arg, tree fndecl)
2387{
2388 bool errno_set = false;
2389 optab builtin_optab = unknown_optab;
2390 machine_mode mode;
2391
2392 switch (DECL_FUNCTION_CODE (fndecl))
2393 {
2394 CASE_FLT_FN (BUILT_IN_ILOGB)case BUILT_IN_ILOGB: case BUILT_IN_ILOGBF: case BUILT_IN_ILOGBL:
2395 errno_set = true; builtin_optab = ilogb_optab; break;
2396 CASE_FLT_FN (BUILT_IN_ISINF)case BUILT_IN_ISINF: case BUILT_IN_ISINFF: case BUILT_IN_ISINFL:
2397 builtin_optab = isinf_optab; break;
2398 case BUILT_IN_ISNORMAL:
2399 case BUILT_IN_ISFINITE:
2400 CASE_FLT_FN (BUILT_IN_FINITE)case BUILT_IN_FINITE: case BUILT_IN_FINITEF: case BUILT_IN_FINITEL:
2401 case BUILT_IN_FINITED32:
2402 case BUILT_IN_FINITED64:
2403 case BUILT_IN_FINITED128:
2404 case BUILT_IN_ISINFD32:
2405 case BUILT_IN_ISINFD64:
2406 case BUILT_IN_ISINFD128:
2407 /* These builtins have no optabs (yet). */
2408 break;
2409 default:
2410 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2410, __FUNCTION__))
;
2411 }
2412
2413 /* There's no easy way to detect the case we need to set EDOM. */
2414 if (flag_errno_mathglobal_options.x_flag_errno_math && errno_set)
2415 return CODE_FOR_nothing;
2416
2417 /* Optab mode depends on the mode of the input argument. */
2418 mode = TYPE_MODE (TREE_TYPE (arg))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2418, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2418, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2418, __FUNCTION__))->typed.type)) : (((contains_struct_check
((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2418, __FUNCTION__))->typed.type))->type_common.mode)
;
2419
2420 if (builtin_optab)
2421 return optab_handler (builtin_optab, mode);
2422 return CODE_FOR_nothing;
2423}
2424
2425/* Expand a call to one of the builtin math functions that operate on
2426 floating point argument and output an integer result (ilogb, isinf,
2427 isnan, etc).
2428 Return 0 if a normal call should be emitted rather than expanding the
2429 function in-line. EXP is the expression that is a call to the builtin
2430 function; if convenient, the result should be placed in TARGET. */
2431
2432static rtx
2433expand_builtin_interclass_mathfn (tree exp, rtx target)
2434{
2435 enum insn_code icode = CODE_FOR_nothing;
2436 rtx op0;
2437 tree fndecl = get_callee_fndecl (exp);
2438 machine_mode mode;
2439 tree arg;
2440
2441 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2442 return NULL_RTX(rtx) 0;
2443
2444 arg = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2444, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2444, __FUNCTION__)))))
;
2445 icode = interclass_mathfn_icode (arg, fndecl);
2446 mode = TYPE_MODE (TREE_TYPE (arg))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2446, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2446, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2446, __FUNCTION__))->typed.type)) : (((contains_struct_check
((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2446, __FUNCTION__))->typed.type))->type_common.mode)
;
2447
2448 if (icode != CODE_FOR_nothing)
2449 {
2450 class expand_operand ops[1];
2451 rtx_insn *last = get_last_insn ();
2452 tree orig_arg = arg;
2453
2454 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2455 need to expand the argument again. This way, we will not perform
2456 side-effects more the once. */
2457 CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2457, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2457, __FUNCTION__)))))
= arg = builtin_save_expr (arg);
2458
2459 op0 = expand_expr (arg, NULL_RTX(rtx) 0, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
2460
2461 if (mode != GET_MODE (op0)((machine_mode) (op0)->mode))
2462 op0 = convert_to_mode (mode, op0, 0);
2463
2464 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2464, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2464, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2464, __FUNCTION__))->typed.type)) : (((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2464, __FUNCTION__))->typed.type))->type_common.mode)
);
2465 if (maybe_legitimize_operands (icode, 0, 1, ops)
2466 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2467 return ops[0].value;
2468
2469 delete_insns_since (last);
2470 CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2470, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2470, __FUNCTION__)))))
= orig_arg;
2471 }
2472
2473 return NULL_RTX(rtx) 0;
2474}
2475
2476/* Expand a call to the builtin sincos math function.
2477 Return NULL_RTX if a normal call should be emitted rather than expanding the
2478 function in-line. EXP is the expression that is a call to the builtin
2479 function. */
2480
2481static rtx
2482expand_builtin_sincos (tree exp)
2483{
2484 rtx op0, op1, op2, target1, target2;
2485 machine_mode mode;
2486 tree arg, sinp, cosp;
2487 int result;
2488 location_t loc = EXPR_LOCATION (exp)((((exp)) && ((tree_code_type[(int) (((enum tree_code
) ((exp))->base.code))]) >= tcc_reference && (tree_code_type
[(int) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression
)) ? (exp)->exp.locus : ((location_t) 0))
;
2489 tree alias_type, alias_off;
2490
2491 if (!validate_arglist (exp, REAL_TYPE,
2492 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2493 return NULL_RTX(rtx) 0;
2494
2495 arg = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2495, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2495, __FUNCTION__)))))
;
2496 sinp = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2496, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2496, __FUNCTION__)))))
;
2497 cosp = CALL_EXPR_ARG (exp, 2)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2497, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2497, __FUNCTION__)))))
;
2498
2499 /* Make a suitable register to place result in. */
2500 mode = TYPE_MODE (TREE_TYPE (arg))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2500, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2500, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2500, __FUNCTION__))->typed.type)) : (((contains_struct_check
((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2500, __FUNCTION__))->typed.type))->type_common.mode)
;
2501
2502 /* Check if sincos insn is available, otherwise emit the call. */
2503 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2504 return NULL_RTX(rtx) 0;
2505
2506 target1 = gen_reg_rtx (mode);
2507 target2 = gen_reg_rtx (mode);
2508
2509 op0 = expand_normal (arg);
2510 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2510, __FUNCTION__))->typed.type)
, ptr_mode, true);
2511 alias_off = build_int_cst (alias_type, 0);
2512 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2512, __FUNCTION__))->typed.type)
,
2513 sinp, alias_off));
2514 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2514, __FUNCTION__))->typed.type)
,
2515 cosp, alias_off));
2516
2517 /* Compute into target1 and target2.
2518 Set TARGET to wherever the result comes back. */
2519 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2520 gcc_assert (result)((void)(!(result) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2520, __FUNCTION__), 0 : 0))
;
2521
2522 /* Move target1 and target2 to the memory locations indicated
2523 by op1 and op2. */
2524 emit_move_insn (op1, target1);
2525 emit_move_insn (op2, target2);
2526
2527 return const0_rtx(const_int_rtx[64]);
2528}
2529
2530/* Expand a call to the internal cexpi builtin to the sincos math function.
2531 EXP is the expression that is a call to the builtin function; if convenient,
2532 the result should be placed in TARGET. */
2533
2534static rtx
2535expand_builtin_cexpi (tree exp, rtx target)
2536{
2537 tree fndecl = get_callee_fndecl (exp);
2538 tree arg, type;
2539 machine_mode mode;
2540 rtx op0, op1, op2;
2541 location_t loc = EXPR_LOCATION (exp)((((exp)) && ((tree_code_type[(int) (((enum tree_code
) ((exp))->base.code))]) >= tcc_reference && (tree_code_type
[(int) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression
)) ? (exp)->exp.locus : ((location_t) 0))
;
2542
2543 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2544 return NULL_RTX(rtx) 0;
2545
2546 arg = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2546, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2546, __FUNCTION__)))))
;
2547 type = TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2547, __FUNCTION__))->typed.type)
;
2548 mode = TYPE_MODE (TREE_TYPE (arg))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2548, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2548, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2548, __FUNCTION__))->typed.type)) : (((contains_struct_check
((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2548, __FUNCTION__))->typed.type))->type_common.mode)
;
2549
2550 /* Try expanding via a sincos optab, fall back to emitting a libcall
2551 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2552 is only generated from sincos, cexp or if we have either of them. */
2553 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2554 {
2555 op1 = gen_reg_rtx (mode);
2556 op2 = gen_reg_rtx (mode);
2557
2558 op0 = expand_expr (arg, NULL_RTX(rtx) 0, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
2559
2560 /* Compute into op1 and op2. */
2561 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2562 }
2563 else if (targetm.libc_has_function (function_sincos, type))
2564 {
2565 tree call, fn = NULL_TREE(tree) __null;
2566 tree top1, top2;
2567 rtx op1a, op2a;
2568
2569 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2570 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2571 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2572 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2573 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2574 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2575 else
2576 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2576, __FUNCTION__))
;
2577
2578 op1 = assign_temp (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2578, __FUNCTION__))->typed.type)
, 1, 1);
2579 op2 = assign_temp (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2579, __FUNCTION__))->typed.type)
, 1, 1);
2580 op1a = copy_addr_to_reg (XEXP (op1, 0)(((op1)->u.fld[0]).rt_rtx));
2581 op2a = copy_addr_to_reg (XEXP (op2, 0)(((op2)->u.fld[0]).rt_rtx));
2582 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2582, __FUNCTION__))->typed.type)
), op1a);
2583 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2583, __FUNCTION__))->typed.type)
), op2a);
2584
2585 /* Make sure not to fold the sincos call again. */
2586 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)((contains_struct_check ((fn), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2586, __FUNCTION__))->typed.type)
), fn);
2587 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn))((contains_struct_check ((((contains_struct_check ((fn), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2587, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2587, __FUNCTION__))->typed.type)
,
2588 call, 3, arg, top1, top2));
2589 }
2590 else
2591 {
2592 tree call, fn = NULL_TREE(tree) __null, narg;
2593 tree ctype = build_complex_type (type);
2594
2595 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2596 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2597 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2598 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2599 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2600 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2601 else
2602 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2602, __FUNCTION__))
;
2603
2604 /* If we don't have a decl for cexp create one. This is the
2605 friendliest fallback if the user calls __builtin_cexpi
2606 without full target C99 function support. */
2607 if (fn == NULL_TREE(tree) __null)
2608 {
2609 tree fntype;
2610 const char *name = NULL__null;
2611
2612 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2613 name = "cexpf";
2614 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2615 name = "cexp";
2616 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2617 name = "cexpl";
2618
2619 fntype = build_function_type_list (ctype, ctype, NULL_TREE(tree) __null);
2620 fn = build_fn_decl (name, fntype);
2621 }
2622
2623 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2624 build_real (type, dconst0), arg);
2625
2626 /* Make sure not to fold the cexp call again. */
2627 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)((contains_struct_check ((fn), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2627, __FUNCTION__))->typed.type)
), fn);
2628 return expand_expr (build_call_nary (ctype, call, 1, narg),
2629 target, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
2630 }
2631
2632 /* Now build the proper return type. */
2633 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2634 make_tree (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2634, __FUNCTION__))->typed.type)
, op2),
2635 make_tree (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2635, __FUNCTION__))->typed.type)
, op1)),
2636 target, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
2637}
2638
2639/* Conveniently construct a function call expression. FNDECL names the
2640 function to be called, N is the number of arguments, and the "..."
2641 parameters are the argument expressions. Unlike build_call_exr
2642 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2643
2644static tree
2645build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2646{
2647 va_list ap;
2648 tree fntype = TREE_TYPE (fndecl)((contains_struct_check ((fndecl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2648, __FUNCTION__))->typed.type)
;
2649 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2650
2651 va_start (ap, n)__builtin_va_start(ap, n);
2652 fn = build_call_valist (TREE_TYPE (fntype)((contains_struct_check ((fntype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2652, __FUNCTION__))->typed.type)
, fn, n, ap);
2653 va_end (ap)__builtin_va_end(ap);
2654 SET_EXPR_LOCATION (fn, loc)(expr_check (((fn)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2654, __FUNCTION__))->exp.locus = (loc)
;
2655 return fn;
2656}
2657
2658/* Expand a call to one of the builtin rounding functions gcc defines
2659 as an extension (lfloor and lceil). As these are gcc extensions we
2660 do not need to worry about setting errno to EDOM.
2661 If expanding via optab fails, lower expression to (int)(floor(x)).
2662 EXP is the expression that is a call to the builtin function;
2663 if convenient, the result should be placed in TARGET. */
2664
2665static rtx
2666expand_builtin_int_roundingfn (tree exp, rtx target)
2667{
2668 convert_optab builtin_optab;
2669 rtx op0, tmp;
2670 rtx_insn *insns;
2671 tree fndecl = get_callee_fndecl (exp);
2672 enum built_in_function fallback_fn;
2673 tree fallback_fndecl;
2674 machine_mode mode;
2675 tree arg;
2676
2677 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2678 return NULL_RTX(rtx) 0;
2679
2680 arg = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2680, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2680, __FUNCTION__)))))
;
2681
2682 switch (DECL_FUNCTION_CODE (fndecl))
2683 {
2684 CASE_FLT_FN (BUILT_IN_ICEIL)case BUILT_IN_ICEIL: case BUILT_IN_ICEILF: case BUILT_IN_ICEILL:
2685 CASE_FLT_FN (BUILT_IN_LCEIL)case BUILT_IN_LCEIL: case BUILT_IN_LCEILF: case BUILT_IN_LCEILL:
2686 CASE_FLT_FN (BUILT_IN_LLCEIL)case BUILT_IN_LLCEIL: case BUILT_IN_LLCEILF: case BUILT_IN_LLCEILL:
2687 builtin_optab = lceil_optab;
2688 fallback_fn = BUILT_IN_CEIL;
2689 break;
2690
2691 CASE_FLT_FN (BUILT_IN_IFLOOR)case BUILT_IN_IFLOOR: case BUILT_IN_IFLOORF: case BUILT_IN_IFLOORL:
2692 CASE_FLT_FN (BUILT_IN_LFLOOR)case BUILT_IN_LFLOOR: case BUILT_IN_LFLOORF: case BUILT_IN_LFLOORL:
2693 CASE_FLT_FN (BUILT_IN_LLFLOOR)case BUILT_IN_LLFLOOR: case BUILT_IN_LLFLOORF: case BUILT_IN_LLFLOORL:
2694 builtin_optab = lfloor_optab;
2695 fallback_fn = BUILT_IN_FLOOR;
2696 break;
2697
2698 default:
2699 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2699, __FUNCTION__))
;
2700 }
2701
2702 /* Make a suitable register to place result in. */
2703 mode = TYPE_MODE (TREE_TYPE (exp))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2703, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2703, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2703, __FUNCTION__))->typed.type)) : (((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2703, __FUNCTION__))->typed.type))->type_common.mode)
;
2704
2705 target = gen_reg_rtx (mode);
2706
2707 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2708 need to expand the argument again. This way, we will not perform
2709 side-effects more the once. */
2710 CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2710, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2710, __FUNCTION__)))))
= arg = builtin_save_expr (arg);
2711
2712 op0 = expand_expr (arg, NULL__null, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
2713
2714 start_sequence ();
2715
2716 /* Compute into TARGET. */
2717 if (expand_sfix_optab (target, op0, builtin_optab))
2718 {
2719 /* Output the entire sequence. */
2720 insns = get_insns ();
2721 end_sequence ();
2722 emit_insn (insns);
2723 return target;
2724 }
2725
2726 /* If we were unable to expand via the builtin, stop the sequence
2727 (without outputting the insns). */
2728 end_sequence ();
2729
2730 /* Fall back to floating point rounding optab. */
2731 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2731, __FUNCTION__))->typed.type)
, fallback_fn);
2732
2733 /* For non-C99 targets we may end up without a fallback fndecl here
2734 if the user called __builtin_lfloor directly. In this case emit
2735 a call to the floor/ceil variants nevertheless. This should result
2736 in the best user experience for not full C99 targets. */
2737 if (fallback_fndecl == NULL_TREE(tree) __null)
2738 {
2739 tree fntype;
2740 const char *name = NULL__null;
2741
2742 switch (DECL_FUNCTION_CODE (fndecl))
2743 {
2744 case BUILT_IN_ICEIL:
2745 case BUILT_IN_LCEIL:
2746 case BUILT_IN_LLCEIL:
2747 name = "ceil";
2748 break;
2749 case BUILT_IN_ICEILF:
2750 case BUILT_IN_LCEILF:
2751 case BUILT_IN_LLCEILF:
2752 name = "ceilf";
2753 break;
2754 case BUILT_IN_ICEILL:
2755 case BUILT_IN_LCEILL:
2756 case BUILT_IN_LLCEILL:
2757 name = "ceill";
2758 break;
2759 case BUILT_IN_IFLOOR:
2760 case BUILT_IN_LFLOOR:
2761 case BUILT_IN_LLFLOOR:
2762 name = "floor";
2763 break;
2764 case BUILT_IN_IFLOORF:
2765 case BUILT_IN_LFLOORF:
2766 case BUILT_IN_LLFLOORF:
2767 name = "floorf";
2768 break;
2769 case BUILT_IN_IFLOORL:
2770 case BUILT_IN_LFLOORL:
2771 case BUILT_IN_LLFLOORL:
2772 name = "floorl";
2773 break;
2774 default:
2775 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2775, __FUNCTION__))
;
2776 }
2777
2778 fntype = build_function_type_list (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2778, __FUNCTION__))->typed.type)
,
2779 TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2779, __FUNCTION__))->typed.type)
, NULL_TREE(tree) __null);
2780 fallback_fndecl = build_fn_decl (name, fntype);
2781 }
2782
2783 exp = build_call_nofold_loc (EXPR_LOCATION (exp)((((exp)) && ((tree_code_type[(int) (((enum tree_code
) ((exp))->base.code))]) >= tcc_reference && (tree_code_type
[(int) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression
)) ? (exp)->exp.locus : ((location_t) 0))
, fallback_fndecl, 1, arg);
2784
2785 tmp = expand_normal (exp);
2786 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp)((contains_struct_check ((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2786, __FUNCTION__))->typed.type)
);
2787
2788 /* Truncate the result of floating point optab to integer
2789 via expand_fix (). */
2790 target = gen_reg_rtx (mode);
2791 expand_fix (target, tmp, 0);
2792
2793 return target;
2794}
2795
2796/* Expand a call to one of the builtin math functions doing integer
2797 conversion (lrint).
2798 Return 0 if a normal call should be emitted rather than expanding the
2799 function in-line. EXP is the expression that is a call to the builtin
2800 function; if convenient, the result should be placed in TARGET. */
2801
2802static rtx
2803expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2804{
2805 convert_optab builtin_optab;
2806 rtx op0;
2807 rtx_insn *insns;
2808 tree fndecl = get_callee_fndecl (exp);
2809 tree arg;
2810 machine_mode mode;
2811 enum built_in_function fallback_fn = BUILT_IN_NONE;
2812
2813 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2814 return NULL_RTX(rtx) 0;
2815
2816 arg = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2816, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2816, __FUNCTION__)))))
;
2817
2818 switch (DECL_FUNCTION_CODE (fndecl))
2819 {
2820 CASE_FLT_FN (BUILT_IN_IRINT)case BUILT_IN_IRINT: case BUILT_IN_IRINTF: case BUILT_IN_IRINTL:
2821 fallback_fn = BUILT_IN_LRINT;
2822 gcc_fallthrough ();
2823 CASE_FLT_FN (BUILT_IN_LRINT)case BUILT_IN_LRINT: case BUILT_IN_LRINTF: case BUILT_IN_LRINTL:
2824 CASE_FLT_FN (BUILT_IN_LLRINT)case BUILT_IN_LLRINT: case BUILT_IN_LLRINTF: case BUILT_IN_LLRINTL:
2825 builtin_optab = lrint_optab;
2826 break;
2827
2828 CASE_FLT_FN (BUILT_IN_IROUND)case BUILT_IN_IROUND: case BUILT_IN_IROUNDF: case BUILT_IN_IROUNDL:
2829 fallback_fn = BUILT_IN_LROUND;
2830 gcc_fallthrough ();
2831 CASE_FLT_FN (BUILT_IN_LROUND)case BUILT_IN_LROUND: case BUILT_IN_LROUNDF: case BUILT_IN_LROUNDL:
2832 CASE_FLT_FN (BUILT_IN_LLROUND)case BUILT_IN_LLROUND: case BUILT_IN_LLROUNDF: case BUILT_IN_LLROUNDL:
2833 builtin_optab = lround_optab;
2834 break;
2835
2836 default:
2837 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2837, __FUNCTION__))
;
2838 }
2839
2840 /* There's no easy way to detect the case we need to set EDOM. */
2841 if (flag_errno_mathglobal_options.x_flag_errno_math && fallback_fn == BUILT_IN_NONE)
2842 return NULL_RTX(rtx) 0;
2843
2844 /* Make a suitable register to place result in. */
2845 mode = TYPE_MODE (TREE_TYPE (exp))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2845, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2845, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2845, __FUNCTION__))->typed.type)) : (((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2845, __FUNCTION__))->typed.type))->type_common.mode)
;
2846
2847 /* There's no easy way to detect the case we need to set EDOM. */
2848 if (!flag_errno_mathglobal_options.x_flag_errno_math)
2849 {
2850 rtx result = gen_reg_rtx (mode);
2851
2852 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2853 need to expand the argument again. This way, we will not perform
2854 side-effects more the once. */
2855 CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2855, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2855, __FUNCTION__)))))
= arg = builtin_save_expr (arg);
2856
2857 op0 = expand_expr (arg, NULL__null, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
2858
2859 start_sequence ();
2860
2861 if (expand_sfix_optab (result, op0, builtin_optab))
2862 {
2863 /* Output the entire sequence. */
2864 insns = get_insns ();
2865 end_sequence ();
2866 emit_insn (insns);
2867 return result;
2868 }
2869
2870 /* If we were unable to expand via the builtin, stop the sequence
2871 (without outputting the insns) and call to the library function
2872 with the stabilized argument list. */
2873 end_sequence ();
2874 }
2875
2876 if (fallback_fn != BUILT_IN_NONE)
2877 {
2878 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2879 targets, (int) round (x) should never be transformed into
2880 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2881 a call to lround in the hope that the target provides at least some
2882 C99 functions. This should result in the best user experience for
2883 not full C99 targets. */
2884 tree fallback_fndecl = mathfn_built_in_1
2885 (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2885, __FUNCTION__))->typed.type)
, as_combined_fn (fallback_fn), 0);
2886
2887 exp = build_call_nofold_loc (EXPR_LOCATION (exp)((((exp)) && ((tree_code_type[(int) (((enum tree_code
) ((exp))->base.code))]) >= tcc_reference && (tree_code_type
[(int) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression
)) ? (exp)->exp.locus : ((location_t) 0))
,
2888 fallback_fndecl, 1, arg);
2889
2890 target = expand_call (exp, NULL_RTX(rtx) 0, target == const0_rtx(const_int_rtx[64]));
2891 target = maybe_emit_group_store (target, TREE_TYPE (exp)((contains_struct_check ((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2891, __FUNCTION__))->typed.type)
);
2892 return convert_to_mode (mode, target, 0);
2893 }
2894
2895 return expand_call (exp, target, target == const0_rtx(const_int_rtx[64]));
2896}
2897
2898/* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2899 a normal call should be emitted rather than expanding the function
2900 in-line. EXP is the expression that is a call to the builtin
2901 function; if convenient, the result should be placed in TARGET. */
2902
2903static rtx
2904expand_builtin_powi (tree exp, rtx target)
2905{
2906 tree arg0, arg1;
2907 rtx op0, op1;
2908 machine_mode mode;
2909 machine_mode mode2;
2910
2911 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2912 return NULL_RTX(rtx) 0;
2913
2914 arg0 = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2914, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2914, __FUNCTION__)))))
;
2915 arg1 = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2915, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2915, __FUNCTION__)))))
;
2916 mode = TYPE_MODE (TREE_TYPE (exp))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2916, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2916, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2916, __FUNCTION__))->typed.type)) : (((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2916, __FUNCTION__))->typed.type))->type_common.mode)
;
2917
2918 /* Emit a libcall to libgcc. */
2919
2920 /* Mode of the 2nd argument must match that of an int. */
2921 mode2 = int_mode_for_size (INT_TYPE_SIZE32, 0).require ();
2922
2923 if (target == NULL_RTX(rtx) 0)
2924 target = gen_reg_rtx (mode);
2925
2926 op0 = expand_expr (arg0, NULL_RTX(rtx) 0, mode, EXPAND_NORMAL);
2927 if (GET_MODE (op0)((machine_mode) (op0)->mode) != mode)
2928 op0 = convert_to_mode (mode, op0, 0);
2929 op1 = expand_expr (arg1, NULL_RTX(rtx) 0, mode2, EXPAND_NORMAL);
2930 if (GET_MODE (op1)((machine_mode) (op1)->mode) != mode2)
2931 op1 = convert_to_mode (mode2, op1, 0);
2932
2933 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2934 target, LCT_CONST, mode,
2935 op0, mode, op1, mode2);
2936
2937 return target;
2938}
2939
2940/* Expand expression EXP which is a call to the strlen builtin. Return
2941 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2942 try to get the result in TARGET, if convenient. */
2943
2944static rtx
2945expand_builtin_strlen (tree exp, rtx target,
2946 machine_mode target_mode)
2947{
2948 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2949 return NULL_RTX(rtx) 0;
2950
2951 tree src = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2951, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2951, __FUNCTION__)))))
;
2952
2953 /* If the length can be computed at compile-time, return it. */
2954 if (tree len = c_strlen (src, 0))
2955 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2956
2957 /* If the length can be computed at compile-time and is constant
2958 integer, but there are side-effects in src, evaluate
2959 src for side-effects, then return len.
2960 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2961 can be optimized into: i++; x = 3; */
2962 tree len = c_strlen (src, 1);
2963 if (len && TREE_CODE (len)((enum tree_code) (len)->base.code) == INTEGER_CST)
2964 {
2965 expand_expr (src, const0_rtx(const_int_rtx[64]), VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
2966 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2967 }
2968
2969 unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT(8);
2970
2971 /* If SRC is not a pointer type, don't do this operation inline. */
2972 if (align == 0)
2973 return NULL_RTX(rtx) 0;
2974
2975 /* Bail out if we can't compute strlen in the right mode. */
2976 machine_mode insn_mode;
2977 enum insn_code icode = CODE_FOR_nothing;
2978 FOR_EACH_MODE_FROM (insn_mode, target_mode)for ((insn_mode) = (target_mode); mode_iterator::iterate_p (&
(insn_mode)); mode_iterator::get_wider (&(insn_mode)))
2979 {
2980 icode = optab_handler (strlen_optab, insn_mode);
2981 if (icode != CODE_FOR_nothing)
2982 break;
2983 }
2984 if (insn_mode == VOIDmode((void) 0, E_VOIDmode))
2985 return NULL_RTX(rtx) 0;
2986
2987 /* Make a place to hold the source address. We will not expand
2988 the actual source until we are sure that the expansion will
2989 not fail -- there are trees that cannot be expanded twice. */
2990 rtx src_reg = gen_reg_rtx (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
2991
2992 /* Mark the beginning of the strlen sequence so we can emit the
2993 source operand later. */
2994 rtx_insn *before_strlen = get_last_insn ();
2995
2996 class expand_operand ops[4];
2997 create_output_operand (&ops[0], target, insn_mode);
2998 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), src_reg));
2999 create_integer_operand (&ops[2], 0);
3000 create_integer_operand (&ops[3], align);
3001 if (!maybe_expand_insn (icode, 4, ops))
3002 return NULL_RTX(rtx) 0;
3003
3004 /* Check to see if the argument was declared attribute nonstring
3005 and if so, issue a warning since at this point it's not known
3006 to be nul-terminated. */
3007 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3008
3009 /* Now that we are assured of success, expand the source. */
3010 start_sequence ();
3011 rtx pat = expand_expr (src, src_reg, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, EXPAND_NORMAL);
3012 if (pat != src_reg)
3013 {
3014#ifdef POINTERS_EXTEND_UNSIGNED1
3015 if (GET_MODE (pat)((machine_mode) (pat)->mode) != Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
)
3016 pat = convert_to_mode (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, pat,
3017 POINTERS_EXTEND_UNSIGNED1);
3018#endif
3019 emit_move_insn (src_reg, pat);
3020 }
3021 pat = get_insns ();
3022 end_sequence ();
3023
3024 if (before_strlen)
3025 emit_insn_after (pat, before_strlen);
3026 else
3027 emit_insn_before (pat, get_insns ());
3028
3029 /* Return the value in the proper mode for this function. */
3030 if (GET_MODE (ops[0].value)((machine_mode) (ops[0].value)->mode) == target_mode)
3031 target = ops[0].value;
3032 else if (target != 0)
3033 convert_move (target, ops[0].value, 0);
3034 else
3035 target = convert_to_mode (target_mode, ops[0].value, 0);
3036
3037 return target;
3038}
3039
3040/* Expand call EXP to the strnlen built-in, returning the result
3041 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3042
3043static rtx
3044expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3045{
3046 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3047 return NULL_RTX(rtx) 0;
3048
3049 tree src = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3049, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3049, __FUNCTION__)))))
;
3050 tree bound = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3050, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3050, __FUNCTION__)))))
;
3051
3052 if (!bound)
3053 return NULL_RTX(rtx) 0;
3054
3055 location_t loc = UNKNOWN_LOCATION((location_t) 0);
3056 if (EXPR_HAS_LOCATION (exp)(((IS_ADHOC_LOC (((((exp)) && ((tree_code_type[(int) (
((enum tree_code) ((exp))->base.code))]) >= tcc_reference
&& (tree_code_type[(int) (((enum tree_code) ((exp))->
base.code))]) <= tcc_expression)) ? (exp)->exp.locus : (
(location_t) 0)))) ? get_location_from_adhoc_loc (line_table,
((((exp)) && ((tree_code_type[(int) (((enum tree_code
) ((exp))->base.code))]) >= tcc_reference && (tree_code_type
[(int) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression
)) ? (exp)->exp.locus : ((location_t) 0))) : (((((exp)) &&
((tree_code_type[(int) (((enum tree_code) ((exp))->base.code
))]) >= tcc_reference && (tree_code_type[(int) (((
enum tree_code) ((exp))->base.code))]) <= tcc_expression
)) ? (exp)->exp.locus : ((location_t) 0)))) != ((location_t
) 0))
)
3057 loc = EXPR_LOCATION (exp)((((exp)) && ((tree_code_type[(int) (((enum tree_code
) ((exp))->base.code))]) >= tcc_reference && (tree_code_type
[(int) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression
)) ? (exp)->exp.locus : ((location_t) 0))
;
3058
3059 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3060 so these conversions aren't necessary. */
3061 c_strlen_data lendata = { };
3062 tree len = c_strlen (src, 0, &lendata, 1);
3063 if (len)
3064 len = fold_convert_loc (loc, TREE_TYPE (bound)((contains_struct_check ((bound), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3064, __FUNCTION__))->typed.type)
, len);
3065
3066 if (TREE_CODE (bound)((enum tree_code) (bound)->base.code) == INTEGER_CST)
3067 {
3068 if (!len)
3069 return NULL_RTX(rtx) 0;
3070
3071 len = fold_build2_loc (loc, MIN_EXPR, size_type_nodeglobal_trees[TI_SIZE_TYPE], len, bound);
3072 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3073 }
3074
3075 if (TREE_CODE (bound)((enum tree_code) (bound)->base.code) != SSA_NAME)
3076 return NULL_RTX(rtx) 0;
3077
3078 wide_int min, max;
3079 value_range r;
3080 get_global_range_query ()->range_of_expr (r, bound);
3081 if (r.kind () != VR_RANGE)
3082 return NULL_RTX(rtx) 0;
3083 min = r.lower_bound ();
3084 max = r.upper_bound ();
3085
3086 if (!len || TREE_CODE (len)((enum tree_code) (len)->base.code) != INTEGER_CST)
3087 {
3088 bool exact;
3089 lendata.decl = unterminated_array (src, &len, &exact);
3090 if (!lendata.decl)
3091 return NULL_RTX(rtx) 0;
3092 }
3093
3094 if (lendata.decl)
3095 return NULL_RTX(rtx) 0;
3096
3097 if (wi::gtu_p (min, wi::to_wide (len)))
3098 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3099
3100 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len)((contains_struct_check ((len), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3100, __FUNCTION__))->typed.type)
, len, bound);
3101 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3102}
3103
3104/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3105 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3106 a target constant. */
3107
3108static rtx
3109builtin_memcpy_read_str (void *data, void *, HOST_WIDE_INTlong offset,
3110 fixed_size_mode mode)
3111{
3112 /* The REPresentation pointed to by DATA need not be a nul-terminated
3113 string but the caller guarantees it's large enough for MODE. */
3114 const char *rep = (const char *) data;
3115
3116 /* The by-pieces infrastructure does not try to pick a vector mode
3117 for memcpy expansion. */
3118 return c_readstr (rep + offset, as_a <scalar_int_mode> (mode),
3119 /*nul_terminated=*/false);
3120}
3121
3122/* LEN specify length of the block of memcpy/memset operation.
3123 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3124 In some cases we can make very likely guess on max size, then we
3125 set it into PROBABLE_MAX_SIZE. */
3126
3127static void
3128determine_block_size (tree len, rtx len_rtx,
3129 unsigned HOST_WIDE_INTlong *min_size,
3130 unsigned HOST_WIDE_INTlong *max_size,
3131 unsigned HOST_WIDE_INTlong *probable_max_size)
3132{
3133 if (CONST_INT_P (len_rtx)(((enum rtx_code) (len_rtx)->code) == CONST_INT))
3134 {
3135 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx)((unsigned long) ((len_rtx)->u.hwint[0]));
3136 return;
3137 }
3138 else
3139 {
3140 wide_int min, max;
3141 enum value_range_kind range_type = VR_UNDEFINED;
3142
3143 /* Determine bounds from the type. */
3144 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))((tree_check5 ((((contains_struct_check ((len), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3144, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3144, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.minval
)
))
3145 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len))((tree_check5 ((((contains_struct_check ((len), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3145, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3145, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.minval
)
);
3146 else
3147 *min_size = 0;
3148 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))((tree_check5 ((((contains_struct_check ((len), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3148, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3148, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval
)
))
3149 *probable_max_size = *max_size
3150 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len))((tree_check5 ((((contains_struct_check ((len), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3150, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3150, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval
)
);
3151 else
3152 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx))mode_mask_array[((machine_mode) (len_rtx)->mode)];
3153
3154 if (TREE_CODE (len)((enum tree_code) (len)->base.code) == SSA_NAME)
3155 {
3156 value_range r;
3157 get_global_range_query ()->range_of_expr (r, len);
3158 range_type = r.kind ();
3159 if (range_type != VR_UNDEFINED)
3160 {
3161 min = wi::to_wide (r.min ());
3162 max = wi::to_wide (r.max ());
3163 }
3164 }
3165 if (range_type == VR_RANGE)
3166 {
3167 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3168 *min_size = min.to_uhwi ();
3169 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3170 *probable_max_size = *max_size = max.to_uhwi ();
3171 }
3172 else if (range_type == VR_ANTI_RANGE)
3173 {
3174 /* Code like
3175
3176 int n;
3177 if (n < 100)
3178 memcpy (a, b, n)
3179
3180 Produce anti range allowing negative values of N. We still
3181 can use the information and make a guess that N is not negative.
3182 */
3183 if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3184 *probable_max_size = min.to_uhwi () - 1;
3185 }
3186 }
3187 gcc_checking_assert (*max_size <=((void)(!(*max_size <= (unsigned long) mode_mask_array[((machine_mode
) (len_rtx)->mode)]) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3189, __FUNCTION__), 0 : 0))
3188 (unsigned HOST_WIDE_INT)((void)(!(*max_size <= (unsigned long) mode_mask_array[((machine_mode
) (len_rtx)->mode)]) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3189, __FUNCTION__), 0 : 0))
3189 GET_MODE_MASK (GET_MODE (len_rtx)))((void)(!(*max_size <= (unsigned long) mode_mask_array[((machine_mode
) (len_rtx)->mode)]) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3189, __FUNCTION__), 0 : 0))
;
3190}
3191
3192/* Expand a call EXP to the memcpy builtin.
3193 Return NULL_RTX if we failed, the caller should emit a normal call,
3194 otherwise try to get the result in TARGET, if convenient (and in
3195 mode MODE if that's convenient). */
3196
3197static rtx
3198expand_builtin_memcpy (tree exp, rtx target)
3199{
3200 if (!validate_arglist (exp,
3201 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3202 return NULL_RTX(rtx) 0;
3203
3204 tree dest = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3204, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3204, __FUNCTION__)))))
;
3205 tree src = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3205, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3205, __FUNCTION__)))))
;
3206 tree len = CALL_EXPR_ARG (exp, 2)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3206, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3206, __FUNCTION__)))))
;
3207
3208 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3209 /*retmode=*/ RETURN_BEGIN, false);
3210}
3211
3212/* Check a call EXP to the memmove built-in for validity.
3213 Return NULL_RTX on both success and failure. */
3214
3215static rtx
3216expand_builtin_memmove (tree exp, rtx target)
3217{
3218 if (!validate_arglist (exp,
3219 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3220 return NULL_RTX(rtx) 0;
3221
3222 tree dest = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3222, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3222, __FUNCTION__)))))
;
3223 tree src = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3223, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3223, __FUNCTION__)))))
;
3224 tree len = CALL_EXPR_ARG (exp, 2)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3224, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3224, __FUNCTION__)))))
;
3225
3226 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3227 /*retmode=*/ RETURN_BEGIN, true);
3228}
3229
3230/* Expand a call EXP to the mempcpy builtin.
3231 Return NULL_RTX if we failed; the caller should emit a normal call,
3232 otherwise try to get the result in TARGET, if convenient (and in
3233 mode MODE if that's convenient). */
3234
3235static rtx
3236expand_builtin_mempcpy (tree exp, rtx target)
3237{
3238 if (!validate_arglist (exp,
3239 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3240 return NULL_RTX(rtx) 0;
3241
3242 tree dest = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3242, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3242, __FUNCTION__)))))
;
3243 tree src = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3243, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3243, __FUNCTION__)))))
;
3244 tree len = CALL_EXPR_ARG (exp, 2)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3244, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3244, __FUNCTION__)))))
;
3245
3246 /* Policy does not generally allow using compute_objsize (which
3247 is used internally by check_memop_size) to change code generation
3248 or drive optimization decisions.
3249
3250 In this instance it is safe because the code we generate has
3251 the same semantics regardless of the return value of
3252 check_memop_sizes. Exactly the same amount of data is copied
3253 and the return value is exactly the same in both cases.
3254
3255 Furthermore, check_memop_size always uses mode 0 for the call to
3256 compute_objsize, so the imprecise nature of compute_objsize is
3257 avoided. */
3258
3259 /* Avoid expanding mempcpy into memcpy when the call is determined
3260 to overflow the buffer. This also prevents the same overflow
3261 from being diagnosed again when expanding memcpy. */
3262
3263 return expand_builtin_mempcpy_args (dest, src, len,
3264 target, exp, /*retmode=*/ RETURN_END);
3265}
3266
3267/* Helper function to do the actual work for expand of memory copy family
3268 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3269 of memory from SRC to DEST and assign to TARGET if convenient. Return
3270 value is based on RETMODE argument. */
3271
3272static rtx
3273expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3274 rtx target, tree exp, memop_ret retmode,
3275 bool might_overlap)
3276{
3277 unsigned int src_align = get_pointer_alignment (src);
3278 unsigned int dest_align = get_pointer_alignment (dest);
3279 rtx dest_mem, src_mem, dest_addr, len_rtx;
3280 HOST_WIDE_INTlong expected_size = -1;
3281 unsigned int expected_align = 0;
3282 unsigned HOST_WIDE_INTlong min_size;
3283 unsigned HOST_WIDE_INTlong max_size;
3284 unsigned HOST_WIDE_INTlong probable_max_size;
3285
3286 bool is_move_done;
3287
3288 /* If DEST is not a pointer type, call the normal function. */
3289 if (dest_align == 0)
3290 return NULL_RTX(rtx) 0;
3291
3292 /* If either SRC is not a pointer type, don't do this
3293 operation in-line. */
3294 if (src_align == 0)
3295 return NULL_RTX(rtx) 0;
3296
3297 if (currently_expanding_gimple_stmt)
3298 stringop_block_profile (currently_expanding_gimple_stmt,
3299 &expected_align, &expected_size);
3300
3301 if (expected_align < dest_align)
3302 expected_align = dest_align;
3303 dest_mem = get_memory_rtx (dest, len);
3304 set_mem_align (dest_mem, dest_align);
3305 len_rtx = expand_normal (len);
3306 determine_block_size (len, len_rtx, &min_size, &max_size,
3307 &probable_max_size);
3308
3309 /* Try to get the byte representation of the constant SRC points to,
3310 with its byte size in NBYTES. */
3311 unsigned HOST_WIDE_INTlong nbytes;
3312 const char *rep = getbyterep (src, &nbytes);
3313
3314 /* If the function's constant bound LEN_RTX is less than or equal
3315 to the byte size of the representation of the constant argument,
3316 and if block move would be done by pieces, we can avoid loading
3317 the bytes from memory and only store the computed constant.
3318 This works in the overlap (memmove) case as well because
3319 store_by_pieces just generates a series of stores of constants
3320 from the representation returned by getbyterep(). */
3321 if (rep
3322 && CONST_INT_P (len_rtx)(((enum rtx_code) (len_rtx)->code) == CONST_INT)
3323 && (unsigned HOST_WIDE_INTlong) INTVAL (len_rtx)((len_rtx)->u.hwint[0]) <= nbytes
3324 && can_store_by_pieces (INTVAL (len_rtx)((len_rtx)->u.hwint[0]), builtin_memcpy_read_str,
3325 CONST_CAST (char *, rep)(const_cast<char *> ((rep))),
3326 dest_align, false))
3327 {
3328 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx)((len_rtx)->u.hwint[0]),
3329 builtin_memcpy_read_str,
3330 CONST_CAST (char *, rep)(const_cast<char *> ((rep))),
3331 dest_align, false, retmode);
3332 dest_mem = force_operand (XEXP (dest_mem, 0)(((dest_mem)->u.fld[0]).rt_rtx), target);
3333 dest_mem = convert_memory_address (ptr_mode, dest_mem)convert_memory_address_addr_space ((ptr_mode), (dest_mem), 0);
3334 return dest_mem;
3335 }
3336
3337 src_mem = get_memory_rtx (src, len);
3338 set_mem_align (src_mem, src_align);
3339
3340 /* Copy word part most expediently. */
3341 enum block_op_methods method = BLOCK_OP_NORMAL;
3342 if (CALL_EXPR_TAILCALL (exp)((tree_check ((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3342, __FUNCTION__, (CALL_EXPR)))->base.addressable_flag
)
3343 && (retmode == RETURN_BEGIN || target == const0_rtx(const_int_rtx[64])))
3344 method = BLOCK_OP_TAILCALL;
3345 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
3346 && retmode == RETURN_END
3347 && !might_overlap
3348 && target != const0_rtx(const_int_rtx[64]));
3349 if (use_mempcpy_call)
3350 method = BLOCK_OP_NO_LIBCALL_RET;
3351 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3352 expected_align, expected_size,
3353 min_size, max_size, probable_max_size,
3354 use_mempcpy_call, &is_move_done,
3355 might_overlap);
3356
3357 /* Bail out when a mempcpy call would be expanded as libcall and when
3358 we have a target that provides a fast implementation
3359 of mempcpy routine. */
3360 if (!is_move_done)
3361 return NULL_RTX(rtx) 0;
3362
3363 if (dest_addr == pc_rtx)
3364 return NULL_RTX(rtx) 0;
3365
3366 if (dest_addr == 0)
3367 {
3368 dest_addr = force_operand (XEXP (dest_mem, 0)(((dest_mem)->u.fld[0]).rt_rtx), target);
3369 dest_addr = convert_memory_address (ptr_mode, dest_addr)convert_memory_address_addr_space ((ptr_mode), (dest_addr), 0
)
;
3370 }
3371
3372 if (retmode != RETURN_BEGIN && target != const0_rtx(const_int_rtx[64]))
3373 {
3374 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx)gen_rtx_fmt_ee_stat ((PLUS), ((ptr_mode)), ((dest_addr)), ((len_rtx
)) )
;
3375 /* stpcpy pointer to last byte. */
3376 if (retmode == RETURN_END_MINUS_ONE)
3377 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx)gen_rtx_fmt_ee_stat ((MINUS), ((ptr_mode)), ((dest_addr)), ((
(const_int_rtx[64 +1]))) )
;
3378 }
3379
3380 return dest_addr;
3381}
3382
3383static rtx
3384expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3385 rtx target, tree orig_exp, memop_ret retmode)
3386{
3387 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3388 retmode, false);
3389}
3390
3391/* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3392 we failed, the caller should emit a normal call, otherwise try to
3393 get the result in TARGET, if convenient.
3394 Return value is based on RETMODE argument. */
3395
3396static rtx
3397expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3398{
3399 class expand_operand ops[3];
3400 rtx dest_mem;
3401 rtx src_mem;
3402
3403 if (!targetm.have_movstr ())
3404 return NULL_RTX(rtx) 0;
3405
3406 dest_mem = get_memory_rtx (dest, NULL__null);
3407 src_mem = get_memory_rtx (src, NULL__null);
3408 if (retmode == RETURN_BEGIN)
3409 {
3410 target = force_reg (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, XEXP (dest_mem, 0)(((dest_mem)->u.fld[0]).rt_rtx));
3411 dest_mem = replace_equiv_address (dest_mem, target);
3412 }
3413
3414 create_output_operand (&ops[0],
3415 retmode != RETURN_BEGIN ? target : NULL_RTX(rtx) 0, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
3416 create_fixed_operand (&ops[1], dest_mem);
3417 create_fixed_operand (&ops[2], src_mem);
3418 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3419 return NULL_RTX(rtx) 0;
3420
3421 if (retmode != RETURN_BEGIN && target != const0_rtx(const_int_rtx[64]))
3422 {
3423 target = ops[0].value;
3424 /* movstr is supposed to set end to the address of the NUL
3425 terminator. If the caller requested a mempcpy-like return value,
3426 adjust it. */
3427 if (retmode == RETURN_END)
3428 {
3429 rtx tem = plus_constant (GET_MODE (target)((machine_mode) (target)->mode),
3430 gen_lowpartrtl_hooks.gen_lowpart (GET_MODE (target)((machine_mode) (target)->mode), target), 1);
3431 emit_move_insn (target, force_operand (tem, NULL_RTX(rtx) 0));
3432 }
3433 }
3434 return target;
3435}
3436
3437/* Expand expression EXP, which is a call to the strcpy builtin. Return
3438 NULL_RTX if we failed the caller should emit a normal call, otherwise
3439 try to get the result in TARGET, if convenient (and in mode MODE if that's
3440 convenient). */
3441
3442static rtx
3443expand_builtin_strcpy (tree exp, rtx target)
3444{
3445 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3446 return NULL_RTX(rtx) 0;
3447
3448 tree dest = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3448, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3448, __FUNCTION__)))))
;
3449 tree src = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3449, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3449, __FUNCTION__)))))
;
3450
3451 return expand_builtin_strcpy_args (exp, dest, src, target);
3452}
3453
3454/* Helper function to do the actual work for expand_builtin_strcpy. The
3455 arguments to the builtin_strcpy call DEST and SRC are broken out
3456 so that this can also be called without constructing an actual CALL_EXPR.
3457 The other arguments and return value are the same as for
3458 expand_builtin_strcpy. */
3459
3460static rtx
3461expand_builtin_strcpy_args (tree, tree dest, tree src, rtx target)
3462{
3463 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
3464}
3465
3466/* Expand a call EXP to the stpcpy builtin.
3467 Return NULL_RTX if we failed the caller should emit a normal call,
3468 otherwise try to get the result in TARGET, if convenient (and in
3469 mode MODE if that's convenient). */
3470
3471static rtx
3472expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3473{
3474 tree dst, src;
3475 location_t loc = EXPR_LOCATION (exp)((((exp)) && ((tree_code_type[(int) (((enum tree_code
) ((exp))->base.code))]) >= tcc_reference && (tree_code_type
[(int) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression
)) ? (exp)->exp.locus : ((location_t) 0))
;
3476
3477 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3478 return NULL_RTX(rtx) 0;
3479
3480 dst = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3480, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3480, __FUNCTION__)))))
;
3481 src = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3481, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3481, __FUNCTION__)))))
;
3482
3483 /* If return value is ignored, transform stpcpy into strcpy. */
3484 if (target == const0_rtx(const_int_rtx[64]) && builtin_decl_implicit (BUILT_IN_STRCPY))
3485 {
3486 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3487 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3488 return expand_expr (result, target, mode, EXPAND_NORMAL);
3489 }
3490 else
3491 {
3492 tree len, lenp1;
3493 rtx ret;
3494
3495 /* Ensure we get an actual string whose length can be evaluated at
3496 compile-time, not an expression containing a string. This is
3497 because the latter will potentially produce pessimized code
3498 when used to produce the return value. */
3499 c_strlen_data lendata = { };
3500 if (!c_getstr (src)
3501 || !(len = c_strlen (src, 0, &lendata, 1)))
3502 return expand_movstr (dst, src, target,
3503 /*retmode=*/ RETURN_END_MINUS_ONE);
3504
3505 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1)size_int_kind (1, stk_ssizetype));
3506 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3507 target, exp,
3508 /*retmode=*/ RETURN_END_MINUS_ONE);
3509
3510 if (ret)
3511 return ret;
3512
3513 if (TREE_CODE (len)((enum tree_code) (len)->base.code) == INTEGER_CST)
3514 {
3515 rtx len_rtx = expand_normal (len);
3516
3517 if (CONST_INT_P (len_rtx)(((enum rtx_code) (len_rtx)->code) == CONST_INT))
3518 {
3519 ret = expand_builtin_strcpy_args (exp, dst, src, target);
3520
3521 if (ret)
3522 {
3523 if (! target)
3524 {
3525 if (mode != VOIDmode((void) 0, E_VOIDmode))
3526 target = gen_reg_rtx (mode);
3527 else
3528 target = gen_reg_rtx (GET_MODE (ret)((machine_mode) (ret)->mode));
3529 }
3530 if (GET_MODE (target)((machine_mode) (target)->mode) != GET_MODE (ret)((machine_mode) (ret)->mode))
3531 ret = gen_lowpartrtl_hooks.gen_lowpart (GET_MODE (target)((machine_mode) (target)->mode), ret);
3532
3533 ret = plus_constant (GET_MODE (ret)((machine_mode) (ret)->mode), ret, INTVAL (len_rtx)((len_rtx)->u.hwint[0]));
3534 ret = emit_move_insn (target, force_operand (ret, NULL_RTX(rtx) 0));
3535 gcc_assert (ret)((void)(!(ret) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3535, __FUNCTION__), 0 : 0))
;
3536
3537 return target;
3538 }
3539 }
3540 }
3541
3542 return expand_movstr (dst, src, target,
3543 /*retmode=*/ RETURN_END_MINUS_ONE);
3544 }
3545}
3546
3547/* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3548 arguments while being careful to avoid duplicate warnings (which could
3549 be issued if the expander were to expand the call, resulting in it
3550 being emitted in expand_call(). */
3551
3552static rtx
3553expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3554{
3555 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3556 {
3557 /* The call has been successfully expanded. Check for nonstring
3558 arguments and issue warnings as appropriate. */
3559 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3560 return ret;
3561 }
3562
3563 return NULL_RTX(rtx) 0;
3564}
3565
3566/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3567 bytes from constant string DATA + OFFSET and return it as target
3568 constant. */
3569
3570rtx
3571builtin_strncpy_read_str (void *data, void *, HOST_WIDE_INTlong offset,
3572 fixed_size_mode mode)
3573{
3574 const char *str = (const char *) data;
3575
3576 if ((unsigned HOST_WIDE_INTlong) offset > strlen (str))
3577 return const0_rtx(const_int_rtx[64]);
3578
3579 /* The by-pieces infrastructure does not try to pick a vector mode
3580 for strncpy expansion. */
3581 return c_readstr (str + offset, as_a <scalar_int_mode> (mode));
3582}
3583
3584/* Helper to check the sizes of sequences and the destination of calls
3585 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3586 success (no overflow or invalid sizes), false otherwise. */
3587
3588static bool
3589check_strncat_sizes (tree exp, tree objsize)
3590{
3591 tree dest = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3591, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3591, __FUNCTION__)))))
;
3592 tree src = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3592, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3592, __FUNCTION__)))))
;
3593 tree maxread = CALL_EXPR_ARG (exp, 2)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3593, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3593, __FUNCTION__)))))
;
3594
3595 /* Try to determine the range of lengths that the source expression
3596 refers to. */
3597 c_strlen_data lendata = { };
3598 get_range_strlen (src, &lendata, /* eltsize = */ 1);
3599
3600 /* Try to verify that the destination is big enough for the shortest
3601 string. */
3602
3603 access_data data (nullptr, exp, access_read_write, maxread, true);
3604 if (!objsize && warn_stringop_overflowglobal_options.x_warn_stringop_overflow)
3605 {
3606 /* If it hasn't been provided by __strncat_chk, try to determine
3607 the size of the destination object into which the source is
3608 being copied. */
3609 objsize = compute_objsize (dest, warn_stringop_overflowglobal_options.x_warn_stringop_overflow - 1, &data.dst);
3610 }
3611
3612 /* Add one for the terminating nul. */
3613 tree srclen = (lendata.minlen
3614 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,fold_build2_loc (((location_t) 0), PLUS_EXPR, global_trees[TI_SIZE_TYPE
], lendata.minlen, global_trees[TI_SIZE_ONE] )
3615 size_one_node)fold_build2_loc (((location_t) 0), PLUS_EXPR, global_trees[TI_SIZE_TYPE
], lendata.minlen, global_trees[TI_SIZE_ONE] )
3616 : NULL_TREE(tree) __null);
3617
3618 /* The strncat function copies at most MAXREAD bytes and always appends
3619 the terminating nul so the specified upper bound should never be equal
3620 to (or greater than) the size of the destination. */
3621 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
3622 && tree_int_cst_equal (objsize, maxread))
3623 {
3624 location_t loc = EXPR_LOCATION (exp)((((exp)) && ((tree_code_type[(int) (((enum tree_code
) ((exp))->base.code))]) >= tcc_reference && (tree_code_type
[(int) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression
)) ? (exp)->exp.locus : ((location_t) 0))
;
3625 warning_at (loc, OPT_Wstringop_overflow_,
3626 "%qD specified bound %E equals destination size",
3627 get_callee_fndecl (exp), maxread);
3628
3629 return false;
3630 }
3631
3632 if (!srclen
3633 || (maxread && tree_fits_uhwi_p (maxread)
3634 && tree_fits_uhwi_p (srclen)
3635 && tree_int_cst_lt (maxread, srclen)))
3636 srclen = maxread;
3637
3638 /* The number of bytes to write is LEN but check_access will alsoa
3639 check SRCLEN if LEN's value isn't known. */
3640 return check_access (exp, /*dstwrite=*/NULL_TREE(tree) __null, maxread, srclen,
3641 objsize, data.mode, &data);
3642}
3643
3644/* Expand expression EXP, which is a call to the strncpy builtin. Return
3645 NULL_RTX if we failed the caller should emit a normal call. */
3646
3647static rtx
3648expand_builtin_strncpy (tree exp, rtx target)
3649{
3650 location_t loc = EXPR_LOCATION (exp)((((exp)) && ((tree_code_type[(int) (((enum tree_code
) ((exp))->base.code))]) >= tcc_reference && (tree_code_type
[(int) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression
)) ? (exp)->exp.locus : ((location_t) 0))
;
3651
3652 if (!validate_arglist (exp,
3653 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3654 return NULL_RTX(rtx) 0;
3655 tree dest = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3655, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3655, __FUNCTION__)))))
;
3656 tree src = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3656, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3656, __FUNCTION__)))))
;
3657 /* The number of bytes to write (not the maximum). */
3658 tree len = CALL_EXPR_ARG (exp, 2)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3658, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3658, __FUNCTION__)))))
;
3659
3660 /* The length of the source sequence. */
3661 tree slen = c_strlen (src, 1);
3662
3663 /* We must be passed a constant len and src parameter. */
3664 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3665 return NULL_RTX(rtx) 0;
3666
3667 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1)size_int_kind (1, stk_ssizetype));
3668
3669 /* We're required to pad with trailing zeros if the requested
3670 len is greater than strlen(s2)+1. In that case try to
3671 use store_by_pieces, if it fails, punt. */
3672 if (tree_int_cst_lt (slen, len))
3673 {
3674 unsigned int dest_align = get_pointer_alignment (dest);
3675 const char *p = c_getstr (src);
3676 rtx dest_mem;
3677
3678 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3679 || !can_store_by_pieces (tree_to_uhwi (len),
3680 builtin_strncpy_read_str,
3681 CONST_CAST (char *, p)(const_cast<char *> ((p))),
3682 dest_align, false))
3683 return NULL_RTX(rtx) 0;
3684
3685 dest_mem = get_memory_rtx (dest, len);
3686 store_by_pieces (dest_mem, tree_to_uhwi (len),
3687 builtin_strncpy_read_str,
3688 CONST_CAST (char *, p)(const_cast<char *> ((p))), dest_align, false,
3689 RETURN_BEGIN);
3690 dest_mem = force_operand (XEXP (dest_mem, 0)(((dest_mem)->u.fld[0]).rt_rtx), target);
3691 dest_mem = convert_memory_address (ptr_mode, dest_mem)convert_memory_address_addr_space ((ptr_mode), (dest_mem), 0);
3692 return dest_mem;
3693 }
3694
3695 return NULL_RTX(rtx) 0;
3696}
3697
3698/* Return the RTL of a register in MODE generated from PREV in the
3699 previous iteration. */
3700
3701static rtx
3702gen_memset_value_from_prev (by_pieces_prev *prev, fixed_size_mode mode)
3703{
3704 rtx target = nullptr;
3705 if (prev != nullptr && prev->data != nullptr)
3706 {
3707 /* Use the previous data in the same mode. */
3708 if (prev->mode == mode)
3709 return prev->data;
3710
3711 fixed_size_mode prev_mode = prev->mode;
3712
3713 /* Don't use the previous data to write QImode if it is in a
3714 vector mode. */
3715 if (VECTOR_MODE_P (prev_mode)(((enum mode_class) mode_class[prev_mode]) == MODE_VECTOR_BOOL
|| ((enum mode_class) mode_class[prev_mode]) == MODE_VECTOR_INT
|| ((enum mode_class) mode_class[prev_mode]) == MODE_VECTOR_FLOAT
|| ((enum mode_class) mode_class[prev_mode]) == MODE_VECTOR_FRACT
|| ((enum mode_class) mode_class[prev_mode]) == MODE_VECTOR_UFRACT
|| ((enum mode_class) mode_class[prev_mode]) == MODE_VECTOR_ACCUM
|| ((enum mode_class) mode_class[prev_mode]) == MODE_VECTOR_UACCUM
)
&& mode == QImode(scalar_int_mode ((scalar_int_mode::from_int) E_QImode)))
3716 return target;
3717
3718 rtx prev_rtx = prev->data;
3719
3720 if (REG_P (prev_rtx)(((enum rtx_code) (prev_rtx)->code) == REG)
3721 && HARD_REGISTER_P (prev_rtx)((((rhs_regno(prev_rtx))) < 76))
3722 && lowpart_subreg_regno (REGNO (prev_rtx)(rhs_regno(prev_rtx)), prev_mode, mode) < 0)
3723 {
3724 /* This case occurs when PREV_MODE is a vector and when
3725 MODE is too small to store using vector operations.
3726 After register allocation, the code will need to move the
3727 lowpart of the vector register into a non-vector register.
3728
3729 Also, the target has chosen to use a hard register
3730 instead of going with the default choice of using a
3731 pseudo register. We should respect that choice and try to
3732 avoid creating a pseudo register with the same mode as the
3733 current hard register.
3734
3735 In principle, we could just use a lowpart MODE subreg of
3736 the vector register. However, the vector register mode might
3737 be too wide for non-vector registers, and we already know
3738 that the non-vector mode is too small for vector registers.
3739 It's therefore likely that we'd need to spill to memory in
3740 the vector mode and reload the non-vector value from there.
3741
3742 Try to avoid that by reducing the vector register to the
3743 smallest size that it can hold. This should increase the
3744 chances that non-vector registers can hold both the inner
3745 and outer modes of the subreg that we generate later. */
3746 machine_mode m;
3747 fixed_size_mode candidate;
3748 FOR_EACH_MODE_IN_CLASS (m, GET_MODE_CLASS (mode))for (mode_iterator::start (&(m), ((enum mode_class) mode_class
[mode])); mode_iterator::iterate_p (&(m)); mode_iterator::
get_wider (&(m)))
3749 if (is_a<fixed_size_mode> (m, &candidate))
3750 {
3751 if (GET_MODE_SIZE (candidate)
3752 >= GET_MODE_SIZE (prev_mode))
3753 break;
3754 if (GET_MODE_SIZE (candidate) >= GET_MODE_SIZE (mode)
3755 && lowpart_subreg_regno (REGNO (prev_rtx)(rhs_regno(prev_rtx)),
3756 prev_mode, candidate) >= 0)
3757 {
3758 target = lowpart_subreg (candidate, prev_rtx,
3759 prev_mode);
3760 prev_rtx = target;
3761 prev_mode = candidate;
3762 break;
3763 }
3764 }
3765 if (target == nullptr)
3766 prev_rtx = copy_to_reg (prev_rtx);
3767 }
3768
3769 target = lowpart_subreg (mode, prev_rtx, prev_mode);
3770 }
3771 return target;
3772}
3773
3774/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3775 bytes from constant string DATA + OFFSET and return it as target
3776 constant. If PREV isn't nullptr, it has the RTL info from the
3777 previous iteration. */
3778
3779rtx
3780builtin_memset_read_str (void *data, void *prev,
3781 HOST_WIDE_INTlong offset ATTRIBUTE_UNUSED__attribute__ ((__unused__)),
3782 fixed_size_mode mode)
3783{
3784 const char *c = (const char *) data;
3785 unsigned int size = GET_MODE_SIZE (mode);
3786
3787 rtx target = gen_memset_value_from_prev ((by_pieces_prev *) prev,
3788 mode);
3789 if (target != nullptr)
3790 return target;
3791 rtx src = gen_int_mode (*c, QImode(scalar_int_mode ((scalar_int_mode::from_int) E_QImode)));
3792
3793 if (VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || (
(enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM)
)
3794 {
3795 gcc_assert (GET_MODE_INNER (mode) == QImode)((void)(!((mode_to_inner (mode)) == (scalar_int_mode ((scalar_int_mode
::from_int) E_QImode))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3795, __FUNCTION__), 0 : 0))
;
3796
3797 rtx const_vec = gen_const_vec_duplicate (mode, src);
3798 if (prev == NULL__null)
3799 /* Return CONST_VECTOR when called by a query function. */
3800 return const_vec;
3801
3802 /* Use the move expander with CONST_VECTOR. */
3803 target = targetm.gen_memset_scratch_rtx (mode);
3804 emit_move_insn (target, const_vec);
3805 return target;
3806 }
3807
3808 char *p = XALLOCAVEC (char, size)((char *) __builtin_alloca(sizeof (char) * (size)));
3809
3810 memset (p, *c, size);
3811
3812 /* Vector modes should be handled above. */
3813 return c_readstr (p, as_a <scalar_int_mode> (mode));
3814}
3815
3816/* Callback routine for store_by_pieces. Return the RTL of a register
3817 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3818 char value given in the RTL register data. For example, if mode is
3819 4 bytes wide, return the RTL for 0x01010101*data. If PREV isn't
3820 nullptr, it has the RTL info from the previous iteration. */
3821
3822static rtx
3823builtin_memset_gen_str (void *data, void *prev,
3824 HOST_WIDE_INTlong offset ATTRIBUTE_UNUSED__attribute__ ((__unused__)),
3825 fixed_size_mode mode)
3826{
3827 rtx target, coeff;
3828 size_t size;
3829 char *p;
3830
3831 size = GET_MODE_SIZE (mode);
3832 if (size == 1)
3833 return (rtx) data;
3834
3835 target = gen_memset_value_from_prev ((by_pieces_prev *) prev, mode);
3836 if (target != nullptr)
3837 return target;
3838
3839 if (VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || (
(enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM)
)
3840 {
3841 gcc_assert (GET_MODE_INNER (mode) == QImode)((void)(!((mode_to_inner (mode)) == (scalar_int_mode ((scalar_int_mode
::from_int) E_QImode))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3841, __FUNCTION__), 0 : 0))
;
3842
3843 /* vec_duplicate_optab is a precondition to pick a vector mode for
3844 the memset expander. */
3845 insn_code icode = optab_handler (vec_duplicate_optab, mode);
3846
3847 target = targetm.gen_memset_scratch_rtx (mode);
3848 class expand_operand ops[2];
3849 create_output_operand (&ops[0], target, mode);
3850 create_input_operand (&ops[1], (rtx) data, QImode(scalar_int_mode ((scalar_int_mode::from_int) E_QImode)));
3851 expand_insn (icode, 2, ops);
3852 if (!rtx_equal_p (target, ops[0].value))
3853 emit_move_insn (target, ops[0].value);
3854
3855 return target;
3856 }
3857
3858 p = XALLOCAVEC (char, size)((char *) __builtin_alloca(sizeof (char) * (size)));
3859 memset (p, 1, size);
3860 /* Vector modes should be handled above. */
3861 coeff = c_readstr (p, as_a <scalar_int_mode> (mode));
3862
3863 target = convert_to_mode (mode, (rtx) data, 1);
3864 target = expand_mult (mode, target, coeff, NULL_RTX(rtx) 0, 1);
3865 return force_reg (mode, target);
3866}
3867
3868/* Expand expression EXP, which is a call to the memset builtin. Return
3869 NULL_RTX if we failed the caller should emit a normal call, otherwise
3870 try to get the result in TARGET, if convenient (and in mode MODE if that's
3871 convenient). */
3872
3873rtx
3874expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3875{
3876 if (!validate_arglist (exp,
3877 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3878 return NULL_RTX(rtx) 0;
3879
3880 tree dest = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3880, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3880, __FUNCTION__)))))
;
3881 tree val = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3881, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3881, __FUNCTION__)))))
;
3882 tree len = CALL_EXPR_ARG (exp, 2)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3882, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3882, __FUNCTION__)))))
;
3883
3884 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3885}
3886
3887/* Try to store VAL (or, if NULL_RTX, VALC) in LEN bytes starting at TO.
3888 Return TRUE if successful, FALSE otherwise. TO is assumed to be
3889 aligned at an ALIGN-bits boundary. LEN must be a multiple of
3890 1<<CTZ_LEN between MIN_LEN and MAX_LEN.
3891
3892 The strategy is to issue one store_by_pieces for each power of two,
3893 from most to least significant, guarded by a test on whether there
3894 are at least that many bytes left to copy in LEN.
3895
3896 ??? Should we skip some powers of two in favor of loops? Maybe start
3897 at the max of TO/LEN/word alignment, at least when optimizing for
3898 size, instead of ensuring O(log len) dynamic compares? */
3899
3900bool
3901try_store_by_multiple_pieces (rtx to, rtx len, unsigned int ctz_len,
3902 unsigned HOST_WIDE_INTlong min_len,
3903 unsigned HOST_WIDE_INTlong max_len,
3904 rtx val, char valc, unsigned int align)
3905{
3906 int max_bits = floor_log2 (max_len);
3907 int min_bits = floor_log2 (min_len);
3908 int sctz_len = ctz_len;
3909
3910 gcc_checking_assert (sctz_len >= 0)((void)(!(sctz_len >= 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3910, __FUNCTION__), 0 : 0))
;
3911
3912 if (val)
3913 valc = 1;
3914
3915 /* Bits more significant than TST_BITS are part of the shared prefix
3916 in the binary representation of both min_len and max_len. Since
3917 they're identical, we don't need to test them in the loop. */
3918 int tst_bits = (max_bits != min_bits ? max_bits
3919 : floor_log2 (max_len ^ min_len));
3920
3921 /* Check whether it's profitable to start by storing a fixed BLKSIZE
3922 bytes, to lower max_bits. In the unlikely case of a constant LEN
3923 (implied by identical MAX_LEN and MIN_LEN), we want to issue a
3924 single store_by_pieces, but otherwise, select the minimum multiple
3925 of the ALIGN (in bytes) and of the MCD of the possible LENs, that
3926 brings MAX_LEN below TST_BITS, if that's lower than min_len. */
3927 unsigned HOST_WIDE_INTlong blksize;
3928 if (max_len > min_len)
3929 {
3930 unsigned HOST_WIDE_INTlong alrng = MAX (HOST_WIDE_INT_1U << ctz_len,((1UL << ctz_len) > (align / (8)) ? (1UL << ctz_len
) : (align / (8)))
3931 align / BITS_PER_UNIT)((1UL << ctz_len) > (align / (8)) ? (1UL << ctz_len
) : (align / (8)))
;
3932 blksize = max_len - (HOST_WIDE_INT_1U1UL << tst_bits) + alrng;
3933 blksize &= ~(alrng - 1);
3934 }
3935 else if (max_len == min_len)
3936 blksize = max_len;
3937 else
3938 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3938, __FUNCTION__))
;
3939 if (min_len >= blksize)
3940 {
3941 min_len -= blksize;
3942 min_bits = floor_log2 (min_len);
3943 max_len -= blksize;
3944 max_bits = floor_log2 (max_len);
3945
3946 tst_bits = (max_bits != min_bits ? max_bits
3947 : floor_log2 (max_len ^ min_len));
3948 }
3949 else
3950 blksize = 0;
3951
3952 /* Check that we can use store by pieces for the maximum store count
3953 we may issue (initial fixed-size block, plus conditional
3954 power-of-two-sized from max_bits to ctz_len. */
3955 unsigned HOST_WIDE_INTlong xlenest = blksize;
3956 if (max_bits >= 0)
3957 xlenest += ((HOST_WIDE_INT_1U1UL << max_bits) * 2
3958 - (HOST_WIDE_INT_1U1UL << ctz_len));
3959 if (!can_store_by_pieces (xlenest, builtin_memset_read_str,
3960 &valc, align, true))
3961 return false;
3962
3963 by_pieces_constfn constfun;
3964 void *constfundata;
3965 if (val)
3966 {
3967 constfun = builtin_memset_gen_str;
3968 constfundata = val = force_reg (TYPE_MODE (unsigned_char_type_node)((((enum tree_code) ((tree_class_check ((integer_types[itk_unsigned_char
]), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3968, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(integer_types[itk_unsigned_char]) : (integer_types[itk_unsigned_char
])->type_common.mode)
,
Although the value stored to 'val' is used in the enclosing expression, the value is never actually read from 'val'
3969 val);
3970 }
3971 else
3972 {
3973 constfun = builtin_memset_read_str;
3974 constfundata = &valc;
3975 }
3976
3977 rtx ptr = copy_addr_to_reg (convert_to_mode (ptr_mode, XEXP (to, 0)(((to)->u.fld[0]).rt_rtx), 0));
3978 rtx rem = copy_to_mode_reg (ptr_mode, convert_to_mode (ptr_mode, len, 0));
3979 to = replace_equiv_address (to, ptr);
3980 set_mem_align (to, align);
3981
3982 if (blksize)
3983 {
3984 to = store_by_pieces (to, blksize,
3985 constfun, constfundata,
3986 align, true,
3987 max_len != 0 ? RETURN_END : RETURN_BEGIN);
3988 if (max_len == 0)
3989 return true;
3990
3991 /* Adjust PTR, TO and REM. Since TO's address is likely
3992 PTR+offset, we have to replace it. */
3993 emit_move_insn (ptr, force_operand (XEXP (to, 0)(((to)->u.fld[0]).rt_rtx), NULL_RTX(rtx) 0));
3994 to = replace_equiv_address (to, ptr);
3995 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
3996 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX(rtx) 0));
3997 }
3998
3999 /* Iterate over power-of-two block sizes from the maximum length to
4000 the least significant bit possibly set in the length. */
4001 for (int i = max_bits; i >= sctz_len; i--)
4002 {
4003 rtx_code_label *label = NULL__null;
4004 blksize = HOST_WIDE_INT_1U1UL << i;
4005
4006 /* If we're past the bits shared between min_ and max_len, expand
4007 a test on the dynamic length, comparing it with the
4008 BLKSIZE. */
4009 if (i <= tst_bits)
4010 {
4011 label = gen_label_rtx ();
4012 emit_cmp_and_jump_insns (rem, GEN_INT (blksize)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (blksize)), LT, NULL__null,
4013 ptr_mode, 1, label,
4014 profile_probability::even ());
4015 }
4016 /* If we are at a bit that is in the prefix shared by min_ and
4017 max_len, skip this BLKSIZE if the bit is clear. */
4018 else if ((max_len & blksize) == 0)
4019 continue;
4020
4021 /* Issue a store of BLKSIZE bytes. */
4022 to = store_by_pieces (to, blksize,
4023 constfun, constfundata,
4024 align, true,
4025 i != sctz_len ? RETURN_END : RETURN_BEGIN);
4026
4027 /* Adjust REM and PTR, unless this is the last iteration. */
4028 if (i != sctz_len)
4029 {
4030 emit_move_insn (ptr, force_operand (XEXP (to, 0)(((to)->u.fld[0]).rt_rtx), NULL_RTX(rtx) 0));
4031 to = replace_equiv_address (to, ptr);
4032 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4033 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX(rtx) 0));
4034 }
4035
4036 if (label)
4037 {
4038 emit_label (label);
4039
4040 /* Given conditional stores, the offset can no longer be
4041 known, so clear it. */
4042 clear_mem_offset (to);
4043 }
4044 }
4045
4046 return true;
4047}
4048
4049/* Helper function to do the actual work for expand_builtin_memset. The
4050 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4051 so that this can also be called without constructing an actual CALL_EXPR.
4052 The other arguments and return value are the same as for
4053 expand_builtin_memset. */
4054
4055static rtx
4056expand_builtin_memset_args (tree dest, tree val, tree len,
4057 rtx target, machine_mode mode, tree orig_exp)
4058{
4059 tree fndecl, fn;
4060 enum built_in_function fcode;
4061 machine_mode val_mode;
4062 char c;
4063 unsigned int dest_align;
4064 rtx dest_mem, dest_addr, len_rtx;
4065 HOST_WIDE_INTlong expected_size = -1;
4066 unsigned int expected_align = 0;
4067 unsigned HOST_WIDE_INTlong min_size;
4068 unsigned HOST_WIDE_INTlong max_size;
4069 unsigned HOST_WIDE_INTlong probable_max_size;
4070
4071 dest_align = get_pointer_alignment (dest);
4072
4073 /* If DEST is not a pointer type, don't do this operation in-line. */
4074 if (dest_align == 0)
4075 return NULL_RTX(rtx) 0;
4076
4077 if (currently_expanding_gimple_stmt)
4078 stringop_block_profile (currently_expanding_gimple_stmt,
4079 &expected_align, &expected_size);
4080
4081 if (expected_align < dest_align)
4082 expected_align = dest_align;
4083
4084 /* If the LEN parameter is zero, return DEST. */
4085 if (integer_zerop (len))
4086 {
4087 /* Evaluate and ignore VAL in case it has side-effects. */
4088 expand_expr (val, const0_rtx(const_int_rtx[64]), VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
4089 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4090 }
4091
4092 /* Stabilize the arguments in case we fail. */
4093 dest = builtin_save_expr (dest);
4094 val = builtin_save_expr (val);
4095 len = builtin_save_expr (len);
4096
4097 len_rtx = expand_normal (len);
4098 determine_block_size (len, len_rtx, &min_size, &max_size,
4099 &probable_max_size);
4100 dest_mem = get_memory_rtx (dest, len);
4101 val_mode = TYPE_MODE (unsigned_char_type_node)((((enum tree_code) ((tree_class_check ((integer_types[itk_unsigned_char
]), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4101, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(integer_types[itk_unsigned_char]) : (integer_types[itk_unsigned_char
])->type_common.mode)
;
4102
4103 if (TREE_CODE (val)((enum tree_code) (val)->base.code) != INTEGER_CST
4104 || target_char_cast (val, &c))
4105 {
4106 rtx val_rtx;
4107
4108 val_rtx = expand_normal (val);
4109 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4110
4111 /* Assume that we can memset by pieces if we can store
4112 * the coefficients by pieces (in the required modes).
4113 * We can't pass builtin_memset_gen_str as that emits RTL. */
4114 c = 1;
4115 if (tree_fits_uhwi_p (len)
4116 && can_store_by_pieces (tree_to_uhwi (len),
4117 builtin_memset_read_str, &c, dest_align,
4118 true))
4119 {
4120 val_rtx = force_reg (val_mode, val_rtx);
4121 store_by_pieces (dest_mem, tree_to_uhwi (len),
4122 builtin_memset_gen_str, val_rtx, dest_align,
4123 true, RETURN_BEGIN);
4124 }
4125 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4126 dest_align, expected_align,
4127 expected_size, min_size, max_size,
4128 probable_max_size)
4129 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4130 tree_ctz (len),
4131 min_size, max_size,
4132 val_rtx, 0,
4133 dest_align))
4134 goto do_libcall;
4135
4136 dest_mem = force_operand (XEXP (dest_mem, 0)(((dest_mem)->u.fld[0]).rt_rtx), NULL_RTX(rtx) 0);
4137 dest_mem = convert_memory_address (ptr_mode, dest_mem)convert_memory_address_addr_space ((ptr_mode), (dest_mem), 0);
4138 return dest_mem;
4139 }
4140
4141 if (c)
4142 {
4143 if (tree_fits_uhwi_p (len)
4144 && can_store_by_pieces (tree_to_uhwi (len),
4145 builtin_memset_read_str, &c, dest_align,
4146 true))
4147 store_by_pieces (dest_mem, tree_to_uhwi (len),
4148 builtin_memset_read_str, &c, dest_align, true,
4149 RETURN_BEGIN);
4150 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4151 gen_int_mode (c, val_mode),
4152 dest_align, expected_align,
4153 expected_size, min_size, max_size,
4154 probable_max_size)
4155 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4156 tree_ctz (len),
4157 min_size, max_size,
4158 NULL_RTX(rtx) 0, c,
4159 dest_align))
4160 goto do_libcall;
4161
4162 dest_mem = force_operand (XEXP (dest_mem, 0)(((dest_mem)->u.fld[0]).rt_rtx), NULL_RTX(rtx) 0);
4163 dest_mem = convert_memory_address (ptr_mode, dest_mem)convert_memory_address_addr_space ((ptr_mode), (dest_mem), 0);
4164 return dest_mem;
4165 }
4166
4167 set_mem_align (dest_mem, dest_align);
4168 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4169 CALL_EXPR_TAILCALL (orig_exp)((tree_check ((orig_exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4169, __FUNCTION__, (CALL_EXPR)))->base.addressable_flag
)
4170 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4171 expected_align, expected_size,
4172 min_size, max_size,
4173 probable_max_size, tree_ctz (len));
4174
4175 if (dest_addr == 0)
4176 {
4177 dest_addr = force_operand (XEXP (dest_mem, 0)(((dest_mem)->u.fld[0]).rt_rtx), NULL_RTX(rtx) 0);
4178 dest_addr = convert_memory_address (ptr_mode, dest_addr)convert_memory_address_addr_space ((ptr_mode), (dest_addr), 0
)
;
4179 }
4180
4181 return dest_addr;
4182
4183 do_libcall:
4184 fndecl = get_callee_fndecl (orig_exp);
4185 fcode = DECL_FUNCTION_CODE (fndecl);
4186 if (fcode == BUILT_IN_MEMSET)
4187 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp)((((orig_exp)) && ((tree_code_type[(int) (((enum tree_code
) ((orig_exp))->base.code))]) >= tcc_reference &&
(tree_code_type[(int) (((enum tree_code) ((orig_exp))->base
.code))]) <= tcc_expression)) ? (orig_exp)->exp.locus :
((location_t) 0))
, fndecl, 3,
4188 dest, val, len);
4189 else if (fcode == BUILT_IN_BZERO)
4190 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp)((((orig_exp)) && ((tree_code_type[(int) (((enum tree_code
) ((orig_exp))->base.code))]) >= tcc_reference &&
(tree_code_type[(int) (((enum tree_code) ((orig_exp))->base
.code))]) <= tcc_expression)) ? (orig_exp)->exp.locus :
((location_t) 0))
, fndecl, 2,
4191 dest, len);
4192 else
4193 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4193, __FUNCTION__))
;
4194 gcc_assert (TREE_CODE (fn) == CALL_EXPR)((void)(!(((enum tree_code) (fn)->base.code) == CALL_EXPR)
? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4194, __FUNCTION__), 0 : 0))
;
4195 CALL_EXPR_TAILCALL (fn)((tree_check ((fn), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4195, __FUNCTION__, (CALL_EXPR)))->base.addressable_flag
)
= CALL_EXPR_TAILCALL (orig_exp)((tree_check ((orig_exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4195, __FUNCTION__, (CALL_EXPR)))->base.addressable_flag
)
;
4196 return expand_call (fn, target, target == const0_rtx(const_int_rtx[64]));
4197}
4198
4199/* Expand expression EXP, which is a call to the bzero builtin. Return
4200 NULL_RTX if we failed the caller should emit a normal call. */
4201
4202static rtx
4203expand_builtin_bzero (tree exp)
4204{
4205 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4206 return NULL_RTX(rtx) 0;
4207
4208 tree dest = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4208, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4208, __FUNCTION__)))))
;
4209 tree size = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4209, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4209, __FUNCTION__)))))
;
4210
4211 /* New argument list transforming bzero(ptr x, int y) to
4212 memset(ptr x, int 0, size_t y). This is done this way
4213 so that if it isn't expanded inline, we fallback to
4214 calling bzero instead of memset. */
4215
4216 location_t loc = EXPR_LOCATION (exp)((((exp)) && ((tree_code_type[(int) (((enum tree_code
) ((exp))->base.code))]) >= tcc_reference && (tree_code_type
[(int) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression
)) ? (exp)->exp.locus : ((location_t) 0))
;
4217
4218 return expand_builtin_memset_args (dest, integer_zero_nodeglobal_trees[TI_INTEGER_ZERO],
4219 fold_convert_loc (loc,
4220 size_type_nodeglobal_trees[TI_SIZE_TYPE], size),
4221 const0_rtx(const_int_rtx[64]), VOIDmode((void) 0, E_VOIDmode), exp);
4222}
4223
4224/* Try to expand cmpstr operation ICODE with the given operands.
4225 Return the result rtx on success, otherwise return null. */
4226
4227static rtx
4228expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4229 HOST_WIDE_INTlong align)
4230{
4231 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4232
4233 if (target && (!REG_P (target)(((enum rtx_code) (target)->code) == REG) || HARD_REGISTER_P (target)((((rhs_regno(target))) < 76))))
4234 target = NULL_RTX(rtx) 0;
4235
4236 class expand_operand ops[4];
4237 create_output_operand (&ops[0], target, insn_mode);
4238 create_fixed_operand (&ops[1], arg1_rtx);
4239 create_fixed_operand (&ops[2], arg2_rtx);
4240 create_integer_operand (&ops[3], align);
4241 if (maybe_expand_insn (icode, 4, ops))
4242 return ops[0].value;
4243 return NULL_RTX(rtx) 0;
4244}
4245
4246/* Expand expression EXP, which is a call to the memcmp built-in function.
4247 Return NULL_RTX if we failed and the caller should emit a normal call,
4248 otherwise try to get the result in TARGET, if convenient.
4249 RESULT_EQ is true if we can relax the returned value to be either zero
4250 or nonzero, without caring about the sign. */
4251
4252static rtx
4253expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4254{
4255 if (!validate_arglist (exp,
4256 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4257 return NULL_RTX(rtx) 0;
4258
4259 tree arg1 = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4259, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4259, __FUNCTION__)))))
;
4260 tree arg2 = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4260, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4260, __FUNCTION__)))))
;
4261 tree len = CALL_EXPR_ARG (exp, 2)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4261, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4261, __FUNCTION__)))))
;
4262
4263 /* Due to the performance benefit, always inline the calls first
4264 when result_eq is false. */
4265 rtx result = NULL_RTX(rtx) 0;
4266 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4267 if (!result_eq && fcode != BUILT_IN_BCMP)
4268 {
4269 result = inline_expand_builtin_bytecmp (exp, target);
4270 if (result)
4271 return result;
4272 }
4273
4274 machine_mode mode = TYPE_MODE (TREE_TYPE (exp))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4274, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4274, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4274, __FUNCTION__))->typed.type)) : (((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4274, __FUNCTION__))->typed.type))->type_common.mode)
;
4275 location_t loc = EXPR_LOCATION (exp)((((exp)) && ((tree_code_type[(int) (((enum tree_code
) ((exp))->base.code))]) >= tcc_reference && (tree_code_type
[(int) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression
)) ? (exp)->exp.locus : ((location_t) 0))
;
4276
4277 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT(8);
4278 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT(8);
4279
4280 /* If we don't have POINTER_TYPE, call the function. */
4281 if (arg1_align == 0 || arg2_align == 0)
4282 return NULL_RTX(rtx) 0;
4283
4284 rtx arg1_rtx = get_memory_rtx (arg1, len);
4285 rtx arg2_rtx = get_memory_rtx (arg2, len);
4286 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetypesizetype_tab[(int) stk_sizetype], len));
4287
4288 /* Set MEM_SIZE as appropriate. */
4289 if (CONST_INT_P (len_rtx)(((enum rtx_code) (len_rtx)->code) == CONST_INT))
4290 {
4291 set_mem_size (arg1_rtx, INTVAL (len_rtx)((len_rtx)->u.hwint[0]));
4292 set_mem_size (arg2_rtx, INTVAL (len_rtx)((len_rtx)->u.hwint[0]));
4293 }
4294
4295 by_pieces_constfn constfn = NULL__null;
4296
4297 /* Try to get the byte representation of the constant ARG2 (or, only
4298 when the function's result is used for equality to zero, ARG1)
4299 points to, with its byte size in NBYTES. */
4300 unsigned HOST_WIDE_INTlong nbytes;
4301 const char *rep = getbyterep (arg2, &nbytes);
4302 if (result_eq && rep == NULL__null)
4303 {
4304 /* For equality to zero the arguments are interchangeable. */
4305 rep = getbyterep (arg1, &nbytes);
4306 if (rep != NULL__null)
4307 std::swap (arg1_rtx, arg2_rtx);
4308 }
4309
4310 /* If the function's constant bound LEN_RTX is less than or equal
4311 to the byte size of the representation of the constant argument,
4312 and if block move would be done by pieces, we can avoid loading
4313 the bytes from memory and only store the computed constant result. */
4314 if (rep
4315 && CONST_INT_P (len_rtx)(((enum rtx_code) (len_rtx)->code) == CONST_INT)
4316 && (unsigned HOST_WIDE_INTlong) INTVAL (len_rtx)((len_rtx)->u.hwint[0]) <= nbytes)
4317 constfn = builtin_memcpy_read_str;
4318
4319 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4320 TREE_TYPE (len)((contains_struct_check ((len), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4320, __FUNCTION__))->typed.type)
, target,
4321 result_eq, constfn,
4322 CONST_CAST (char *, rep)(const_cast<char *> ((rep))));
4323
4324 if (result)
4325 {
4326 /* Return the value in the proper mode for this function. */
4327 if (GET_MODE (result)((machine_mode) (result)->mode) == mode)
4328 return result;
4329
4330 if (target != 0)
4331 {
4332 convert_move (target, result, 0);
4333 return target;
4334 }
4335
4336 return convert_to_mode (mode, result, 0);
4337 }
4338
4339 return NULL_RTX(rtx) 0;
4340}
4341
4342/* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4343 if we failed the caller should emit a normal call, otherwise try to get
4344 the result in TARGET, if convenient. */
4345
4346static rtx
4347expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED__attribute__ ((__unused__)) rtx target)
4348{
4349 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4350 return NULL_RTX(rtx) 0;
4351
4352 tree arg1 = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4352, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4352, __FUNCTION__)))))
;
4353 tree arg2 = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4353, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4353, __FUNCTION__)))))
;
4354
4355 /* Due to the performance benefit, always inline the calls first. */
4356 rtx result = NULL_RTX(rtx) 0;
4357 result = inline_expand_builtin_bytecmp (exp, target);
4358 if (result)
4359 return result;
4360
4361 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode(scalar_int_mode ((scalar_int_mode::from_int) E_SImode)));
4362 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode(scalar_int_mode ((scalar_int_mode::from_int) E_SImode)));
4363 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4364 return NULL_RTX(rtx) 0;
4365
4366 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT(8);
4367 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT(8);
4368
4369 /* If we don't have POINTER_TYPE, call the function. */
4370 if (arg1_align == 0 || arg2_align == 0)
4371 return NULL_RTX(rtx) 0;
4372
4373 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4374 arg1 = builtin_save_expr (arg1);
4375 arg2 = builtin_save_expr (arg2);
4376
4377 rtx arg1_rtx = get_memory_rtx (arg1, NULL__null);
4378 rtx arg2_rtx = get_memory_rtx (arg2, NULL__null);
4379
4380 /* Try to call cmpstrsi. */
4381 if (cmpstr_icode != CODE_FOR_nothing)
4382 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4383 MIN (arg1_align, arg2_align)((arg1_align) < (arg2_align) ? (arg1_align) : (arg2_align)
)
);
4384
4385 /* Try to determine at least one length and call cmpstrnsi. */
4386 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4387 {
4388 tree len;
4389 rtx arg3_rtx;
4390
4391 tree len1 = c_strlen (arg1, 1);
4392 tree len2 = c_strlen (arg2, 1);
4393
4394 if (len1)
4395 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1)size_binop_loc (((location_t) 0), PLUS_EXPR, size_int_kind (1
, stk_ssizetype), len1)
;
4396 if (len2)
4397 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2)size_binop_loc (((location_t) 0), PLUS_EXPR, size_int_kind (1
, stk_ssizetype), len2)
;
4398
4399 /* If we don't have a constant length for the first, use the length
4400 of the second, if we know it. We don't require a constant for
4401 this case; some cost analysis could be done if both are available
4402 but neither is constant. For now, assume they're equally cheap,
4403 unless one has side effects. If both strings have constant lengths,
4404 use the smaller. */
4405
4406 if (!len1)
4407 len = len2;
4408 else if (!len2)
4409 len = len1;
4410 else if (TREE_SIDE_EFFECTS (len1)((non_type_check ((len1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4410, __FUNCTION__))->base.side_effects_flag)
)
4411 len = len2;
4412 else if (TREE_SIDE_EFFECTS (len2)((non_type_check ((len2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4412, __FUNCTION__))->base.side_effects_flag)
)
4413 len = len1;
4414 else if (TREE_CODE (len1)((enum tree_code) (len1)->base.code) != INTEGER_CST)
4415 len = len2;
4416 else if (TREE_CODE (len2)((enum tree_code) (len2)->base.code) != INTEGER_CST)
4417 len = len1;
4418 else if (tree_int_cst_lt (len1, len2))
4419 len = len1;
4420 else
4421 len = len2;
4422
4423 /* If both arguments have side effects, we cannot optimize. */
4424 if (len && !TREE_SIDE_EFFECTS (len)((non_type_check ((len), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4424, __FUNCTION__))->base.side_effects_flag)
)
4425 {
4426 arg3_rtx = expand_normal (len);
4427 result = expand_cmpstrn_or_cmpmem
4428 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len)((contains_struct_check ((len), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4428, __FUNCTION__))->typed.type)
,
4429 arg3_rtx, MIN (arg1_align, arg2_align)((arg1_align) < (arg2_align) ? (arg1_align) : (arg2_align)
)
);
4430 }
4431 }
4432
4433 tree fndecl = get_callee_fndecl (exp);
4434 if (result)
4435 {
4436 /* Return the value in the proper mode for this function. */
4437 machine_mode mode = TYPE_MODE (TREE_TYPE (exp))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4437, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4437, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4437, __FUNCTION__))->typed.type)) : (((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4437, __FUNCTION__))->typed.type))->type_common.mode)
;
4438 if (GET_MODE (result)((machine_mode) (result)->mode) == mode)
4439 return result;
4440 if (target == 0)
4441 return convert_to_mode (mode, result, 0);
4442 convert_move (target, result, 0);
4443 return target;
4444 }
4445
4446 /* Expand the library call ourselves using a stabilized argument
4447 list to avoid re-evaluating the function's arguments twice. */
4448 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp)((((exp)) && ((tree_code_type[(int) (((enum tree_code
) ((exp))->base.code))]) >= tcc_reference && (tree_code_type
[(int) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression
)) ? (exp)->exp.locus : ((location_t) 0))
, fndecl, 2, arg1, arg2);
4449 copy_warning (fn, exp);
4450 gcc_assert (TREE_CODE (fn) == CALL_EXPR)((void)(!(((enum tree_code) (fn)->base.code) == CALL_EXPR)
? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4450, __FUNCTION__), 0 : 0))
;
4451 CALL_EXPR_TAILCALL (fn)((tree_check ((fn), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4451, __FUNCTION__, (CALL_EXPR)))->base.addressable_flag
)
= CALL_EXPR_TAILCALL (exp)((tree_check ((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4451, __FUNCTION__, (CALL_EXPR)))->base.addressable_flag
)
;
4452 return expand_call (fn, target, target == const0_rtx(const_int_rtx[64]));
4453}
4454
4455/* Expand expression EXP, which is a call to the strncmp builtin. Return
4456 NULL_RTX if we failed the caller should emit a normal call, otherwise
4457 try to get the result in TARGET, if convenient. */
4458
4459static rtx
4460expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED__attribute__ ((__unused__)) rtx target,
4461 ATTRIBUTE_UNUSED__attribute__ ((__unused__)) machine_mode mode)
4462{
4463 if (!validate_arglist (exp,
4464 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4465 return NULL_RTX(rtx) 0;
4466
4467 tree arg1 = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4467, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4467, __FUNCTION__)))))
;
4468 tree arg2 = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4468, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4468, __FUNCTION__)))))
;
4469 tree arg3 = CALL_EXPR_ARG (exp, 2)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4469, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4469, __FUNCTION__)))))
;
4470
4471 location_t loc = EXPR_LOCATION (exp)((((exp)) && ((tree_code_type[(int) (((enum tree_code
) ((exp))->base.code))]) >= tcc_reference && (tree_code_type
[(int) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression
)) ? (exp)->exp.locus : ((location_t) 0))
;
4472 tree len1 = c_strlen (arg1, 1);
4473 tree len2 = c_strlen (arg2, 1);
4474
4475 /* Due to the performance benefit, always inline the calls first. */
4476 rtx result = NULL_RTX(rtx) 0;
4477 result = inline_expand_builtin_bytecmp (exp, target);
4478 if (result)
4479 return result;
4480
4481 /* If c_strlen can determine an expression for one of the string
4482 lengths, and it doesn't have side effects, then emit cmpstrnsi
4483 using length MIN(strlen(string)+1, arg3). */
4484 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode(scalar_int_mode ((scalar_int_mode::from_int) E_SImode)));
4485 if (cmpstrn_icode == CODE_FOR_nothing)
4486 return NULL_RTX(rtx) 0;
4487
4488 tree len;
4489
4490 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT(8);
4491 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT(8);
4492
4493 if (len1)
4494 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1)size_int_kind (1, stk_ssizetype), len1);
4495 if (len2)
4496 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1)size_int_kind (1, stk_ssizetype), len2);
4497
4498 tree len3 = fold_convert_loc (loc, sizetypesizetype_tab[(int) stk_sizetype], arg3);
4499
4500 /* If we don't have a constant length for the first, use the length
4501 of the second, if we know it. If neither string is constant length,
4502 use the given length argument. We don't require a constant for
4503 this case; some cost analysis could be done if both are available
4504 but neither is constant. For now, assume they're equally cheap,
4505 unless one has side effects. If both strings have constant lengths,
4506 use the smaller. */
4507
4508 if (!len1 && !len2)
4509 len = len3;
4510 else if (!len1)
4511 len = len2;
4512 else if (!len2)
4513 len = len1;
4514 else if (TREE_SIDE_EFFECTS (len1)((non_type_check ((len1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4514, __FUNCTION__))->base.side_effects_flag)
)
4515 len = len2;
4516 else if (TREE_SIDE_EFFECTS (len2)((non_type_check ((len2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4516, __FUNCTION__))->base.side_effects_flag)
)
4517 len = len1;
4518 else if (TREE_CODE (len1)((enum tree_code) (len1)->base.code) != INTEGER_CST)
4519 len = len2;
4520 else if (TREE_CODE (len2)((enum tree_code) (len2)->base.code) != INTEGER_CST)
4521 len = len1;
4522 else if (tree_int_cst_lt (len1, len2))
4523 len = len1;
4524 else
4525 len = len2;
4526
4527 /* If we are not using the given length, we must incorporate it here.
4528 The actual new length parameter will be MIN(len,arg3) in this case. */
4529 if (len != len3)
4530 {
4531 len = fold_convert_loc (loc, sizetypesizetype_tab[(int) stk_sizetype], len);
4532 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len)((contains_struct_check ((len), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4532, __FUNCTION__))->typed.type)
, len, len3);
4533 }
4534 rtx arg1_rtx = get_memory_rtx (arg1, len);
4535 rtx arg2_rtx = get_memory_rtx (arg2, len);
4536 rtx arg3_rtx = expand_normal (len);
4537 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4538 arg2_rtx, TREE_TYPE (len)((contains_struct_check ((len), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4538, __FUNCTION__))->typed.type)
, arg3_rtx,
4539 MIN (arg1_align, arg2_align)((arg1_align) < (arg2_align) ? (arg1_align) : (arg2_align)
)
);
4540
4541 tree fndecl = get_callee_fndecl (exp);
4542 if (result)
4543 {
4544 /* Return the value in the proper mode for this function. */
4545 mode = TYPE_MODE (TREE_TYPE (exp))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4545, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4545, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4545, __FUNCTION__))->typed.type)) : (((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4545, __FUNCTION__))->typed.type))->type_common.mode)
;
4546 if (GET_MODE (result)((machine_mode) (result)->mode) == mode)
4547 return result;
4548 if (target == 0)
4549 return convert_to_mode (mode, result, 0);
4550 convert_move (target, result, 0);
4551 return target;
4552 }
4553
4554 /* Expand the library call ourselves using a stabilized argument
4555 list to avoid re-evaluating the function's arguments twice. */
4556 tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4557 copy_warning (call, exp);
4558 gcc_assert (TREE_CODE (call) == CALL_EXPR)((void)(!(((enum tree_code) (call)->base.code) == CALL_EXPR
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4558, __FUNCTION__), 0 : 0))
;
4559 CALL_EXPR_TAILCALL (call)((tree_check ((call), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4559, __FUNCTION__, (CALL_EXPR)))->base.addressable_flag
)
= CALL_EXPR_TAILCALL (exp)((tree_check ((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4559, __FUNCTION__, (CALL_EXPR)))->base.addressable_flag
)
;
4560 return expand_call (call, target, target == const0_rtx(const_int_rtx[64]));
4561}
4562
4563/* Expand a call to __builtin_saveregs, generating the result in TARGET,
4564 if that's convenient. */
4565
4566rtx
4567expand_builtin_saveregs (void)
4568{
4569 rtx val;
4570 rtx_insn *seq;
4571
4572 /* Don't do __builtin_saveregs more than once in a function.
4573 Save the result of the first call and reuse it. */
4574 if (saveregs_value((&x_rtl)->expr.x_saveregs_value) != 0)
4575 return saveregs_value((&x_rtl)->expr.x_saveregs_value);
4576
4577 /* When this function is called, it means that registers must be
4578 saved on entry to this function. So we migrate the call to the
4579 first insn of this function. */
4580
4581 start_sequence ();
4582
4583 /* Do whatever the machine needs done in this case. */
4584 val = targetm.calls.expand_builtin_saveregs ();
4585
4586 seq = get_insns ();
4587 end_sequence ();
4588
4589 saveregs_value((&x_rtl)->expr.x_saveregs_value) = val;
4590
4591 /* Put the insns after the NOTE that starts the function. If this
4592 is inside a start_sequence, make the outer-level insn chain current, so
4593 the code is placed at the start of the function. */
4594 push_topmost_sequence ();
4595 emit_insn_after (seq, entry_of_function ());
4596 pop_topmost_sequence ();
4597
4598 return val;
4599}
4600
4601/* Expand a call to __builtin_next_arg. */
4602
4603static rtx
4604expand_builtin_next_arg (void)
4605{
4606 /* Checking arguments is already done in fold_builtin_next_arg
4607 that must be called before this function. */
4608 return expand_binop (ptr_mode, add_optab,
4609 crtl(&x_rtl)->args.internal_arg_pointer,
4610 crtl(&x_rtl)->args.arg_offset_rtx,
4611 NULL_RTX(rtx) 0, 0, OPTAB_LIB_WIDEN);
4612}
4613
4614/* Make it easier for the backends by protecting the valist argument
4615 from multiple evaluations. */
4616
4617static tree
4618stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4619{
4620 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist)((contains_struct_check ((valist), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4620, __FUNCTION__))->typed.type)
);
4621
4622 /* The current way of determining the type of valist is completely
4623 bogus. We should have the information on the va builtin instead. */
4624 if (!vatype)
4625 vatype = targetm.fn_abi_va_list (cfun(cfun + 0)->decl);
4626
4627 if (TREE_CODE (vatype)((enum tree_code) (vatype)->base.code) == ARRAY_TYPE)
4628 {
4629 if (TREE_SIDE_EFFECTS (valist)((non_type_check ((valist), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4629, __FUNCTION__))->base.side_effects_flag)
)
4630 valist = save_expr (valist);
4631
4632 /* For this case, the backends will be expecting a pointer to
4633 vatype, but it's possible we've actually been given an array
4634 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4635 So fix it. */
4636 if (TREE_CODE (TREE_TYPE (valist))((enum tree_code) (((contains_struct_check ((valist), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4636, __FUNCTION__))->typed.type))->base.code)
== ARRAY_TYPE)
4637 {
4638 tree p1 = build_pointer_type (TREE_TYPE (vatype)((contains_struct_check ((vatype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4638, __FUNCTION__))->typed.type)
);
4639 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4640 }
4641 }
4642 else
4643 {
4644 tree pt = build_pointer_type (vatype);
4645
4646 if (! needs_lvalue)
4647 {
4648 if (! TREE_SIDE_EFFECTS (valist)((non_type_check ((valist), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4648, __FUNCTION__))->base.side_effects_flag)
)
4649 return valist;
4650
4651 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4652 TREE_SIDE_EFFECTS (valist)((non_type_check ((valist), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4652, __FUNCTION__))->base.side_effects_flag)
= 1;
4653 }
4654
4655 if (TREE_SIDE_EFFECTS (valist)((non_type_check ((valist), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4655, __FUNCTION__))->base.side_effects_flag)
)
4656 valist = save_expr (valist);
4657 valist = fold_build2_loc (loc, MEM_REF,
4658 vatype, valist, build_int_cst (pt, 0));
4659 }
4660
4661 return valist;
4662}
4663
4664/* The "standard" definition of va_list is void*. */
4665
4666tree
4667std_build_builtin_va_list (void)
4668{
4669 return ptr_type_nodeglobal_trees[TI_PTR_TYPE];
4670}
4671
4672/* The "standard" abi va_list is va_list_type_node. */
4673
4674tree
4675std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED__attribute__ ((__unused__)))
4676{
4677 return va_list_type_nodeglobal_trees[TI_VA_LIST_TYPE];
4678}
4679
4680/* The "standard" type of va_list is va_list_type_node. */
4681
4682tree
4683std_canonical_va_list_type (tree type)
4684{
4685 tree wtype, htype;
4686
4687 wtype = va_list_type_nodeglobal_trees[TI_VA_LIST_TYPE];
4688 htype = type;
4689
4690 if (TREE_CODE (wtype)((enum tree_code) (wtype)->base.code) == ARRAY_TYPE)
4691 {
4692 /* If va_list is an array type, the argument may have decayed
4693 to a pointer type, e.g. by being passed to another function.
4694 In that case, unwrap both types so that we can compare the
4695 underlying records. */
4696 if (TREE_CODE (htype)((enum tree_code) (htype)->base.code) == ARRAY_TYPE
4697 || POINTER_TYPE_P (htype)(((enum tree_code) (htype)->base.code) == POINTER_TYPE || (
(enum tree_code) (htype)->base.code) == REFERENCE_TYPE)
)
4698 {
4699 wtype = TREE_TYPE (wtype)((contains_struct_check ((wtype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4699, __FUNCTION__))->typed.type)
;
4700 htype = TREE_TYPE (htype)((contains_struct_check ((htype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4700, __FUNCTION__))->typed.type)
;
4701 }
4702 }
4703 if (TYPE_MAIN_VARIANT (wtype)((tree_class_check ((wtype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4703, __FUNCTION__))->type_common.main_variant)
== TYPE_MAIN_VARIANT (htype)((tree_class_check ((htype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4703, __FUNCTION__))->type_common.main_variant)
)
4704 return va_list_type_nodeglobal_trees[TI_VA_LIST_TYPE];
4705
4706 return NULL_TREE(tree) __null;
4707}
4708
4709/* The "standard" implementation of va_start: just assign `nextarg' to
4710 the variable. */
4711
4712void
4713std_expand_builtin_va_start (tree valist, rtx nextarg)
4714{
4715 rtx va_r = expand_expr (valist, NULL_RTX(rtx) 0, VOIDmode((void) 0, E_VOIDmode), EXPAND_WRITE);
4716 convert_move (va_r, nextarg, 0);
4717}
4718
4719/* Expand EXP, a call to __builtin_va_start. */
4720
4721static rtx
4722expand_builtin_va_start (tree exp)
4723{
4724 rtx nextarg;
4725 tree valist;
4726 location_t loc = EXPR_LOCATION (exp)((((exp)) && ((tree_code_type[(int) (((enum tree_code
) ((exp))->base.code))]) >= tcc_reference && (tree_code_type
[(int) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression
)) ? (exp)->exp.locus : ((location_t) 0))
;
4727
4728 if (call_expr_nargs (exp)(((int)((unsigned long) (*tree_int_cst_elt_check (((tree_class_check
((exp), (tcc_vl_exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4728, __FUNCTION__))->exp.operands[0]), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4728, __FUNCTION__)))) - 3)
< 2)
4729 {
4730 error_at (loc, "too few arguments to function %<va_start%>");
4731 return const0_rtx(const_int_rtx[64]);
4732 }
4733
4734 if (fold_builtin_next_arg (exp, true))
4735 return const0_rtx(const_int_rtx[64]);
4736
4737 nextarg = expand_builtin_next_arg ();
4738 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4738, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4738, __FUNCTION__)))))
, 1);
4739
4740 if (targetm.expand_builtin_va_start)
4741 targetm.expand_builtin_va_start (valist, nextarg);
4742 else
4743 std_expand_builtin_va_start (valist, nextarg);
4744
4745 return const0_rtx(const_int_rtx[64]);
4746}
4747
4748/* Expand EXP, a call to __builtin_va_end. */
4749
4750static rtx
4751expand_builtin_va_end (tree exp)
4752{
4753 tree valist = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4753, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4753, __FUNCTION__)))))
;
4754
4755 /* Evaluate for side effects, if needed. I hate macros that don't
4756 do that. */
4757 if (TREE_SIDE_EFFECTS (valist)((non_type_check ((valist), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4757, __FUNCTION__))->base.side_effects_flag)
)
4758 expand_expr (valist, const0_rtx(const_int_rtx[64]), VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
4759
4760 return const0_rtx(const_int_rtx[64]);
4761}
4762
4763/* Expand EXP, a call to __builtin_va_copy. We do this as a
4764 builtin rather than just as an assignment in stdarg.h because of the
4765 nastiness of array-type va_list types. */
4766
4767static rtx
4768expand_builtin_va_copy (tree exp)
4769{
4770 tree dst, src, t;
4771 location_t loc = EXPR_LOCATION (exp)((((exp)) && ((tree_code_type[(int) (((enum tree_code
) ((exp))->base.code))]) >= tcc_reference && (tree_code_type
[(int) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression
)) ? (exp)->exp.locus : ((location_t) 0))
;
4772
4773 dst = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4773, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4773, __FUNCTION__)))))
;
4774 src = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4774, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4774, __FUNCTION__)))))
;
4775
4776 dst = stabilize_va_list_loc (loc, dst, 1);
4777 src = stabilize_va_list_loc (loc, src, 0);
4778
4779 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE)((void)(!((cfun + 0) != __null && (cfun + 0)->decl
!= (tree) __null) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4779, __FUNCTION__), 0 : 0))
;
4780
4781 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl))((enum tree_code) (targetm.fn_abi_va_list ((cfun + 0)->decl
))->base.code)
!= ARRAY_TYPE)
4782 {
4783 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun(cfun + 0)->decl), dst, src);
4784 TREE_SIDE_EFFECTS (t)((non_type_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4784, __FUNCTION__))->base.side_effects_flag)
= 1;
4785 expand_expr (t, const0_rtx(const_int_rtx[64]), VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
4786 }
4787 else
4788 {
4789 rtx dstb, srcb, size;
4790
4791 /* Evaluate to pointers. */
4792 dstb = expand_expr (dst, NULL_RTX(rtx) 0, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, EXPAND_NORMAL);
4793 srcb = expand_expr (src, NULL_RTX(rtx) 0, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, EXPAND_NORMAL);
4794 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl))((tree_class_check ((targetm.fn_abi_va_list ((cfun + 0)->decl
)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4794, __FUNCTION__))->type_common.size_unit)
,
4795 NULL_RTX(rtx) 0, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
4796
4797 dstb = convert_memory_address (Pmode, dstb)convert_memory_address_addr_space (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
)), (dstb), 0)
;
4798 srcb = convert_memory_address (Pmode, srcb)convert_memory_address_addr_space (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
)), (srcb), 0)
;
4799
4800 /* "Dereference" to BLKmode memories. */
4801 dstb = gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), dstb);
4802 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))((contains_struct_check ((((contains_struct_check ((dst), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4802, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4802, __FUNCTION__))->typed.type)
));
4803 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl))(((tree_class_check ((targetm.fn_abi_va_list ((cfun + 0)->
decl)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4803, __FUNCTION__))->type_common.align) ? ((unsigned)1)
<< (((tree_class_check ((targetm.fn_abi_va_list ((cfun
+ 0)->decl)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4803, __FUNCTION__))->type_common.align) - 1) : 0)
);
4804 srcb = gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), srcb);
4805 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))((contains_struct_check ((((contains_struct_check ((src), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4805, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4805, __FUNCTION__))->typed.type)
));
4806 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl))(((tree_class_check ((targetm.fn_abi_va_list ((cfun + 0)->
decl)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4806, __FUNCTION__))->type_common.align) ? ((unsigned)1)
<< (((tree_class_check ((targetm.fn_abi_va_list ((cfun
+ 0)->decl)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4806, __FUNCTION__))->type_common.align) - 1) : 0)
);
4807
4808 /* Copy. */
4809 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4810 }
4811
4812 return const0_rtx(const_int_rtx[64]);
4813}
4814
4815/* Expand a call to one of the builtin functions __builtin_frame_address or
4816 __builtin_return_address. */
4817
4818static rtx
4819expand_builtin_frame_address (tree fndecl, tree exp)
4820{
4821 /* The argument must be a nonnegative integer constant.
4822 It counts the number of frames to scan up the stack.
4823 The value is either the frame pointer value or the return
4824 address saved in that frame. */
4825 if (call_expr_nargs (exp)(((int)((unsigned long) (*tree_int_cst_elt_check (((tree_class_check
((exp), (tcc_vl_exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4825, __FUNCTION__))->exp.operands[0]), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4825, __FUNCTION__)))) - 3)
== 0)
4826 /* Warning about missing arg was already issued. */
4827 return const0_rtx(const_int_rtx[64]);
4828 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4828, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4828, __FUNCTION__)))))
))
4829 {
4830 error ("invalid argument to %qD", fndecl);
4831 return const0_rtx(const_int_rtx[64]);
4832 }
4833 else
4834 {
4835 /* Number of frames to scan up the stack. */
4836 unsigned HOST_WIDE_INTlong count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4836, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4836, __FUNCTION__)))))
);
4837
4838 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4839
4840 /* Some ports cannot access arbitrary stack frames. */
4841 if (tem == NULL__null)
4842 {
4843 warning (0, "unsupported argument to %qD", fndecl);
4844 return const0_rtx(const_int_rtx[64]);
4845 }
4846
4847 if (count)
4848 {
4849 /* Warn since no effort is made to ensure that any frame
4850 beyond the current one exists or can be safely reached. */
4851 warning (OPT_Wframe_address, "calling %qD with "
4852 "a nonzero argument is unsafe", fndecl);
4853 }
4854
4855 /* For __builtin_frame_address, return what we've got. */
4856 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4857 return tem;
4858
4859 if (!REG_P (tem)(((enum rtx_code) (tem)->code) == REG)
4860 && ! CONSTANT_P (tem)((rtx_class[(int) (((enum rtx_code) (tem)->code))]) == RTX_CONST_OBJ
)
)
4861 tem = copy_addr_to_reg (tem);
4862 return tem;
4863 }
4864}
4865
4866/* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4867 failed and the caller should emit a normal call. */
4868
4869static rtx
4870expand_builtin_alloca (tree exp)
4871{
4872 rtx op0;
4873 rtx result;
4874 unsigned int align;
4875 tree fndecl = get_callee_fndecl (exp);
4876 HOST_WIDE_INTlong max_size;
4877 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4878 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp)((tree_check ((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4878, __FUNCTION__, (CALL_EXPR)))->base.protected_flag)
;
4879 bool valid_arglist
4880 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4881 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
4882 VOID_TYPE)
4883 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
4884 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4885 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4886
4887 if (!valid_arglist)
4888 return NULL_RTX(rtx) 0;
4889
4890 /* Compute the argument. */
4891 op0 = expand_normal (CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4891, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4891, __FUNCTION__)))))
);
4892
4893 /* Compute the alignment. */
4894 align = (fcode == BUILT_IN_ALLOCA
4895 ? BIGGEST_ALIGNMENT(((global_options.x_target_flags & (1U << 12)) != 0
) ? 32 : (((global_options.x_ix86_isa_flags & (1UL <<
15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & (
1UL << 8)) != 0) ? 256 : 128)))
4896 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))((unsigned long) (*tree_int_cst_elt_check (((*((const_cast<
tree*> (tree_operand_check (((tree_check ((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4896, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4896, __FUNCTION__)))))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4896, __FUNCTION__)))
);
4897
4898 /* Compute the maximum size. */
4899 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4900 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))((unsigned long) (*tree_int_cst_elt_check (((*((const_cast<
tree*> (tree_operand_check (((tree_check ((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4900, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4900, __FUNCTION__)))))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4900, __FUNCTION__)))
4901 : -1);
4902
4903 /* Allocate the desired space. If the allocation stems from the declaration
4904 of a variable-sized object, it cannot accumulate. */
4905 result
4906 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
4907 result = convert_memory_address (ptr_mode, result)convert_memory_address_addr_space ((ptr_mode), (result), 0);
4908
4909 /* Dynamic allocations for variables are recorded during gimplification. */
4910 if (!alloca_for_var && (flag_callgraph_infoglobal_options.x_flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
4911 record_dynamic_alloc (exp);
4912
4913 return result;
4914}
4915
4916/* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
4917 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
4918 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
4919 handle_builtin_stack_restore function. */
4920
4921static rtx
4922expand_asan_emit_allocas_unpoison (tree exp)
4923{
4924 tree arg0 = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4924, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4924, __FUNCTION__)))))
;
4925 tree arg1 = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4925, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4925, __FUNCTION__)))))
;
4926 rtx top = expand_expr (arg0, NULL_RTX(rtx) 0, ptr_mode, EXPAND_NORMAL);
4927 rtx bot = expand_expr (arg1, NULL_RTX(rtx) 0, ptr_mode, EXPAND_NORMAL);
4928 rtx off = expand_simple_binop (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, MINUS, virtual_stack_dynamic_rtx((this_target_rtl->x_global_rtl)[GR_VIRTUAL_STACK_DYNAMIC]
)
,
4929 stack_pointer_rtx((this_target_rtl->x_global_rtl)[GR_STACK_POINTER]), NULL_RTX(rtx) 0, 0,
4930 OPTAB_LIB_WIDEN);
4931 off = convert_modes (ptr_mode, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, off, 0);
4932 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX(rtx) 0, 0,
4933 OPTAB_LIB_WIDEN);
4934 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
4935 ret = emit_library_call_value (ret, NULL_RTX(rtx) 0, LCT_NORMAL, ptr_mode,
4936 top, ptr_mode, bot, ptr_mode);
4937 return ret;
4938}
4939
4940/* Expand a call to bswap builtin in EXP.
4941 Return NULL_RTX if a normal call should be emitted rather than expanding the
4942 function in-line. If convenient, the result should be placed in TARGET.
4943 SUBTARGET may be used as the target for computing one of EXP's operands. */
4944
4945static rtx
4946expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4947 rtx subtarget)
4948{
4949 tree arg;
4950 rtx op0;
4951
4952 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4953 return NULL_RTX(rtx) 0;
4954
4955 arg = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4955, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4955, __FUNCTION__)))))
;
4956 op0 = expand_expr (arg,
4957 subtarget && GET_MODE (subtarget)((machine_mode) (subtarget)->mode) == target_mode
4958 ? subtarget : NULL_RTX(rtx) 0,
4959 target_mode, EXPAND_NORMAL);
4960 if (GET_MODE (op0)((machine_mode) (op0)->mode) != target_mode)
4961 op0 = convert_to_mode (target_mode, op0, 1);
4962
4963 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4964
4965 gcc_assert (target)((void)(!(target) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4965, __FUNCTION__), 0 : 0))
;
4966
4967 return convert_to_mode (target_mode, target, 1);
4968}
4969
4970/* Expand a call to a unary builtin in EXP.
4971 Return NULL_RTX if a normal call should be emitted rather than expanding the
4972 function in-line. If convenient, the result should be placed in TARGET.
4973 SUBTARGET may be used as the target for computing one of EXP's operands. */
4974
4975static rtx
4976expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4977 rtx subtarget, optab op_optab)
4978{
4979 rtx op0;
4980
4981 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4982 return NULL_RTX(rtx) 0;
4983
4984 /* Compute the argument. */
4985 op0 = expand_expr (CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4985, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4985, __FUNCTION__)))))
,
4986 (subtarget
4987 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))((((enum tree_code) ((tree_class_check ((((contains_struct_check
(((*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4987, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4987, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4987, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4987, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check (((*((const_cast<tree*> (tree_operand_check
(((tree_check ((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4987, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4987, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4987, __FUNCTION__))->typed.type)) : (((contains_struct_check
(((*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4987, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4987, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4987, __FUNCTION__))->typed.type))->type_common.mode)
4988 == GET_MODE (subtarget)((machine_mode) (subtarget)->mode))) ? subtarget : NULL_RTX(rtx) 0,
4989 VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
4990 /* Compute op, into TARGET if possible.
4991 Set TARGET to wherever the result comes back. */
4992 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))((((enum tree_code) ((tree_class_check ((((contains_struct_check
(((*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4992, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4992, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4992, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4992, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check (((*((const_cast<tree*> (tree_operand_check
(((tree_check ((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4992, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4992, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4992, __FUNCTION__))->typed.type)) : (((contains_struct_check
(((*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4992, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4992, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4992, __FUNCTION__))->typed.type))->type_common.mode)
,
4993 op_optab, op0, target, op_optab != clrsb_optab);
4994 gcc_assert (target)((void)(!(target) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4994, __FUNCTION__), 0 : 0))
;
4995
4996 return convert_to_mode (target_mode, target, 0);
4997}
4998
4999/* Expand a call to __builtin_expect. We just return our argument
5000 as the builtin_expect semantic should've been already executed by
5001 tree branch prediction pass. */
5002
5003static rtx
5004expand_builtin_expect (tree exp, rtx target)
5005{
5006 tree arg;
5007
5008 if (call_expr_nargs (exp)(((int)((unsigned long) (*tree_int_cst_elt_check (((tree_class_check
((exp), (tcc_vl_exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5008, __FUNCTION__))->exp.operands[0]), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5008, __FUNCTION__)))) - 3)
< 2)
5009 return const0_rtx(const_int_rtx[64]);
5010 arg = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5010, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5010, __FUNCTION__)))))
;
5011
5012 target = expand_expr (arg, target, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
5013 /* When guessing was done, the hints should be already stripped away. */
5014 gcc_assert (!flag_guess_branch_prob((void)(!(!global_options.x_flag_guess_branch_prob || global_options
.x_optimize == 0 || seen_error ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5015, __FUNCTION__), 0 : 0))
5015 || optimize == 0 || seen_error ())((void)(!(!global_options.x_flag_guess_branch_prob || global_options
.x_optimize == 0 || seen_error ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5015, __FUNCTION__), 0 : 0))
;
5016 return target;
5017}
5018
5019/* Expand a call to __builtin_expect_with_probability. We just return our
5020 argument as the builtin_expect semantic should've been already executed by
5021 tree branch prediction pass. */
5022
5023static rtx
5024expand_builtin_expect_with_probability (tree exp, rtx target)
5025{
5026 tree arg;
5027
5028 if (call_expr_nargs (exp)(((int)((unsigned long) (*tree_int_cst_elt_check (((tree_class_check
((exp), (tcc_vl_exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5028, __FUNCTION__))->exp.operands[0]), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5028, __FUNCTION__)))) - 3)
< 3)
5029 return const0_rtx(const_int_rtx[64]);
5030 arg = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5030, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5030, __FUNCTION__)))))
;
5031
5032 target = expand_expr (arg, target, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
5033 /* When guessing was done, the hints should be already stripped away. */
5034 gcc_assert (!flag_guess_branch_prob((void)(!(!global_options.x_flag_guess_branch_prob || global_options
.x_optimize == 0 || seen_error ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5035, __FUNCTION__), 0 : 0))
5035 || optimize == 0 || seen_error ())((void)(!(!global_options.x_flag_guess_branch_prob || global_options
.x_optimize == 0 || seen_error ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5035, __FUNCTION__), 0 : 0))
;
5036 return target;
5037}
5038
5039
5040/* Expand a call to __builtin_assume_aligned. We just return our first
5041 argument as the builtin_assume_aligned semantic should've been already
5042 executed by CCP. */
5043
5044static rtx
5045expand_builtin_assume_aligned (tree exp, rtx target)
5046{
5047 if (call_expr_nargs (exp)(((int)((unsigned long) (*tree_int_cst_elt_check (((tree_class_check
((exp), (tcc_vl_exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5047, __FUNCTION__))->exp.operands[0]), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5047, __FUNCTION__)))) - 3)
< 2)
5048 return const0_rtx(const_int_rtx[64]);
5049 target = expand_expr (CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5049, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5049, __FUNCTION__)))))
, target, VOIDmode((void) 0, E_VOIDmode),
5050 EXPAND_NORMAL);
5051 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))((void)(!(!((non_type_check (((*((const_cast<tree*> (tree_operand_check
(((tree_check ((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5051, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5051, __FUNCTION__)))))), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5051, __FUNCTION__))->base.side_effects_flag) &&
((((int)((unsigned long) (*tree_int_cst_elt_check (((tree_class_check
((exp), (tcc_vl_exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5052, __FUNCTION__))->exp.operands[0]), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5052, __FUNCTION__)))) - 3) < 3 || !((non_type_check (((
*((const_cast<tree*> (tree_operand_check (((tree_check (
(exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5053, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5053, __FUNCTION__)))))), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5053, __FUNCTION__))->base.side_effects_flag))) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5053, __FUNCTION__), 0 : 0))
5052 && (call_expr_nargs (exp) < 3((void)(!(!((non_type_check (((*((const_cast<tree*> (tree_operand_check
(((tree_check ((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5051, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5051, __FUNCTION__)))))), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5051, __FUNCTION__))->base.side_effects_flag) &&
((((int)((unsigned long) (*tree_int_cst_elt_check (((tree_class_check
((exp), (tcc_vl_exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5052, __FUNCTION__))->exp.operands[0]), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5052, __FUNCTION__)))) - 3) < 3 || !((non_type_check (((
*((const_cast<tree*> (tree_operand_check (((tree_check (
(exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5053, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5053, __FUNCTION__)))))), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5053, __FUNCTION__))->base.side_effects_flag))) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5053, __FUNCTION__), 0 : 0))
5053 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))))((void)(!(!((non_type_check (((*((const_cast<tree*> (tree_operand_check
(((tree_check ((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5051, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5051, __FUNCTION__)))))), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5051, __FUNCTION__))->base.side_effects_flag) &&
((((int)((unsigned long) (*tree_int_cst_elt_check (((tree_class_check
((exp), (tcc_vl_exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5052, __FUNCTION__))->exp.operands[0]), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5052, __FUNCTION__)))) - 3) < 3 || !((non_type_check (((
*((const_cast<tree*> (tree_operand_check (((tree_check (
(exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5053, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5053, __FUNCTION__)))))), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5053, __FUNCTION__))->base.side_effects_flag))) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5053, __FUNCTION__), 0 : 0))
;
5054 return target;
5055}
5056
5057void
5058expand_builtin_trap (void)
5059{
5060 if (targetm.have_trap ())
5061 {
5062 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5063 /* For trap insns when not accumulating outgoing args force
5064 REG_ARGS_SIZE note to prevent crossjumping of calls with
5065 different args sizes. */
5066 if (!ACCUMULATE_OUTGOING_ARGS((((global_options.x_target_flags & (1U << 3)) != 0
) && optimize_function_for_speed_p ((cfun + 0))) || (
(cfun + 0)->machine->func_type != TYPE_NORMAL &&
(&x_rtl)->stack_realign_needed) || ((global_options.x_target_flags
& (1U << 26)) != 0) || (((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) && ix86_cfun_abi () ==
MS_ABI) || (0 && (&x_rtl)->profile))
)
5067 add_args_size_note (insn, stack_pointer_delta((&x_rtl)->expr.x_stack_pointer_delta));
5068 }
5069 else
5070 {
5071 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5072 tree call_expr = build_call_expr (fn, 0);
5073 expand_call (call_expr, NULL_RTX(rtx) 0, false);
5074 }
5075
5076 emit_barrier ();
5077}
5078
5079/* Expand a call to __builtin_unreachable. We do nothing except emit
5080 a barrier saying that control flow will not pass here.
5081
5082 It is the responsibility of the program being compiled to ensure
5083 that control flow does never reach __builtin_unreachable. */
5084static void
5085expand_builtin_unreachable (void)
5086{
5087 emit_barrier ();
5088}
5089
5090/* Expand EXP, a call to fabs, fabsf or fabsl.
5091 Return NULL_RTX if a normal call should be emitted rather than expanding
5092 the function inline. If convenient, the result should be placed
5093 in TARGET. SUBTARGET may be used as the target for computing
5094 the operand. */
5095
5096static rtx
5097expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5098{
5099 machine_mode mode;
5100 tree arg;
5101 rtx op0;
5102
5103 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5104 return NULL_RTX(rtx) 0;
5105
5106 arg = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5106, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5106, __FUNCTION__)))))
;
5107 CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5107, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5107, __FUNCTION__)))))
= arg = builtin_save_expr (arg);
5108 mode = TYPE_MODE (TREE_TYPE (arg))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5108, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5108, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5108, __FUNCTION__))->typed.type)) : (((contains_struct_check
((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5108, __FUNCTION__))->typed.type))->type_common.mode)
;
5109 op0 = expand_expr (arg, subtarget, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
5110 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5111}
5112
5113/* Expand EXP, a call to copysign, copysignf, or copysignl.
5114 Return NULL is a normal call should be emitted rather than expanding the
5115 function inline. If convenient, the result should be placed in TARGET.
5116 SUBTARGET may be used as the target for computing the operand. */
5117
5118static rtx
5119expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5120{
5121 rtx op0, op1;
5122 tree arg;
5123
5124 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5125 return NULL_RTX(rtx) 0;
5126
5127 arg = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5127, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5127, __FUNCTION__)))))
;
5128 op0 = expand_expr (arg, subtarget, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
5129
5130 arg = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5130, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5130, __FUNCTION__)))))
;
5131 op1 = expand_normal (arg);
5132
5133 return expand_copysign (op0, op1, target);
5134}
5135
5136/* Emit a call to __builtin___clear_cache. */
5137
5138void
5139default_emit_call_builtin___clear_cache (rtx begin, rtx end)
5140{
5141 rtx callee = gen_rtx_SYMBOL_REF (Pmode,gen_rtx_fmt_s0_stat ((SYMBOL_REF), (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
))), (((((const char *) (tree_check ((decl_assembler_name (builtin_decl_explicit
(BUILT_IN_CLEAR_CACHE))), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5143, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)))) )
5142 BUILTIN_ASM_NAME_PTRgen_rtx_fmt_s0_stat ((SYMBOL_REF), (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
))), (((((const char *) (tree_check ((decl_assembler_name (builtin_decl_explicit
(BUILT_IN_CLEAR_CACHE))), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5143, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)))) )
5143 (BUILT_IN_CLEAR_CACHE))gen_rtx_fmt_s0_stat ((SYMBOL_REF), (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
))), (((((const char *) (tree_check ((decl_assembler_name (builtin_decl_explicit
(BUILT_IN_CLEAR_CACHE))), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5143, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)))) )
;
5144
5145 emit_library_call (callee,
5146 LCT_NORMAL, VOIDmode((void) 0, E_VOIDmode),
5147 convert_memory_address (ptr_mode, begin)convert_memory_address_addr_space ((ptr_mode), (begin), 0), ptr_mode,
5148 convert_memory_address (ptr_mode, end)convert_memory_address_addr_space ((ptr_mode), (end), 0), ptr_mode);
5149}
5150
5151/* Emit a call to __builtin___clear_cache, unless the target specifies
5152 it as do-nothing. This function can be used by trampoline
5153 finalizers to duplicate the effects of expanding a call to the
5154 clear_cache builtin. */
5155
5156void
5157maybe_emit_call_builtin___clear_cache (rtx begin, rtx end)
5158{
5159 gcc_assert ((GET_MODE (begin) == ptr_mode || GET_MODE (begin) == Pmode((void)(!((((machine_mode) (begin)->mode) == ptr_mode || (
(machine_mode) (begin)->mode) == (global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
) || (((enum rtx_code) (begin)->code) == CONST_INT)) &&
(((machine_mode) (end)->mode) == ptr_mode || ((machine_mode
) (end)->mode) == (global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))) ||
(((enum rtx_code) (end)->code) == CONST_INT))) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5162, __FUNCTION__), 0 : 0))
5160 || CONST_INT_P (begin))((void)(!((((machine_mode) (begin)->mode) == ptr_mode || (
(machine_mode) (begin)->mode) == (global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
) || (((enum rtx_code) (begin)->code) == CONST_INT)) &&
(((machine_mode) (end)->mode) == ptr_mode || ((machine_mode
) (end)->mode) == (global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))) ||
(((enum rtx_code) (end)->code) == CONST_INT))) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5162, __FUNCTION__), 0 : 0))
5161 && (GET_MODE (end) == ptr_mode || GET_MODE (end) == Pmode((void)(!((((machine_mode) (begin)->mode) == ptr_mode || (
(machine_mode) (begin)->mode) == (global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
) || (((enum rtx_code) (begin)->code) == CONST_INT)) &&
(((machine_mode) (end)->mode) == ptr_mode || ((machine_mode
) (end)->mode) == (global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))) ||
(((enum rtx_code) (end)->code) == CONST_INT))) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5162, __FUNCTION__), 0 : 0))
5162 || CONST_INT_P (end)))((void)(!((((machine_mode) (begin)->mode) == ptr_mode || (
(machine_mode) (begin)->mode) == (global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
) || (((enum rtx_code) (begin)->code) == CONST_INT)) &&
(((machine_mode) (end)->mode) == ptr_mode || ((machine_mode
) (end)->mode) == (global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))) ||
(((enum rtx_code) (end)->code) == CONST_INT))) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5162, __FUNCTION__), 0 : 0))
;
5163
5164 if (targetm.have_clear_cache ())
5165 {
5166 /* We have a "clear_cache" insn, and it will handle everything. */
5167 class expand_operand ops[2];
5168
5169 create_address_operand (&ops[0], begin);
5170 create_address_operand (&ops[1], end);
5171
5172 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5173 return;
5174 }
5175 else
5176 {
5177#ifndef CLEAR_INSN_CACHE
5178 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5179 does nothing. There is no need to call it. Do nothing. */
5180 return;
5181#endif /* CLEAR_INSN_CACHE */
5182 }
5183
5184 targetm.calls.emit_call_builtin___clear_cache (begin, end);
5185}
5186
5187/* Expand a call to __builtin___clear_cache. */
5188
5189static void
5190expand_builtin___clear_cache (tree exp)
5191{
5192 tree begin, end;
5193 rtx begin_rtx, end_rtx;
5194
5195 /* We must not expand to a library call. If we did, any
5196 fallback library function in libgcc that might contain a call to
5197 __builtin___clear_cache() would recurse infinitely. */
5198 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5199 {
5200 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5201 return;
5202 }
5203
5204 begin = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5204, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5204, __FUNCTION__)))))
;
5205 begin_rtx = expand_expr (begin, NULL_RTX(rtx) 0, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, EXPAND_NORMAL);
5206
5207 end = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5207, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5207, __FUNCTION__)))))
;
5208 end_rtx = expand_expr (end, NULL_RTX(rtx) 0, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, EXPAND_NORMAL);
5209
5210 maybe_emit_call_builtin___clear_cache (begin_rtx, end_rtx);
5211}
5212
5213/* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5214
5215static rtx
5216round_trampoline_addr (rtx tramp)
5217{
5218 rtx temp, addend, mask;
5219
5220 /* If we don't need too much alignment, we'll have been guaranteed
5221 proper alignment by get_trampoline_type. */
5222 if (TRAMPOLINE_ALIGNMENT(lang_hooks.custom_function_descriptors && targetm.calls
.custom_function_descriptors > 0 ? (((8)) > (2 * targetm
.calls.custom_function_descriptors * (8)) ? ((8)) : (2 * targetm
.calls.custom_function_descriptors * (8))) : (8))
<= STACK_BOUNDARY((((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) && ix86_cfun_abi () == MS_ABI) ? 128 : ((8) * (((
global_options.x_ix86_isa_flags & (1UL << 1)) != 0)
? 8 : 4)))
)
5223 return tramp;
5224
5225 /* Round address up to desired boundary. */
5226 temp = gen_reg_rtx (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
5227 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT(lang_hooks.custom_function_descriptors && targetm.calls
.custom_function_descriptors > 0 ? (((8)) > (2 * targetm
.calls.custom_function_descriptors * (8)) ? ((8)) : (2 * targetm
.calls.custom_function_descriptors * (8))) : (8))
/ BITS_PER_UNIT(8) - 1, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
5228 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT(lang_hooks.custom_function_descriptors && targetm.calls
.custom_function_descriptors > 0 ? (((8)) > (2 * targetm
.calls.custom_function_descriptors * (8)) ? ((8)) : (2 * targetm
.calls.custom_function_descriptors * (8))) : (8))
/ BITS_PER_UNIT(8), Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
5229
5230 temp = expand_simple_binop (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, PLUS, tramp, addend,
5231 temp, 0, OPTAB_LIB_WIDEN);
5232 tramp = expand_simple_binop (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, AND, temp, mask,
5233 temp, 0, OPTAB_LIB_WIDEN);
5234
5235 return tramp;
5236}
5237
5238static rtx
5239expand_builtin_init_trampoline (tree exp, bool onstack)
5240{
5241 tree t_tramp, t_func, t_chain;
5242 rtx m_tramp, r_tramp, r_chain, tmp;
5243
5244 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5245 POINTER_TYPE, VOID_TYPE))
5246 return NULL_RTX(rtx) 0;
5247
5248 t_tramp = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5248, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5248, __FUNCTION__)))))
;
5249 t_func = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5249, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5249, __FUNCTION__)))))
;
5250 t_chain = CALL_EXPR_ARG (exp, 2)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5250, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5250, __FUNCTION__)))))
;
5251
5252 r_tramp = expand_normal (t_tramp);
5253 m_tramp = gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), r_tramp);
5254 MEM_NOTRAP_P (m_tramp)(__extension__ ({ __typeof ((m_tramp)) const _rtx = ((m_tramp
)); if (((enum rtx_code) (_rtx)->code) != MEM) rtl_check_failed_flag
("MEM_NOTRAP_P", _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5254, __FUNCTION__); _rtx; })->call)
= 1;
5255
5256 /* If ONSTACK, the TRAMP argument should be the address of a field
5257 within the local function's FRAME decl. Either way, let's see if
5258 we can fill in the MEM_ATTRs for this memory. */
5259 if (TREE_CODE (t_tramp)((enum tree_code) (t_tramp)->base.code) == ADDR_EXPR)
5260 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0)(*((const_cast<tree*> (tree_operand_check ((t_tramp), (
0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5260, __FUNCTION__)))))
, true);
5261
5262 /* Creator of a heap trampoline is responsible for making sure the
5263 address is aligned to at least STACK_BOUNDARY. Normally malloc
5264 will ensure this anyhow. */
5265 tmp = round_trampoline_addr (r_tramp);
5266 if (tmp != r_tramp)
5267 {
5268 m_tramp = change_address (m_tramp, BLKmode((void) 0, E_BLKmode), tmp);
5269 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT(lang_hooks.custom_function_descriptors && targetm.calls
.custom_function_descriptors > 0 ? (((8)) > (2 * targetm
.calls.custom_function_descriptors * (8)) ? ((8)) : (2 * targetm
.calls.custom_function_descriptors * (8))) : (8))
);
5270 set_mem_size (m_tramp, TRAMPOLINE_SIZE(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? 28 : 14)
);
5271 }
5272
5273 /* The FUNC argument should be the address of the nested function.
5274 Extract the actual function decl to pass to the hook. */
5275 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR)((void)(!(((enum tree_code) (t_func)->base.code) == ADDR_EXPR
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5275, __FUNCTION__), 0 : 0))
;
5276 t_func = TREE_OPERAND (t_func, 0)(*((const_cast<tree*> (tree_operand_check ((t_func), (0
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5276, __FUNCTION__)))))
;
5277 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL)((void)(!(((enum tree_code) (t_func)->base.code) == FUNCTION_DECL
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5277, __FUNCTION__), 0 : 0))
;