File: | build/gcc/builtins.cc |
Warning: | line 4381, column 22 Although the value stored to 'val' is used in the enclosing expression, the value is never actually read from 'val' |
Press '?' to see keyboard shortcuts
Keyboard shortcuts:
1 | /* Expand builtin functions. |
2 | Copyright (C) 1988-2023 Free Software Foundation, Inc. |
3 | |
4 | This file is part of GCC. |
5 | |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free |
8 | Software Foundation; either version 3, or (at your option) any later |
9 | version. |
10 | |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
14 | for more details. |
15 | |
16 | You should have received a copy of the GNU General Public License |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ |
19 | |
20 | /* Legacy warning! Please add no further builtin simplifications here |
21 | (apart from pure constant folding) - builtin simplifications should go |
22 | to match.pd or gimple-fold.cc instead. */ |
23 | |
24 | #include "config.h" |
25 | #include "system.h" |
26 | #include "coretypes.h" |
27 | #include "backend.h" |
28 | #include "target.h" |
29 | #include "rtl.h" |
30 | #include "tree.h" |
31 | #include "memmodel.h" |
32 | #include "gimple.h" |
33 | #include "predict.h" |
34 | #include "tm_p.h" |
35 | #include "stringpool.h" |
36 | #include "tree-vrp.h" |
37 | #include "tree-ssanames.h" |
38 | #include "expmed.h" |
39 | #include "optabs.h" |
40 | #include "emit-rtl.h" |
41 | #include "recog.h" |
42 | #include "diagnostic-core.h" |
43 | #include "alias.h" |
44 | #include "fold-const.h" |
45 | #include "fold-const-call.h" |
46 | #include "gimple-ssa-warn-access.h" |
47 | #include "stor-layout.h" |
48 | #include "calls.h" |
49 | #include "varasm.h" |
50 | #include "tree-object-size.h" |
51 | #include "tree-ssa-strlen.h" |
52 | #include "realmpfr.h" |
53 | #include "cfgrtl.h" |
54 | #include "except.h" |
55 | #include "dojump.h" |
56 | #include "explow.h" |
57 | #include "stmt.h" |
58 | #include "expr.h" |
59 | #include "libfuncs.h" |
60 | #include "output.h" |
61 | #include "typeclass.h" |
62 | #include "langhooks.h" |
63 | #include "value-prof.h" |
64 | #include "builtins.h" |
65 | #include "stringpool.h" |
66 | #include "attribs.h" |
67 | #include "asan.h" |
68 | #include "internal-fn.h" |
69 | #include "case-cfn-macros.h" |
70 | #include "gimple-iterator.h" |
71 | #include "gimple-fold.h" |
72 | #include "intl.h" |
73 | #include "file-prefix-map.h" /* remap_macro_filename() */ |
74 | #include "gomp-constants.h" |
75 | #include "omp-general.h" |
76 | #include "tree-dfa.h" |
77 | #include "gimple-ssa.h" |
78 | #include "tree-ssa-live.h" |
79 | #include "tree-outof-ssa.h" |
80 | #include "attr-fnspec.h" |
81 | #include "demangle.h" |
82 | #include "gimple-range.h" |
83 | #include "pointer-query.h" |
84 | |
85 | struct target_builtins default_target_builtins; |
86 | #if SWITCHABLE_TARGET1 |
87 | struct target_builtins *this_target_builtins = &default_target_builtins; |
88 | #endif |
89 | |
90 | /* Define the names of the builtin function types and codes. */ |
91 | const char *const built_in_class_names[BUILT_IN_LAST(BUILT_IN_NORMAL + 1)] |
92 | = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"}; |
93 | |
94 | #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X, |
95 | const char * built_in_names[(int) END_BUILTINS] = |
96 | { |
97 | #include "builtins.def" |
98 | }; |
99 | |
100 | /* Setup an array of builtin_info_type, make sure each element decl is |
101 | initialized to NULL_TREE. */ |
102 | builtin_info_type builtin_info[(int)END_BUILTINS]; |
103 | |
104 | /* Non-zero if __builtin_constant_p should be folded right away. */ |
105 | bool force_folding_builtin_constant_p; |
106 | |
107 | static int target_char_cast (tree, char *); |
108 | static int apply_args_size (void); |
109 | static int apply_result_size (void); |
110 | static rtx result_vector (int, rtx); |
111 | static void expand_builtin_prefetch (tree); |
112 | static rtx expand_builtin_apply_args (void); |
113 | static rtx expand_builtin_apply_args_1 (void); |
114 | static rtx expand_builtin_apply (rtx, rtx, rtx); |
115 | static void expand_builtin_return (rtx); |
116 | static enum type_class type_to_class (tree); |
117 | static rtx expand_builtin_classify_type (tree); |
118 | static rtx expand_builtin_mathfn_3 (tree, rtx, rtx); |
119 | static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx); |
120 | static rtx expand_builtin_interclass_mathfn (tree, rtx); |
121 | static rtx expand_builtin_sincos (tree); |
122 | static rtx expand_builtin_fegetround (tree, rtx, machine_mode); |
123 | static rtx expand_builtin_feclear_feraise_except (tree, rtx, machine_mode, |
124 | optab); |
125 | static rtx expand_builtin_cexpi (tree, rtx); |
126 | static rtx expand_builtin_issignaling (tree, rtx); |
127 | static rtx expand_builtin_int_roundingfn (tree, rtx); |
128 | static rtx expand_builtin_int_roundingfn_2 (tree, rtx); |
129 | static rtx expand_builtin_next_arg (void); |
130 | static rtx expand_builtin_va_start (tree); |
131 | static rtx expand_builtin_va_end (tree); |
132 | static rtx expand_builtin_va_copy (tree); |
133 | static rtx inline_expand_builtin_bytecmp (tree, rtx); |
134 | static rtx expand_builtin_strcmp (tree, rtx); |
135 | static rtx expand_builtin_strncmp (tree, rtx, machine_mode); |
136 | static rtx expand_builtin_memcpy (tree, rtx); |
137 | static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len, |
138 | rtx target, tree exp, |
139 | memop_ret retmode, |
140 | bool might_overlap); |
141 | static rtx expand_builtin_memmove (tree, rtx); |
142 | static rtx expand_builtin_mempcpy (tree, rtx); |
143 | static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret); |
144 | static rtx expand_builtin_strcpy (tree, rtx); |
145 | static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx); |
146 | static rtx expand_builtin_stpcpy (tree, rtx, machine_mode); |
147 | static rtx expand_builtin_strncpy (tree, rtx); |
148 | static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree); |
149 | static rtx expand_builtin_bzero (tree); |
150 | static rtx expand_builtin_strlen (tree, rtx, machine_mode); |
151 | static rtx expand_builtin_strnlen (tree, rtx, machine_mode); |
152 | static rtx expand_builtin_alloca (tree); |
153 | static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab); |
154 | static rtx expand_builtin_frame_address (tree, tree); |
155 | static tree stabilize_va_list_loc (location_t, tree, int); |
156 | static rtx expand_builtin_expect (tree, rtx); |
157 | static rtx expand_builtin_expect_with_probability (tree, rtx); |
158 | static tree fold_builtin_constant_p (tree); |
159 | static tree fold_builtin_classify_type (tree); |
160 | static tree fold_builtin_strlen (location_t, tree, tree, tree); |
161 | static tree fold_builtin_inf (location_t, tree, int); |
162 | static tree rewrite_call_expr (location_t, tree, int, tree, int, ...); |
163 | static bool validate_arg (const_tree, enum tree_code code); |
164 | static rtx expand_builtin_fabs (tree, rtx, rtx); |
165 | static rtx expand_builtin_signbit (tree, rtx); |
166 | static tree fold_builtin_memcmp (location_t, tree, tree, tree); |
167 | static tree fold_builtin_isascii (location_t, tree); |
168 | static tree fold_builtin_toascii (location_t, tree); |
169 | static tree fold_builtin_isdigit (location_t, tree); |
170 | static tree fold_builtin_fabs (location_t, tree, tree); |
171 | static tree fold_builtin_abs (location_t, tree, tree); |
172 | static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code, |
173 | enum tree_code); |
174 | static tree fold_builtin_varargs (location_t, tree, tree*, int); |
175 | |
176 | static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree); |
177 | static tree fold_builtin_strspn (location_t, tree, tree, tree); |
178 | static tree fold_builtin_strcspn (location_t, tree, tree, tree); |
179 | |
180 | static rtx expand_builtin_object_size (tree); |
181 | static rtx expand_builtin_memory_chk (tree, rtx, machine_mode, |
182 | enum built_in_function); |
183 | static void maybe_emit_chk_warning (tree, enum built_in_function); |
184 | static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function); |
185 | static tree fold_builtin_object_size (tree, tree, enum built_in_function); |
186 | |
187 | unsigned HOST_WIDE_INTlong target_newline; |
188 | unsigned HOST_WIDE_INTlong target_percent; |
189 | static unsigned HOST_WIDE_INTlong target_c; |
190 | static unsigned HOST_WIDE_INTlong target_s; |
191 | char target_percent_c[3]; |
192 | char target_percent_s[3]; |
193 | char target_percent_s_newline[4]; |
194 | static tree do_mpfr_remquo (tree, tree, tree); |
195 | static tree do_mpfr_lgamma_r (tree, tree, tree); |
196 | static void expand_builtin_sync_synchronize (void); |
197 | |
198 | /* Return true if NAME starts with __builtin_ or __sync_. */ |
199 | |
200 | static bool |
201 | is_builtin_name (const char *name) |
202 | { |
203 | return (startswith (name, "__builtin_") |
204 | || startswith (name, "__sync_") |
205 | || startswith (name, "__atomic_")); |
206 | } |
207 | |
208 | /* Return true if NODE should be considered for inline expansion regardless |
209 | of the optimization level. This means whenever a function is invoked with |
210 | its "internal" name, which normally contains the prefix "__builtin". */ |
211 | |
212 | bool |
213 | called_as_built_in (tree node) |
214 | { |
215 | /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since |
216 | we want the name used to call the function, not the name it |
217 | will have. */ |
218 | const char *name = IDENTIFIER_POINTER (DECL_NAME (node))((const char *) (tree_check ((((contains_struct_check ((node) , (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 218, __FUNCTION__))->decl_minimal.name)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 218, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str ); |
219 | return is_builtin_name (name); |
220 | } |
221 | |
222 | /* Compute values M and N such that M divides (address of EXP - N) and such |
223 | that N < M. If these numbers can be determined, store M in alignp and N in |
224 | *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to |
225 | *alignp and any bit-offset to *bitposp. |
226 | |
227 | Note that the address (and thus the alignment) computed here is based |
228 | on the address to which a symbol resolves, whereas DECL_ALIGN is based |
229 | on the address at which an object is actually located. These two |
230 | addresses are not always the same. For example, on ARM targets, |
231 | the address &foo of a Thumb function foo() has the lowest bit set, |
232 | whereas foo() itself starts on an even address. |
233 | |
234 | If ADDR_P is true we are taking the address of the memory reference EXP |
235 | and thus cannot rely on the access taking place. */ |
236 | |
237 | bool |
238 | get_object_alignment_2 (tree exp, unsigned int *alignp, |
239 | unsigned HOST_WIDE_INTlong *bitposp, bool addr_p) |
240 | { |
241 | poly_int64 bitsize, bitpos; |
242 | tree offset; |
243 | machine_mode mode; |
244 | int unsignedp, reversep, volatilep; |
245 | unsigned int align = BITS_PER_UNIT(8); |
246 | bool known_alignment = false; |
247 | |
248 | /* Get the innermost object and the constant (bitpos) and possibly |
249 | variable (offset) offset of the access. */ |
250 | exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode, |
251 | &unsignedp, &reversep, &volatilep); |
252 | |
253 | /* Extract alignment information from the innermost object and |
254 | possibly adjust bitpos and offset. */ |
255 | if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == FUNCTION_DECL) |
256 | { |
257 | /* Function addresses can encode extra information besides their |
258 | alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION |
259 | allows the low bit to be used as a virtual bit, we know |
260 | that the address itself must be at least 2-byte aligned. */ |
261 | if (TARGET_PTRMEMFUNC_VBIT_LOCATIONptrmemfunc_vbit_in_pfn == ptrmemfunc_vbit_in_pfn) |
262 | align = 2 * BITS_PER_UNIT(8); |
263 | } |
264 | else if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == LABEL_DECL) |
265 | ; |
266 | else if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == CONST_DECL) |
267 | { |
268 | /* The alignment of a CONST_DECL is determined by its initializer. */ |
269 | exp = DECL_INITIAL (exp)((contains_struct_check ((exp), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 269, __FUNCTION__))->decl_common.initial); |
270 | align = TYPE_ALIGN (TREE_TYPE (exp))(((tree_class_check ((((contains_struct_check ((exp), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 270, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 270, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((((contains_struct_check ((exp), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 270, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 270, __FUNCTION__))->type_common.align) - 1) : 0); |
271 | if (CONSTANT_CLASS_P (exp)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code ) (exp)->base.code))] == tcc_constant)) |
272 | align = targetm.constant_alignment (exp, align); |
273 | |
274 | known_alignment = true; |
275 | } |
276 | else if (DECL_P (exp)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code ) (exp)->base.code))] == tcc_declaration)) |
277 | { |
278 | align = DECL_ALIGN (exp)(((contains_struct_check ((exp), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 278, __FUNCTION__))->decl_common.align) ? ((unsigned)1) << (((contains_struct_check ((exp), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 278, __FUNCTION__))->decl_common.align) - 1) : 0); |
279 | known_alignment = true; |
280 | } |
281 | else if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == INDIRECT_REF |
282 | || TREE_CODE (exp)((enum tree_code) (exp)->base.code) == MEM_REF |
283 | || TREE_CODE (exp)((enum tree_code) (exp)->base.code) == TARGET_MEM_REF) |
284 | { |
285 | tree addr = TREE_OPERAND (exp, 0)(*((const_cast<tree*> (tree_operand_check ((exp), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 285, __FUNCTION__))))); |
286 | unsigned ptr_align; |
287 | unsigned HOST_WIDE_INTlong ptr_bitpos; |
288 | unsigned HOST_WIDE_INTlong ptr_bitmask = ~0; |
289 | |
290 | /* If the address is explicitely aligned, handle that. */ |
291 | if (TREE_CODE (addr)((enum tree_code) (addr)->base.code) == BIT_AND_EXPR |
292 | && TREE_CODE (TREE_OPERAND (addr, 1))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check ((addr), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 292, __FUNCTION__))))))->base.code) == INTEGER_CST) |
293 | { |
294 | ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))((unsigned long) (*tree_int_cst_elt_check (((*((const_cast< tree*> (tree_operand_check ((addr), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 294, __FUNCTION__)))))), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 294, __FUNCTION__))); |
295 | ptr_bitmask *= BITS_PER_UNIT(8); |
296 | align = least_bit_hwi (ptr_bitmask); |
297 | addr = TREE_OPERAND (addr, 0)(*((const_cast<tree*> (tree_operand_check ((addr), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 297, __FUNCTION__))))); |
298 | } |
299 | |
300 | known_alignment |
301 | = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos); |
302 | align = MAX (ptr_align, align)((ptr_align) > (align) ? (ptr_align) : (align)); |
303 | |
304 | /* Re-apply explicit alignment to the bitpos. */ |
305 | ptr_bitpos &= ptr_bitmask; |
306 | |
307 | /* The alignment of the pointer operand in a TARGET_MEM_REF |
308 | has to take the variable offset parts into account. */ |
309 | if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == TARGET_MEM_REF) |
310 | { |
311 | if (TMR_INDEX (exp)((*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 311, __FUNCTION__, (TARGET_MEM_REF)))), (2), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 311, __FUNCTION__))))))) |
312 | { |
313 | unsigned HOST_WIDE_INTlong step = 1; |
314 | if (TMR_STEP (exp)((*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 314, __FUNCTION__, (TARGET_MEM_REF)))), (3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 314, __FUNCTION__))))))) |
315 | step = TREE_INT_CST_LOW (TMR_STEP (exp))((unsigned long) (*tree_int_cst_elt_check ((((*((const_cast< tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 315, __FUNCTION__, (TARGET_MEM_REF)))), (3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 315, __FUNCTION__))))))), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 315, __FUNCTION__))); |
316 | align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT)((align) < (least_bit_hwi (step) * (8)) ? (align) : (least_bit_hwi (step) * (8))); |
317 | } |
318 | if (TMR_INDEX2 (exp)((*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 318, __FUNCTION__, (TARGET_MEM_REF)))), (4), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 318, __FUNCTION__))))))) |
319 | align = BITS_PER_UNIT(8); |
320 | known_alignment = false; |
321 | } |
322 | |
323 | /* When EXP is an actual memory reference then we can use |
324 | TYPE_ALIGN of a pointer indirection to derive alignment. |
325 | Do so only if get_pointer_alignment_1 did not reveal absolute |
326 | alignment knowledge and if using that alignment would |
327 | improve the situation. */ |
328 | unsigned int talign; |
329 | if (!addr_p && !known_alignment |
330 | && (talign = min_align_of_type (TREE_TYPE (exp)((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 330, __FUNCTION__))->typed.type)) * BITS_PER_UNIT(8)) |
331 | && talign > align) |
332 | align = talign; |
333 | else |
334 | { |
335 | /* Else adjust bitpos accordingly. */ |
336 | bitpos += ptr_bitpos; |
337 | if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == MEM_REF |
338 | || TREE_CODE (exp)((enum tree_code) (exp)->base.code) == TARGET_MEM_REF) |
339 | bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT(8); |
340 | } |
341 | } |
342 | else if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == STRING_CST) |
343 | { |
344 | /* STRING_CST are the only constant objects we allow to be not |
345 | wrapped inside a CONST_DECL. */ |
346 | align = TYPE_ALIGN (TREE_TYPE (exp))(((tree_class_check ((((contains_struct_check ((exp), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 346, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 346, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((((contains_struct_check ((exp), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 346, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 346, __FUNCTION__))->type_common.align) - 1) : 0); |
347 | if (CONSTANT_CLASS_P (exp)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code ) (exp)->base.code))] == tcc_constant)) |
348 | align = targetm.constant_alignment (exp, align); |
349 | |
350 | known_alignment = true; |
351 | } |
352 | |
353 | /* If there is a non-constant offset part extract the maximum |
354 | alignment that can prevail. */ |
355 | if (offset) |
356 | { |
357 | unsigned int trailing_zeros = tree_ctz (offset); |
358 | if (trailing_zeros < HOST_BITS_PER_INT(8 * 4)) |
359 | { |
360 | unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT(8); |
361 | if (inner) |
362 | align = MIN (align, inner)((align) < (inner) ? (align) : (inner)); |
363 | } |
364 | } |
365 | |
366 | /* Account for the alignment of runtime coefficients, so that the constant |
367 | bitpos is guaranteed to be accurate. */ |
368 | unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]); |
369 | if (alt_align != 0 && alt_align < align) |
370 | { |
371 | align = alt_align; |
372 | known_alignment = false; |
373 | } |
374 | |
375 | *alignp = align; |
376 | *bitposp = bitpos.coeffs[0] & (align - 1); |
377 | return known_alignment; |
378 | } |
379 | |
380 | /* For a memory reference expression EXP compute values M and N such that M |
381 | divides (&EXP - N) and such that N < M. If these numbers can be determined, |
382 | store M in alignp and N in *BITPOSP and return true. Otherwise return false |
383 | and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */ |
384 | |
385 | bool |
386 | get_object_alignment_1 (tree exp, unsigned int *alignp, |
387 | unsigned HOST_WIDE_INTlong *bitposp) |
388 | { |
389 | /* Strip a WITH_SIZE_EXPR, get_inner_reference doesn't know how to deal |
390 | with it. */ |
391 | if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == WITH_SIZE_EXPR) |
392 | exp = TREE_OPERAND (exp, 0)(*((const_cast<tree*> (tree_operand_check ((exp), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 392, __FUNCTION__))))); |
393 | return get_object_alignment_2 (exp, alignp, bitposp, false); |
394 | } |
395 | |
396 | /* Return the alignment in bits of EXP, an object. */ |
397 | |
398 | unsigned int |
399 | get_object_alignment (tree exp) |
400 | { |
401 | unsigned HOST_WIDE_INTlong bitpos = 0; |
402 | unsigned int align; |
403 | |
404 | get_object_alignment_1 (exp, &align, &bitpos); |
405 | |
406 | /* align and bitpos now specify known low bits of the pointer. |
407 | ptr & (align - 1) == bitpos. */ |
408 | |
409 | if (bitpos != 0) |
410 | align = least_bit_hwi (bitpos); |
411 | return align; |
412 | } |
413 | |
414 | /* For a pointer valued expression EXP compute values M and N such that M |
415 | divides (EXP - N) and such that N < M. If these numbers can be determined, |
416 | store M in alignp and N in *BITPOSP and return true. Return false if |
417 | the results are just a conservative approximation. |
418 | |
419 | If EXP is not a pointer, false is returned too. */ |
420 | |
421 | bool |
422 | get_pointer_alignment_1 (tree exp, unsigned int *alignp, |
423 | unsigned HOST_WIDE_INTlong *bitposp) |
424 | { |
425 | STRIP_NOPS (exp)(exp) = tree_strip_nop_conversions ((const_cast<union tree_node *> (((exp))))); |
426 | |
427 | if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == ADDR_EXPR) |
428 | return get_object_alignment_2 (TREE_OPERAND (exp, 0)(*((const_cast<tree*> (tree_operand_check ((exp), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 428, __FUNCTION__))))), |
429 | alignp, bitposp, true); |
430 | else if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == POINTER_PLUS_EXPR) |
431 | { |
432 | unsigned int align; |
433 | unsigned HOST_WIDE_INTlong bitpos; |
434 | bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0)(*((const_cast<tree*> (tree_operand_check ((exp), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 434, __FUNCTION__))))), |
435 | &align, &bitpos); |
436 | if (TREE_CODE (TREE_OPERAND (exp, 1))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check ((exp), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 436, __FUNCTION__))))))->base.code) == INTEGER_CST) |
437 | bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))((unsigned long) (*tree_int_cst_elt_check (((*((const_cast< tree*> (tree_operand_check ((exp), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 437, __FUNCTION__)))))), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 437, __FUNCTION__))) * BITS_PER_UNIT(8); |
438 | else |
439 | { |
440 | unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1)(*((const_cast<tree*> (tree_operand_check ((exp), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 440, __FUNCTION__)))))); |
441 | if (trailing_zeros < HOST_BITS_PER_INT(8 * 4)) |
442 | { |
443 | unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT(8); |
444 | if (inner) |
445 | align = MIN (align, inner)((align) < (inner) ? (align) : (inner)); |
446 | } |
447 | } |
448 | *alignp = align; |
449 | *bitposp = bitpos & (align - 1); |
450 | return res; |
451 | } |
452 | else if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == SSA_NAME |
453 | && POINTER_TYPE_P (TREE_TYPE (exp))(((enum tree_code) (((contains_struct_check ((exp), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 453, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE || ((enum tree_code) (((contains_struct_check ((exp), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 453, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE )) |
454 | { |
455 | unsigned int ptr_align, ptr_misalign; |
456 | struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp)(tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 456, __FUNCTION__, (SSA_NAME)))->ssa_name.info.ptr_info; |
457 | |
458 | if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign)) |
459 | { |
460 | *bitposp = ptr_misalign * BITS_PER_UNIT(8); |
461 | *alignp = ptr_align * BITS_PER_UNIT(8); |
462 | /* Make sure to return a sensible alignment when the multiplication |
463 | by BITS_PER_UNIT overflowed. */ |
464 | if (*alignp == 0) |
465 | *alignp = 1u << (HOST_BITS_PER_INT(8 * 4) - 1); |
466 | /* We cannot really tell whether this result is an approximation. */ |
467 | return false; |
468 | } |
469 | else |
470 | { |
471 | *bitposp = 0; |
472 | *alignp = BITS_PER_UNIT(8); |
473 | return false; |
474 | } |
475 | } |
476 | else if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == INTEGER_CST) |
477 | { |
478 | *alignp = BIGGEST_ALIGNMENT(((global_options.x_target_flags & (1U << 12)) != 0 ) ? 32 : (((global_options.x_ix86_isa_flags & (1UL << 15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & ( 1UL << 8)) != 0) ? 256 : 128))); |
479 | *bitposp = ((TREE_INT_CST_LOW (exp)((unsigned long) (*tree_int_cst_elt_check ((exp), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 479, __FUNCTION__))) * BITS_PER_UNIT(8)) |
480 | & (BIGGEST_ALIGNMENT(((global_options.x_target_flags & (1U << 12)) != 0 ) ? 32 : (((global_options.x_ix86_isa_flags & (1UL << 15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & ( 1UL << 8)) != 0) ? 256 : 128))) - 1)); |
481 | return true; |
482 | } |
483 | |
484 | *bitposp = 0; |
485 | *alignp = BITS_PER_UNIT(8); |
486 | return false; |
487 | } |
488 | |
489 | /* Return the alignment in bits of EXP, a pointer valued expression. |
490 | The alignment returned is, by default, the alignment of the thing that |
491 | EXP points to. If it is not a POINTER_TYPE, 0 is returned. |
492 | |
493 | Otherwise, look at the expression to see if we can do better, i.e., if the |
494 | expression is actually pointing at an object whose alignment is tighter. */ |
495 | |
496 | unsigned int |
497 | get_pointer_alignment (tree exp) |
498 | { |
499 | unsigned HOST_WIDE_INTlong bitpos = 0; |
500 | unsigned int align; |
501 | |
502 | get_pointer_alignment_1 (exp, &align, &bitpos); |
503 | |
504 | /* align and bitpos now specify known low bits of the pointer. |
505 | ptr & (align - 1) == bitpos. */ |
506 | |
507 | if (bitpos != 0) |
508 | align = least_bit_hwi (bitpos); |
509 | |
510 | return align; |
511 | } |
512 | |
513 | /* Return the number of leading non-zero elements in the sequence |
514 | [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes. |
515 | ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */ |
516 | |
517 | unsigned |
518 | string_length (const void *ptr, unsigned eltsize, unsigned maxelts) |
519 | { |
520 | gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4)((void)(!(eltsize == 1 || eltsize == 2 || eltsize == 4) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 520, __FUNCTION__), 0 : 0)); |
521 | |
522 | unsigned n; |
523 | |
524 | if (eltsize == 1) |
525 | { |
526 | /* Optimize the common case of plain char. */ |
527 | for (n = 0; n < maxelts; n++) |
528 | { |
529 | const char *elt = (const char*) ptr + n; |
530 | if (!*elt) |
531 | break; |
532 | } |
533 | } |
534 | else |
535 | { |
536 | for (n = 0; n < maxelts; n++) |
537 | { |
538 | const char *elt = (const char*) ptr + n * eltsize; |
539 | if (!memcmp (elt, "\0\0\0\0", eltsize)) |
540 | break; |
541 | } |
542 | } |
543 | return n; |
544 | } |
545 | |
546 | /* Compute the length of a null-terminated character string or wide |
547 | character string handling character sizes of 1, 2, and 4 bytes. |
548 | TREE_STRING_LENGTH is not the right way because it evaluates to |
549 | the size of the character array in bytes (as opposed to characters) |
550 | and because it can contain a zero byte in the middle. |
551 | |
552 | ONLY_VALUE should be nonzero if the result is not going to be emitted |
553 | into the instruction stream and zero if it is going to be expanded. |
554 | E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3 |
555 | is returned, otherwise NULL, since |
556 | len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not |
557 | evaluate the side-effects. |
558 | |
559 | If ONLY_VALUE is two then we do not emit warnings about out-of-bound |
560 | accesses. Note that this implies the result is not going to be emitted |
561 | into the instruction stream. |
562 | |
563 | Additional information about the string accessed may be recorded |
564 | in DATA. For example, if ARG references an unterminated string, |
565 | then the declaration will be stored in the DECL field. If the |
566 | length of the unterminated string can be determined, it'll be |
567 | stored in the LEN field. Note this length could well be different |
568 | than what a C strlen call would return. |
569 | |
570 | ELTSIZE is 1 for normal single byte character strings, and 2 or |
571 | 4 for wide characer strings. ELTSIZE is by default 1. |
572 | |
573 | The value returned is of type `ssizetype'. */ |
574 | |
575 | tree |
576 | c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize) |
577 | { |
578 | /* If we were not passed a DATA pointer, then get one to a local |
579 | structure. That avoids having to check DATA for NULL before |
580 | each time we want to use it. */ |
581 | c_strlen_data local_strlen_data = { }; |
582 | if (!data) |
583 | data = &local_strlen_data; |
584 | |
585 | gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4)((void)(!(eltsize == 1 || eltsize == 2 || eltsize == 4) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 585, __FUNCTION__), 0 : 0)); |
586 | |
587 | tree src = STRIP_NOPS (arg)(arg) = tree_strip_nop_conversions ((const_cast<union tree_node *> (((arg))))); |
588 | if (TREE_CODE (src)((enum tree_code) (src)->base.code) == COND_EXPR |
589 | && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))((non_type_check (((*((const_cast<tree*> (tree_operand_check ((src), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 589, __FUNCTION__)))))), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 589, __FUNCTION__))->base.side_effects_flag))) |
590 | { |
591 | tree len1, len2; |
592 | |
593 | len1 = c_strlen (TREE_OPERAND (src, 1)(*((const_cast<tree*> (tree_operand_check ((src), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 593, __FUNCTION__))))), only_value, data, eltsize); |
594 | len2 = c_strlen (TREE_OPERAND (src, 2)(*((const_cast<tree*> (tree_operand_check ((src), (2), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 594, __FUNCTION__))))), only_value, data, eltsize); |
595 | if (tree_int_cst_equal (len1, len2)) |
596 | return len1; |
597 | } |
598 | |
599 | if (TREE_CODE (src)((enum tree_code) (src)->base.code) == COMPOUND_EXPR |
600 | && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))((non_type_check (((*((const_cast<tree*> (tree_operand_check ((src), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 600, __FUNCTION__)))))), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 600, __FUNCTION__))->base.side_effects_flag))) |
601 | return c_strlen (TREE_OPERAND (src, 1)(*((const_cast<tree*> (tree_operand_check ((src), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 601, __FUNCTION__))))), only_value, data, eltsize); |
602 | |
603 | location_t loc = EXPR_LOC_OR_LOC (src, input_location)((((IS_ADHOC_LOC (((((src)) && ((tree_code_type_tmpl < 0>::tree_code_type[(int) (((enum tree_code) ((src))->base .code))]) >= tcc_reference && (tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code) ((src))-> base.code))]) <= tcc_expression)) ? (src)->exp.locus : ( (location_t) 0)))) ? get_location_from_adhoc_loc (line_table, ((((src)) && ((tree_code_type_tmpl <0>::tree_code_type [(int) (((enum tree_code) ((src))->base.code))]) >= tcc_reference && (tree_code_type_tmpl <0>::tree_code_type[(int ) (((enum tree_code) ((src))->base.code))]) <= tcc_expression )) ? (src)->exp.locus : ((location_t) 0))) : (((((src)) && ((tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code) ((src))->base.code))]) >= tcc_reference && (tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code) ((src))->base.code))]) <= tcc_expression)) ? (src)->exp.locus : ((location_t) 0)))) != ((location_t) 0 )) ? (src)->exp.locus : (input_location)); |
604 | |
605 | /* Offset from the beginning of the string in bytes. */ |
606 | tree byteoff; |
607 | tree memsize; |
608 | tree decl; |
609 | src = string_constant (src, &byteoff, &memsize, &decl); |
610 | if (src == 0) |
611 | return NULL_TREE(tree) __null; |
612 | |
613 | /* Determine the size of the string element. */ |
614 | if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))((tree_class_check ((((contains_struct_check ((((contains_struct_check ((src), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 614, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 614, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 614, __FUNCTION__))->type_common.size_unit))) |
615 | return NULL_TREE(tree) __null; |
616 | |
617 | /* Set MAXELTS to ARRAY_SIZE (SRC) - 1, the maximum possible |
618 | length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible |
619 | in case the latter is less than the size of the array, such as when |
620 | SRC refers to a short string literal used to initialize a large array. |
621 | In that case, the elements of the array after the terminating NUL are |
622 | all NUL. */ |
623 | HOST_WIDE_INTlong strelts = TREE_STRING_LENGTH (src)((tree_check ((src), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 623, __FUNCTION__, (STRING_CST)))->string.length); |
624 | strelts = strelts / eltsize; |
625 | |
626 | if (!tree_fits_uhwi_p (memsize)) |
627 | return NULL_TREE(tree) __null; |
628 | |
629 | HOST_WIDE_INTlong maxelts = tree_to_uhwi (memsize) / eltsize; |
630 | |
631 | /* PTR can point to the byte representation of any string type, including |
632 | char* and wchar_t*. */ |
633 | const char *ptr = TREE_STRING_POINTER (src)((const char *)((tree_check ((src), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 633, __FUNCTION__, (STRING_CST)))->string.str)); |
634 | |
635 | if (byteoff && TREE_CODE (byteoff)((enum tree_code) (byteoff)->base.code) != INTEGER_CST) |
636 | { |
637 | /* The code below works only for single byte character types. */ |
638 | if (eltsize != 1) |
639 | return NULL_TREE(tree) __null; |
640 | |
641 | /* If the string has an internal NUL character followed by any |
642 | non-NUL characters (e.g., "foo\0bar"), we can't compute |
643 | the offset to the following NUL if we don't know where to |
644 | start searching for it. */ |
645 | unsigned len = string_length (ptr, eltsize, strelts); |
646 | |
647 | /* Return when an embedded null character is found or none at all. |
648 | In the latter case, set the DECL/LEN field in the DATA structure |
649 | so that callers may examine them. */ |
650 | if (len + 1 < strelts) |
651 | return NULL_TREE(tree) __null; |
652 | else if (len >= maxelts) |
653 | { |
654 | data->decl = decl; |
655 | data->off = byteoff; |
656 | data->minlen = ssize_int (len)size_int_kind (len, stk_ssizetype); |
657 | return NULL_TREE(tree) __null; |
658 | } |
659 | |
660 | /* For empty strings the result should be zero. */ |
661 | if (len == 0) |
662 | return ssize_int (0)size_int_kind (0, stk_ssizetype); |
663 | |
664 | /* We don't know the starting offset, but we do know that the string |
665 | has no internal zero bytes. If the offset falls within the bounds |
666 | of the string subtract the offset from the length of the string, |
667 | and return that. Otherwise the length is zero. Take care to |
668 | use SAVE_EXPR in case the OFFSET has side-effects. */ |
669 | tree offsave = TREE_SIDE_EFFECTS (byteoff)((non_type_check ((byteoff), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 669, __FUNCTION__))->base.side_effects_flag) ? save_expr (byteoff) |
670 | : byteoff; |
671 | offsave = fold_convert_loc (loc, sizetypesizetype_tab[(int) stk_sizetype], offsave); |
672 | tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], offsave, |
673 | size_int (len)size_int_kind (len, stk_sizetype)); |
674 | tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetypesizetype_tab[(int) stk_sizetype], size_int (len)size_int_kind (len, stk_sizetype), |
675 | offsave); |
676 | lenexp = fold_convert_loc (loc, ssizetypesizetype_tab[(int) stk_ssizetype], lenexp); |
677 | return fold_build3_loc (loc, COND_EXPR, ssizetypesizetype_tab[(int) stk_ssizetype], condexp, lenexp, |
678 | build_zero_cst (ssizetypesizetype_tab[(int) stk_ssizetype])); |
679 | } |
680 | |
681 | /* Offset from the beginning of the string in elements. */ |
682 | HOST_WIDE_INTlong eltoff; |
683 | |
684 | /* We have a known offset into the string. Start searching there for |
685 | a null character if we can represent it as a single HOST_WIDE_INT. */ |
686 | if (byteoff == 0) |
687 | eltoff = 0; |
688 | else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize) |
689 | eltoff = -1; |
690 | else |
691 | eltoff = tree_to_uhwi (byteoff) / eltsize; |
692 | |
693 | /* If the offset is known to be out of bounds, warn, and call strlen at |
694 | runtime. */ |
695 | if (eltoff < 0 || eltoff >= maxelts) |
696 | { |
697 | /* Suppress multiple warnings for propagated constant strings. */ |
698 | if (only_value != 2 |
699 | && !warning_suppressed_p (arg, OPT_Warray_bounds_) |
700 | && warning_at (loc, OPT_Warray_bounds_, |
701 | "offset %qwi outside bounds of constant string", |
702 | eltoff)) |
703 | { |
704 | if (decl) |
705 | inform (DECL_SOURCE_LOCATION (decl)((contains_struct_check ((decl), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 705, __FUNCTION__))->decl_minimal.locus), "%qE declared here", decl); |
706 | suppress_warning (arg, OPT_Warray_bounds_); |
707 | } |
708 | return NULL_TREE(tree) __null; |
709 | } |
710 | |
711 | /* If eltoff is larger than strelts but less than maxelts the |
712 | string length is zero, since the excess memory will be zero. */ |
713 | if (eltoff > strelts) |
714 | return ssize_int (0)size_int_kind (0, stk_ssizetype); |
715 | |
716 | /* Use strlen to search for the first zero byte. Since any strings |
717 | constructed with build_string will have nulls appended, we win even |
718 | if we get handed something like (char[4])"abcd". |
719 | |
720 | Since ELTOFF is our starting index into the string, no further |
721 | calculation is needed. */ |
722 | unsigned len = string_length (ptr + eltoff * eltsize, eltsize, |
723 | strelts - eltoff); |
724 | |
725 | /* Don't know what to return if there was no zero termination. |
726 | Ideally this would turn into a gcc_checking_assert over time. |
727 | Set DECL/LEN so callers can examine them. */ |
728 | if (len >= maxelts - eltoff) |
729 | { |
730 | data->decl = decl; |
731 | data->off = byteoff; |
732 | data->minlen = ssize_int (len)size_int_kind (len, stk_ssizetype); |
733 | return NULL_TREE(tree) __null; |
734 | } |
735 | |
736 | return ssize_int (len)size_int_kind (len, stk_ssizetype); |
737 | } |
738 | |
739 | /* Return a constant integer corresponding to target reading |
740 | GET_MODE_BITSIZE (MODE) bits from string constant STR. If |
741 | NULL_TERMINATED_P, reading stops after '\0' character, all further ones |
742 | are assumed to be zero, otherwise it reads as many characters |
743 | as needed. */ |
744 | |
745 | rtx |
746 | c_readstr (const char *str, scalar_int_mode mode, |
747 | bool null_terminated_p/*=true*/) |
748 | { |
749 | HOST_WIDE_INTlong ch; |
750 | unsigned int i, j; |
751 | HOST_WIDE_INTlong tmp[MAX_BITSIZE_MODE_ANY_INT(64*(8)) / HOST_BITS_PER_WIDE_INT64]; |
752 | |
753 | gcc_assert (GET_MODE_CLASS (mode) == MODE_INT)((void)(!(((enum mode_class) mode_class[mode]) == MODE_INT) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 753, __FUNCTION__), 0 : 0)); |
754 | unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT64 - 1) |
755 | / HOST_BITS_PER_WIDE_INT64; |
756 | |
757 | gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT)((void)(!(len <= (64*(8)) / 64) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 757, __FUNCTION__), 0 : 0)); |
758 | for (i = 0; i < len; i++) |
759 | tmp[i] = 0; |
760 | |
761 | ch = 1; |
762 | for (i = 0; i < GET_MODE_SIZE (mode); i++) |
763 | { |
764 | j = i; |
765 | if (WORDS_BIG_ENDIAN0) |
766 | j = GET_MODE_SIZE (mode) - i - 1; |
767 | if (BYTES_BIG_ENDIAN0 != WORDS_BIG_ENDIAN0 |
768 | && GET_MODE_SIZE (mode) >= UNITS_PER_WORD(((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4)) |
769 | j = j + UNITS_PER_WORD(((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4) - 2 * (j % UNITS_PER_WORD(((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4)) - 1; |
770 | j *= BITS_PER_UNIT(8); |
771 | |
772 | if (ch || !null_terminated_p) |
773 | ch = (unsigned char) str[i]; |
774 | tmp[j / HOST_BITS_PER_WIDE_INT64] |= ch << (j % HOST_BITS_PER_WIDE_INT64); |
775 | } |
776 | |
777 | wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode)); |
778 | return immed_wide_int_const (c, mode); |
779 | } |
780 | |
781 | /* Cast a target constant CST to target CHAR and if that value fits into |
782 | host char type, return zero and put that value into variable pointed to by |
783 | P. */ |
784 | |
785 | static int |
786 | target_char_cast (tree cst, char *p) |
787 | { |
788 | unsigned HOST_WIDE_INTlong val, hostval; |
789 | |
790 | if (TREE_CODE (cst)((enum tree_code) (cst)->base.code) != INTEGER_CST |
791 | || CHAR_TYPE_SIZE(8) > HOST_BITS_PER_WIDE_INT64) |
792 | return 1; |
793 | |
794 | /* Do not care if it fits or not right here. */ |
795 | val = TREE_INT_CST_LOW (cst)((unsigned long) (*tree_int_cst_elt_check ((cst), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 795, __FUNCTION__))); |
796 | |
797 | if (CHAR_TYPE_SIZE(8) < HOST_BITS_PER_WIDE_INT64) |
798 | val &= (HOST_WIDE_INT_1U1UL << CHAR_TYPE_SIZE(8)) - 1; |
799 | |
800 | hostval = val; |
801 | if (HOST_BITS_PER_CHAR8 < HOST_BITS_PER_WIDE_INT64) |
802 | hostval &= (HOST_WIDE_INT_1U1UL << HOST_BITS_PER_CHAR8) - 1; |
803 | |
804 | if (val != hostval) |
805 | return 1; |
806 | |
807 | *p = hostval; |
808 | return 0; |
809 | } |
810 | |
811 | /* Similar to save_expr, but assumes that arbitrary code is not executed |
812 | in between the multiple evaluations. In particular, we assume that a |
813 | non-addressable local variable will not be modified. */ |
814 | |
815 | static tree |
816 | builtin_save_expr (tree exp) |
817 | { |
818 | if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == SSA_NAME |
819 | || (TREE_ADDRESSABLE (exp)((exp)->base.addressable_flag) == 0 |
820 | && (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == PARM_DECL |
821 | || (VAR_P (exp)(((enum tree_code) (exp)->base.code) == VAR_DECL) && !TREE_STATIC (exp)((exp)->base.static_flag))))) |
822 | return exp; |
823 | |
824 | return save_expr (exp); |
825 | } |
826 | |
827 | /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT |
828 | times to get the address of either a higher stack frame, or a return |
829 | address located within it (depending on FNDECL_CODE). */ |
830 | |
831 | static rtx |
832 | expand_builtin_return_addr (enum built_in_function fndecl_code, int count) |
833 | { |
834 | int i; |
835 | rtx tem = INITIAL_FRAME_ADDRESS_RTX__null; |
836 | if (tem == NULL_RTX(rtx) 0) |
837 | { |
838 | /* For a zero count with __builtin_return_address, we don't care what |
839 | frame address we return, because target-specific definitions will |
840 | override us. Therefore frame pointer elimination is OK, and using |
841 | the soft frame pointer is OK. |
842 | |
843 | For a nonzero count, or a zero count with __builtin_frame_address, |
844 | we require a stable offset from the current frame pointer to the |
845 | previous one, so we must use the hard frame pointer, and |
846 | we must disable frame pointer elimination. */ |
847 | if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS) |
848 | tem = frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_FRAME_POINTER]); |
849 | else |
850 | { |
851 | tem = hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER]); |
852 | |
853 | /* Tell reload not to eliminate the frame pointer. */ |
854 | crtl(&x_rtl)->accesses_prior_frames = 1; |
855 | } |
856 | } |
857 | |
858 | if (count > 0) |
859 | SETUP_FRAME_ADDRESSES ()ix86_setup_frame_addresses (); |
860 | |
861 | /* On the SPARC, the return address is not in the frame, it is in a |
862 | register. There is no way to access it off of the current frame |
863 | pointer, but it can be accessed off the previous frame pointer by |
864 | reading the value from the register window save area. */ |
865 | if (RETURN_ADDR_IN_PREVIOUS_FRAME0 && fndecl_code == BUILT_IN_RETURN_ADDRESS) |
866 | count--; |
867 | |
868 | /* Scan back COUNT frames to the specified frame. */ |
869 | for (i = 0; i < count; i++) |
870 | { |
871 | /* Assume the dynamic chain pointer is in the word that the |
872 | frame address points to, unless otherwise specified. */ |
873 | tem = DYNAMIC_CHAIN_ADDRESS (tem)(tem); |
874 | tem = memory_address (Pmode, tem)memory_address_addr_space (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)))), ( tem), 0); |
875 | tem = gen_frame_mem (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), tem); |
876 | tem = copy_to_reg (tem); |
877 | } |
878 | |
879 | /* For __builtin_frame_address, return what we've got. But, on |
880 | the SPARC for example, we may have to add a bias. */ |
881 | if (fndecl_code == BUILT_IN_FRAME_ADDRESS) |
882 | return FRAME_ADDR_RTX (tem)(tem); |
883 | |
884 | /* For __builtin_return_address, get the return address from that frame. */ |
885 | #ifdef RETURN_ADDR_RTX |
886 | tem = RETURN_ADDR_RTX (count, tem)((count) == 0 ? gen_rtx_MEM ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), plus_constant ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ( (scalar_int_mode::from_int) E_SImode))), ((this_target_rtl-> x_global_rtl)[GR_ARG_POINTER]), -(((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4))) : gen_rtx_MEM ((global_options .x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode ::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode:: from_int) E_SImode))), plus_constant ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) ), (tem), (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4)))); |
887 | #else |
888 | tem = memory_address (Pmode,memory_address_addr_space (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)))), ( plus_constant ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ( (scalar_int_mode::from_int) E_SImode))), tem, GET_MODE_SIZE ( (global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))))), 0) |
889 | plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)))memory_address_addr_space (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)))), ( plus_constant ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ( (scalar_int_mode::from_int) E_SImode))), tem, GET_MODE_SIZE ( (global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))))), 0); |
890 | tem = gen_frame_mem (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), tem); |
891 | #endif |
892 | return tem; |
893 | } |
894 | |
895 | /* Alias set used for setjmp buffer. */ |
896 | static alias_set_type setjmp_alias_set = -1; |
897 | |
898 | /* Construct the leading half of a __builtin_setjmp call. Control will |
899 | return to RECEIVER_LABEL. This is also called directly by the SJLJ |
900 | exception handling code. */ |
901 | |
902 | void |
903 | expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label) |
904 | { |
905 | machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL)((SAVE_NONLOCAL) == SAVE_NONLOCAL ? (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? (scalar_int_mode ((scalar_int_mode ::from_int) E_TImode)) : (scalar_int_mode ((scalar_int_mode:: from_int) E_DImode))) : (global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)))); |
906 | rtx stack_save; |
907 | rtx mem; |
908 | |
909 | if (setjmp_alias_set == -1) |
910 | setjmp_alias_set = new_alias_set (); |
911 | |
912 | buf_addr = convert_memory_address (Pmode, buf_addr)convert_memory_address_addr_space (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) )), (buf_addr), 0); |
913 | |
914 | buf_addr = force_reg (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), force_operand (buf_addr, NULL_RTX(rtx) 0)); |
915 | |
916 | /* We store the frame pointer and the address of receiver_label in |
917 | the buffer and use the rest of it for the stack save area, which |
918 | is machine-dependent. */ |
919 | |
920 | mem = gen_rtx_MEM (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), buf_addr); |
921 | set_mem_alias_set (mem, setjmp_alias_set); |
922 | emit_move_insn (mem, hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER])); |
923 | |
924 | mem = gen_rtx_MEM (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), buf_addr, |
925 | GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))))), |
926 | set_mem_alias_set (mem, setjmp_alias_set); |
927 | |
928 | emit_move_insn (validize_mem (mem), |
929 | force_reg (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), gen_rtx_LABEL_REF (Pmode, receiver_label)gen_rtx_fmt_u_stat ((LABEL_REF), (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) ))), ((receiver_label)) ))); |
930 | |
931 | stack_save = gen_rtx_MEM (sa_mode, |
932 | plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), buf_addr, |
933 | 2 * GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))))); |
934 | set_mem_alias_set (stack_save, setjmp_alias_set); |
935 | emit_stack_save (SAVE_NONLOCAL, &stack_save); |
936 | |
937 | /* If there is further processing to do, do it. */ |
938 | if (targetm.have_builtin_setjmp_setup ()) |
939 | emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr)); |
940 | |
941 | /* We have a nonlocal label. */ |
942 | cfun(cfun + 0)->has_nonlocal_label = 1; |
943 | } |
944 | |
945 | /* Construct the trailing part of a __builtin_setjmp call. This is |
946 | also called directly by the SJLJ exception handling code. |
947 | If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */ |
948 | |
949 | void |
950 | expand_builtin_setjmp_receiver (rtx receiver_label) |
951 | { |
952 | rtx chain; |
953 | |
954 | /* Mark the FP as used when we get here, so we have to make sure it's |
955 | marked as used by this function. */ |
956 | emit_use (hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER])); |
957 | |
958 | /* Mark the static chain as clobbered here so life information |
959 | doesn't get messed up for it. */ |
960 | chain = rtx_for_static_chain (current_function_decl, true); |
961 | if (chain && REG_P (chain)(((enum rtx_code) (chain)->code) == REG)) |
962 | emit_clobber (chain); |
963 | |
964 | if (!HARD_FRAME_POINTER_IS_ARG_POINTER(6 == 16) && fixed_regs(this_target_hard_regs->x_fixed_regs)[ARG_POINTER_REGNUM16]) |
965 | { |
966 | /* If the argument pointer can be eliminated in favor of the |
967 | frame pointer, we don't need to restore it. We assume here |
968 | that if such an elimination is present, it can always be used. |
969 | This is the case on all known machines; if we don't make this |
970 | assumption, we do unnecessary saving on many machines. */ |
971 | size_t i; |
972 | static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS{{ 16, 7}, { 16, 6}, { 19, 7}, { 19, 6}}; |
973 | |
974 | for (i = 0; i < ARRAY_SIZE (elim_regs)(sizeof (elim_regs) / sizeof ((elim_regs)[0])); i++) |
975 | if (elim_regs[i].from == ARG_POINTER_REGNUM16 |
976 | && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM6) |
977 | break; |
978 | |
979 | if (i == ARRAY_SIZE (elim_regs)(sizeof (elim_regs) / sizeof ((elim_regs)[0]))) |
980 | { |
981 | /* Now restore our arg pointer from the address at which it |
982 | was saved in our stack frame. */ |
983 | emit_move_insn (crtl(&x_rtl)->args.internal_arg_pointer, |
984 | copy_to_reg (get_arg_pointer_save_area ())); |
985 | } |
986 | } |
987 | |
988 | if (receiver_label != NULL__null && targetm.have_builtin_setjmp_receiver ()) |
989 | emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label)); |
990 | else if (targetm.have_nonlocal_goto_receiver ()) |
991 | emit_insn (targetm.gen_nonlocal_goto_receiver ()); |
992 | else |
993 | { /* Nothing */ } |
994 | |
995 | /* We must not allow the code we just generated to be reordered by |
996 | scheduling. Specifically, the update of the frame pointer must |
997 | happen immediately, not later. */ |
998 | emit_insn (gen_blockage ()); |
999 | } |
1000 | |
1001 | /* __builtin_longjmp is passed a pointer to an array of five words (not |
1002 | all will be used on all machines). It operates similarly to the C |
1003 | library function of the same name, but is more efficient. Much of |
1004 | the code below is copied from the handling of non-local gotos. */ |
1005 | |
1006 | static void |
1007 | expand_builtin_longjmp (rtx buf_addr, rtx value) |
1008 | { |
1009 | rtx fp, lab, stack; |
1010 | rtx_insn *insn, *last; |
1011 | machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL)((SAVE_NONLOCAL) == SAVE_NONLOCAL ? (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? (scalar_int_mode ((scalar_int_mode ::from_int) E_TImode)) : (scalar_int_mode ((scalar_int_mode:: from_int) E_DImode))) : (global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)))); |
1012 | |
1013 | /* DRAP is needed for stack realign if longjmp is expanded to current |
1014 | function */ |
1015 | if (SUPPORTS_STACK_ALIGNMENT((((unsigned int) 1 << 28) * 8) > ((((global_options .x_ix86_isa_flags & (1UL << 1)) != 0) && ix86_cfun_abi () == MS_ABI) ? 128 : ((8) * (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4))))) |
1016 | crtl(&x_rtl)->need_drap = true; |
1017 | |
1018 | if (setjmp_alias_set == -1) |
1019 | setjmp_alias_set = new_alias_set (); |
1020 | |
1021 | buf_addr = convert_memory_address (Pmode, buf_addr)convert_memory_address_addr_space (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) )), (buf_addr), 0); |
1022 | |
1023 | buf_addr = force_reg (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), buf_addr); |
1024 | |
1025 | /* We require that the user must pass a second argument of 1, because |
1026 | that is what builtin_setjmp will return. */ |
1027 | gcc_assert (value == const1_rtx)((void)(!(value == (const_int_rtx[64 +1])) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1027, __FUNCTION__), 0 : 0)); |
1028 | |
1029 | last = get_last_insn (); |
1030 | if (targetm.have_builtin_longjmp ()) |
1031 | emit_insn (targetm.gen_builtin_longjmp (buf_addr)); |
1032 | else |
1033 | { |
1034 | fp = gen_rtx_MEM (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), buf_addr); |
1035 | lab = gen_rtx_MEM (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), buf_addr, |
1036 | GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))))); |
1037 | |
1038 | stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), buf_addr, |
1039 | 2 * GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))))); |
1040 | set_mem_alias_set (fp, setjmp_alias_set); |
1041 | set_mem_alias_set (lab, setjmp_alias_set); |
1042 | set_mem_alias_set (stack, setjmp_alias_set); |
1043 | |
1044 | /* Pick up FP, label, and SP from the block and jump. This code is |
1045 | from expand_goto in stmt.cc; see there for detailed comments. */ |
1046 | if (targetm.have_nonlocal_goto ()) |
1047 | /* We have to pass a value to the nonlocal_goto pattern that will |
1048 | get copied into the static_chain pointer, but it does not matter |
1049 | what that value is, because builtin_setjmp does not use it. */ |
1050 | emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp)); |
1051 | else |
1052 | { |
1053 | emit_clobber (gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), gen_rtx_SCRATCH (VOIDmode)gen_rtx_fmt__stat ((SCRATCH), ((((void) 0, E_VOIDmode))) ))); |
1054 | emit_clobber (gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER]))); |
1055 | |
1056 | lab = copy_to_reg (lab); |
1057 | |
1058 | /* Restore the frame pointer and stack pointer. We must use a |
1059 | temporary since the setjmp buffer may be a local. */ |
1060 | fp = copy_to_reg (fp); |
1061 | emit_stack_restore (SAVE_NONLOCAL, stack); |
1062 | |
1063 | /* Ensure the frame pointer move is not optimized. */ |
1064 | emit_insn (gen_blockage ()); |
1065 | emit_clobber (hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER])); |
1066 | emit_clobber (frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_FRAME_POINTER])); |
1067 | emit_move_insn (hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER]), fp); |
1068 | |
1069 | emit_use (hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER])); |
1070 | emit_use (stack_pointer_rtx((this_target_rtl->x_global_rtl)[GR_STACK_POINTER])); |
1071 | emit_indirect_jump (lab); |
1072 | } |
1073 | } |
1074 | |
1075 | /* Search backwards and mark the jump insn as a non-local goto. |
1076 | Note that this precludes the use of __builtin_longjmp to a |
1077 | __builtin_setjmp target in the same function. However, we've |
1078 | already cautioned the user that these functions are for |
1079 | internal exception handling use only. */ |
1080 | for (insn = get_last_insn (); insn; insn = PREV_INSN (insn)) |
1081 | { |
1082 | gcc_assert (insn != last)((void)(!(insn != last) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1082, __FUNCTION__), 0 : 0)); |
1083 | |
1084 | if (JUMP_P (insn)(((enum rtx_code) (insn)->code) == JUMP_INSN)) |
1085 | { |
1086 | add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx(const_int_rtx[64])); |
1087 | break; |
1088 | } |
1089 | else if (CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN)) |
1090 | break; |
1091 | } |
1092 | } |
1093 | |
1094 | static inline bool |
1095 | more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter) |
1096 | { |
1097 | return (iter->i < iter->n); |
1098 | } |
1099 | |
1100 | /* This function validates the types of a function call argument list |
1101 | against a specified list of tree_codes. If the last specifier is a 0, |
1102 | that represents an ellipsis, otherwise the last specifier must be a |
1103 | VOID_TYPE. */ |
1104 | |
1105 | static bool |
1106 | validate_arglist (const_tree callexpr, ...) |
1107 | { |
1108 | enum tree_code code; |
1109 | bool res = 0; |
1110 | va_list ap; |
1111 | const_call_expr_arg_iterator iter; |
1112 | const_tree arg; |
1113 | |
1114 | va_start (ap, callexpr)__builtin_va_start(ap, callexpr); |
1115 | init_const_call_expr_arg_iterator (callexpr, &iter); |
1116 | |
1117 | /* Get a bitmap of pointer argument numbers declared attribute nonnull. */ |
1118 | tree fn = CALL_EXPR_FN (callexpr)(*((const_cast<tree*> (tree_operand_check (((tree_check ((callexpr), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1118, __FUNCTION__, (CALL_EXPR)))), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1118, __FUNCTION__))))); |
1119 | bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn))((contains_struct_check ((((contains_struct_check ((fn), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1119, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1119, __FUNCTION__))->typed.type)); |
1120 | |
1121 | for (unsigned argno = 1; ; ++argno) |
1122 | { |
1123 | code = (enum tree_code) va_arg (ap, int)__builtin_va_arg(ap, int); |
1124 | |
1125 | switch (code) |
1126 | { |
1127 | case 0: |
1128 | /* This signifies an ellipses, any further arguments are all ok. */ |
1129 | res = true; |
1130 | goto end; |
1131 | case VOID_TYPE: |
1132 | /* This signifies an endlink, if no arguments remain, return |
1133 | true, otherwise return false. */ |
1134 | res = !more_const_call_expr_args_p (&iter); |
1135 | goto end; |
1136 | case POINTER_TYPE: |
1137 | /* The actual argument must be nonnull when either the whole |
1138 | called function has been declared nonnull, or when the formal |
1139 | argument corresponding to the actual argument has been. */ |
1140 | if (argmap |
1141 | && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno))) |
1142 | { |
1143 | arg = next_const_call_expr_arg (&iter); |
1144 | if (!validate_arg (arg, code) || integer_zerop (arg)) |
1145 | goto end; |
1146 | break; |
1147 | } |
1148 | /* FALLTHRU */ |
1149 | default: |
1150 | /* If no parameters remain or the parameter's code does not |
1151 | match the specified code, return false. Otherwise continue |
1152 | checking any remaining arguments. */ |
1153 | arg = next_const_call_expr_arg (&iter); |
1154 | if (!validate_arg (arg, code)) |
1155 | goto end; |
1156 | break; |
1157 | } |
1158 | } |
1159 | |
1160 | /* We need gotos here since we can only have one VA_CLOSE in a |
1161 | function. */ |
1162 | end: ; |
1163 | va_end (ap)__builtin_va_end(ap); |
1164 | |
1165 | BITMAP_FREE (argmap)((void) (bitmap_obstack_free ((bitmap) argmap), (argmap) = (bitmap ) __null)); |
1166 | |
1167 | return res; |
1168 | } |
1169 | |
1170 | /* Expand a call to __builtin_nonlocal_goto. We're passed the target label |
1171 | and the address of the save area. */ |
1172 | |
1173 | static rtx |
1174 | expand_builtin_nonlocal_goto (tree exp) |
1175 | { |
1176 | tree t_label, t_save_area; |
1177 | rtx r_label, r_save_area, r_fp, r_sp; |
1178 | rtx_insn *insn; |
1179 | |
1180 | if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)) |
1181 | return NULL_RTX(rtx) 0; |
1182 | |
1183 | t_label = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1183, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1183, __FUNCTION__))))); |
1184 | t_save_area = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1184, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1184, __FUNCTION__))))); |
1185 | |
1186 | r_label = expand_normal (t_label); |
1187 | r_label = convert_memory_address (Pmode, r_label)convert_memory_address_addr_space (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) )), (r_label), 0); |
1188 | r_save_area = expand_normal (t_save_area); |
1189 | r_save_area = convert_memory_address (Pmode, r_save_area)convert_memory_address_addr_space (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) )), (r_save_area), 0); |
1190 | /* Copy the address of the save location to a register just in case it was |
1191 | based on the frame pointer. */ |
1192 | r_save_area = copy_to_reg (r_save_area); |
1193 | r_fp = gen_rtx_MEM (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), r_save_area); |
1194 | r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL)((SAVE_NONLOCAL) == SAVE_NONLOCAL ? (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? (scalar_int_mode ((scalar_int_mode ::from_int) E_TImode)) : (scalar_int_mode ((scalar_int_mode:: from_int) E_DImode))) : (global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)))), |
1195 | plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), r_save_area, |
1196 | GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))))); |
1197 | |
1198 | crtl(&x_rtl)->has_nonlocal_goto = 1; |
1199 | |
1200 | /* ??? We no longer need to pass the static chain value, afaik. */ |
1201 | if (targetm.have_nonlocal_goto ()) |
1202 | emit_insn (targetm.gen_nonlocal_goto (const0_rtx(const_int_rtx[64]), r_label, r_sp, r_fp)); |
1203 | else |
1204 | { |
1205 | emit_clobber (gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), gen_rtx_SCRATCH (VOIDmode)gen_rtx_fmt__stat ((SCRATCH), ((((void) 0, E_VOIDmode))) ))); |
1206 | emit_clobber (gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER]))); |
1207 | |
1208 | r_label = copy_to_reg (r_label); |
1209 | |
1210 | /* Restore the frame pointer and stack pointer. We must use a |
1211 | temporary since the setjmp buffer may be a local. */ |
1212 | r_fp = copy_to_reg (r_fp); |
1213 | emit_stack_restore (SAVE_NONLOCAL, r_sp); |
1214 | |
1215 | /* Ensure the frame pointer move is not optimized. */ |
1216 | emit_insn (gen_blockage ()); |
1217 | emit_clobber (hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER])); |
1218 | emit_clobber (frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_FRAME_POINTER])); |
1219 | emit_move_insn (hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER]), r_fp); |
1220 | |
1221 | /* USE of hard_frame_pointer_rtx added for consistency; |
1222 | not clear if really needed. */ |
1223 | emit_use (hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER])); |
1224 | emit_use (stack_pointer_rtx((this_target_rtl->x_global_rtl)[GR_STACK_POINTER])); |
1225 | |
1226 | /* If the architecture is using a GP register, we must |
1227 | conservatively assume that the target function makes use of it. |
1228 | The prologue of functions with nonlocal gotos must therefore |
1229 | initialize the GP register to the appropriate value, and we |
1230 | must then make sure that this value is live at the point |
1231 | of the jump. (Note that this doesn't necessarily apply |
1232 | to targets with a nonlocal_goto pattern; they are free |
1233 | to implement it in their own way. Note also that this is |
1234 | a no-op if the GP register is a global invariant.) */ |
1235 | unsigned regnum = PIC_OFFSET_TABLE_REGNUM(ix86_use_pseudo_pic_reg () ? ((this_target_rtl->x_pic_offset_table_rtx ) ? (~(unsigned int) 0) : (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 43 : 3)) : (~(unsigned int) 0)); |
1236 | if (regnum != INVALID_REGNUM(~(unsigned int) 0) && fixed_regs(this_target_hard_regs->x_fixed_regs)[regnum]) |
1237 | emit_use (pic_offset_table_rtx(this_target_rtl->x_pic_offset_table_rtx)); |
1238 | |
1239 | emit_indirect_jump (r_label); |
1240 | } |
1241 | |
1242 | /* Search backwards to the jump insn and mark it as a |
1243 | non-local goto. */ |
1244 | for (insn = get_last_insn (); insn; insn = PREV_INSN (insn)) |
1245 | { |
1246 | if (JUMP_P (insn)(((enum rtx_code) (insn)->code) == JUMP_INSN)) |
1247 | { |
1248 | add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx(const_int_rtx[64])); |
1249 | break; |
1250 | } |
1251 | else if (CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN)) |
1252 | break; |
1253 | } |
1254 | |
1255 | return const0_rtx(const_int_rtx[64]); |
1256 | } |
1257 | |
1258 | /* __builtin_update_setjmp_buf is passed a pointer to an array of five words |
1259 | (not all will be used on all machines) that was passed to __builtin_setjmp. |
1260 | It updates the stack pointer in that block to the current value. This is |
1261 | also called directly by the SJLJ exception handling code. */ |
1262 | |
1263 | void |
1264 | expand_builtin_update_setjmp_buf (rtx buf_addr) |
1265 | { |
1266 | machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL)((SAVE_NONLOCAL) == SAVE_NONLOCAL ? (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? (scalar_int_mode ((scalar_int_mode ::from_int) E_TImode)) : (scalar_int_mode ((scalar_int_mode:: from_int) E_DImode))) : (global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)))); |
1267 | buf_addr = convert_memory_address (Pmode, buf_addr)convert_memory_address_addr_space (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) )), (buf_addr), 0); |
1268 | rtx stack_save |
1269 | = gen_rtx_MEM (sa_mode, |
1270 | memory_addressmemory_address_addr_space ((sa_mode), (plus_constant ((global_options .x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode ::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode:: from_int) E_SImode))), buf_addr, 2 * GET_MODE_SIZE ((global_options .x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode ::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode:: from_int) E_SImode)))))), 0) |
1271 | (sa_mode,memory_address_addr_space ((sa_mode), (plus_constant ((global_options .x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode ::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode:: from_int) E_SImode))), buf_addr, 2 * GET_MODE_SIZE ((global_options .x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode ::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode:: from_int) E_SImode)))))), 0) |
1272 | plus_constant (Pmode, buf_addr,memory_address_addr_space ((sa_mode), (plus_constant ((global_options .x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode ::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode:: from_int) E_SImode))), buf_addr, 2 * GET_MODE_SIZE ((global_options .x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode ::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode:: from_int) E_SImode)))))), 0) |
1273 | 2 * GET_MODE_SIZE (Pmode)))memory_address_addr_space ((sa_mode), (plus_constant ((global_options .x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode ::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode:: from_int) E_SImode))), buf_addr, 2 * GET_MODE_SIZE ((global_options .x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode ::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode:: from_int) E_SImode)))))), 0)); |
1274 | |
1275 | emit_stack_save (SAVE_NONLOCAL, &stack_save); |
1276 | } |
1277 | |
1278 | /* Expand a call to __builtin_prefetch. For a target that does not support |
1279 | data prefetch, evaluate the memory address argument in case it has side |
1280 | effects. */ |
1281 | |
1282 | static void |
1283 | expand_builtin_prefetch (tree exp) |
1284 | { |
1285 | tree arg0, arg1, arg2; |
1286 | int nargs; |
1287 | rtx op0, op1, op2; |
1288 | |
1289 | if (!validate_arglist (exp, POINTER_TYPE, 0)) |
1290 | return; |
1291 | |
1292 | arg0 = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1292, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1292, __FUNCTION__))))); |
1293 | |
1294 | /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to |
1295 | zero (read) and argument 2 (locality) defaults to 3 (high degree of |
1296 | locality). */ |
1297 | nargs = call_expr_nargs (exp)(((int)((unsigned long) (*tree_int_cst_elt_check (((tree_class_check ((exp), (tcc_vl_exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1297, __FUNCTION__))->exp.operands[0]), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1297, __FUNCTION__)))) - 3); |
1298 | if (nargs > 1) |
1299 | arg1 = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1299, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1299, __FUNCTION__))))); |
1300 | else |
1301 | arg1 = integer_zero_nodeglobal_trees[TI_INTEGER_ZERO]; |
1302 | if (nargs > 2) |
1303 | arg2 = CALL_EXPR_ARG (exp, 2)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1303, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1303, __FUNCTION__))))); |
1304 | else |
1305 | arg2 = integer_three_nodeglobal_trees[TI_INTEGER_THREE]; |
1306 | |
1307 | /* Argument 0 is an address. */ |
1308 | op0 = expand_expr (arg0, NULL_RTX(rtx) 0, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), EXPAND_NORMAL); |
1309 | |
1310 | /* Argument 1 (read/write flag) must be a compile-time constant int. */ |
1311 | if (TREE_CODE (arg1)((enum tree_code) (arg1)->base.code) != INTEGER_CST) |
1312 | { |
1313 | error ("second argument to %<__builtin_prefetch%> must be a constant"); |
1314 | arg1 = integer_zero_nodeglobal_trees[TI_INTEGER_ZERO]; |
1315 | } |
1316 | op1 = expand_normal (arg1); |
1317 | /* Argument 1 must be either zero or one. */ |
1318 | if (INTVAL (op1)((op1)->u.hwint[0]) != 0 && INTVAL (op1)((op1)->u.hwint[0]) != 1) |
1319 | { |
1320 | warning (0, "invalid second argument to %<__builtin_prefetch%>;" |
1321 | " using zero"); |
1322 | op1 = const0_rtx(const_int_rtx[64]); |
1323 | } |
1324 | |
1325 | /* Argument 2 (locality) must be a compile-time constant int. */ |
1326 | if (TREE_CODE (arg2)((enum tree_code) (arg2)->base.code) != INTEGER_CST) |
1327 | { |
1328 | error ("third argument to %<__builtin_prefetch%> must be a constant"); |
1329 | arg2 = integer_zero_nodeglobal_trees[TI_INTEGER_ZERO]; |
1330 | } |
1331 | op2 = expand_normal (arg2); |
1332 | /* Argument 2 must be 0, 1, 2, or 3. */ |
1333 | if (INTVAL (op2)((op2)->u.hwint[0]) < 0 || INTVAL (op2)((op2)->u.hwint[0]) > 3) |
1334 | { |
1335 | warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero"); |
1336 | op2 = const0_rtx(const_int_rtx[64]); |
1337 | } |
1338 | |
1339 | if (targetm.have_prefetch ()) |
1340 | { |
1341 | class expand_operand ops[3]; |
1342 | |
1343 | create_address_operand (&ops[0], op0); |
1344 | create_integer_operand (&ops[1], INTVAL (op1)((op1)->u.hwint[0])); |
1345 | create_integer_operand (&ops[2], INTVAL (op2)((op2)->u.hwint[0])); |
1346 | if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops)) |
1347 | return; |
1348 | } |
1349 | |
1350 | /* Don't do anything with direct references to volatile memory, but |
1351 | generate code to handle other side effects. */ |
1352 | if (!MEM_P (op0)(((enum rtx_code) (op0)->code) == MEM) && side_effects_p (op0)) |
1353 | emit_insn (op0); |
1354 | } |
1355 | |
1356 | /* Get a MEM rtx for expression EXP which is the address of an operand |
1357 | to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is |
1358 | the maximum length of the block of memory that might be accessed or |
1359 | NULL if unknown. */ |
1360 | |
1361 | rtx |
1362 | get_memory_rtx (tree exp, tree len) |
1363 | { |
1364 | tree orig_exp = exp, base; |
1365 | rtx addr, mem; |
1366 | |
1367 | /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived |
1368 | from its expression, for expr->a.b only <variable>.a.b is recorded. */ |
1369 | if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp)((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1369, __FUNCTION__, (SAVE_EXPR)))->base.public_flag)) |
1370 | exp = TREE_OPERAND (exp, 0)(*((const_cast<tree*> (tree_operand_check ((exp), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1370, __FUNCTION__))))); |
1371 | |
1372 | addr = expand_expr (orig_exp, NULL_RTX(rtx) 0, ptr_mode, EXPAND_NORMAL); |
1373 | mem = gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), memory_address (BLKmode, addr)memory_address_addr_space ((((void) 0, E_BLKmode)), (addr), 0 )); |
1374 | |
1375 | /* Get an expression we can use to find the attributes to assign to MEM. |
1376 | First remove any nops. */ |
1377 | while (CONVERT_EXPR_P (exp)((((enum tree_code) (exp)->base.code)) == NOP_EXPR || (((enum tree_code) (exp)->base.code)) == CONVERT_EXPR) |
1378 | && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0)))(((enum tree_code) (((contains_struct_check (((*((const_cast< tree*> (tree_operand_check ((exp), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1378, __FUNCTION__)))))), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1378, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE || ((enum tree_code) (((contains_struct_check (((*((const_cast <tree*> (tree_operand_check ((exp), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1378, __FUNCTION__)))))), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1378, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE )) |
1379 | exp = TREE_OPERAND (exp, 0)(*((const_cast<tree*> (tree_operand_check ((exp), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1379, __FUNCTION__))))); |
1380 | |
1381 | /* Build a MEM_REF representing the whole accessed area as a byte blob, |
1382 | (as builtin stringops may alias with anything). */ |
1383 | exp = fold_build2 (MEM_REF,fold_build2_loc (((location_t) 0), MEM_REF, build_array_type ( integer_types[itk_char], build_range_type (sizetype_tab[(int) stk_sizetype], global_trees[TI_SIZE_ONE], len)), exp, build_int_cst (global_trees[TI_PTR_TYPE], 0) ) |
1384 | build_array_type (char_type_node,fold_build2_loc (((location_t) 0), MEM_REF, build_array_type ( integer_types[itk_char], build_range_type (sizetype_tab[(int) stk_sizetype], global_trees[TI_SIZE_ONE], len)), exp, build_int_cst (global_trees[TI_PTR_TYPE], 0) ) |
1385 | build_range_type (sizetype,fold_build2_loc (((location_t) 0), MEM_REF, build_array_type ( integer_types[itk_char], build_range_type (sizetype_tab[(int) stk_sizetype], global_trees[TI_SIZE_ONE], len)), exp, build_int_cst (global_trees[TI_PTR_TYPE], 0) ) |
1386 | size_one_node, len)),fold_build2_loc (((location_t) 0), MEM_REF, build_array_type ( integer_types[itk_char], build_range_type (sizetype_tab[(int) stk_sizetype], global_trees[TI_SIZE_ONE], len)), exp, build_int_cst (global_trees[TI_PTR_TYPE], 0) ) |
1387 | exp, build_int_cst (ptr_type_node, 0))fold_build2_loc (((location_t) 0), MEM_REF, build_array_type ( integer_types[itk_char], build_range_type (sizetype_tab[(int) stk_sizetype], global_trees[TI_SIZE_ONE], len)), exp, build_int_cst (global_trees[TI_PTR_TYPE], 0) ); |
1388 | |
1389 | /* If the MEM_REF has no acceptable address, try to get the base object |
1390 | from the original address we got, and build an all-aliasing |
1391 | unknown-sized access to that one. */ |
1392 | if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)(*((const_cast<tree*> (tree_operand_check ((exp), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1392, __FUNCTION__))))))) |
1393 | set_mem_attributes (mem, exp, 0); |
1394 | else if (TREE_CODE (TREE_OPERAND (exp, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check ((exp), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1394, __FUNCTION__))))))->base.code) == ADDR_EXPR |
1395 | && (base = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),(*((const_cast<tree*> (tree_operand_check (((*((const_cast <tree*> (tree_operand_check ((exp), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1395, __FUNCTION__)))))), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1396, __FUNCTION__))))) |
1396 | 0)(*((const_cast<tree*> (tree_operand_check (((*((const_cast <tree*> (tree_operand_check ((exp), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1395, __FUNCTION__)))))), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1396, __FUNCTION__)))))))) |
1397 | { |
1398 | unsigned int align = get_pointer_alignment (TREE_OPERAND (exp, 0)(*((const_cast<tree*> (tree_operand_check ((exp), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1398, __FUNCTION__)))))); |
1399 | exp = build_fold_addr_expr (base)build_fold_addr_expr_loc (((location_t) 0), (base)); |
1400 | exp = fold_build2 (MEM_REF,fold_build2_loc (((location_t) 0), MEM_REF, build_array_type ( integer_types[itk_char], build_range_type (sizetype_tab[(int) stk_sizetype], global_trees[TI_SIZE_ZERO], __null)), exp, build_int_cst (global_trees[TI_PTR_TYPE], 0) ) |
1401 | build_array_type (char_type_node,fold_build2_loc (((location_t) 0), MEM_REF, build_array_type ( integer_types[itk_char], build_range_type (sizetype_tab[(int) stk_sizetype], global_trees[TI_SIZE_ZERO], __null)), exp, build_int_cst (global_trees[TI_PTR_TYPE], 0) ) |
1402 | build_range_type (sizetype,fold_build2_loc (((location_t) 0), MEM_REF, build_array_type ( integer_types[itk_char], build_range_type (sizetype_tab[(int) stk_sizetype], global_trees[TI_SIZE_ZERO], __null)), exp, build_int_cst (global_trees[TI_PTR_TYPE], 0) ) |
1403 | size_zero_node,fold_build2_loc (((location_t) 0), MEM_REF, build_array_type ( integer_types[itk_char], build_range_type (sizetype_tab[(int) stk_sizetype], global_trees[TI_SIZE_ZERO], __null)), exp, build_int_cst (global_trees[TI_PTR_TYPE], 0) ) |
1404 | NULL)),fold_build2_loc (((location_t) 0), MEM_REF, build_array_type ( integer_types[itk_char], build_range_type (sizetype_tab[(int) stk_sizetype], global_trees[TI_SIZE_ZERO], __null)), exp, build_int_cst (global_trees[TI_PTR_TYPE], 0) ) |
1405 | exp, build_int_cst (ptr_type_node, 0))fold_build2_loc (((location_t) 0), MEM_REF, build_array_type ( integer_types[itk_char], build_range_type (sizetype_tab[(int) stk_sizetype], global_trees[TI_SIZE_ZERO], __null)), exp, build_int_cst (global_trees[TI_PTR_TYPE], 0) ); |
1406 | set_mem_attributes (mem, exp, 0); |
1407 | /* Since we stripped parts make sure the offset is unknown and the |
1408 | alignment is computed from the original address. */ |
1409 | clear_mem_offset (mem); |
1410 | set_mem_align (mem, align); |
1411 | } |
1412 | set_mem_alias_set (mem, 0); |
1413 | return mem; |
1414 | } |
1415 | |
1416 | /* Built-in functions to perform an untyped call and return. */ |
1417 | |
1418 | #define apply_args_mode(this_target_builtins->x_apply_args_mode) \ |
1419 | (this_target_builtins->x_apply_args_mode) |
1420 | #define apply_result_mode(this_target_builtins->x_apply_result_mode) \ |
1421 | (this_target_builtins->x_apply_result_mode) |
1422 | |
1423 | /* Return the size required for the block returned by __builtin_apply_args, |
1424 | and initialize apply_args_mode. */ |
1425 | |
1426 | static int |
1427 | apply_args_size (void) |
1428 | { |
1429 | static int size = -1; |
1430 | int align; |
1431 | unsigned int regno; |
1432 | |
1433 | /* The values computed by this function never change. */ |
1434 | if (size < 0) |
1435 | { |
1436 | /* The first value is the incoming arg-pointer. */ |
1437 | size = GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))); |
1438 | |
1439 | /* The second value is the structure value address unless this is |
1440 | passed as an "invisible" first argument. */ |
1441 | if (targetm.calls.struct_value_rtx (cfun(cfun + 0) ? TREE_TYPE (cfun->decl)((contains_struct_check (((cfun + 0)->decl), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1441, __FUNCTION__))->typed.type) : 0, 0)) |
1442 | size += GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))); |
1443 | |
1444 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++) |
1445 | if (FUNCTION_ARG_REGNO_P (regno)ix86_function_arg_regno_p (regno)) |
1446 | { |
1447 | fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno); |
1448 | |
1449 | gcc_assert (mode != VOIDmode)((void)(!(mode != ((void) 0, E_VOIDmode)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1449, __FUNCTION__), 0 : 0)); |
1450 | |
1451 | align = GET_MODE_ALIGNMENT (mode)get_mode_alignment (mode) / BITS_PER_UNIT(8); |
1452 | if (size % align != 0) |
1453 | size = CEIL (size, align)(((size) + (align) - 1) / (align)) * align; |
1454 | size += GET_MODE_SIZE (mode); |
1455 | apply_args_mode(this_target_builtins->x_apply_args_mode)[regno] = mode; |
1456 | } |
1457 | else |
1458 | { |
1459 | apply_args_mode(this_target_builtins->x_apply_args_mode)[regno] = as_a <fixed_size_mode> (VOIDmode((void) 0, E_VOIDmode)); |
1460 | } |
1461 | } |
1462 | return size; |
1463 | } |
1464 | |
1465 | /* Return the size required for the block returned by __builtin_apply, |
1466 | and initialize apply_result_mode. */ |
1467 | |
1468 | static int |
1469 | apply_result_size (void) |
1470 | { |
1471 | static int size = -1; |
1472 | int align, regno; |
1473 | |
1474 | /* The values computed by this function never change. */ |
1475 | if (size < 0) |
1476 | { |
1477 | size = 0; |
1478 | |
1479 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++) |
1480 | if (targetm.calls.function_value_regno_p (regno)) |
1481 | { |
1482 | fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno); |
1483 | |
1484 | gcc_assert (mode != VOIDmode)((void)(!(mode != ((void) 0, E_VOIDmode)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1484, __FUNCTION__), 0 : 0)); |
1485 | |
1486 | align = GET_MODE_ALIGNMENT (mode)get_mode_alignment (mode) / BITS_PER_UNIT(8); |
1487 | if (size % align != 0) |
1488 | size = CEIL (size, align)(((size) + (align) - 1) / (align)) * align; |
1489 | size += GET_MODE_SIZE (mode); |
1490 | apply_result_mode(this_target_builtins->x_apply_result_mode)[regno] = mode; |
1491 | } |
1492 | else |
1493 | apply_result_mode(this_target_builtins->x_apply_result_mode)[regno] = as_a <fixed_size_mode> (VOIDmode((void) 0, E_VOIDmode)); |
1494 | |
1495 | /* Allow targets that use untyped_call and untyped_return to override |
1496 | the size so that machine-specific information can be stored here. */ |
1497 | #ifdef APPLY_RESULT_SIZE(8+108) |
1498 | size = APPLY_RESULT_SIZE(8+108); |
1499 | #endif |
1500 | } |
1501 | return size; |
1502 | } |
1503 | |
1504 | /* Create a vector describing the result block RESULT. If SAVEP is true, |
1505 | the result block is used to save the values; otherwise it is used to |
1506 | restore the values. */ |
1507 | |
1508 | static rtx |
1509 | result_vector (int savep, rtx result) |
1510 | { |
1511 | int regno, size, align, nelts; |
1512 | fixed_size_mode mode; |
1513 | rtx reg, mem; |
1514 | rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER)((rtx *) __builtin_alloca(sizeof (rtx) * (76))); |
1515 | |
1516 | size = nelts = 0; |
1517 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++) |
1518 | if ((mode = apply_result_mode(this_target_builtins->x_apply_result_mode)[regno]) != VOIDmode((void) 0, E_VOIDmode)) |
1519 | { |
1520 | align = GET_MODE_ALIGNMENT (mode)get_mode_alignment (mode) / BITS_PER_UNIT(8); |
1521 | if (size % align != 0) |
1522 | size = CEIL (size, align)(((size) + (align) - 1) / (align)) * align; |
1523 | reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno)(regno)); |
1524 | mem = adjust_address (result, mode, size)adjust_address_1 (result, mode, size, 1, 1, 0, 0); |
1525 | savevec[nelts++] = (savep |
1526 | ? gen_rtx_SET (mem, reg)gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((mem)) , ((reg)) ) |
1527 | : gen_rtx_SET (reg, mem)gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((reg)) , ((mem)) )); |
1528 | size += GET_MODE_SIZE (mode); |
1529 | } |
1530 | return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec))gen_rtx_fmt_E_stat ((PARALLEL), ((((void) 0, E_VOIDmode))), ( (gen_rtvec_v (nelts, savevec))) ); |
1531 | } |
1532 | |
1533 | /* Save the state required to perform an untyped call with the same |
1534 | arguments as were passed to the current function. */ |
1535 | |
1536 | static rtx |
1537 | expand_builtin_apply_args_1 (void) |
1538 | { |
1539 | rtx registers, tem; |
1540 | int size, align, regno; |
1541 | fixed_size_mode mode; |
1542 | rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun(cfun + 0) ? TREE_TYPE (cfun->decl)((contains_struct_check (((cfun + 0)->decl), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1542, __FUNCTION__))->typed.type) : 0, 1); |
1543 | |
1544 | /* Create a block where the arg-pointer, structure value address, |
1545 | and argument registers can be saved. */ |
1546 | registers = assign_stack_local (BLKmode((void) 0, E_BLKmode), apply_args_size (), -1); |
1547 | |
1548 | /* Walk past the arg-pointer and structure value address. */ |
1549 | size = GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))); |
1550 | if (targetm.calls.struct_value_rtx (cfun(cfun + 0) ? TREE_TYPE (cfun->decl)((contains_struct_check (((cfun + 0)->decl), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1550, __FUNCTION__))->typed.type) : 0, 0)) |
1551 | size += GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))); |
1552 | |
1553 | /* Save each register used in calling a function to the block. */ |
1554 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++) |
1555 | if ((mode = apply_args_mode(this_target_builtins->x_apply_args_mode)[regno]) != VOIDmode((void) 0, E_VOIDmode)) |
1556 | { |
1557 | align = GET_MODE_ALIGNMENT (mode)get_mode_alignment (mode) / BITS_PER_UNIT(8); |
1558 | if (size % align != 0) |
1559 | size = CEIL (size, align)(((size) + (align) - 1) / (align)) * align; |
1560 | |
1561 | tem = gen_rtx_REG (mode, INCOMING_REGNO (regno)(regno)); |
1562 | |
1563 | emit_move_insn (adjust_address (registers, mode, size)adjust_address_1 (registers, mode, size, 1, 1, 0, 0), tem); |
1564 | size += GET_MODE_SIZE (mode); |
1565 | } |
1566 | |
1567 | /* Save the arg pointer to the block. */ |
1568 | tem = copy_to_reg (crtl(&x_rtl)->args.internal_arg_pointer); |
1569 | /* We need the pointer as the caller actually passed them to us, not |
1570 | as we might have pretended they were passed. Make sure it's a valid |
1571 | operand, as emit_move_insn isn't expected to handle a PLUS. */ |
1572 | if (STACK_GROWS_DOWNWARD1) |
1573 | tem |
1574 | = force_operand (plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), tem, |
1575 | crtl(&x_rtl)->args.pretend_args_size), |
1576 | NULL_RTX(rtx) 0); |
1577 | emit_move_insn (adjust_address (registers, Pmode, 0)adjust_address_1 (registers, (global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), 0 , 1, 1, 0, 0), tem); |
1578 | |
1579 | size = GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))); |
1580 | |
1581 | /* Save the structure value address unless this is passed as an |
1582 | "invisible" first argument. */ |
1583 | if (struct_incoming_value) |
1584 | emit_move_insn (adjust_address (registers, Pmode, size)adjust_address_1 (registers, (global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), size , 1, 1, 0, 0), |
1585 | copy_to_reg (struct_incoming_value)); |
1586 | |
1587 | /* Return the address of the block. */ |
1588 | return copy_addr_to_reg (XEXP (registers, 0)(((registers)->u.fld[0]).rt_rtx)); |
1589 | } |
1590 | |
1591 | /* __builtin_apply_args returns block of memory allocated on |
1592 | the stack into which is stored the arg pointer, structure |
1593 | value address, static chain, and all the registers that might |
1594 | possibly be used in performing a function call. The code is |
1595 | moved to the start of the function so the incoming values are |
1596 | saved. */ |
1597 | |
1598 | static rtx |
1599 | expand_builtin_apply_args (void) |
1600 | { |
1601 | /* Don't do __builtin_apply_args more than once in a function. |
1602 | Save the result of the first call and reuse it. */ |
1603 | if (apply_args_value((&x_rtl)->expr.x_apply_args_value) != 0) |
1604 | return apply_args_value((&x_rtl)->expr.x_apply_args_value); |
1605 | { |
1606 | /* When this function is called, it means that registers must be |
1607 | saved on entry to this function. So we migrate the |
1608 | call to the first insn of this function. */ |
1609 | rtx temp; |
1610 | |
1611 | start_sequence (); |
1612 | temp = expand_builtin_apply_args_1 (); |
1613 | rtx_insn *seq = get_insns (); |
1614 | end_sequence (); |
1615 | |
1616 | apply_args_value((&x_rtl)->expr.x_apply_args_value) = temp; |
1617 | |
1618 | /* Put the insns after the NOTE that starts the function. |
1619 | If this is inside a start_sequence, make the outer-level insn |
1620 | chain current, so the code is placed at the start of the |
1621 | function. If internal_arg_pointer is a non-virtual pseudo, |
1622 | it needs to be placed after the function that initializes |
1623 | that pseudo. */ |
1624 | push_topmost_sequence (); |
1625 | if (REG_P (crtl->args.internal_arg_pointer)(((enum rtx_code) ((&x_rtl)->args.internal_arg_pointer )->code) == REG) |
1626 | && REGNO (crtl->args.internal_arg_pointer)(rhs_regno((&x_rtl)->args.internal_arg_pointer)) > LAST_VIRTUAL_REGISTER(((76)) + 5)) |
1627 | emit_insn_before (seq, parm_birth_insn((&x_rtl)->x_parm_birth_insn)); |
1628 | else |
1629 | emit_insn_before (seq, NEXT_INSN (entry_of_function ())); |
1630 | pop_topmost_sequence (); |
1631 | return temp; |
1632 | } |
1633 | } |
1634 | |
1635 | /* Perform an untyped call and save the state required to perform an |
1636 | untyped return of whatever value was returned by the given function. */ |
1637 | |
1638 | static rtx |
1639 | expand_builtin_apply (rtx function, rtx arguments, rtx argsize) |
1640 | { |
1641 | int size, align, regno; |
1642 | fixed_size_mode mode; |
1643 | rtx incoming_args, result, reg, dest, src; |
1644 | rtx_call_insn *call_insn; |
1645 | rtx old_stack_level = 0; |
1646 | rtx call_fusage = 0; |
1647 | rtx struct_value = targetm.calls.struct_value_rtx (cfun(cfun + 0) ? TREE_TYPE (cfun->decl)((contains_struct_check (((cfun + 0)->decl), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1647, __FUNCTION__))->typed.type) : 0, 0); |
1648 | |
1649 | arguments = convert_memory_address (Pmode, arguments)convert_memory_address_addr_space (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) )), (arguments), 0); |
1650 | |
1651 | /* Create a block where the return registers can be saved. */ |
1652 | result = assign_stack_local (BLKmode((void) 0, E_BLKmode), apply_result_size (), -1); |
1653 | |
1654 | /* Fetch the arg pointer from the ARGUMENTS block. */ |
1655 | incoming_args = gen_reg_rtx (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))); |
1656 | emit_move_insn (incoming_args, gen_rtx_MEM (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), arguments)); |
1657 | if (!STACK_GROWS_DOWNWARD1) |
1658 | incoming_args = expand_simple_binop (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), MINUS, incoming_args, argsize, |
1659 | incoming_args, 0, OPTAB_LIB_WIDEN); |
1660 | |
1661 | /* Push a new argument block and copy the arguments. Do not allow |
1662 | the (potential) memcpy call below to interfere with our stack |
1663 | manipulations. */ |
1664 | do_pending_stack_adjust (); |
1665 | NO_DEFER_POP(((&x_rtl)->expr.x_inhibit_defer_pop) += 1); |
1666 | |
1667 | /* Save the stack with nonlocal if available. */ |
1668 | if (targetm.have_save_stack_nonlocal ()) |
1669 | emit_stack_save (SAVE_NONLOCAL, &old_stack_level); |
1670 | else |
1671 | emit_stack_save (SAVE_BLOCK, &old_stack_level); |
1672 | |
1673 | /* Allocate a block of memory onto the stack and copy the memory |
1674 | arguments to the outgoing arguments address. We can pass TRUE |
1675 | as the 4th argument because we just saved the stack pointer |
1676 | and will restore it right after the call. */ |
1677 | allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT(((global_options.x_target_flags & (1U << 12)) != 0 ) ? 32 : (((global_options.x_ix86_isa_flags & (1UL << 15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & ( 1UL << 8)) != 0) ? 256 : 128))), -1, true); |
1678 | |
1679 | /* Set DRAP flag to true, even though allocate_dynamic_stack_space |
1680 | may have already set current_function_calls_alloca to true. |
1681 | current_function_calls_alloca won't be set if argsize is zero, |
1682 | so we have to guarantee need_drap is true here. */ |
1683 | if (SUPPORTS_STACK_ALIGNMENT((((unsigned int) 1 << 28) * 8) > ((((global_options .x_ix86_isa_flags & (1UL << 1)) != 0) && ix86_cfun_abi () == MS_ABI) ? 128 : ((8) * (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4))))) |
1684 | crtl(&x_rtl)->need_drap = true; |
1685 | |
1686 | dest = virtual_outgoing_args_rtx((this_target_rtl->x_global_rtl)[GR_VIRTUAL_OUTGOING_ARGS] ); |
1687 | if (!STACK_GROWS_DOWNWARD1) |
1688 | { |
1689 | if (CONST_INT_P (argsize)(((enum rtx_code) (argsize)->code) == CONST_INT)) |
1690 | dest = plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), dest, -INTVAL (argsize)((argsize)->u.hwint[0])); |
1691 | else |
1692 | dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize))gen_rtx_fmt_ee_stat ((PLUS), (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) ))), ((dest)), ((negate_rtx ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), argsize ))) ); |
1693 | } |
1694 | dest = gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), dest); |
1695 | set_mem_align (dest, PARM_BOUNDARY((8) * (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4))); |
1696 | src = gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), incoming_args); |
1697 | set_mem_align (src, PARM_BOUNDARY((8) * (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4))); |
1698 | emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL); |
1699 | |
1700 | /* Refer to the argument block. */ |
1701 | apply_args_size (); |
1702 | arguments = gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), arguments); |
1703 | set_mem_align (arguments, PARM_BOUNDARY((8) * (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4))); |
1704 | |
1705 | /* Walk past the arg-pointer and structure value address. */ |
1706 | size = GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))); |
1707 | if (struct_value) |
1708 | size += GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))); |
1709 | |
1710 | /* Restore each of the registers previously saved. Make USE insns |
1711 | for each of these registers for use in making the call. */ |
1712 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++) |
1713 | if ((mode = apply_args_mode(this_target_builtins->x_apply_args_mode)[regno]) != VOIDmode((void) 0, E_VOIDmode)) |
1714 | { |
1715 | align = GET_MODE_ALIGNMENT (mode)get_mode_alignment (mode) / BITS_PER_UNIT(8); |
1716 | if (size % align != 0) |
1717 | size = CEIL (size, align)(((size) + (align) - 1) / (align)) * align; |
1718 | reg = gen_rtx_REG (mode, regno); |
1719 | emit_move_insn (reg, adjust_address (arguments, mode, size)adjust_address_1 (arguments, mode, size, 1, 1, 0, 0)); |
1720 | use_reg (&call_fusage, reg); |
1721 | size += GET_MODE_SIZE (mode); |
1722 | } |
1723 | |
1724 | /* Restore the structure value address unless this is passed as an |
1725 | "invisible" first argument. */ |
1726 | size = GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))); |
1727 | if (struct_value) |
1728 | { |
1729 | rtx value = gen_reg_rtx (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))); |
1730 | emit_move_insn (value, adjust_address (arguments, Pmode, size)adjust_address_1 (arguments, (global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), size , 1, 1, 0, 0)); |
1731 | emit_move_insn (struct_value, value); |
1732 | if (REG_P (struct_value)(((enum rtx_code) (struct_value)->code) == REG)) |
1733 | use_reg (&call_fusage, struct_value); |
1734 | } |
1735 | |
1736 | /* All arguments and registers used for the call are set up by now! */ |
1737 | function = prepare_call_address (NULL__null, function, NULL__null, &call_fusage, 0, 0); |
1738 | |
1739 | /* Ensure address is valid. SYMBOL_REF is already valid, so no need, |
1740 | and we don't want to load it into a register as an optimization, |
1741 | because prepare_call_address already did it if it should be done. */ |
1742 | if (GET_CODE (function)((enum rtx_code) (function)->code) != SYMBOL_REF) |
1743 | function = memory_address (FUNCTION_MODE, function)memory_address_addr_space (((scalar_int_mode ((scalar_int_mode ::from_int) E_QImode))), (function), 0); |
1744 | |
1745 | /* Generate the actual call instruction and save the return value. */ |
1746 | if (targetm.have_untyped_call ()) |
1747 | { |
1748 | rtx mem = gen_rtx_MEM (FUNCTION_MODE(scalar_int_mode ((scalar_int_mode::from_int) E_QImode)), function); |
1749 | rtx_insn *seq = targetm.gen_untyped_call (mem, result, |
1750 | result_vector (1, result)); |
1751 | for (rtx_insn *insn = seq; insn; insn = NEXT_INSN (insn)) |
1752 | if (CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN)) |
1753 | add_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX(rtx) 0); |
1754 | emit_insn (seq); |
1755 | } |
1756 | else if (targetm.have_call_value ()) |
1757 | { |
1758 | rtx valreg = 0; |
1759 | |
1760 | /* Locate the unique return register. It is not possible to |
1761 | express a call that sets more than one return register using |
1762 | call_value; use untyped_call for that. In fact, untyped_call |
1763 | only needs to save the return registers in the given block. */ |
1764 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++) |
1765 | if ((mode = apply_result_mode(this_target_builtins->x_apply_result_mode)[regno]) != VOIDmode((void) 0, E_VOIDmode)) |
1766 | { |
1767 | gcc_assert (!valreg)((void)(!(!valreg) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1767, __FUNCTION__), 0 : 0)); /* have_untyped_call required. */ |
1768 | |
1769 | valreg = gen_rtx_REG (mode, regno); |
1770 | } |
1771 | |
1772 | emit_insn (targetm.gen_call_value (valreg, |
1773 | gen_rtx_MEM (FUNCTION_MODE(scalar_int_mode ((scalar_int_mode::from_int) E_QImode)), function), |
1774 | const0_rtx(const_int_rtx[64]), NULL_RTX(rtx) 0, const0_rtx(const_int_rtx[64]))); |
1775 | |
1776 | emit_move_insn (adjust_address (result, GET_MODE (valreg), 0)adjust_address_1 (result, ((machine_mode) (valreg)->mode), 0, 1, 1, 0, 0), valreg); |
1777 | } |
1778 | else |
1779 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1779, __FUNCTION__)); |
1780 | |
1781 | /* Find the CALL insn we just emitted, and attach the register usage |
1782 | information. */ |
1783 | call_insn = last_call_insn (); |
1784 | add_function_usage_to (call_insn, call_fusage); |
1785 | |
1786 | /* Restore the stack. */ |
1787 | if (targetm.have_save_stack_nonlocal ()) |
1788 | emit_stack_restore (SAVE_NONLOCAL, old_stack_level); |
1789 | else |
1790 | emit_stack_restore (SAVE_BLOCK, old_stack_level); |
1791 | fixup_args_size_notes (call_insn, get_last_insn (), 0); |
1792 | |
1793 | OK_DEFER_POP(((&x_rtl)->expr.x_inhibit_defer_pop) -= 1); |
1794 | |
1795 | /* Return the address of the result block. */ |
1796 | result = copy_addr_to_reg (XEXP (result, 0)(((result)->u.fld[0]).rt_rtx)); |
1797 | return convert_memory_address (ptr_mode, result)convert_memory_address_addr_space ((ptr_mode), (result), 0); |
1798 | } |
1799 | |
1800 | /* Perform an untyped return. */ |
1801 | |
1802 | static void |
1803 | expand_builtin_return (rtx result) |
1804 | { |
1805 | int size, align, regno; |
1806 | fixed_size_mode mode; |
1807 | rtx reg; |
1808 | rtx_insn *call_fusage = 0; |
1809 | |
1810 | result = convert_memory_address (Pmode, result)convert_memory_address_addr_space (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) )), (result), 0); |
1811 | |
1812 | apply_result_size (); |
1813 | result = gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), result); |
1814 | |
1815 | if (targetm.have_untyped_return ()) |
1816 | { |
1817 | rtx vector = result_vector (0, result); |
1818 | emit_jump_insn (targetm.gen_untyped_return (result, vector)); |
1819 | emit_barrier (); |
1820 | return; |
1821 | } |
1822 | |
1823 | /* Restore the return value and note that each value is used. */ |
1824 | size = 0; |
1825 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++) |
1826 | if ((mode = apply_result_mode(this_target_builtins->x_apply_result_mode)[regno]) != VOIDmode((void) 0, E_VOIDmode)) |
1827 | { |
1828 | align = GET_MODE_ALIGNMENT (mode)get_mode_alignment (mode) / BITS_PER_UNIT(8); |
1829 | if (size % align != 0) |
1830 | size = CEIL (size, align)(((size) + (align) - 1) / (align)) * align; |
1831 | reg = gen_rtx_REG (mode, INCOMING_REGNO (regno)(regno)); |
1832 | emit_move_insn (reg, adjust_address (result, mode, size)adjust_address_1 (result, mode, size, 1, 1, 0, 0)); |
1833 | |
1834 | push_to_sequence (call_fusage); |
1835 | emit_use (reg); |
1836 | call_fusage = get_insns (); |
1837 | end_sequence (); |
1838 | size += GET_MODE_SIZE (mode); |
1839 | } |
1840 | |
1841 | /* Put the USE insns before the return. */ |
1842 | emit_insn (call_fusage); |
1843 | |
1844 | /* Return whatever values was restored by jumping directly to the end |
1845 | of the function. */ |
1846 | expand_naked_return (); |
1847 | } |
1848 | |
1849 | /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */ |
1850 | |
1851 | static enum type_class |
1852 | type_to_class (tree type) |
1853 | { |
1854 | switch (TREE_CODE (type)((enum tree_code) (type)->base.code)) |
1855 | { |
1856 | case VOID_TYPE: return void_type_class; |
1857 | case INTEGER_TYPE: return integer_type_class; |
1858 | case ENUMERAL_TYPE: return enumeral_type_class; |
1859 | case BOOLEAN_TYPE: return boolean_type_class; |
1860 | case POINTER_TYPE: return pointer_type_class; |
1861 | case REFERENCE_TYPE: return reference_type_class; |
1862 | case OFFSET_TYPE: return offset_type_class; |
1863 | case REAL_TYPE: return real_type_class; |
1864 | case COMPLEX_TYPE: return complex_type_class; |
1865 | case FUNCTION_TYPE: return function_type_class; |
1866 | case METHOD_TYPE: return method_type_class; |
1867 | case RECORD_TYPE: return record_type_class; |
1868 | case UNION_TYPE: |
1869 | case QUAL_UNION_TYPE: return union_type_class; |
1870 | case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)((tree_check2 ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1870, __FUNCTION__, (ARRAY_TYPE), (INTEGER_TYPE)))->type_common .string_flag) |
1871 | ? string_type_class : array_type_class); |
1872 | case LANG_TYPE: return lang_type_class; |
1873 | case OPAQUE_TYPE: return opaque_type_class; |
1874 | default: return no_type_class; |
1875 | } |
1876 | } |
1877 | |
1878 | /* Expand a call EXP to __builtin_classify_type. */ |
1879 | |
1880 | static rtx |
1881 | expand_builtin_classify_type (tree exp) |
1882 | { |
1883 | if (call_expr_nargs (exp)(((int)((unsigned long) (*tree_int_cst_elt_check (((tree_class_check ((exp), (tcc_vl_exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1883, __FUNCTION__))->exp.operands[0]), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1883, __FUNCTION__)))) - 3)) |
1884 | return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))))gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (type_to_class ((( contains_struct_check (((*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1884, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1884, __FUNCTION__)))))), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 1884, __FUNCTION__))->typed.type)))); |
1885 | return GEN_INT (no_type_class)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (no_type_class)); |
1886 | } |
1887 | |
1888 | /* This helper macro, meant to be used in mathfn_built_in below, determines |
1889 | which among a set of builtin math functions is appropriate for a given type |
1890 | mode. The `F' (float) and `L' (long double) are automatically generated |
1891 | from the 'double' case. If a function supports the _Float<N> and _Float<N>X |
1892 | types, there are additional types that are considered with 'F32', 'F64', |
1893 | 'F128', etc. suffixes. */ |
1894 | #define CASE_MATHFN(MATHFN) \ |
1895 | CASE_CFN_##MATHFN: \ |
1896 | fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \ |
1897 | fcodel = BUILT_IN_##MATHFN##L ; break; |
1898 | /* Similar to the above, but also add support for the _Float<N> and _Float<N>X |
1899 | types. */ |
1900 | #define CASE_MATHFN_FLOATN(MATHFN) \ |
1901 | CASE_CFN_##MATHFN: \ |
1902 | fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \ |
1903 | fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \ |
1904 | fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \ |
1905 | fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \ |
1906 | fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\ |
1907 | break; |
1908 | /* Similar to above, but appends _R after any F/L suffix. */ |
1909 | #define CASE_MATHFN_REENT(MATHFN) \ |
1910 | case CFN_BUILT_IN_##MATHFN##_R: \ |
1911 | case CFN_BUILT_IN_##MATHFN##F_R: \ |
1912 | case CFN_BUILT_IN_##MATHFN##L_R: \ |
1913 | fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \ |
1914 | fcodel = BUILT_IN_##MATHFN##L_R ; break; |
1915 | |
1916 | /* Return a function equivalent to FN but operating on floating-point |
1917 | values of type TYPE, or END_BUILTINS if no such function exists. |
1918 | This is purely an operation on function codes; it does not guarantee |
1919 | that the target actually has an implementation of the function. */ |
1920 | |
1921 | static built_in_function |
1922 | mathfn_built_in_2 (tree type, combined_fn fn) |
1923 | { |
1924 | tree mtype; |
1925 | built_in_function fcode, fcodef, fcodel; |
1926 | built_in_function fcodef16 = END_BUILTINS; |
1927 | built_in_function fcodef32 = END_BUILTINS; |
1928 | built_in_function fcodef64 = END_BUILTINS; |
1929 | built_in_function fcodef128 = END_BUILTINS; |
1930 | built_in_function fcodef32x = END_BUILTINS; |
1931 | built_in_function fcodef64x = END_BUILTINS; |
1932 | built_in_function fcodef128x = END_BUILTINS; |
1933 | |
1934 | /* If <math.h> has been included somehow, HUGE_VAL and NAN definitions |
1935 | break the uses below. */ |
1936 | #undef HUGE_VAL |
1937 | #undef NAN |
1938 | |
1939 | switch (fn) |
1940 | { |
1941 | #define SEQ_OF_CASE_MATHFN \ |
1942 | CASE_MATHFN_FLOATN (ACOS) \ |
1943 | CASE_MATHFN_FLOATN (ACOSH) \ |
1944 | CASE_MATHFN_FLOATN (ASIN) \ |
1945 | CASE_MATHFN_FLOATN (ASINH) \ |
1946 | CASE_MATHFN_FLOATN (ATAN) \ |
1947 | CASE_MATHFN_FLOATN (ATAN2) \ |
1948 | CASE_MATHFN_FLOATN (ATANH) \ |
1949 | CASE_MATHFN_FLOATN (CBRT) \ |
1950 | CASE_MATHFN_FLOATN (CEIL) \ |
1951 | CASE_MATHFN (CEXPI) \ |
1952 | CASE_MATHFN_FLOATN (COPYSIGN) \ |
1953 | CASE_MATHFN_FLOATN (COS) \ |
1954 | CASE_MATHFN_FLOATN (COSH) \ |
1955 | CASE_MATHFN (DREM) \ |
1956 | CASE_MATHFN_FLOATN (ERF) \ |
1957 | CASE_MATHFN_FLOATN (ERFC) \ |
1958 | CASE_MATHFN_FLOATN (EXP) \ |
1959 | CASE_MATHFN (EXP10) \ |
1960 | CASE_MATHFN_FLOATN (EXP2) \ |
1961 | CASE_MATHFN_FLOATN (EXPM1) \ |
1962 | CASE_MATHFN_FLOATN (FABS) \ |
1963 | CASE_MATHFN_FLOATN (FDIM) \ |
1964 | CASE_MATHFN_FLOATN (FLOOR) \ |
1965 | CASE_MATHFN_FLOATN (FMA) \ |
1966 | CASE_MATHFN_FLOATN (FMAX) \ |
1967 | CASE_MATHFN_FLOATN (FMIN) \ |
1968 | CASE_MATHFN_FLOATN (FMOD) \ |
1969 | CASE_MATHFN_FLOATN (FREXP) \ |
1970 | CASE_MATHFN (GAMMA) \ |
1971 | CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \ |
1972 | CASE_MATHFN_FLOATN (HUGE_VAL) \ |
1973 | CASE_MATHFN_FLOATN (HYPOT) \ |
1974 | CASE_MATHFN_FLOATN (ILOGB) \ |
1975 | CASE_MATHFN (ICEIL) \ |
1976 | CASE_MATHFN (IFLOOR) \ |
1977 | CASE_MATHFN_FLOATN (INF) \ |
1978 | CASE_MATHFN (IRINT) \ |
1979 | CASE_MATHFN (IROUND) \ |
1980 | CASE_MATHFN (ISINF) \ |
1981 | CASE_MATHFN (J0) \ |
1982 | CASE_MATHFN (J1) \ |
1983 | CASE_MATHFN (JN) \ |
1984 | CASE_MATHFN (LCEIL) \ |
1985 | CASE_MATHFN_FLOATN (LDEXP) \ |
1986 | CASE_MATHFN (LFLOOR) \ |
1987 | CASE_MATHFN_FLOATN (LGAMMA) \ |
1988 | CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \ |
1989 | CASE_MATHFN (LLCEIL) \ |
1990 | CASE_MATHFN (LLFLOOR) \ |
1991 | CASE_MATHFN_FLOATN (LLRINT) \ |
1992 | CASE_MATHFN_FLOATN (LLROUND) \ |
1993 | CASE_MATHFN_FLOATN (LOG) \ |
1994 | CASE_MATHFN_FLOATN (LOG10) \ |
1995 | CASE_MATHFN_FLOATN (LOG1P) \ |
1996 | CASE_MATHFN_FLOATN (LOG2) \ |
1997 | CASE_MATHFN_FLOATN (LOGB) \ |
1998 | CASE_MATHFN_FLOATN (LRINT) \ |
1999 | CASE_MATHFN_FLOATN (LROUND) \ |
2000 | CASE_MATHFN_FLOATN (MODF) \ |
2001 | CASE_MATHFN_FLOATN (NAN) \ |
2002 | CASE_MATHFN_FLOATN (NANS) \ |
2003 | CASE_MATHFN_FLOATN (NEARBYINT) \ |
2004 | CASE_MATHFN_FLOATN (NEXTAFTER) \ |
2005 | CASE_MATHFN (NEXTTOWARD) \ |
2006 | CASE_MATHFN_FLOATN (POW) \ |
2007 | CASE_MATHFN (POWI) \ |
2008 | CASE_MATHFN (POW10) \ |
2009 | CASE_MATHFN_FLOATN (REMAINDER) \ |
2010 | CASE_MATHFN_FLOATN (REMQUO) \ |
2011 | CASE_MATHFN_FLOATN (RINT) \ |
2012 | CASE_MATHFN_FLOATN (ROUND) \ |
2013 | CASE_MATHFN_FLOATN (ROUNDEVEN) \ |
2014 | CASE_MATHFN (SCALB) \ |
2015 | CASE_MATHFN_FLOATN (SCALBLN) \ |
2016 | CASE_MATHFN_FLOATN (SCALBN) \ |
2017 | CASE_MATHFN (SIGNBIT) \ |
2018 | CASE_MATHFN (SIGNIFICAND) \ |
2019 | CASE_MATHFN_FLOATN (SIN) \ |
2020 | CASE_MATHFN (SINCOS) \ |
2021 | CASE_MATHFN_FLOATN (SINH) \ |
2022 | CASE_MATHFN_FLOATN (SQRT) \ |
2023 | CASE_MATHFN_FLOATN (TAN) \ |
2024 | CASE_MATHFN_FLOATN (TANH) \ |
2025 | CASE_MATHFN_FLOATN (TGAMMA) \ |
2026 | CASE_MATHFN_FLOATN (TRUNC) \ |
2027 | CASE_MATHFN (Y0) \ |
2028 | CASE_MATHFN (Y1) \ |
2029 | CASE_MATHFN (YN) |
2030 | |
2031 | SEQ_OF_CASE_MATHFN |
2032 | |
2033 | default: |
2034 | return END_BUILTINS; |
2035 | } |
2036 | |
2037 | mtype = TYPE_MAIN_VARIANT (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2037, __FUNCTION__))->type_common.main_variant); |
2038 | if (mtype == double_type_nodeglobal_trees[TI_DOUBLE_TYPE]) |
2039 | return fcode; |
2040 | else if (mtype == float_type_nodeglobal_trees[TI_FLOAT_TYPE]) |
2041 | return fcodef; |
2042 | else if (mtype == long_double_type_nodeglobal_trees[TI_LONG_DOUBLE_TYPE]) |
2043 | return fcodel; |
2044 | else if (mtype == float16_type_nodeglobal_trees[TI_FLOAT16_TYPE]) |
2045 | return fcodef16; |
2046 | else if (mtype == float32_type_nodeglobal_trees[TI_FLOAT32_TYPE]) |
2047 | return fcodef32; |
2048 | else if (mtype == float64_type_nodeglobal_trees[TI_FLOAT64_TYPE]) |
2049 | return fcodef64; |
2050 | else if (mtype == float128_type_nodeglobal_trees[TI_FLOAT128_TYPE]) |
2051 | return fcodef128; |
2052 | else if (mtype == float32x_type_nodeglobal_trees[TI_FLOAT32X_TYPE]) |
2053 | return fcodef32x; |
2054 | else if (mtype == float64x_type_nodeglobal_trees[TI_FLOAT64X_TYPE]) |
2055 | return fcodef64x; |
2056 | else if (mtype == float128x_type_nodeglobal_trees[TI_FLOAT128X_TYPE]) |
2057 | return fcodef128x; |
2058 | else |
2059 | return END_BUILTINS; |
2060 | } |
2061 | |
2062 | #undef CASE_MATHFN |
2063 | #undef CASE_MATHFN_FLOATN |
2064 | #undef CASE_MATHFN_REENT |
2065 | |
2066 | /* Return mathematic function equivalent to FN but operating directly on TYPE, |
2067 | if available. If IMPLICIT_P is true use the implicit builtin declaration, |
2068 | otherwise use the explicit declaration. If we can't do the conversion, |
2069 | return null. */ |
2070 | |
2071 | static tree |
2072 | mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p) |
2073 | { |
2074 | built_in_function fcode2 = mathfn_built_in_2 (type, fn); |
2075 | if (fcode2 == END_BUILTINS) |
2076 | return NULL_TREE(tree) __null; |
2077 | |
2078 | if (implicit_p && !builtin_decl_implicit_p (fcode2)) |
2079 | return NULL_TREE(tree) __null; |
2080 | |
2081 | return builtin_decl_explicit (fcode2); |
2082 | } |
2083 | |
2084 | /* Like mathfn_built_in_1, but always use the implicit array. */ |
2085 | |
2086 | tree |
2087 | mathfn_built_in (tree type, combined_fn fn) |
2088 | { |
2089 | return mathfn_built_in_1 (type, fn, /*implicit=*/ 1); |
2090 | } |
2091 | |
2092 | /* Like mathfn_built_in_1, but always use the explicit array. */ |
2093 | |
2094 | tree |
2095 | mathfn_built_in_explicit (tree type, combined_fn fn) |
2096 | { |
2097 | return mathfn_built_in_1 (type, fn, /*implicit=*/ 0); |
2098 | } |
2099 | |
2100 | /* Like mathfn_built_in_1, but take a built_in_function and |
2101 | always use the implicit array. */ |
2102 | |
2103 | tree |
2104 | mathfn_built_in (tree type, enum built_in_function fn) |
2105 | { |
2106 | return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1); |
2107 | } |
2108 | |
2109 | /* Return the type associated with a built in function, i.e., the one |
2110 | to be passed to mathfn_built_in to get the type-specific |
2111 | function. */ |
2112 | |
2113 | tree |
2114 | mathfn_built_in_type (combined_fn fn) |
2115 | { |
2116 | #define CASE_MATHFN(MATHFN) \ |
2117 | case CFN_BUILT_IN_##MATHFN: \ |
2118 | return double_type_nodeglobal_trees[TI_DOUBLE_TYPE]; \ |
2119 | case CFN_BUILT_IN_##MATHFN##F: \ |
2120 | return float_type_nodeglobal_trees[TI_FLOAT_TYPE]; \ |
2121 | case CFN_BUILT_IN_##MATHFN##L: \ |
2122 | return long_double_type_nodeglobal_trees[TI_LONG_DOUBLE_TYPE]; |
2123 | |
2124 | #define CASE_MATHFN_FLOATN(MATHFN) \ |
2125 | CASE_MATHFN(MATHFN) \ |
2126 | case CFN_BUILT_IN_##MATHFN##F16: \ |
2127 | return float16_type_nodeglobal_trees[TI_FLOAT16_TYPE]; \ |
2128 | case CFN_BUILT_IN_##MATHFN##F32: \ |
2129 | return float32_type_nodeglobal_trees[TI_FLOAT32_TYPE]; \ |
2130 | case CFN_BUILT_IN_##MATHFN##F64: \ |
2131 | return float64_type_nodeglobal_trees[TI_FLOAT64_TYPE]; \ |
2132 | case CFN_BUILT_IN_##MATHFN##F128: \ |
2133 | return float128_type_nodeglobal_trees[TI_FLOAT128_TYPE]; \ |
2134 | case CFN_BUILT_IN_##MATHFN##F32X: \ |
2135 | return float32x_type_nodeglobal_trees[TI_FLOAT32X_TYPE]; \ |
2136 | case CFN_BUILT_IN_##MATHFN##F64X: \ |
2137 | return float64x_type_nodeglobal_trees[TI_FLOAT64X_TYPE]; \ |
2138 | case CFN_BUILT_IN_##MATHFN##F128X: \ |
2139 | return float128x_type_nodeglobal_trees[TI_FLOAT128X_TYPE]; |
2140 | |
2141 | /* Similar to above, but appends _R after any F/L suffix. */ |
2142 | #define CASE_MATHFN_REENT(MATHFN) \ |
2143 | case CFN_BUILT_IN_##MATHFN##_R: \ |
2144 | return double_type_nodeglobal_trees[TI_DOUBLE_TYPE]; \ |
2145 | case CFN_BUILT_IN_##MATHFN##F_R: \ |
2146 | return float_type_nodeglobal_trees[TI_FLOAT_TYPE]; \ |
2147 | case CFN_BUILT_IN_##MATHFN##L_R: \ |
2148 | return long_double_type_nodeglobal_trees[TI_LONG_DOUBLE_TYPE]; |
2149 | |
2150 | switch (fn) |
2151 | { |
2152 | SEQ_OF_CASE_MATHFN |
2153 | |
2154 | default: |
2155 | return NULL_TREE(tree) __null; |
2156 | } |
2157 | |
2158 | #undef CASE_MATHFN |
2159 | #undef CASE_MATHFN_FLOATN |
2160 | #undef CASE_MATHFN_REENT |
2161 | #undef SEQ_OF_CASE_MATHFN |
2162 | } |
2163 | |
2164 | /* Check whether there is an internal function associated with function FN |
2165 | and return type RETURN_TYPE. Return the function if so, otherwise return |
2166 | IFN_LAST. |
2167 | |
2168 | Note that this function only tests whether the function is defined in |
2169 | internals.def, not whether it is actually available on the target. */ |
2170 | |
2171 | static internal_fn |
2172 | associated_internal_fn (built_in_function fn, tree return_type) |
2173 | { |
2174 | switch (fn) |
2175 | { |
2176 | #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \ |
2177 | CASE_FLT_FN (BUILT_IN_##NAME)case BUILT_IN_##NAME: case BUILT_IN_##NAMEF: case BUILT_IN_## NAMEL: return IFN_##NAME; |
2178 | #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \ |
2179 | CASE_FLT_FN (BUILT_IN_##NAME)case BUILT_IN_##NAME: case BUILT_IN_##NAMEF: case BUILT_IN_## NAMEL: return IFN_##NAME; \ |
2180 | CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME)case BUILT_IN_##NAMEF16: case BUILT_IN_##NAMEF32: case BUILT_IN_ ##NAMEF64: case BUILT_IN_##NAMEF128: case BUILT_IN_##NAMEF32X : case BUILT_IN_##NAMEF64X: case BUILT_IN_##NAMEF128X: return IFN_##NAME; |
2181 | #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \ |
2182 | CASE_INT_FN (BUILT_IN_##NAME)case BUILT_IN_##NAME: case BUILT_IN_##NAMEL: case BUILT_IN_## NAMELL: case BUILT_IN_##NAMEIMAX: return IFN_##NAME; |
2183 | #include "internal-fn.def" |
2184 | |
2185 | CASE_FLT_FN (BUILT_IN_POW10)case BUILT_IN_POW10: case BUILT_IN_POW10F: case BUILT_IN_POW10L: |
2186 | return IFN_EXP10; |
2187 | |
2188 | CASE_FLT_FN (BUILT_IN_DREM)case BUILT_IN_DREM: case BUILT_IN_DREMF: case BUILT_IN_DREML: |
2189 | return IFN_REMAINDER; |
2190 | |
2191 | CASE_FLT_FN (BUILT_IN_SCALBN)case BUILT_IN_SCALBN: case BUILT_IN_SCALBNF: case BUILT_IN_SCALBNL: |
2192 | CASE_FLT_FN (BUILT_IN_SCALBLN)case BUILT_IN_SCALBLN: case BUILT_IN_SCALBLNF: case BUILT_IN_SCALBLNL: |
2193 | if (REAL_MODE_FORMAT (TYPE_MODE (return_type))(real_format_for_mode[(((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check ((return_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2193, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (return_type) : (return_type)->type_common.mode)]) == MODE_DECIMAL_FLOAT ) ? (((((((enum tree_code) ((tree_class_check ((return_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2193, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (return_type) : (return_type)->type_common.mode)) - MIN_MODE_DECIMAL_FLOAT ) + (MAX_MODE_FLOAT - MIN_MODE_FLOAT + 1)) : ((enum mode_class ) mode_class[((((enum tree_code) ((tree_class_check ((return_type ), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2193, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (return_type) : (return_type)->type_common.mode)]) == MODE_FLOAT ? ((((((enum tree_code) ((tree_class_check ((return_type), ( tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2193, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (return_type) : (return_type)->type_common.mode)) - MIN_MODE_FLOAT ) : ((fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2193, __FUNCTION__)), 0)])->b == 2) |
2194 | return IFN_LDEXP; |
2195 | return IFN_LAST; |
2196 | |
2197 | default: |
2198 | return IFN_LAST; |
2199 | } |
2200 | } |
2201 | |
2202 | /* If BUILT_IN_NORMAL function FNDECL has an associated internal function, |
2203 | return its code, otherwise return IFN_LAST. Note that this function |
2204 | only tests whether the function is defined in internals.def, not whether |
2205 | it is actually available on the target. */ |
2206 | |
2207 | internal_fn |
2208 | associated_internal_fn (tree fndecl) |
2209 | { |
2210 | gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)((void)(!(((built_in_class) (tree_check ((fndecl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2210, __FUNCTION__, (FUNCTION_DECL)))->function_decl.built_in_class ) == BUILT_IN_NORMAL) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2210, __FUNCTION__), 0 : 0)); |
2211 | return associated_internal_fn (DECL_FUNCTION_CODE (fndecl), |
2212 | TREE_TYPE (TREE_TYPE (fndecl))((contains_struct_check ((((contains_struct_check ((fndecl), ( TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2212, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2212, __FUNCTION__))->typed.type)); |
2213 | } |
2214 | |
2215 | /* Check whether there is an internal function associated with function CFN |
2216 | and return type RETURN_TYPE. Return the function if so, otherwise return |
2217 | IFN_LAST. |
2218 | |
2219 | Note that this function only tests whether the function is defined in |
2220 | internals.def, not whether it is actually available on the target. */ |
2221 | |
2222 | internal_fn |
2223 | associated_internal_fn (combined_fn cfn, tree return_type) |
2224 | { |
2225 | if (internal_fn_p (cfn)) |
2226 | return as_internal_fn (cfn); |
2227 | return associated_internal_fn (as_builtin_fn (cfn), return_type); |
2228 | } |
2229 | |
2230 | /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced |
2231 | on the current target by a call to an internal function, return the |
2232 | code of that internal function, otherwise return IFN_LAST. The caller |
2233 | is responsible for ensuring that any side-effects of the built-in |
2234 | call are dealt with correctly. E.g. if CALL sets errno, the caller |
2235 | must decide that the errno result isn't needed or make it available |
2236 | in some other way. */ |
2237 | |
2238 | internal_fn |
2239 | replacement_internal_fn (gcall *call) |
2240 | { |
2241 | if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)) |
2242 | { |
2243 | internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call)); |
2244 | if (ifn != IFN_LAST) |
2245 | { |
2246 | tree_pair types = direct_internal_fn_types (ifn, call); |
2247 | optimization_type opt_type = bb_optimization_type (gimple_bb (call)); |
2248 | if (direct_internal_fn_supported_p (ifn, types, opt_type)) |
2249 | return ifn; |
2250 | } |
2251 | } |
2252 | return IFN_LAST; |
2253 | } |
2254 | |
2255 | /* Expand a call to the builtin trinary math functions (fma). |
2256 | Return NULL_RTX if a normal call should be emitted rather than expanding the |
2257 | function in-line. EXP is the expression that is a call to the builtin |
2258 | function; if convenient, the result should be placed in TARGET. |
2259 | SUBTARGET may be used as the target for computing one of EXP's |
2260 | operands. */ |
2261 | |
2262 | static rtx |
2263 | expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget) |
2264 | { |
2265 | optab builtin_optab; |
2266 | rtx op0, op1, op2, result; |
2267 | rtx_insn *insns; |
2268 | tree fndecl = get_callee_fndecl (exp); |
2269 | tree arg0, arg1, arg2; |
2270 | machine_mode mode; |
2271 | |
2272 | if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE)) |
2273 | return NULL_RTX(rtx) 0; |
2274 | |
2275 | arg0 = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2275, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2275, __FUNCTION__))))); |
2276 | arg1 = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2276, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2276, __FUNCTION__))))); |
2277 | arg2 = CALL_EXPR_ARG (exp, 2)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2277, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2277, __FUNCTION__))))); |
2278 | |
2279 | switch (DECL_FUNCTION_CODE (fndecl)) |
2280 | { |
2281 | CASE_FLT_FN (BUILT_IN_FMA)case BUILT_IN_FMA: case BUILT_IN_FMAF: case BUILT_IN_FMAL: |
2282 | CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA)case BUILT_IN_FMAF16: case BUILT_IN_FMAF32: case BUILT_IN_FMAF64 : case BUILT_IN_FMAF128: case BUILT_IN_FMAF32X: case BUILT_IN_FMAF64X : case BUILT_IN_FMAF128X: |
2283 | builtin_optab = fma_optab; break; |
2284 | default: |
2285 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2285, __FUNCTION__)); |
2286 | } |
2287 | |
2288 | /* Make a suitable register to place result in. */ |
2289 | mode = TYPE_MODE (TREE_TYPE (exp))((((enum tree_code) ((tree_class_check ((((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2289, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2289, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2289, __FUNCTION__))->typed.type)) : (((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2289, __FUNCTION__))->typed.type))->type_common.mode); |
2290 | |
2291 | /* Before working hard, check whether the instruction is available. */ |
2292 | if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing) |
2293 | return NULL_RTX(rtx) 0; |
2294 | |
2295 | result = gen_reg_rtx (mode); |
2296 | |
2297 | /* Always stabilize the argument list. */ |
2298 | CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2298, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2298, __FUNCTION__))))) = arg0 = builtin_save_expr (arg0); |
2299 | CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2299, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2299, __FUNCTION__))))) = arg1 = builtin_save_expr (arg1); |
2300 | CALL_EXPR_ARG (exp, 2)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2300, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2300, __FUNCTION__))))) = arg2 = builtin_save_expr (arg2); |
2301 | |
2302 | op0 = expand_expr (arg0, subtarget, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL); |
2303 | op1 = expand_normal (arg1); |
2304 | op2 = expand_normal (arg2); |
2305 | |
2306 | start_sequence (); |
2307 | |
2308 | /* Compute into RESULT. |
2309 | Set RESULT to wherever the result comes back. */ |
2310 | result = expand_ternary_op (mode, builtin_optab, op0, op1, op2, |
2311 | result, 0); |
2312 | |
2313 | /* If we were unable to expand via the builtin, stop the sequence |
2314 | (without outputting the insns) and call to the library function |
2315 | with the stabilized argument list. */ |
2316 | if (result == 0) |
2317 | { |
2318 | end_sequence (); |
2319 | return expand_call (exp, target, target == const0_rtx(const_int_rtx[64])); |
2320 | } |
2321 | |
2322 | /* Output the entire sequence. */ |
2323 | insns = get_insns (); |
2324 | end_sequence (); |
2325 | emit_insn (insns); |
2326 | |
2327 | return result; |
2328 | } |
2329 | |
2330 | /* Expand a call to the builtin sin and cos math functions. |
2331 | Return NULL_RTX if a normal call should be emitted rather than expanding the |
2332 | function in-line. EXP is the expression that is a call to the builtin |
2333 | function; if convenient, the result should be placed in TARGET. |
2334 | SUBTARGET may be used as the target for computing one of EXP's |
2335 | operands. */ |
2336 | |
2337 | static rtx |
2338 | expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget) |
2339 | { |
2340 | optab builtin_optab; |
2341 | rtx op0; |
2342 | rtx_insn *insns; |
2343 | tree fndecl = get_callee_fndecl (exp); |
2344 | machine_mode mode; |
2345 | tree arg; |
2346 | |
2347 | if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE)) |
2348 | return NULL_RTX(rtx) 0; |
2349 | |
2350 | arg = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2350, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2350, __FUNCTION__))))); |
2351 | |
2352 | switch (DECL_FUNCTION_CODE (fndecl)) |
2353 | { |
2354 | CASE_FLT_FN (BUILT_IN_SIN)case BUILT_IN_SIN: case BUILT_IN_SINF: case BUILT_IN_SINL: |
2355 | CASE_FLT_FN (BUILT_IN_COS)case BUILT_IN_COS: case BUILT_IN_COSF: case BUILT_IN_COSL: |
2356 | builtin_optab = sincos_optab; break; |
2357 | default: |
2358 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2358, __FUNCTION__)); |
2359 | } |
2360 | |
2361 | /* Make a suitable register to place result in. */ |
2362 | mode = TYPE_MODE (TREE_TYPE (exp))((((enum tree_code) ((tree_class_check ((((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2362, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2362, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2362, __FUNCTION__))->typed.type)) : (((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2362, __FUNCTION__))->typed.type))->type_common.mode); |
2363 | |
2364 | /* Check if sincos insn is available, otherwise fallback |
2365 | to sin or cos insn. */ |
2366 | if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing) |
2367 | switch (DECL_FUNCTION_CODE (fndecl)) |
2368 | { |
2369 | CASE_FLT_FN (BUILT_IN_SIN)case BUILT_IN_SIN: case BUILT_IN_SINF: case BUILT_IN_SINL: |
2370 | builtin_optab = sin_optab; break; |
2371 | CASE_FLT_FN (BUILT_IN_COS)case BUILT_IN_COS: case BUILT_IN_COSF: case BUILT_IN_COSL: |
2372 | builtin_optab = cos_optab; break; |
2373 | default: |
2374 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2374, __FUNCTION__)); |
2375 | } |
2376 | |
2377 | /* Before working hard, check whether the instruction is available. */ |
2378 | if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing) |
2379 | { |
2380 | rtx result = gen_reg_rtx (mode); |
2381 | |
2382 | /* Wrap the computation of the argument in a SAVE_EXPR, as we may |
2383 | need to expand the argument again. This way, we will not perform |
2384 | side-effects more the once. */ |
2385 | CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2385, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2385, __FUNCTION__))))) = arg = builtin_save_expr (arg); |
2386 | |
2387 | op0 = expand_expr (arg, subtarget, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL); |
2388 | |
2389 | start_sequence (); |
2390 | |
2391 | /* Compute into RESULT. |
2392 | Set RESULT to wherever the result comes back. */ |
2393 | if (builtin_optab == sincos_optab) |
2394 | { |
2395 | int ok; |
2396 | |
2397 | switch (DECL_FUNCTION_CODE (fndecl)) |
2398 | { |
2399 | CASE_FLT_FN (BUILT_IN_SIN)case BUILT_IN_SIN: case BUILT_IN_SINF: case BUILT_IN_SINL: |
2400 | ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0); |
2401 | break; |
2402 | CASE_FLT_FN (BUILT_IN_COS)case BUILT_IN_COS: case BUILT_IN_COSF: case BUILT_IN_COSL: |
2403 | ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0); |
2404 | break; |
2405 | default: |
2406 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2406, __FUNCTION__)); |
2407 | } |
2408 | gcc_assert (ok)((void)(!(ok) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2408, __FUNCTION__), 0 : 0)); |
2409 | } |
2410 | else |
2411 | result = expand_unop (mode, builtin_optab, op0, result, 0); |
2412 | |
2413 | if (result != 0) |
2414 | { |
2415 | /* Output the entire sequence. */ |
2416 | insns = get_insns (); |
2417 | end_sequence (); |
2418 | emit_insn (insns); |
2419 | return result; |
2420 | } |
2421 | |
2422 | /* If we were unable to expand via the builtin, stop the sequence |
2423 | (without outputting the insns) and call to the library function |
2424 | with the stabilized argument list. */ |
2425 | end_sequence (); |
2426 | } |
2427 | |
2428 | return expand_call (exp, target, target == const0_rtx(const_int_rtx[64])); |
2429 | } |
2430 | |
2431 | /* Given an interclass math builtin decl FNDECL and it's argument ARG |
2432 | return an RTL instruction code that implements the functionality. |
2433 | If that isn't possible or available return CODE_FOR_nothing. */ |
2434 | |
2435 | static enum insn_code |
2436 | interclass_mathfn_icode (tree arg, tree fndecl) |
2437 | { |
2438 | bool errno_set = false; |
2439 | optab builtin_optab = unknown_optab; |
2440 | machine_mode mode; |
2441 | |
2442 | switch (DECL_FUNCTION_CODE (fndecl)) |
2443 | { |
2444 | CASE_FLT_FN (BUILT_IN_ILOGB)case BUILT_IN_ILOGB: case BUILT_IN_ILOGBF: case BUILT_IN_ILOGBL: |
2445 | errno_set = true; builtin_optab = ilogb_optab; break; |
2446 | CASE_FLT_FN (BUILT_IN_ISINF)case BUILT_IN_ISINF: case BUILT_IN_ISINFF: case BUILT_IN_ISINFL: |
2447 | builtin_optab = isinf_optab; break; |
2448 | case BUILT_IN_ISNORMAL: |
2449 | case BUILT_IN_ISFINITE: |
2450 | CASE_FLT_FN (BUILT_IN_FINITE)case BUILT_IN_FINITE: case BUILT_IN_FINITEF: case BUILT_IN_FINITEL: |
2451 | case BUILT_IN_FINITED32: |
2452 | case BUILT_IN_FINITED64: |
2453 | case BUILT_IN_FINITED128: |
2454 | case BUILT_IN_ISINFD32: |
2455 | case BUILT_IN_ISINFD64: |
2456 | case BUILT_IN_ISINFD128: |
2457 | /* These builtins have no optabs (yet). */ |
2458 | break; |
2459 | default: |
2460 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2460, __FUNCTION__)); |
2461 | } |
2462 | |
2463 | /* There's no easy way to detect the case we need to set EDOM. */ |
2464 | if (flag_errno_mathglobal_options.x_flag_errno_math && errno_set) |
2465 | return CODE_FOR_nothing; |
2466 | |
2467 | /* Optab mode depends on the mode of the input argument. */ |
2468 | mode = TYPE_MODE (TREE_TYPE (arg))((((enum tree_code) ((tree_class_check ((((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2468, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2468, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2468, __FUNCTION__))->typed.type)) : (((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2468, __FUNCTION__))->typed.type))->type_common.mode); |
2469 | |
2470 | if (builtin_optab) |
2471 | return optab_handler (builtin_optab, mode); |
2472 | return CODE_FOR_nothing; |
2473 | } |
2474 | |
2475 | /* Expand a call to one of the builtin math functions that operate on |
2476 | floating point argument and output an integer result (ilogb, isinf, |
2477 | isnan, etc). |
2478 | Return 0 if a normal call should be emitted rather than expanding the |
2479 | function in-line. EXP is the expression that is a call to the builtin |
2480 | function; if convenient, the result should be placed in TARGET. */ |
2481 | |
2482 | static rtx |
2483 | expand_builtin_interclass_mathfn (tree exp, rtx target) |
2484 | { |
2485 | enum insn_code icode = CODE_FOR_nothing; |
2486 | rtx op0; |
2487 | tree fndecl = get_callee_fndecl (exp); |
2488 | machine_mode mode; |
2489 | tree arg; |
2490 | |
2491 | if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE)) |
2492 | return NULL_RTX(rtx) 0; |
2493 | |
2494 | arg = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2494, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2494, __FUNCTION__))))); |
2495 | icode = interclass_mathfn_icode (arg, fndecl); |
2496 | mode = TYPE_MODE (TREE_TYPE (arg))((((enum tree_code) ((tree_class_check ((((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2496, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2496, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2496, __FUNCTION__))->typed.type)) : (((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2496, __FUNCTION__))->typed.type))->type_common.mode); |
2497 | |
2498 | if (icode != CODE_FOR_nothing) |
2499 | { |
2500 | class expand_operand ops[1]; |
2501 | rtx_insn *last = get_last_insn (); |
2502 | tree orig_arg = arg; |
2503 | |
2504 | /* Wrap the computation of the argument in a SAVE_EXPR, as we may |
2505 | need to expand the argument again. This way, we will not perform |
2506 | side-effects more the once. */ |
2507 | CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2507, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2507, __FUNCTION__))))) = arg = builtin_save_expr (arg); |
2508 | |
2509 | op0 = expand_expr (arg, NULL_RTX(rtx) 0, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL); |
2510 | |
2511 | if (mode != GET_MODE (op0)((machine_mode) (op0)->mode)) |
2512 | op0 = convert_to_mode (mode, op0, 0); |
2513 | |
2514 | create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp))((((enum tree_code) ((tree_class_check ((((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2514, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2514, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2514, __FUNCTION__))->typed.type)) : (((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2514, __FUNCTION__))->typed.type))->type_common.mode)); |
2515 | if (maybe_legitimize_operands (icode, 0, 1, ops) |
2516 | && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN)) |
2517 | return ops[0].value; |
2518 | |
2519 | delete_insns_since (last); |
2520 | CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2520, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2520, __FUNCTION__))))) = orig_arg; |
2521 | } |
2522 | |
2523 | return NULL_RTX(rtx) 0; |
2524 | } |
2525 | |
2526 | /* Expand a call to the builtin sincos math function. |
2527 | Return NULL_RTX if a normal call should be emitted rather than expanding the |
2528 | function in-line. EXP is the expression that is a call to the builtin |
2529 | function. */ |
2530 | |
2531 | static rtx |
2532 | expand_builtin_sincos (tree exp) |
2533 | { |
2534 | rtx op0, op1, op2, target1, target2; |
2535 | machine_mode mode; |
2536 | tree arg, sinp, cosp; |
2537 | int result; |
2538 | location_t loc = EXPR_LOCATION (exp)((((exp)) && ((tree_code_type_tmpl <0>::tree_code_type [(int) (((enum tree_code) ((exp))->base.code))]) >= tcc_reference && (tree_code_type_tmpl <0>::tree_code_type[(int ) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression )) ? (exp)->exp.locus : ((location_t) 0)); |
2539 | tree alias_type, alias_off; |
2540 | |
2541 | if (!validate_arglist (exp, REAL_TYPE, |
2542 | POINTER_TYPE, POINTER_TYPE, VOID_TYPE)) |
2543 | return NULL_RTX(rtx) 0; |
2544 | |
2545 | arg = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2545, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2545, __FUNCTION__))))); |
2546 | sinp = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2546, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2546, __FUNCTION__))))); |
2547 | cosp = CALL_EXPR_ARG (exp, 2)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2547, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2547, __FUNCTION__))))); |
2548 | |
2549 | /* Make a suitable register to place result in. */ |
2550 | mode = TYPE_MODE (TREE_TYPE (arg))((((enum tree_code) ((tree_class_check ((((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2550, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2550, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2550, __FUNCTION__))->typed.type)) : (((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2550, __FUNCTION__))->typed.type))->type_common.mode); |
2551 | |
2552 | /* Check if sincos insn is available, otherwise emit the call. */ |
2553 | if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing) |
2554 | return NULL_RTX(rtx) 0; |
2555 | |
2556 | target1 = gen_reg_rtx (mode); |
2557 | target2 = gen_reg_rtx (mode); |
2558 | |
2559 | op0 = expand_normal (arg); |
2560 | alias_type = build_pointer_type_for_mode (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2560, __FUNCTION__))->typed.type), ptr_mode, true); |
2561 | alias_off = build_int_cst (alias_type, 0); |
2562 | op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2562, __FUNCTION__))->typed.type), |
2563 | sinp, alias_off)); |
2564 | op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2564, __FUNCTION__))->typed.type), |
2565 | cosp, alias_off)); |
2566 | |
2567 | /* Compute into target1 and target2. |
2568 | Set TARGET to wherever the result comes back. */ |
2569 | result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0); |
2570 | gcc_assert (result)((void)(!(result) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2570, __FUNCTION__), 0 : 0)); |
2571 | |
2572 | /* Move target1 and target2 to the memory locations indicated |
2573 | by op1 and op2. */ |
2574 | emit_move_insn (op1, target1); |
2575 | emit_move_insn (op2, target2); |
2576 | |
2577 | return const0_rtx(const_int_rtx[64]); |
2578 | } |
2579 | |
2580 | /* Expand call EXP to the fegetround builtin (from C99 fenv.h), returning the |
2581 | result and setting it in TARGET. Otherwise return NULL_RTX on failure. */ |
2582 | static rtx |
2583 | expand_builtin_fegetround (tree exp, rtx target, machine_mode target_mode) |
2584 | { |
2585 | if (!validate_arglist (exp, VOID_TYPE)) |
2586 | return NULL_RTX(rtx) 0; |
2587 | |
2588 | insn_code icode = direct_optab_handler (fegetround_optab, SImode(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))); |
2589 | if (icode == CODE_FOR_nothing) |
2590 | return NULL_RTX(rtx) 0; |
2591 | |
2592 | if (target == 0 |
2593 | || GET_MODE (target)((machine_mode) (target)->mode) != target_mode |
2594 | || !(*insn_data[icode].operand[0].predicate) (target, target_mode)) |
2595 | target = gen_reg_rtx (target_mode); |
2596 | |
2597 | rtx pat = GEN_FCN (icode)(insn_data[icode].genfun) (target); |
2598 | if (!pat) |
2599 | return NULL_RTX(rtx) 0; |
2600 | emit_insn (pat); |
2601 | |
2602 | return target; |
2603 | } |
2604 | |
2605 | /* Expand call EXP to either feclearexcept or feraiseexcept builtins (from C99 |
2606 | fenv.h), returning the result and setting it in TARGET. Otherwise return |
2607 | NULL_RTX on failure. */ |
2608 | static rtx |
2609 | expand_builtin_feclear_feraise_except (tree exp, rtx target, |
2610 | machine_mode target_mode, optab op_optab) |
2611 | { |
2612 | if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE)) |
2613 | return NULL_RTX(rtx) 0; |
2614 | rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2614, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2614, __FUNCTION__)))))); |
2615 | |
2616 | insn_code icode = direct_optab_handler (op_optab, SImode(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))); |
2617 | if (icode == CODE_FOR_nothing) |
2618 | return NULL_RTX(rtx) 0; |
2619 | |
2620 | if (!(*insn_data[icode].operand[1].predicate) (op0, GET_MODE (op0)((machine_mode) (op0)->mode))) |
2621 | return NULL_RTX(rtx) 0; |
2622 | |
2623 | if (target == 0 |
2624 | || GET_MODE (target)((machine_mode) (target)->mode) != target_mode |
2625 | || !(*insn_data[icode].operand[0].predicate) (target, target_mode)) |
2626 | target = gen_reg_rtx (target_mode); |
2627 | |
2628 | rtx pat = GEN_FCN (icode)(insn_data[icode].genfun) (target, op0); |
2629 | if (!pat) |
2630 | return NULL_RTX(rtx) 0; |
2631 | emit_insn (pat); |
2632 | |
2633 | return target; |
2634 | } |
2635 | |
2636 | /* Expand a call to the internal cexpi builtin to the sincos math function. |
2637 | EXP is the expression that is a call to the builtin function; if convenient, |
2638 | the result should be placed in TARGET. */ |
2639 | |
2640 | static rtx |
2641 | expand_builtin_cexpi (tree exp, rtx target) |
2642 | { |
2643 | tree fndecl = get_callee_fndecl (exp); |
2644 | tree arg, type; |
2645 | machine_mode mode; |
2646 | rtx op0, op1, op2; |
2647 | location_t loc = EXPR_LOCATION (exp)((((exp)) && ((tree_code_type_tmpl <0>::tree_code_type [(int) (((enum tree_code) ((exp))->base.code))]) >= tcc_reference && (tree_code_type_tmpl <0>::tree_code_type[(int ) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression )) ? (exp)->exp.locus : ((location_t) 0)); |
2648 | |
2649 | if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE)) |
2650 | return NULL_RTX(rtx) 0; |
2651 | |
2652 | arg = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2652, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2652, __FUNCTION__))))); |
2653 | type = TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2653, __FUNCTION__))->typed.type); |
2654 | mode = TYPE_MODE (TREE_TYPE (arg))((((enum tree_code) ((tree_class_check ((((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2654, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2654, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2654, __FUNCTION__))->typed.type)) : (((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2654, __FUNCTION__))->typed.type))->type_common.mode); |
2655 | |
2656 | /* Try expanding via a sincos optab, fall back to emitting a libcall |
2657 | to sincos or cexp. We are sure we have sincos or cexp because cexpi |
2658 | is only generated from sincos, cexp or if we have either of them. */ |
2659 | if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing) |
2660 | { |
2661 | op1 = gen_reg_rtx (mode); |
2662 | op2 = gen_reg_rtx (mode); |
2663 | |
2664 | op0 = expand_expr (arg, NULL_RTX(rtx) 0, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL); |
2665 | |
2666 | /* Compute into op1 and op2. */ |
2667 | expand_twoval_unop (sincos_optab, op0, op2, op1, 0); |
2668 | } |
2669 | else if (targetm.libc_has_function (function_sincos, type)) |
2670 | { |
2671 | tree call, fn = NULL_TREE(tree) __null; |
2672 | tree top1, top2; |
2673 | rtx op1a, op2a; |
2674 | |
2675 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF) |
2676 | fn = builtin_decl_explicit (BUILT_IN_SINCOSF); |
2677 | else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI) |
2678 | fn = builtin_decl_explicit (BUILT_IN_SINCOS); |
2679 | else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL) |
2680 | fn = builtin_decl_explicit (BUILT_IN_SINCOSL); |
2681 | else |
2682 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2682, __FUNCTION__)); |
2683 | |
2684 | op1 = assign_temp (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2684, __FUNCTION__))->typed.type), 1, 1); |
2685 | op2 = assign_temp (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2685, __FUNCTION__))->typed.type), 1, 1); |
2686 | op1a = copy_addr_to_reg (XEXP (op1, 0)(((op1)->u.fld[0]).rt_rtx)); |
2687 | op2a = copy_addr_to_reg (XEXP (op2, 0)(((op2)->u.fld[0]).rt_rtx)); |
2688 | top1 = make_tree (build_pointer_type (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2688, __FUNCTION__))->typed.type)), op1a); |
2689 | top2 = make_tree (build_pointer_type (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2689, __FUNCTION__))->typed.type)), op2a); |
2690 | |
2691 | /* Make sure not to fold the sincos call again. */ |
2692 | call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)((contains_struct_check ((fn), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2692, __FUNCTION__))->typed.type)), fn); |
2693 | expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn))((contains_struct_check ((((contains_struct_check ((fn), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2693, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2693, __FUNCTION__))->typed.type), |
2694 | call, 3, arg, top1, top2)); |
2695 | } |
2696 | else |
2697 | { |
2698 | tree call, fn = NULL_TREE(tree) __null, narg; |
2699 | tree ctype = build_complex_type (type); |
2700 | |
2701 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF) |
2702 | fn = builtin_decl_explicit (BUILT_IN_CEXPF); |
2703 | else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI) |
2704 | fn = builtin_decl_explicit (BUILT_IN_CEXP); |
2705 | else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL) |
2706 | fn = builtin_decl_explicit (BUILT_IN_CEXPL); |
2707 | else |
2708 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2708, __FUNCTION__)); |
2709 | |
2710 | /* If we don't have a decl for cexp create one. This is the |
2711 | friendliest fallback if the user calls __builtin_cexpi |
2712 | without full target C99 function support. */ |
2713 | if (fn == NULL_TREE(tree) __null) |
2714 | { |
2715 | tree fntype; |
2716 | const char *name = NULL__null; |
2717 | |
2718 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF) |
2719 | name = "cexpf"; |
2720 | else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI) |
2721 | name = "cexp"; |
2722 | else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL) |
2723 | name = "cexpl"; |
2724 | |
2725 | fntype = build_function_type_list (ctype, ctype, NULL_TREE(tree) __null); |
2726 | fn = build_fn_decl (name, fntype); |
2727 | } |
2728 | |
2729 | narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype, |
2730 | build_real (type, dconst0), arg); |
2731 | |
2732 | /* Make sure not to fold the cexp call again. */ |
2733 | call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)((contains_struct_check ((fn), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2733, __FUNCTION__))->typed.type)), fn); |
2734 | return expand_expr (build_call_nary (ctype, call, 1, narg), |
2735 | target, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL); |
2736 | } |
2737 | |
2738 | /* Now build the proper return type. */ |
2739 | return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type), |
2740 | make_tree (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2740, __FUNCTION__))->typed.type), op2), |
2741 | make_tree (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2741, __FUNCTION__))->typed.type), op1)), |
2742 | target, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL); |
2743 | } |
2744 | |
2745 | /* Conveniently construct a function call expression. FNDECL names the |
2746 | function to be called, N is the number of arguments, and the "..." |
2747 | parameters are the argument expressions. Unlike build_call_exr |
2748 | this doesn't fold the call, hence it will always return a CALL_EXPR. */ |
2749 | |
2750 | static tree |
2751 | build_call_nofold_loc (location_t loc, tree fndecl, int n, ...) |
2752 | { |
2753 | va_list ap; |
2754 | tree fntype = TREE_TYPE (fndecl)((contains_struct_check ((fndecl), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2754, __FUNCTION__))->typed.type); |
2755 | tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl); |
2756 | |
2757 | va_start (ap, n)__builtin_va_start(ap, n); |
2758 | fn = build_call_valist (TREE_TYPE (fntype)((contains_struct_check ((fntype), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2758, __FUNCTION__))->typed.type), fn, n, ap); |
2759 | va_end (ap)__builtin_va_end(ap); |
2760 | SET_EXPR_LOCATION (fn, loc)(expr_check (((fn)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2760, __FUNCTION__))->exp.locus = (loc); |
2761 | return fn; |
2762 | } |
2763 | |
2764 | /* Expand the __builtin_issignaling builtin. This needs to handle |
2765 | all floating point formats that do support NaNs (for those that |
2766 | don't it just sets target to 0). */ |
2767 | |
2768 | static rtx |
2769 | expand_builtin_issignaling (tree exp, rtx target) |
2770 | { |
2771 | if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE)) |
2772 | return NULL_RTX(rtx) 0; |
2773 | |
2774 | tree arg = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2774, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2774, __FUNCTION__))))); |
2775 | scalar_float_mode fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg))(as_a <scalar_float_mode> ((tree_class_check ((((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2775, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2775, __FUNCTION__))->type_common.mode)); |
2776 | const struct real_format *fmt = REAL_MODE_FORMAT (fmode)(real_format_for_mode[(((enum mode_class) mode_class[fmode]) == MODE_DECIMAL_FLOAT) ? (((fmode) - MIN_MODE_DECIMAL_FLOAT) + ( MAX_MODE_FLOAT - MIN_MODE_FLOAT + 1)) : ((enum mode_class) mode_class [fmode]) == MODE_FLOAT ? ((fmode) - MIN_MODE_FLOAT) : ((fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2776, __FUNCTION__)), 0)]); |
2777 | |
2778 | /* Expand the argument yielding a RTX expression. */ |
2779 | rtx temp = expand_normal (arg); |
2780 | |
2781 | /* If mode doesn't support NaN, always return 0. |
2782 | Don't use !HONOR_SNANS (fmode) here, so there is some possibility of |
2783 | __builtin_issignaling working without -fsignaling-nans. Especially |
2784 | when -fno-signaling-nans is the default. |
2785 | On the other side, MODE_HAS_NANS (fmode) is unnecessary, with |
2786 | -ffinite-math-only even __builtin_isnan or __builtin_fpclassify |
2787 | fold to 0 or non-NaN/Inf classification. */ |
2788 | if (!HONOR_NANS (fmode)) |
2789 | { |
2790 | emit_move_insn (target, const0_rtx(const_int_rtx[64])); |
2791 | return target; |
2792 | } |
2793 | |
2794 | /* Check if the back end provides an insn that handles issignaling for the |
2795 | argument's mode. */ |
2796 | enum insn_code icode = optab_handler (issignaling_optab, fmode); |
2797 | if (icode != CODE_FOR_nothing) |
2798 | { |
2799 | rtx_insn *last = get_last_insn (); |
2800 | rtx this_target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp))((((enum tree_code) ((tree_class_check ((((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2800, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2800, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2800, __FUNCTION__))->typed.type)) : (((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2800, __FUNCTION__))->typed.type))->type_common.mode)); |
2801 | if (maybe_emit_unop_insn (icode, this_target, temp, UNKNOWN)) |
2802 | return this_target; |
2803 | delete_insns_since (last); |
2804 | } |
2805 | |
2806 | if (DECIMAL_FLOAT_MODE_P (fmode)(((enum mode_class) mode_class[fmode]) == MODE_DECIMAL_FLOAT)) |
2807 | { |
2808 | scalar_int_mode imode; |
2809 | rtx hi; |
2810 | switch (fmt->ieee_bits) |
2811 | { |
2812 | case 32: |
2813 | case 64: |
2814 | imode = int_mode_for_mode (fmode).require (); |
2815 | temp = gen_lowpartrtl_hooks.gen_lowpart (imode, temp); |
2816 | break; |
2817 | case 128: |
2818 | imode = int_mode_for_size (64, 1).require (); |
2819 | hi = NULL_RTX(rtx) 0; |
2820 | /* For decimal128, TImode support isn't always there and even when |
2821 | it is, working on the DImode high part is usually better. */ |
2822 | if (!MEM_P (temp)(((enum rtx_code) (temp)->code) == MEM)) |
2823 | { |
2824 | if (rtx t = simplify_gen_subreg (imode, temp, fmode, |
2825 | subreg_highpart_offset (imode, |
2826 | fmode))) |
2827 | hi = t; |
2828 | else |
2829 | { |
2830 | scalar_int_mode imode2; |
2831 | if (int_mode_for_mode (fmode).exists (&imode2)) |
2832 | { |
2833 | rtx temp2 = gen_lowpartrtl_hooks.gen_lowpart (imode2, temp); |
2834 | poly_uint64 off = subreg_highpart_offset (imode, imode2); |
2835 | if (rtx t = simplify_gen_subreg (imode, temp2, |
2836 | imode2, off)) |
2837 | hi = t; |
2838 | } |
2839 | } |
2840 | if (!hi) |
2841 | { |
2842 | rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode)); |
2843 | emit_move_insn (mem, temp); |
2844 | temp = mem; |
2845 | } |
2846 | } |
2847 | if (!hi) |
2848 | { |
2849 | poly_int64 offset |
2850 | = subreg_highpart_offset (imode, GET_MODE (temp)((machine_mode) (temp)->mode)); |
2851 | hi = adjust_address (temp, imode, offset)adjust_address_1 (temp, imode, offset, 1, 1, 0, 0); |
2852 | } |
2853 | temp = hi; |
2854 | break; |
2855 | default: |
2856 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2856, __FUNCTION__)); |
2857 | } |
2858 | /* In all of decimal{32,64,128}, there is MSB sign bit and sNaN |
2859 | have 6 bits below it all set. */ |
2860 | rtx val |
2861 | = GEN_INT (HOST_WIDE_INT_C (0x3f) << (GET_MODE_BITSIZE (imode) - 7))gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (0x3fL << (GET_MODE_BITSIZE (imode) - 7))); |
2862 | temp = expand_binop (imode, and_optab, temp, val, |
2863 | NULL_RTX(rtx) 0, 1, OPTAB_LIB_WIDEN); |
2864 | temp = emit_store_flag_force (target, EQ, temp, val, imode, 1, 1); |
2865 | return temp; |
2866 | } |
2867 | |
2868 | /* Only PDP11 has these defined differently but doesn't support NaNs. */ |
2869 | gcc_assert (FLOAT_WORDS_BIG_ENDIAN == WORDS_BIG_ENDIAN)((void)(!(0 == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2869, __FUNCTION__), 0 : 0)); |
2870 | gcc_assert (fmt->signbit_ro > 0 && fmt->b == 2)((void)(!(fmt->signbit_ro > 0 && fmt->b == 2 ) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2870, __FUNCTION__), 0 : 0)); |
2871 | gcc_assert (MODE_COMPOSITE_P (fmode)((void)(!(((((enum mode_class) mode_class[fmode]) == MODE_FLOAT || ((enum mode_class) mode_class[fmode]) == MODE_DECIMAL_FLOAT || ((enum mode_class) mode_class[fmode]) == MODE_COMPLEX_FLOAT || ((enum mode_class) mode_class[fmode]) == MODE_VECTOR_FLOAT ) && ((real_format_for_mode[(((enum mode_class) mode_class [as_a <scalar_float_mode> ((mode_to_inner (fmode)))]) == MODE_DECIMAL_FLOAT) ? (((as_a <scalar_float_mode> ((mode_to_inner (fmode)))) - MIN_MODE_DECIMAL_FLOAT) + (MAX_MODE_FLOAT - MIN_MODE_FLOAT + 1)) : ((enum mode_class) mode_class[as_a <scalar_float_mode > ((mode_to_inner (fmode)))]) == MODE_FLOAT ? ((as_a <scalar_float_mode > ((mode_to_inner (fmode)))) - MIN_MODE_FLOAT) : ((fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2871, __FUNCTION__)), 0)]))->pnan < ((real_format_for_mode [(((enum mode_class) mode_class[as_a <scalar_float_mode> ((mode_to_inner (fmode)))]) == MODE_DECIMAL_FLOAT) ? (((as_a <scalar_float_mode> ((mode_to_inner (fmode)))) - MIN_MODE_DECIMAL_FLOAT ) + (MAX_MODE_FLOAT - MIN_MODE_FLOAT + 1)) : ((enum mode_class ) mode_class[as_a <scalar_float_mode> ((mode_to_inner ( fmode)))]) == MODE_FLOAT ? ((as_a <scalar_float_mode> ( (mode_to_inner (fmode)))) - MIN_MODE_FLOAT) : ((fancy_abort ( "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2871, __FUNCTION__)), 0)]))->p) || (fmt->pnan == fmt-> p && fmt->signbit_ro == fmt->signbit_rw)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2873, __FUNCTION__), 0 : 0)) |
2872 | || (fmt->pnan == fmt->p((void)(!(((((enum mode_class) mode_class[fmode]) == MODE_FLOAT || ((enum mode_class) mode_class[fmode]) == MODE_DECIMAL_FLOAT || ((enum mode_class) mode_class[fmode]) == MODE_COMPLEX_FLOAT || ((enum mode_class) mode_class[fmode]) == MODE_VECTOR_FLOAT ) && ((real_format_for_mode[(((enum mode_class) mode_class [as_a <scalar_float_mode> ((mode_to_inner (fmode)))]) == MODE_DECIMAL_FLOAT) ? (((as_a <scalar_float_mode> ((mode_to_inner (fmode)))) - MIN_MODE_DECIMAL_FLOAT) + (MAX_MODE_FLOAT - MIN_MODE_FLOAT + 1)) : ((enum mode_class) mode_class[as_a <scalar_float_mode > ((mode_to_inner (fmode)))]) == MODE_FLOAT ? ((as_a <scalar_float_mode > ((mode_to_inner (fmode)))) - MIN_MODE_FLOAT) : ((fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2871, __FUNCTION__)), 0)]))->pnan < ((real_format_for_mode [(((enum mode_class) mode_class[as_a <scalar_float_mode> ((mode_to_inner (fmode)))]) == MODE_DECIMAL_FLOAT) ? (((as_a <scalar_float_mode> ((mode_to_inner (fmode)))) - MIN_MODE_DECIMAL_FLOAT ) + (MAX_MODE_FLOAT - MIN_MODE_FLOAT + 1)) : ((enum mode_class ) mode_class[as_a <scalar_float_mode> ((mode_to_inner ( fmode)))]) == MODE_FLOAT ? ((as_a <scalar_float_mode> ( (mode_to_inner (fmode)))) - MIN_MODE_FLOAT) : ((fancy_abort ( "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2871, __FUNCTION__)), 0)]))->p) || (fmt->pnan == fmt-> p && fmt->signbit_ro == fmt->signbit_rw)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2873, __FUNCTION__), 0 : 0)) |
2873 | && fmt->signbit_ro == fmt->signbit_rw))((void)(!(((((enum mode_class) mode_class[fmode]) == MODE_FLOAT || ((enum mode_class) mode_class[fmode]) == MODE_DECIMAL_FLOAT || ((enum mode_class) mode_class[fmode]) == MODE_COMPLEX_FLOAT || ((enum mode_class) mode_class[fmode]) == MODE_VECTOR_FLOAT ) && ((real_format_for_mode[(((enum mode_class) mode_class [as_a <scalar_float_mode> ((mode_to_inner (fmode)))]) == MODE_DECIMAL_FLOAT) ? (((as_a <scalar_float_mode> ((mode_to_inner (fmode)))) - MIN_MODE_DECIMAL_FLOAT) + (MAX_MODE_FLOAT - MIN_MODE_FLOAT + 1)) : ((enum mode_class) mode_class[as_a <scalar_float_mode > ((mode_to_inner (fmode)))]) == MODE_FLOAT ? ((as_a <scalar_float_mode > ((mode_to_inner (fmode)))) - MIN_MODE_FLOAT) : ((fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2871, __FUNCTION__)), 0)]))->pnan < ((real_format_for_mode [(((enum mode_class) mode_class[as_a <scalar_float_mode> ((mode_to_inner (fmode)))]) == MODE_DECIMAL_FLOAT) ? (((as_a <scalar_float_mode> ((mode_to_inner (fmode)))) - MIN_MODE_DECIMAL_FLOAT ) + (MAX_MODE_FLOAT - MIN_MODE_FLOAT + 1)) : ((enum mode_class ) mode_class[as_a <scalar_float_mode> ((mode_to_inner ( fmode)))]) == MODE_FLOAT ? ((as_a <scalar_float_mode> ( (mode_to_inner (fmode)))) - MIN_MODE_FLOAT) : ((fancy_abort ( "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2871, __FUNCTION__)), 0)]))->p) || (fmt->pnan == fmt-> p && fmt->signbit_ro == fmt->signbit_rw)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2873, __FUNCTION__), 0 : 0)); |
2874 | |
2875 | switch (fmt->p) |
2876 | { |
2877 | case 106: /* IBM double double */ |
2878 | /* For IBM double double, recurse on the most significant double. */ |
2879 | gcc_assert (MODE_COMPOSITE_P (fmode))((void)(!(((((enum mode_class) mode_class[fmode]) == MODE_FLOAT || ((enum mode_class) mode_class[fmode]) == MODE_DECIMAL_FLOAT || ((enum mode_class) mode_class[fmode]) == MODE_COMPLEX_FLOAT || ((enum mode_class) mode_class[fmode]) == MODE_VECTOR_FLOAT ) && ((real_format_for_mode[(((enum mode_class) mode_class [as_a <scalar_float_mode> ((mode_to_inner (fmode)))]) == MODE_DECIMAL_FLOAT) ? (((as_a <scalar_float_mode> ((mode_to_inner (fmode)))) - MIN_MODE_DECIMAL_FLOAT) + (MAX_MODE_FLOAT - MIN_MODE_FLOAT + 1)) : ((enum mode_class) mode_class[as_a <scalar_float_mode > ((mode_to_inner (fmode)))]) == MODE_FLOAT ? ((as_a <scalar_float_mode > ((mode_to_inner (fmode)))) - MIN_MODE_FLOAT) : ((fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2879, __FUNCTION__)), 0)]))->pnan < ((real_format_for_mode [(((enum mode_class) mode_class[as_a <scalar_float_mode> ((mode_to_inner (fmode)))]) == MODE_DECIMAL_FLOAT) ? (((as_a <scalar_float_mode> ((mode_to_inner (fmode)))) - MIN_MODE_DECIMAL_FLOAT ) + (MAX_MODE_FLOAT - MIN_MODE_FLOAT + 1)) : ((enum mode_class ) mode_class[as_a <scalar_float_mode> ((mode_to_inner ( fmode)))]) == MODE_FLOAT ? ((as_a <scalar_float_mode> ( (mode_to_inner (fmode)))) - MIN_MODE_FLOAT) : ((fancy_abort ( "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2879, __FUNCTION__)), 0)]))->p)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2879, __FUNCTION__), 0 : 0)); |
2880 | temp = convert_modes (DFmode(scalar_float_mode ((scalar_float_mode::from_int) E_DFmode)), fmode, temp, 0); |
2881 | fmode = DFmode(scalar_float_mode ((scalar_float_mode::from_int) E_DFmode)); |
2882 | fmt = REAL_MODE_FORMAT (DFmode)(real_format_for_mode[(((enum mode_class) mode_class[(scalar_float_mode ((scalar_float_mode::from_int) E_DFmode))]) == MODE_DECIMAL_FLOAT ) ? ((((scalar_float_mode ((scalar_float_mode::from_int) E_DFmode ))) - MIN_MODE_DECIMAL_FLOAT) + (MAX_MODE_FLOAT - MIN_MODE_FLOAT + 1)) : ((enum mode_class) mode_class[(scalar_float_mode ((scalar_float_mode ::from_int) E_DFmode))]) == MODE_FLOAT ? (((scalar_float_mode ((scalar_float_mode::from_int) E_DFmode))) - MIN_MODE_FLOAT) : ((fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 2882, __FUNCTION__)), 0)]); |
2883 | /* FALLTHRU */ |
2884 | case 8: /* bfloat */ |
2885 | case 11: /* IEEE half */ |
2886 | case 24: /* IEEE single */ |
2887 | case 53: /* IEEE double or Intel extended with rounding to double */ |
2888 | if (fmt->p == 53 && fmt->signbit_ro == 79) |
2889 | goto extended; |
2890 | { |
2891 | scalar_int_mode imode = int_mode_for_mode (fmode).require (); |
2892 | temp = gen_lowpartrtl_hooks.gen_lowpart (imode, temp); |
2893 | rtx val = GEN_INT ((HOST_WIDE_INT_M1U << (fmt->p - 2))gen_rtx_CONST_INT (((void) 0, E_VOIDmode), ((-1UL << (fmt ->p - 2)) & ~(-1UL << fmt->signbit_ro))) |
2894 | & ~(HOST_WIDE_INT_M1U << fmt->signbit_ro))gen_rtx_CONST_INT (((void) 0, E_VOIDmode), ((-1UL << (fmt ->p - 2)) & ~(-1UL << fmt->signbit_ro))); |
2895 | if (fmt->qnan_msb_set) |
2896 | { |
2897 | rtx mask = GEN_INT (~(HOST_WIDE_INT_M1U << fmt->signbit_ro))gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (~(-1UL << fmt ->signbit_ro))); |
2898 | rtx bit = GEN_INT (HOST_WIDE_INT_1U << (fmt->p - 2))gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (1UL << (fmt ->p - 2))); |
2899 | /* For non-MIPS/PA IEEE single/double/half or bfloat, expand to: |
2900 | ((temp ^ bit) & mask) > val. */ |
2901 | temp = expand_binop (imode, xor_optab, temp, bit, |
2902 | NULL_RTX(rtx) 0, 1, OPTAB_LIB_WIDEN); |
2903 | temp = expand_binop (imode, and_optab, temp, mask, |
2904 | NULL_RTX(rtx) 0, 1, OPTAB_LIB_WIDEN); |
2905 | temp = emit_store_flag_force (target, GTU, temp, val, imode, |
2906 | 1, 1); |
2907 | } |
2908 | else |
2909 | { |
2910 | /* For MIPS/PA IEEE single/double, expand to: |
2911 | (temp & val) == val. */ |
2912 | temp = expand_binop (imode, and_optab, temp, val, |
2913 | NULL_RTX(rtx) 0, 1, OPTAB_LIB_WIDEN); |
2914 | temp = emit_store_flag_force (target, EQ, temp, val, imode, |
2915 | 1, 1); |
2916 | } |
2917 | } |
2918 | break; |
2919 | case 113: /* IEEE quad */ |
2920 | { |
2921 | rtx hi = NULL_RTX(rtx) 0, lo = NULL_RTX(rtx) 0; |
2922 | scalar_int_mode imode = int_mode_for_size (64, 1).require (); |
2923 | /* For IEEE quad, TImode support isn't always there and even when |
2924 | it is, working on DImode parts is usually better. */ |
2925 | if (!MEM_P (temp)(((enum rtx_code) (temp)->code) == MEM)) |
2926 | { |
2927 | hi = simplify_gen_subreg (imode, temp, fmode, |
2928 | subreg_highpart_offset (imode, fmode)); |
2929 | lo = simplify_gen_subreg (imode, temp, fmode, |
2930 | subreg_lowpart_offset (imode, fmode)); |
2931 | if (!hi || !lo) |
2932 | { |
2933 | scalar_int_mode imode2; |
2934 | if (int_mode_for_mode (fmode).exists (&imode2)) |
2935 | { |
2936 | rtx temp2 = gen_lowpartrtl_hooks.gen_lowpart (imode2, temp); |
2937 | hi = simplify_gen_subreg (imode, temp2, imode2, |
2938 | subreg_highpart_offset (imode, |
2939 | imode2)); |
2940 | lo = simplify_gen_subreg (imode, temp2, imode2, |
2941 | subreg_lowpart_offset (imode, |
2942 | imode2)); |
2943 | } |
2944 | } |
2945 | if (!hi || !lo) |
2946 | { |
2947 | rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode)); |
2948 | emit_move_insn (mem, temp); |
2949 | temp = mem; |
2950 | } |
2951 | } |
2952 | if (!hi || !lo) |
2953 | { |
2954 | poly_int64 offset |
2955 | = subreg_highpart_offset (imode, GET_MODE (temp)((machine_mode) (temp)->mode)); |
2956 | hi = adjust_address (temp, imode, offset)adjust_address_1 (temp, imode, offset, 1, 1, 0, 0); |
2957 | offset = subreg_lowpart_offset (imode, GET_MODE (temp)((machine_mode) (temp)->mode)); |
2958 | lo = adjust_address (temp, imode, offset)adjust_address_1 (temp, imode, offset, 1, 1, 0, 0); |
2959 | } |
2960 | rtx val = GEN_INT ((HOST_WIDE_INT_M1U << (fmt->p - 2 - 64))gen_rtx_CONST_INT (((void) 0, E_VOIDmode), ((-1UL << (fmt ->p - 2 - 64)) & ~(-1UL << (fmt->signbit_ro - 64)))) |
2961 | & ~(HOST_WIDE_INT_M1U << (fmt->signbit_ro - 64)))gen_rtx_CONST_INT (((void) 0, E_VOIDmode), ((-1UL << (fmt ->p - 2 - 64)) & ~(-1UL << (fmt->signbit_ro - 64)))); |
2962 | if (fmt->qnan_msb_set) |
2963 | { |
2964 | rtx mask = GEN_INT (~(HOST_WIDE_INT_M1U << (fmt->signbit_rogen_rtx_CONST_INT (((void) 0, E_VOIDmode), (~(-1UL << ( fmt->signbit_ro - 64)))) |
2965 | - 64)))gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (~(-1UL << ( fmt->signbit_ro - 64)))); |
2966 | rtx bit = GEN_INT (HOST_WIDE_INT_1U << (fmt->p - 2 - 64))gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (1UL << (fmt ->p - 2 - 64))); |
2967 | /* For non-MIPS/PA IEEE quad, expand to: |
2968 | (((hi ^ bit) | ((lo | -lo) >> 63)) & mask) > val. */ |
2969 | rtx nlo = expand_unop (imode, neg_optab, lo, NULL_RTX(rtx) 0, 0); |
2970 | lo = expand_binop (imode, ior_optab, lo, nlo, |
2971 | NULL_RTX(rtx) 0, 1, OPTAB_LIB_WIDEN); |
2972 | lo = expand_shift (RSHIFT_EXPR, imode, lo, 63, NULL_RTX(rtx) 0, 1); |
2973 | temp = expand_binop (imode, xor_optab, hi, bit, |
2974 | NULL_RTX(rtx) 0, 1, OPTAB_LIB_WIDEN); |
2975 | temp = expand_binop (imode, ior_optab, temp, lo, |
2976 | NULL_RTX(rtx) 0, 1, OPTAB_LIB_WIDEN); |
2977 | temp = expand_binop (imode, and_optab, temp, mask, |
2978 | NULL_RTX(rtx) 0, 1, OPTAB_LIB_WIDEN); |
2979 | temp = emit_store_flag_force (target, GTU, temp, val, imode, |
2980 | 1, 1); |
2981 | } |
2982 | else |
2983 | { |
2984 | /* For MIPS/PA IEEE quad, expand to: |
2985 | (hi & val) == val. */ |
2986 | temp = expand_binop (imode, and_optab, hi, val, |
2987 | NULL_RTX(rtx) 0, 1, OPTAB_LIB_WIDEN); |
2988 | temp = emit_store_flag_force (target, EQ, temp, val, imode, |
2989 | 1, 1); |
2990 | } |
2991 | } |
2992 | break; |
2993 | case 64: /* Intel or Motorola extended */ |
2994 | extended: |
2995 | { |
2996 | rtx ex, hi, lo; |
2997 | scalar_int_mode imode = int_mode_for_size (32, 1).require (); |
2998 | scalar_int_mode iemode = int_mode_for_size (16, 1).require (); |
2999 | if (!MEM_P (temp)(((enum rtx_code) (temp)->code) == MEM)) |
3000 | { |
3001 | rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode)); |
3002 | emit_move_insn (mem, temp); |
3003 | temp = mem; |
3004 | } |
3005 | if (fmt->signbit_ro == 95) |
3006 | { |
3007 | /* Motorola, always big endian, with 16-bit gap in between |
3008 | 16-bit sign+exponent and 64-bit mantissa. */ |
3009 | ex = adjust_address (temp, iemode, 0)adjust_address_1 (temp, iemode, 0, 1, 1, 0, 0); |
3010 | hi = adjust_address (temp, imode, 4)adjust_address_1 (temp, imode, 4, 1, 1, 0, 0); |
3011 | lo = adjust_address (temp, imode, 8)adjust_address_1 (temp, imode, 8, 1, 1, 0, 0); |
3012 | } |
3013 | else if (!WORDS_BIG_ENDIAN0) |
3014 | { |
3015 | /* Intel little endian, 64-bit mantissa followed by 16-bit |
3016 | sign+exponent and then either 16 or 48 bits of gap. */ |
3017 | ex = adjust_address (temp, iemode, 8)adjust_address_1 (temp, iemode, 8, 1, 1, 0, 0); |
3018 | hi = adjust_address (temp, imode, 4)adjust_address_1 (temp, imode, 4, 1, 1, 0, 0); |
3019 | lo = adjust_address (temp, imode, 0)adjust_address_1 (temp, imode, 0, 1, 1, 0, 0); |
3020 | } |
3021 | else |
3022 | { |
3023 | /* Big endian Itanium. */ |
3024 | ex = adjust_address (temp, iemode, 0)adjust_address_1 (temp, iemode, 0, 1, 1, 0, 0); |
3025 | hi = adjust_address (temp, imode, 2)adjust_address_1 (temp, imode, 2, 1, 1, 0, 0); |
3026 | lo = adjust_address (temp, imode, 6)adjust_address_1 (temp, imode, 6, 1, 1, 0, 0); |
3027 | } |
3028 | rtx val = GEN_INT (HOST_WIDE_INT_M1U << 30)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (-1UL << 30) ); |
3029 | gcc_assert (fmt->qnan_msb_set)((void)(!(fmt->qnan_msb_set) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3029, __FUNCTION__), 0 : 0)); |
3030 | rtx mask = GEN_INT (0x7fff)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (0x7fff)); |
3031 | rtx bit = GEN_INT (HOST_WIDE_INT_1U << 30)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (1UL << 30)); |
3032 | /* For Intel/Motorola extended format, expand to: |
3033 | (ex & mask) == mask && ((hi ^ bit) | ((lo | -lo) >> 31)) > val. */ |
3034 | rtx nlo = expand_unop (imode, neg_optab, lo, NULL_RTX(rtx) 0, 0); |
3035 | lo = expand_binop (imode, ior_optab, lo, nlo, |
3036 | NULL_RTX(rtx) 0, 1, OPTAB_LIB_WIDEN); |
3037 | lo = expand_shift (RSHIFT_EXPR, imode, lo, 31, NULL_RTX(rtx) 0, 1); |
3038 | temp = expand_binop (imode, xor_optab, hi, bit, |
3039 | NULL_RTX(rtx) 0, 1, OPTAB_LIB_WIDEN); |
3040 | temp = expand_binop (imode, ior_optab, temp, lo, |
3041 | NULL_RTX(rtx) 0, 1, OPTAB_LIB_WIDEN); |
3042 | temp = emit_store_flag_force (target, GTU, temp, val, imode, 1, 1); |
3043 | ex = expand_binop (iemode, and_optab, ex, mask, |
3044 | NULL_RTX(rtx) 0, 1, OPTAB_LIB_WIDEN); |
3045 | ex = emit_store_flag_force (gen_reg_rtx (GET_MODE (temp)((machine_mode) (temp)->mode)), EQ, |
3046 | ex, mask, iemode, 1, 1); |
3047 | temp = expand_binop (GET_MODE (temp)((machine_mode) (temp)->mode), and_optab, temp, ex, |
3048 | NULL_RTX(rtx) 0, 1, OPTAB_LIB_WIDEN); |
3049 | } |
3050 | break; |
3051 | default: |
3052 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3052, __FUNCTION__)); |
3053 | } |
3054 | |
3055 | return temp; |
3056 | } |
3057 | |
3058 | /* Expand a call to one of the builtin rounding functions gcc defines |
3059 | as an extension (lfloor and lceil). As these are gcc extensions we |
3060 | do not need to worry about setting errno to EDOM. |
3061 | If expanding via optab fails, lower expression to (int)(floor(x)). |
3062 | EXP is the expression that is a call to the builtin function; |
3063 | if convenient, the result should be placed in TARGET. */ |
3064 | |
3065 | static rtx |
3066 | expand_builtin_int_roundingfn (tree exp, rtx target) |
3067 | { |
3068 | convert_optab builtin_optab; |
3069 | rtx op0, tmp; |
3070 | rtx_insn *insns; |
3071 | tree fndecl = get_callee_fndecl (exp); |
3072 | enum built_in_function fallback_fn; |
3073 | tree fallback_fndecl; |
3074 | machine_mode mode; |
3075 | tree arg; |
3076 | |
3077 | if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE)) |
3078 | return NULL_RTX(rtx) 0; |
3079 | |
3080 | arg = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3080, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3080, __FUNCTION__))))); |
3081 | |
3082 | switch (DECL_FUNCTION_CODE (fndecl)) |
3083 | { |
3084 | CASE_FLT_FN (BUILT_IN_ICEIL)case BUILT_IN_ICEIL: case BUILT_IN_ICEILF: case BUILT_IN_ICEILL: |
3085 | CASE_FLT_FN (BUILT_IN_LCEIL)case BUILT_IN_LCEIL: case BUILT_IN_LCEILF: case BUILT_IN_LCEILL: |
3086 | CASE_FLT_FN (BUILT_IN_LLCEIL)case BUILT_IN_LLCEIL: case BUILT_IN_LLCEILF: case BUILT_IN_LLCEILL: |
3087 | builtin_optab = lceil_optab; |
3088 | fallback_fn = BUILT_IN_CEIL; |
3089 | break; |
3090 | |
3091 | CASE_FLT_FN (BUILT_IN_IFLOOR)case BUILT_IN_IFLOOR: case BUILT_IN_IFLOORF: case BUILT_IN_IFLOORL: |
3092 | CASE_FLT_FN (BUILT_IN_LFLOOR)case BUILT_IN_LFLOOR: case BUILT_IN_LFLOORF: case BUILT_IN_LFLOORL: |
3093 | CASE_FLT_FN (BUILT_IN_LLFLOOR)case BUILT_IN_LLFLOOR: case BUILT_IN_LLFLOORF: case BUILT_IN_LLFLOORL: |
3094 | builtin_optab = lfloor_optab; |
3095 | fallback_fn = BUILT_IN_FLOOR; |
3096 | break; |
3097 | |
3098 | default: |
3099 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3099, __FUNCTION__)); |
3100 | } |
3101 | |
3102 | /* Make a suitable register to place result in. */ |
3103 | mode = TYPE_MODE (TREE_TYPE (exp))((((enum tree_code) ((tree_class_check ((((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3103, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3103, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3103, __FUNCTION__))->typed.type)) : (((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3103, __FUNCTION__))->typed.type))->type_common.mode); |
3104 | |
3105 | target = gen_reg_rtx (mode); |
3106 | |
3107 | /* Wrap the computation of the argument in a SAVE_EXPR, as we may |
3108 | need to expand the argument again. This way, we will not perform |
3109 | side-effects more the once. */ |
3110 | CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3110, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3110, __FUNCTION__))))) = arg = builtin_save_expr (arg); |
3111 | |
3112 | op0 = expand_expr (arg, NULL__null, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL); |
3113 | |
3114 | start_sequence (); |
3115 | |
3116 | /* Compute into TARGET. */ |
3117 | if (expand_sfix_optab (target, op0, builtin_optab)) |
3118 | { |
3119 | /* Output the entire sequence. */ |
3120 | insns = get_insns (); |
3121 | end_sequence (); |
3122 | emit_insn (insns); |
3123 | return target; |
3124 | } |
3125 | |
3126 | /* If we were unable to expand via the builtin, stop the sequence |
3127 | (without outputting the insns). */ |
3128 | end_sequence (); |
3129 | |
3130 | /* Fall back to floating point rounding optab. */ |
3131 | fallback_fndecl = mathfn_built_in (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3131, __FUNCTION__))->typed.type), fallback_fn); |
3132 | |
3133 | /* For non-C99 targets we may end up without a fallback fndecl here |
3134 | if the user called __builtin_lfloor directly. In this case emit |
3135 | a call to the floor/ceil variants nevertheless. This should result |
3136 | in the best user experience for not full C99 targets. */ |
3137 | if (fallback_fndecl == NULL_TREE(tree) __null) |
3138 | { |
3139 | tree fntype; |
3140 | const char *name = NULL__null; |
3141 | |
3142 | switch (DECL_FUNCTION_CODE (fndecl)) |
3143 | { |
3144 | case BUILT_IN_ICEIL: |
3145 | case BUILT_IN_LCEIL: |
3146 | case BUILT_IN_LLCEIL: |
3147 | name = "ceil"; |
3148 | break; |
3149 | case BUILT_IN_ICEILF: |
3150 | case BUILT_IN_LCEILF: |
3151 | case BUILT_IN_LLCEILF: |
3152 | name = "ceilf"; |
3153 | break; |
3154 | case BUILT_IN_ICEILL: |
3155 | case BUILT_IN_LCEILL: |
3156 | case BUILT_IN_LLCEILL: |
3157 | name = "ceill"; |
3158 | break; |
3159 | case BUILT_IN_IFLOOR: |
3160 | case BUILT_IN_LFLOOR: |
3161 | case BUILT_IN_LLFLOOR: |
3162 | name = "floor"; |
3163 | break; |
3164 | case BUILT_IN_IFLOORF: |
3165 | case BUILT_IN_LFLOORF: |
3166 | case BUILT_IN_LLFLOORF: |
3167 | name = "floorf"; |
3168 | break; |
3169 | case BUILT_IN_IFLOORL: |
3170 | case BUILT_IN_LFLOORL: |
3171 | case BUILT_IN_LLFLOORL: |
3172 | name = "floorl"; |
3173 | break; |
3174 | default: |
3175 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3175, __FUNCTION__)); |
3176 | } |
3177 | |
3178 | fntype = build_function_type_list (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3178, __FUNCTION__))->typed.type), |
3179 | TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3179, __FUNCTION__))->typed.type), NULL_TREE(tree) __null); |
3180 | fallback_fndecl = build_fn_decl (name, fntype); |
3181 | } |
3182 | |
3183 | exp = build_call_nofold_loc (EXPR_LOCATION (exp)((((exp)) && ((tree_code_type_tmpl <0>::tree_code_type [(int) (((enum tree_code) ((exp))->base.code))]) >= tcc_reference && (tree_code_type_tmpl <0>::tree_code_type[(int ) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression )) ? (exp)->exp.locus : ((location_t) 0)), fallback_fndecl, 1, arg); |
3184 | |
3185 | tmp = expand_normal (exp); |
3186 | tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp)((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3186, __FUNCTION__))->typed.type)); |
3187 | |
3188 | /* Truncate the result of floating point optab to integer |
3189 | via expand_fix (). */ |
3190 | target = gen_reg_rtx (mode); |
3191 | expand_fix (target, tmp, 0); |
3192 | |
3193 | return target; |
3194 | } |
3195 | |
3196 | /* Expand a call to one of the builtin math functions doing integer |
3197 | conversion (lrint). |
3198 | Return 0 if a normal call should be emitted rather than expanding the |
3199 | function in-line. EXP is the expression that is a call to the builtin |
3200 | function; if convenient, the result should be placed in TARGET. */ |
3201 | |
3202 | static rtx |
3203 | expand_builtin_int_roundingfn_2 (tree exp, rtx target) |
3204 | { |
3205 | convert_optab builtin_optab; |
3206 | rtx op0; |
3207 | rtx_insn *insns; |
3208 | tree fndecl = get_callee_fndecl (exp); |
3209 | tree arg; |
3210 | machine_mode mode; |
3211 | enum built_in_function fallback_fn = BUILT_IN_NONE; |
3212 | |
3213 | if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE)) |
3214 | return NULL_RTX(rtx) 0; |
3215 | |
3216 | arg = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3216, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3216, __FUNCTION__))))); |
3217 | |
3218 | switch (DECL_FUNCTION_CODE (fndecl)) |
3219 | { |
3220 | CASE_FLT_FN (BUILT_IN_IRINT)case BUILT_IN_IRINT: case BUILT_IN_IRINTF: case BUILT_IN_IRINTL: |
3221 | fallback_fn = BUILT_IN_LRINT; |
3222 | gcc_fallthrough (); |
3223 | CASE_FLT_FN (BUILT_IN_LRINT)case BUILT_IN_LRINT: case BUILT_IN_LRINTF: case BUILT_IN_LRINTL: |
3224 | CASE_FLT_FN (BUILT_IN_LLRINT)case BUILT_IN_LLRINT: case BUILT_IN_LLRINTF: case BUILT_IN_LLRINTL: |
3225 | builtin_optab = lrint_optab; |
3226 | break; |
3227 | |
3228 | CASE_FLT_FN (BUILT_IN_IROUND)case BUILT_IN_IROUND: case BUILT_IN_IROUNDF: case BUILT_IN_IROUNDL: |
3229 | fallback_fn = BUILT_IN_LROUND; |
3230 | gcc_fallthrough (); |
3231 | CASE_FLT_FN (BUILT_IN_LROUND)case BUILT_IN_LROUND: case BUILT_IN_LROUNDF: case BUILT_IN_LROUNDL: |
3232 | CASE_FLT_FN (BUILT_IN_LLROUND)case BUILT_IN_LLROUND: case BUILT_IN_LLROUNDF: case BUILT_IN_LLROUNDL: |
3233 | builtin_optab = lround_optab; |
3234 | break; |
3235 | |
3236 | default: |
3237 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3237, __FUNCTION__)); |
3238 | } |
3239 | |
3240 | /* There's no easy way to detect the case we need to set EDOM. */ |
3241 | if (flag_errno_mathglobal_options.x_flag_errno_math && fallback_fn == BUILT_IN_NONE) |
3242 | return NULL_RTX(rtx) 0; |
3243 | |
3244 | /* Make a suitable register to place result in. */ |
3245 | mode = TYPE_MODE (TREE_TYPE (exp))((((enum tree_code) ((tree_class_check ((((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3245, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3245, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3245, __FUNCTION__))->typed.type)) : (((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3245, __FUNCTION__))->typed.type))->type_common.mode); |
3246 | |
3247 | /* There's no easy way to detect the case we need to set EDOM. */ |
3248 | if (!flag_errno_mathglobal_options.x_flag_errno_math) |
3249 | { |
3250 | rtx result = gen_reg_rtx (mode); |
3251 | |
3252 | /* Wrap the computation of the argument in a SAVE_EXPR, as we may |
3253 | need to expand the argument again. This way, we will not perform |
3254 | side-effects more the once. */ |
3255 | CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3255, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3255, __FUNCTION__))))) = arg = builtin_save_expr (arg); |
3256 | |
3257 | op0 = expand_expr (arg, NULL__null, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL); |
3258 | |
3259 | start_sequence (); |
3260 | |
3261 | if (expand_sfix_optab (result, op0, builtin_optab)) |
3262 | { |
3263 | /* Output the entire sequence. */ |
3264 | insns = get_insns (); |
3265 | end_sequence (); |
3266 | emit_insn (insns); |
3267 | return result; |
3268 | } |
3269 | |
3270 | /* If we were unable to expand via the builtin, stop the sequence |
3271 | (without outputting the insns) and call to the library function |
3272 | with the stabilized argument list. */ |
3273 | end_sequence (); |
3274 | } |
3275 | |
3276 | if (fallback_fn != BUILT_IN_NONE) |
3277 | { |
3278 | /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99 |
3279 | targets, (int) round (x) should never be transformed into |
3280 | BUILT_IN_IROUND and if __builtin_iround is called directly, emit |
3281 | a call to lround in the hope that the target provides at least some |
3282 | C99 functions. This should result in the best user experience for |
3283 | not full C99 targets. |
3284 | As scalar float conversions with same mode are useless in GIMPLE, |
3285 | we can end up e.g. with _Float32 argument passed to float builtin, |
3286 | try to get the type from the builtin prototype first. */ |
3287 | tree fallback_fndecl = NULL_TREE(tree) __null; |
3288 | if (tree argtypes = TYPE_ARG_TYPES (TREE_TYPE (fndecl))((tree_check2 ((((contains_struct_check ((fndecl), (TS_TYPED) , "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3288, __FUNCTION__))->typed.type)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3288, __FUNCTION__, (FUNCTION_TYPE), (METHOD_TYPE)))->type_non_common .values)) |
3289 | fallback_fndecl |
3290 | = mathfn_built_in_1 (TREE_VALUE (argtypes)((tree_check ((argtypes), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3290, __FUNCTION__, (TREE_LIST)))->list.value), |
3291 | as_combined_fn (fallback_fn), 0); |
3292 | if (fallback_fndecl == NULL_TREE(tree) __null) |
3293 | fallback_fndecl |
3294 | = mathfn_built_in_1 (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3294, __FUNCTION__))->typed.type), |
3295 | as_combined_fn (fallback_fn), 0); |
3296 | if (fallback_fndecl) |
3297 | { |
3298 | exp = build_call_nofold_loc (EXPR_LOCATION (exp)((((exp)) && ((tree_code_type_tmpl <0>::tree_code_type [(int) (((enum tree_code) ((exp))->base.code))]) >= tcc_reference && (tree_code_type_tmpl <0>::tree_code_type[(int ) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression )) ? (exp)->exp.locus : ((location_t) 0)), |
3299 | fallback_fndecl, 1, arg); |
3300 | |
3301 | target = expand_call (exp, NULL_RTX(rtx) 0, target == const0_rtx(const_int_rtx[64])); |
3302 | target = maybe_emit_group_store (target, TREE_TYPE (exp)((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3302, __FUNCTION__))->typed.type)); |
3303 | return convert_to_mode (mode, target, 0); |
3304 | } |
3305 | } |
3306 | |
3307 | return expand_call (exp, target, target == const0_rtx(const_int_rtx[64])); |
3308 | } |
3309 | |
3310 | /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if |
3311 | a normal call should be emitted rather than expanding the function |
3312 | in-line. EXP is the expression that is a call to the builtin |
3313 | function; if convenient, the result should be placed in TARGET. */ |
3314 | |
3315 | static rtx |
3316 | expand_builtin_powi (tree exp, rtx target) |
3317 | { |
3318 | tree arg0, arg1; |
3319 | rtx op0, op1; |
3320 | machine_mode mode; |
3321 | machine_mode mode2; |
3322 | |
3323 | if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE)) |
3324 | return NULL_RTX(rtx) 0; |
3325 | |
3326 | arg0 = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3326, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3326, __FUNCTION__))))); |
3327 | arg1 = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3327, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3327, __FUNCTION__))))); |
3328 | mode = TYPE_MODE (TREE_TYPE (exp))((((enum tree_code) ((tree_class_check ((((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3328, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3328, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3328, __FUNCTION__))->typed.type)) : (((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3328, __FUNCTION__))->typed.type))->type_common.mode); |
3329 | |
3330 | /* Emit a libcall to libgcc. */ |
3331 | |
3332 | /* Mode of the 2nd argument must match that of an int. */ |
3333 | mode2 = int_mode_for_size (INT_TYPE_SIZE32, 0).require (); |
3334 | |
3335 | if (target == NULL_RTX(rtx) 0) |
3336 | target = gen_reg_rtx (mode); |
3337 | |
3338 | op0 = expand_expr (arg0, NULL_RTX(rtx) 0, mode, EXPAND_NORMAL); |
3339 | if (GET_MODE (op0)((machine_mode) (op0)->mode) != mode) |
3340 | op0 = convert_to_mode (mode, op0, 0); |
3341 | op1 = expand_expr (arg1, NULL_RTX(rtx) 0, mode2, EXPAND_NORMAL); |
3342 | if (GET_MODE (op1)((machine_mode) (op1)->mode) != mode2) |
3343 | op1 = convert_to_mode (mode2, op1, 0); |
3344 | |
3345 | target = emit_library_call_value (optab_libfunc (powi_optab, mode), |
3346 | target, LCT_CONST, mode, |
3347 | op0, mode, op1, mode2); |
3348 | |
3349 | return target; |
3350 | } |
3351 | |
3352 | /* Expand expression EXP which is a call to the strlen builtin. Return |
3353 | NULL_RTX if we failed and the caller should emit a normal call, otherwise |
3354 | try to get the result in TARGET, if convenient. */ |
3355 | |
3356 | static rtx |
3357 | expand_builtin_strlen (tree exp, rtx target, |
3358 | machine_mode target_mode) |
3359 | { |
3360 | if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE)) |
3361 | return NULL_RTX(rtx) 0; |
3362 | |
3363 | tree src = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3363, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3363, __FUNCTION__))))); |
3364 | |
3365 | /* If the length can be computed at compile-time, return it. */ |
3366 | if (tree len = c_strlen (src, 0)) |
3367 | return expand_expr (len, target, target_mode, EXPAND_NORMAL); |
3368 | |
3369 | /* If the length can be computed at compile-time and is constant |
3370 | integer, but there are side-effects in src, evaluate |
3371 | src for side-effects, then return len. |
3372 | E.g. x = strlen (i++ ? "xfoo" + 1 : "bar"); |
3373 | can be optimized into: i++; x = 3; */ |
3374 | tree len = c_strlen (src, 1); |
3375 | if (len && TREE_CODE (len)((enum tree_code) (len)->base.code) == INTEGER_CST) |
3376 | { |
3377 | expand_expr (src, const0_rtx(const_int_rtx[64]), VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL); |
3378 | return expand_expr (len, target, target_mode, EXPAND_NORMAL); |
3379 | } |
3380 | |
3381 | unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT(8); |
3382 | |
3383 | /* If SRC is not a pointer type, don't do this operation inline. */ |
3384 | if (align == 0) |
3385 | return NULL_RTX(rtx) 0; |
3386 | |
3387 | /* Bail out if we can't compute strlen in the right mode. */ |
3388 | machine_mode insn_mode; |
3389 | enum insn_code icode = CODE_FOR_nothing; |
3390 | FOR_EACH_MODE_FROM (insn_mode, target_mode)for ((insn_mode) = (target_mode); mode_iterator::iterate_p (& (insn_mode)); mode_iterator::get_next (&(insn_mode))) |
3391 | { |
3392 | icode = optab_handler (strlen_optab, insn_mode); |
3393 | if (icode != CODE_FOR_nothing) |
3394 | break; |
3395 | } |
3396 | if (insn_mode == VOIDmode((void) 0, E_VOIDmode)) |
3397 | return NULL_RTX(rtx) 0; |
3398 | |
3399 | /* Make a place to hold the source address. We will not expand |
3400 | the actual source until we are sure that the expansion will |
3401 | not fail -- there are trees that cannot be expanded twice. */ |
3402 | rtx src_reg = gen_reg_rtx (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))); |
3403 | |
3404 | /* Mark the beginning of the strlen sequence so we can emit the |
3405 | source operand later. */ |
3406 | rtx_insn *before_strlen = get_last_insn (); |
3407 | |
3408 | class expand_operand ops[4]; |
3409 | create_output_operand (&ops[0], target, insn_mode); |
3410 | create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), src_reg)); |
3411 | create_integer_operand (&ops[2], 0); |
3412 | create_integer_operand (&ops[3], align); |
3413 | if (!maybe_expand_insn (icode, 4, ops)) |
3414 | return NULL_RTX(rtx) 0; |
3415 | |
3416 | /* Check to see if the argument was declared attribute nonstring |
3417 | and if so, issue a warning since at this point it's not known |
3418 | to be nul-terminated. */ |
3419 | maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp); |
3420 | |
3421 | /* Now that we are assured of success, expand the source. */ |
3422 | start_sequence (); |
3423 | rtx pat = expand_expr (src, src_reg, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), EXPAND_NORMAL); |
3424 | if (pat != src_reg) |
3425 | { |
3426 | #ifdef POINTERS_EXTEND_UNSIGNED1 |
3427 | if (GET_MODE (pat)((machine_mode) (pat)->mode) != Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))) |
3428 | pat = convert_to_mode (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), pat, |
3429 | POINTERS_EXTEND_UNSIGNED1); |
3430 | #endif |
3431 | emit_move_insn (src_reg, pat); |
3432 | } |
3433 | pat = get_insns (); |
3434 | end_sequence (); |
3435 | |
3436 | if (before_strlen) |
3437 | emit_insn_after (pat, before_strlen); |
3438 | else |
3439 | emit_insn_before (pat, get_insns ()); |
3440 | |
3441 | /* Return the value in the proper mode for this function. */ |
3442 | if (GET_MODE (ops[0].value)((machine_mode) (ops[0].value)->mode) == target_mode) |
3443 | target = ops[0].value; |
3444 | else if (target != 0) |
3445 | convert_move (target, ops[0].value, 0); |
3446 | else |
3447 | target = convert_to_mode (target_mode, ops[0].value, 0); |
3448 | |
3449 | return target; |
3450 | } |
3451 | |
3452 | /* Expand call EXP to the strnlen built-in, returning the result |
3453 | and setting it in TARGET. Otherwise return NULL_RTX on failure. */ |
3454 | |
3455 | static rtx |
3456 | expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode) |
3457 | { |
3458 | if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)) |
3459 | return NULL_RTX(rtx) 0; |
3460 | |
3461 | tree src = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3461, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3461, __FUNCTION__))))); |
3462 | tree bound = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3462, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3462, __FUNCTION__))))); |
3463 | |
3464 | if (!bound) |
3465 | return NULL_RTX(rtx) 0; |
3466 | |
3467 | location_t loc = UNKNOWN_LOCATION((location_t) 0); |
3468 | if (EXPR_HAS_LOCATION (exp)(((IS_ADHOC_LOC (((((exp)) && ((tree_code_type_tmpl < 0>::tree_code_type[(int) (((enum tree_code) ((exp))->base .code))]) >= tcc_reference && (tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code) ((exp))-> base.code))]) <= tcc_expression)) ? (exp)->exp.locus : ( (location_t) 0)))) ? get_location_from_adhoc_loc (line_table, ((((exp)) && ((tree_code_type_tmpl <0>::tree_code_type [(int) (((enum tree_code) ((exp))->base.code))]) >= tcc_reference && (tree_code_type_tmpl <0>::tree_code_type[(int ) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression )) ? (exp)->exp.locus : ((location_t) 0))) : (((((exp)) && ((tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code) ((exp))->base.code))]) >= tcc_reference && (tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression)) ? (exp)->exp.locus : ((location_t) 0)))) != ((location_t) 0 ))) |
3469 | loc = EXPR_LOCATION (exp)((((exp)) && ((tree_code_type_tmpl <0>::tree_code_type [(int) (((enum tree_code) ((exp))->base.code))]) >= tcc_reference && (tree_code_type_tmpl <0>::tree_code_type[(int ) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression )) ? (exp)->exp.locus : ((location_t) 0)); |
3470 | |
3471 | /* FIXME: Change c_strlen() to return sizetype instead of ssizetype |
3472 | so these conversions aren't necessary. */ |
3473 | c_strlen_data lendata = { }; |
3474 | tree len = c_strlen (src, 0, &lendata, 1); |
3475 | if (len) |
3476 | len = fold_convert_loc (loc, TREE_TYPE (bound)((contains_struct_check ((bound), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3476, __FUNCTION__))->typed.type), len); |
3477 | |
3478 | if (TREE_CODE (bound)((enum tree_code) (bound)->base.code) == INTEGER_CST) |
3479 | { |
3480 | if (!len) |
3481 | return NULL_RTX(rtx) 0; |
3482 | |
3483 | len = fold_build2_loc (loc, MIN_EXPR, size_type_nodeglobal_trees[TI_SIZE_TYPE], len, bound); |
3484 | return expand_expr (len, target, target_mode, EXPAND_NORMAL); |
3485 | } |
3486 | |
3487 | if (TREE_CODE (bound)((enum tree_code) (bound)->base.code) != SSA_NAME) |
3488 | return NULL_RTX(rtx) 0; |
3489 | |
3490 | wide_int min, max; |
3491 | value_range r; |
3492 | get_global_range_query ()->range_of_expr (r, bound); |
3493 | if (r.kind () != VR_RANGE) |
3494 | return NULL_RTX(rtx) 0; |
3495 | min = r.lower_bound (); |
3496 | max = r.upper_bound (); |
3497 | |
3498 | if (!len || TREE_CODE (len)((enum tree_code) (len)->base.code) != INTEGER_CST) |
3499 | { |
3500 | bool exact; |
3501 | lendata.decl = unterminated_array (src, &len, &exact); |
3502 | if (!lendata.decl) |
3503 | return NULL_RTX(rtx) 0; |
3504 | } |
3505 | |
3506 | if (lendata.decl) |
3507 | return NULL_RTX(rtx) 0; |
3508 | |
3509 | if (wi::gtu_p (min, wi::to_wide (len))) |
3510 | return expand_expr (len, target, target_mode, EXPAND_NORMAL); |
3511 | |
3512 | len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len)((contains_struct_check ((len), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3512, __FUNCTION__))->typed.type), len, bound); |
3513 | return expand_expr (len, target, target_mode, EXPAND_NORMAL); |
3514 | } |
3515 | |
3516 | /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE) |
3517 | bytes from bytes at DATA + OFFSET and return it reinterpreted as |
3518 | a target constant. */ |
3519 | |
3520 | static rtx |
3521 | builtin_memcpy_read_str (void *data, void *, HOST_WIDE_INTlong offset, |
3522 | fixed_size_mode mode) |
3523 | { |
3524 | /* The REPresentation pointed to by DATA need not be a nul-terminated |
3525 | string but the caller guarantees it's large enough for MODE. */ |
3526 | const char *rep = (const char *) data; |
3527 | |
3528 | /* The by-pieces infrastructure does not try to pick a vector mode |
3529 | for memcpy expansion. */ |
3530 | return c_readstr (rep + offset, as_a <scalar_int_mode> (mode), |
3531 | /*nul_terminated=*/false); |
3532 | } |
3533 | |
3534 | /* LEN specify length of the block of memcpy/memset operation. |
3535 | Figure out its range and put it into MIN_SIZE/MAX_SIZE. |
3536 | In some cases we can make very likely guess on max size, then we |
3537 | set it into PROBABLE_MAX_SIZE. */ |
3538 | |
3539 | static void |
3540 | determine_block_size (tree len, rtx len_rtx, |
3541 | unsigned HOST_WIDE_INTlong *min_size, |
3542 | unsigned HOST_WIDE_INTlong *max_size, |
3543 | unsigned HOST_WIDE_INTlong *probable_max_size) |
3544 | { |
3545 | if (CONST_INT_P (len_rtx)(((enum rtx_code) (len_rtx)->code) == CONST_INT)) |
3546 | { |
3547 | *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx)((unsigned long) ((len_rtx)->u.hwint[0])); |
3548 | return; |
3549 | } |
3550 | else |
3551 | { |
3552 | wide_int min, max; |
3553 | enum value_range_kind range_type = VR_UNDEFINED; |
3554 | |
3555 | /* Determine bounds from the type. */ |
3556 | if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))((tree_check5 ((((contains_struct_check ((len), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3556, __FUNCTION__))->typed.type)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3556, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE ), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.minval ))) |
3557 | *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len))((tree_check5 ((((contains_struct_check ((len), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3557, __FUNCTION__))->typed.type)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3557, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE ), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.minval )); |
3558 | else |
3559 | *min_size = 0; |
3560 | if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))((tree_check5 ((((contains_struct_check ((len), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3560, __FUNCTION__))->typed.type)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3560, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE ), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval ))) |
3561 | *probable_max_size = *max_size |
3562 | = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len))((tree_check5 ((((contains_struct_check ((len), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3562, __FUNCTION__))->typed.type)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3562, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE ), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval )); |
3563 | else |
3564 | *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx))mode_mask_array[((machine_mode) (len_rtx)->mode)]; |
3565 | |
3566 | if (TREE_CODE (len)((enum tree_code) (len)->base.code) == SSA_NAME) |
3567 | { |
3568 | value_range r; |
3569 | get_global_range_query ()->range_of_expr (r, len); |
3570 | range_type = r.kind (); |
3571 | if (range_type != VR_UNDEFINED) |
3572 | { |
3573 | min = wi::to_wide (r.min ()); |
3574 | max = wi::to_wide (r.max ()); |
3575 | } |
3576 | } |
3577 | if (range_type == VR_RANGE) |
3578 | { |
3579 | if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ()) |
3580 | *min_size = min.to_uhwi (); |
3581 | if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ()) |
3582 | *probable_max_size = *max_size = max.to_uhwi (); |
3583 | } |
3584 | else if (range_type == VR_ANTI_RANGE) |
3585 | { |
3586 | /* Code like |
3587 | |
3588 | int n; |
3589 | if (n < 100) |
3590 | memcpy (a, b, n) |
3591 | |
3592 | Produce anti range allowing negative values of N. We still |
3593 | can use the information and make a guess that N is not negative. |
3594 | */ |
3595 | if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min)) |
3596 | *probable_max_size = min.to_uhwi () - 1; |
3597 | } |
3598 | } |
3599 | gcc_checking_assert (*max_size <=((void)(!(*max_size <= (unsigned long) mode_mask_array[((machine_mode ) (len_rtx)->mode)]) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3601, __FUNCTION__), 0 : 0)) |
3600 | (unsigned HOST_WIDE_INT)((void)(!(*max_size <= (unsigned long) mode_mask_array[((machine_mode ) (len_rtx)->mode)]) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3601, __FUNCTION__), 0 : 0)) |
3601 | GET_MODE_MASK (GET_MODE (len_rtx)))((void)(!(*max_size <= (unsigned long) mode_mask_array[((machine_mode ) (len_rtx)->mode)]) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3601, __FUNCTION__), 0 : 0)); |
3602 | } |
3603 | |
3604 | /* Expand a call EXP to the memcpy builtin. |
3605 | Return NULL_RTX if we failed, the caller should emit a normal call, |
3606 | otherwise try to get the result in TARGET, if convenient (and in |
3607 | mode MODE if that's convenient). */ |
3608 | |
3609 | static rtx |
3610 | expand_builtin_memcpy (tree exp, rtx target) |
3611 | { |
3612 | if (!validate_arglist (exp, |
3613 | POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)) |
3614 | return NULL_RTX(rtx) 0; |
3615 | |
3616 | tree dest = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3616, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3616, __FUNCTION__))))); |
3617 | tree src = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3617, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3617, __FUNCTION__))))); |
3618 | tree len = CALL_EXPR_ARG (exp, 2)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3618, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3618, __FUNCTION__))))); |
3619 | |
3620 | return expand_builtin_memory_copy_args (dest, src, len, target, exp, |
3621 | /*retmode=*/ RETURN_BEGIN, false); |
3622 | } |
3623 | |
3624 | /* Check a call EXP to the memmove built-in for validity. |
3625 | Return NULL_RTX on both success and failure. */ |
3626 | |
3627 | static rtx |
3628 | expand_builtin_memmove (tree exp, rtx target) |
3629 | { |
3630 | if (!validate_arglist (exp, |
3631 | POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)) |
3632 | return NULL_RTX(rtx) 0; |
3633 | |
3634 | tree dest = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3634, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3634, __FUNCTION__))))); |
3635 | tree src = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3635, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3635, __FUNCTION__))))); |
3636 | tree len = CALL_EXPR_ARG (exp, 2)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3636, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3636, __FUNCTION__))))); |
3637 | |
3638 | return expand_builtin_memory_copy_args (dest, src, len, target, exp, |
3639 | /*retmode=*/ RETURN_BEGIN, true); |
3640 | } |
3641 | |
3642 | /* Expand a call EXP to the mempcpy builtin. |
3643 | Return NULL_RTX if we failed; the caller should emit a normal call, |
3644 | otherwise try to get the result in TARGET, if convenient (and in |
3645 | mode MODE if that's convenient). */ |
3646 | |
3647 | static rtx |
3648 | expand_builtin_mempcpy (tree exp, rtx target) |
3649 | { |
3650 | if (!validate_arglist (exp, |
3651 | POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)) |
3652 | return NULL_RTX(rtx) 0; |
3653 | |
3654 | tree dest = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3654, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3654, __FUNCTION__))))); |
3655 | tree src = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3655, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3655, __FUNCTION__))))); |
3656 | tree len = CALL_EXPR_ARG (exp, 2)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3656, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3656, __FUNCTION__))))); |
3657 | |
3658 | /* Policy does not generally allow using compute_objsize (which |
3659 | is used internally by check_memop_size) to change code generation |
3660 | or drive optimization decisions. |
3661 | |
3662 | In this instance it is safe because the code we generate has |
3663 | the same semantics regardless of the return value of |
3664 | check_memop_sizes. Exactly the same amount of data is copied |
3665 | and the return value is exactly the same in both cases. |
3666 | |
3667 | Furthermore, check_memop_size always uses mode 0 for the call to |
3668 | compute_objsize, so the imprecise nature of compute_objsize is |
3669 | avoided. */ |
3670 | |
3671 | /* Avoid expanding mempcpy into memcpy when the call is determined |
3672 | to overflow the buffer. This also prevents the same overflow |
3673 | from being diagnosed again when expanding memcpy. */ |
3674 | |
3675 | return expand_builtin_mempcpy_args (dest, src, len, |
3676 | target, exp, /*retmode=*/ RETURN_END); |
3677 | } |
3678 | |
3679 | /* Helper function to do the actual work for expand of memory copy family |
3680 | functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes |
3681 | of memory from SRC to DEST and assign to TARGET if convenient. Return |
3682 | value is based on RETMODE argument. */ |
3683 | |
3684 | static rtx |
3685 | expand_builtin_memory_copy_args (tree dest, tree src, tree len, |
3686 | rtx target, tree exp, memop_ret retmode, |
3687 | bool might_overlap) |
3688 | { |
3689 | unsigned int src_align = get_pointer_alignment (src); |
3690 | unsigned int dest_align = get_pointer_alignment (dest); |
3691 | rtx dest_mem, src_mem, dest_addr, len_rtx; |
3692 | HOST_WIDE_INTlong expected_size = -1; |
3693 | unsigned int expected_align = 0; |
3694 | unsigned HOST_WIDE_INTlong min_size; |
3695 | unsigned HOST_WIDE_INTlong max_size; |
3696 | unsigned HOST_WIDE_INTlong probable_max_size; |
3697 | |
3698 | bool is_move_done; |
3699 | |
3700 | /* If DEST is not a pointer type, call the normal function. */ |
3701 | if (dest_align == 0) |
3702 | return NULL_RTX(rtx) 0; |
3703 | |
3704 | /* If either SRC is not a pointer type, don't do this |
3705 | operation in-line. */ |
3706 | if (src_align == 0) |
3707 | return NULL_RTX(rtx) 0; |
3708 | |
3709 | if (currently_expanding_gimple_stmt) |
3710 | stringop_block_profile (currently_expanding_gimple_stmt, |
3711 | &expected_align, &expected_size); |
3712 | |
3713 | if (expected_align < dest_align) |
3714 | expected_align = dest_align; |
3715 | dest_mem = get_memory_rtx (dest, len); |
3716 | set_mem_align (dest_mem, dest_align); |
3717 | len_rtx = expand_normal (len); |
3718 | determine_block_size (len, len_rtx, &min_size, &max_size, |
3719 | &probable_max_size); |
3720 | |
3721 | /* Try to get the byte representation of the constant SRC points to, |
3722 | with its byte size in NBYTES. */ |
3723 | unsigned HOST_WIDE_INTlong nbytes; |
3724 | const char *rep = getbyterep (src, &nbytes); |
3725 | |
3726 | /* If the function's constant bound LEN_RTX is less than or equal |
3727 | to the byte size of the representation of the constant argument, |
3728 | and if block move would be done by pieces, we can avoid loading |
3729 | the bytes from memory and only store the computed constant. |
3730 | This works in the overlap (memmove) case as well because |
3731 | store_by_pieces just generates a series of stores of constants |
3732 | from the representation returned by getbyterep(). */ |
3733 | if (rep |
3734 | && CONST_INT_P (len_rtx)(((enum rtx_code) (len_rtx)->code) == CONST_INT) |
3735 | && (unsigned HOST_WIDE_INTlong) INTVAL (len_rtx)((len_rtx)->u.hwint[0]) <= nbytes |
3736 | && can_store_by_pieces (INTVAL (len_rtx)((len_rtx)->u.hwint[0]), builtin_memcpy_read_str, |
3737 | CONST_CAST (char *, rep)(const_cast<char *> ((rep))), |
3738 | dest_align, false)) |
3739 | { |
3740 | dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx)((len_rtx)->u.hwint[0]), |
3741 | builtin_memcpy_read_str, |
3742 | CONST_CAST (char *, rep)(const_cast<char *> ((rep))), |
3743 | dest_align, false, retmode); |
3744 | dest_mem = force_operand (XEXP (dest_mem, 0)(((dest_mem)->u.fld[0]).rt_rtx), target); |
3745 | dest_mem = convert_memory_address (ptr_mode, dest_mem)convert_memory_address_addr_space ((ptr_mode), (dest_mem), 0); |
3746 | return dest_mem; |
3747 | } |
3748 | |
3749 | src_mem = get_memory_rtx (src, len); |
3750 | set_mem_align (src_mem, src_align); |
3751 | |
3752 | /* Copy word part most expediently. */ |
3753 | enum block_op_methods method = BLOCK_OP_NORMAL; |
3754 | if (CALL_EXPR_TAILCALL (exp)((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3754, __FUNCTION__, (CALL_EXPR)))->base.addressable_flag ) |
3755 | && (retmode == RETURN_BEGIN || target == const0_rtx(const_int_rtx[64]))) |
3756 | method = BLOCK_OP_TAILCALL; |
3757 | bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY) |
3758 | && retmode == RETURN_END |
3759 | && !might_overlap |
3760 | && target != const0_rtx(const_int_rtx[64])); |
3761 | if (use_mempcpy_call) |
3762 | method = BLOCK_OP_NO_LIBCALL_RET; |
3763 | dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method, |
3764 | expected_align, expected_size, |
3765 | min_size, max_size, probable_max_size, |
3766 | use_mempcpy_call, &is_move_done, |
3767 | might_overlap); |
3768 | |
3769 | /* Bail out when a mempcpy call would be expanded as libcall and when |
3770 | we have a target that provides a fast implementation |
3771 | of mempcpy routine. */ |
3772 | if (!is_move_done) |
3773 | return NULL_RTX(rtx) 0; |
3774 | |
3775 | if (dest_addr == pc_rtx) |
3776 | return NULL_RTX(rtx) 0; |
3777 | |
3778 | if (dest_addr == 0) |
3779 | { |
3780 | dest_addr = force_operand (XEXP (dest_mem, 0)(((dest_mem)->u.fld[0]).rt_rtx), target); |
3781 | dest_addr = convert_memory_address (ptr_mode, dest_addr)convert_memory_address_addr_space ((ptr_mode), (dest_addr), 0 ); |
3782 | } |
3783 | |
3784 | if (retmode != RETURN_BEGIN && target != const0_rtx(const_int_rtx[64])) |
3785 | { |
3786 | dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx)gen_rtx_fmt_ee_stat ((PLUS), ((ptr_mode)), ((dest_addr)), ((len_rtx )) ); |
3787 | /* stpcpy pointer to last byte. */ |
3788 | if (retmode == RETURN_END_MINUS_ONE) |
3789 | dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx)gen_rtx_fmt_ee_stat ((MINUS), ((ptr_mode)), ((dest_addr)), (( (const_int_rtx[64 +1]))) ); |
3790 | } |
3791 | |
3792 | return dest_addr; |
3793 | } |
3794 | |
3795 | static rtx |
3796 | expand_builtin_mempcpy_args (tree dest, tree src, tree len, |
3797 | rtx target, tree orig_exp, memop_ret retmode) |
3798 | { |
3799 | return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp, |
3800 | retmode, false); |
3801 | } |
3802 | |
3803 | /* Expand into a movstr instruction, if one is available. Return NULL_RTX if |
3804 | we failed, the caller should emit a normal call, otherwise try to |
3805 | get the result in TARGET, if convenient. |
3806 | Return value is based on RETMODE argument. */ |
3807 | |
3808 | static rtx |
3809 | expand_movstr (tree dest, tree src, rtx target, memop_ret retmode) |
3810 | { |
3811 | class expand_operand ops[3]; |
3812 | rtx dest_mem; |
3813 | rtx src_mem; |
3814 | |
3815 | if (!targetm.have_movstr ()) |
3816 | return NULL_RTX(rtx) 0; |
3817 | |
3818 | dest_mem = get_memory_rtx (dest, NULL__null); |
3819 | src_mem = get_memory_rtx (src, NULL__null); |
3820 | if (retmode == RETURN_BEGIN) |
3821 | { |
3822 | target = force_reg (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), XEXP (dest_mem, 0)(((dest_mem)->u.fld[0]).rt_rtx)); |
3823 | dest_mem = replace_equiv_address (dest_mem, target); |
3824 | } |
3825 | |
3826 | create_output_operand (&ops[0], |
3827 | retmode != RETURN_BEGIN ? target : NULL_RTX(rtx) 0, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))); |
3828 | create_fixed_operand (&ops[1], dest_mem); |
3829 | create_fixed_operand (&ops[2], src_mem); |
3830 | if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops)) |
3831 | return NULL_RTX(rtx) 0; |
3832 | |
3833 | if (retmode != RETURN_BEGIN && target != const0_rtx(const_int_rtx[64])) |
3834 | { |
3835 | target = ops[0].value; |
3836 | /* movstr is supposed to set end to the address of the NUL |
3837 | terminator. If the caller requested a mempcpy-like return value, |
3838 | adjust it. */ |
3839 | if (retmode == RETURN_END) |
3840 | { |
3841 | rtx tem = plus_constant (GET_MODE (target)((machine_mode) (target)->mode), |
3842 | gen_lowpartrtl_hooks.gen_lowpart (GET_MODE (target)((machine_mode) (target)->mode), target), 1); |
3843 | emit_move_insn (target, force_operand (tem, NULL_RTX(rtx) 0)); |
3844 | } |
3845 | } |
3846 | return target; |
3847 | } |
3848 | |
3849 | /* Expand expression EXP, which is a call to the strcpy builtin. Return |
3850 | NULL_RTX if we failed the caller should emit a normal call, otherwise |
3851 | try to get the result in TARGET, if convenient (and in mode MODE if that's |
3852 | convenient). */ |
3853 | |
3854 | static rtx |
3855 | expand_builtin_strcpy (tree exp, rtx target) |
3856 | { |
3857 | if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)) |
3858 | return NULL_RTX(rtx) 0; |
3859 | |
3860 | tree dest = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3860, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3860, __FUNCTION__))))); |
3861 | tree src = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3861, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3861, __FUNCTION__))))); |
3862 | |
3863 | return expand_builtin_strcpy_args (exp, dest, src, target); |
3864 | } |
3865 | |
3866 | /* Helper function to do the actual work for expand_builtin_strcpy. The |
3867 | arguments to the builtin_strcpy call DEST and SRC are broken out |
3868 | so that this can also be called without constructing an actual CALL_EXPR. |
3869 | The other arguments and return value are the same as for |
3870 | expand_builtin_strcpy. */ |
3871 | |
3872 | static rtx |
3873 | expand_builtin_strcpy_args (tree, tree dest, tree src, rtx target) |
3874 | { |
3875 | return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN); |
3876 | } |
3877 | |
3878 | /* Expand a call EXP to the stpcpy builtin. |
3879 | Return NULL_RTX if we failed the caller should emit a normal call, |
3880 | otherwise try to get the result in TARGET, if convenient (and in |
3881 | mode MODE if that's convenient). */ |
3882 | |
3883 | static rtx |
3884 | expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode) |
3885 | { |
3886 | tree dst, src; |
3887 | location_t loc = EXPR_LOCATION (exp)((((exp)) && ((tree_code_type_tmpl <0>::tree_code_type [(int) (((enum tree_code) ((exp))->base.code))]) >= tcc_reference && (tree_code_type_tmpl <0>::tree_code_type[(int ) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression )) ? (exp)->exp.locus : ((location_t) 0)); |
3888 | |
3889 | if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)) |
3890 | return NULL_RTX(rtx) 0; |
3891 | |
3892 | dst = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3892, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3892, __FUNCTION__))))); |
3893 | src = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3893, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3893, __FUNCTION__))))); |
3894 | |
3895 | /* If return value is ignored, transform stpcpy into strcpy. */ |
3896 | if (target == const0_rtx(const_int_rtx[64]) && builtin_decl_implicit (BUILT_IN_STRCPY)) |
3897 | { |
3898 | tree fn = builtin_decl_implicit (BUILT_IN_STRCPY); |
3899 | tree result = build_call_nofold_loc (loc, fn, 2, dst, src); |
3900 | return expand_expr (result, target, mode, EXPAND_NORMAL); |
3901 | } |
3902 | else |
3903 | { |
3904 | tree len, lenp1; |
3905 | rtx ret; |
3906 | |
3907 | /* Ensure we get an actual string whose length can be evaluated at |
3908 | compile-time, not an expression containing a string. This is |
3909 | because the latter will potentially produce pessimized code |
3910 | when used to produce the return value. */ |
3911 | c_strlen_data lendata = { }; |
3912 | if (!c_getstr (src) |
3913 | || !(len = c_strlen (src, 0, &lendata, 1))) |
3914 | return expand_movstr (dst, src, target, |
3915 | /*retmode=*/ RETURN_END_MINUS_ONE); |
3916 | |
3917 | lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1)size_int_kind (1, stk_ssizetype)); |
3918 | ret = expand_builtin_mempcpy_args (dst, src, lenp1, |
3919 | target, exp, |
3920 | /*retmode=*/ RETURN_END_MINUS_ONE); |
3921 | |
3922 | if (ret) |
3923 | return ret; |
3924 | |
3925 | if (TREE_CODE (len)((enum tree_code) (len)->base.code) == INTEGER_CST) |
3926 | { |
3927 | rtx len_rtx = expand_normal (len); |
3928 | |
3929 | if (CONST_INT_P (len_rtx)(((enum rtx_code) (len_rtx)->code) == CONST_INT)) |
3930 | { |
3931 | ret = expand_builtin_strcpy_args (exp, dst, src, target); |
3932 | |
3933 | if (ret) |
3934 | { |
3935 | if (! target) |
3936 | { |
3937 | if (mode != VOIDmode((void) 0, E_VOIDmode)) |
3938 | target = gen_reg_rtx (mode); |
3939 | else |
3940 | target = gen_reg_rtx (GET_MODE (ret)((machine_mode) (ret)->mode)); |
3941 | } |
3942 | if (GET_MODE (target)((machine_mode) (target)->mode) != GET_MODE (ret)((machine_mode) (ret)->mode)) |
3943 | ret = gen_lowpartrtl_hooks.gen_lowpart (GET_MODE (target)((machine_mode) (target)->mode), ret); |
3944 | |
3945 | ret = plus_constant (GET_MODE (ret)((machine_mode) (ret)->mode), ret, INTVAL (len_rtx)((len_rtx)->u.hwint[0])); |
3946 | ret = emit_move_insn (target, force_operand (ret, NULL_RTX(rtx) 0)); |
3947 | gcc_assert (ret)((void)(!(ret) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 3947, __FUNCTION__), 0 : 0)); |
3948 | |
3949 | return target; |
3950 | } |
3951 | } |
3952 | } |
3953 | |
3954 | return expand_movstr (dst, src, target, |
3955 | /*retmode=*/ RETURN_END_MINUS_ONE); |
3956 | } |
3957 | } |
3958 | |
3959 | /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring |
3960 | arguments while being careful to avoid duplicate warnings (which could |
3961 | be issued if the expander were to expand the call, resulting in it |
3962 | being emitted in expand_call(). */ |
3963 | |
3964 | static rtx |
3965 | expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode) |
3966 | { |
3967 | if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode)) |
3968 | { |
3969 | /* The call has been successfully expanded. Check for nonstring |
3970 | arguments and issue warnings as appropriate. */ |
3971 | maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp); |
3972 | return ret; |
3973 | } |
3974 | |
3975 | return NULL_RTX(rtx) 0; |
3976 | } |
3977 | |
3978 | /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE) |
3979 | bytes from constant string DATA + OFFSET and return it as target |
3980 | constant. */ |
3981 | |
3982 | rtx |
3983 | builtin_strncpy_read_str (void *data, void *, HOST_WIDE_INTlong offset, |
3984 | fixed_size_mode mode) |
3985 | { |
3986 | const char *str = (const char *) data; |
3987 | |
3988 | if ((unsigned HOST_WIDE_INTlong) offset > strlen (str)) |
3989 | return const0_rtx(const_int_rtx[64]); |
3990 | |
3991 | /* The by-pieces infrastructure does not try to pick a vector mode |
3992 | for strncpy expansion. */ |
3993 | return c_readstr (str + offset, as_a <scalar_int_mode> (mode)); |
3994 | } |
3995 | |
3996 | /* Helper to check the sizes of sequences and the destination of calls |
3997 | to __builtin_strncat and __builtin___strncat_chk. Returns true on |
3998 | success (no overflow or invalid sizes), false otherwise. */ |
3999 | |
4000 | static bool |
4001 | check_strncat_sizes (tree exp, tree objsize) |
4002 | { |
4003 | tree dest = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4003, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4003, __FUNCTION__))))); |
4004 | tree src = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4004, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4004, __FUNCTION__))))); |
4005 | tree maxread = CALL_EXPR_ARG (exp, 2)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4005, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4005, __FUNCTION__))))); |
4006 | |
4007 | /* Try to determine the range of lengths that the source expression |
4008 | refers to. */ |
4009 | c_strlen_data lendata = { }; |
4010 | get_range_strlen (src, &lendata, /* eltsize = */ 1); |
4011 | |
4012 | /* Try to verify that the destination is big enough for the shortest |
4013 | string. */ |
4014 | |
4015 | access_data data (nullptr, exp, access_read_write, maxread, true); |
4016 | if (!objsize && warn_stringop_overflowglobal_options.x_warn_stringop_overflow) |
4017 | { |
4018 | /* If it hasn't been provided by __strncat_chk, try to determine |
4019 | the size of the destination object into which the source is |
4020 | being copied. */ |
4021 | objsize = compute_objsize (dest, warn_stringop_overflowglobal_options.x_warn_stringop_overflow - 1, &data.dst); |
4022 | } |
4023 | |
4024 | /* Add one for the terminating nul. */ |
4025 | tree srclen = (lendata.minlen |
4026 | ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,fold_build2_loc (((location_t) 0), PLUS_EXPR, global_trees[TI_SIZE_TYPE ], lendata.minlen, global_trees[TI_SIZE_ONE] ) |
4027 | size_one_node)fold_build2_loc (((location_t) 0), PLUS_EXPR, global_trees[TI_SIZE_TYPE ], lendata.minlen, global_trees[TI_SIZE_ONE] ) |
4028 | : NULL_TREE(tree) __null); |
4029 | |
4030 | /* The strncat function copies at most MAXREAD bytes and always appends |
4031 | the terminating nul so the specified upper bound should never be equal |
4032 | to (or greater than) the size of the destination. */ |
4033 | if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize) |
4034 | && tree_int_cst_equal (objsize, maxread)) |
4035 | { |
4036 | location_t loc = EXPR_LOCATION (exp)((((exp)) && ((tree_code_type_tmpl <0>::tree_code_type [(int) (((enum tree_code) ((exp))->base.code))]) >= tcc_reference && (tree_code_type_tmpl <0>::tree_code_type[(int ) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression )) ? (exp)->exp.locus : ((location_t) 0)); |
4037 | warning_at (loc, OPT_Wstringop_overflow_, |
4038 | "%qD specified bound %E equals destination size", |
4039 | get_callee_fndecl (exp), maxread); |
4040 | |
4041 | return false; |
4042 | } |
4043 | |
4044 | if (!srclen |
4045 | || (maxread && tree_fits_uhwi_p (maxread) |
4046 | && tree_fits_uhwi_p (srclen) |
4047 | && tree_int_cst_lt (maxread, srclen))) |
4048 | srclen = maxread; |
4049 | |
4050 | /* The number of bytes to write is LEN but check_access will alsoa |
4051 | check SRCLEN if LEN's value isn't known. */ |
4052 | return check_access (exp, /*dstwrite=*/NULL_TREE(tree) __null, maxread, srclen, |
4053 | objsize, data.mode, &data); |
4054 | } |
4055 | |
4056 | /* Expand expression EXP, which is a call to the strncpy builtin. Return |
4057 | NULL_RTX if we failed the caller should emit a normal call. */ |
4058 | |
4059 | static rtx |
4060 | expand_builtin_strncpy (tree exp, rtx target) |
4061 | { |
4062 | location_t loc = EXPR_LOCATION (exp)((((exp)) && ((tree_code_type_tmpl <0>::tree_code_type [(int) (((enum tree_code) ((exp))->base.code))]) >= tcc_reference && (tree_code_type_tmpl <0>::tree_code_type[(int ) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression )) ? (exp)->exp.locus : ((location_t) 0)); |
4063 | |
4064 | if (!validate_arglist (exp, |
4065 | POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)) |
4066 | return NULL_RTX(rtx) 0; |
4067 | tree dest = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4067, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4067, __FUNCTION__))))); |
4068 | tree src = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4068, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4068, __FUNCTION__))))); |
4069 | /* The number of bytes to write (not the maximum). */ |
4070 | tree len = CALL_EXPR_ARG (exp, 2)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4070, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4070, __FUNCTION__))))); |
4071 | |
4072 | /* The length of the source sequence. */ |
4073 | tree slen = c_strlen (src, 1); |
4074 | |
4075 | /* We must be passed a constant len and src parameter. */ |
4076 | if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen)) |
4077 | return NULL_RTX(rtx) 0; |
4078 | |
4079 | slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1)size_int_kind (1, stk_ssizetype)); |
4080 | |
4081 | /* We're required to pad with trailing zeros if the requested |
4082 | len is greater than strlen(s2)+1. In that case try to |
4083 | use store_by_pieces, if it fails, punt. */ |
4084 | if (tree_int_cst_lt (slen, len)) |
4085 | { |
4086 | unsigned int dest_align = get_pointer_alignment (dest); |
4087 | const char *p = c_getstr (src); |
4088 | rtx dest_mem; |
4089 | |
4090 | if (!p || dest_align == 0 || !tree_fits_uhwi_p (len) |
4091 | || !can_store_by_pieces (tree_to_uhwi (len), |
4092 | builtin_strncpy_read_str, |
4093 | CONST_CAST (char *, p)(const_cast<char *> ((p))), |
4094 | dest_align, false)) |
4095 | return NULL_RTX(rtx) 0; |
4096 | |
4097 | dest_mem = get_memory_rtx (dest, len); |
4098 | store_by_pieces (dest_mem, tree_to_uhwi (len), |
4099 | builtin_strncpy_read_str, |
4100 | CONST_CAST (char *, p)(const_cast<char *> ((p))), dest_align, false, |
4101 | RETURN_BEGIN); |
4102 | dest_mem = force_operand (XEXP (dest_mem, 0)(((dest_mem)->u.fld[0]).rt_rtx), target); |
4103 | dest_mem = convert_memory_address (ptr_mode, dest_mem)convert_memory_address_addr_space ((ptr_mode), (dest_mem), 0); |
4104 | return dest_mem; |
4105 | } |
4106 | |
4107 | return NULL_RTX(rtx) 0; |
4108 | } |
4109 | |
4110 | /* Return the RTL of a register in MODE generated from PREV in the |
4111 | previous iteration. */ |
4112 | |
4113 | static rtx |
4114 | gen_memset_value_from_prev (by_pieces_prev *prev, fixed_size_mode mode) |
4115 | { |
4116 | rtx target = nullptr; |
4117 | if (prev != nullptr && prev->data != nullptr) |
4118 | { |
4119 | /* Use the previous data in the same mode. */ |
4120 | if (prev->mode == mode) |
4121 | return prev->data; |
4122 | |
4123 | fixed_size_mode prev_mode = prev->mode; |
4124 | |
4125 | /* Don't use the previous data to write QImode if it is in a |
4126 | vector mode. */ |
4127 | if (VECTOR_MODE_P (prev_mode)(((enum mode_class) mode_class[prev_mode]) == MODE_VECTOR_BOOL || ((enum mode_class) mode_class[prev_mode]) == MODE_VECTOR_INT || ((enum mode_class) mode_class[prev_mode]) == MODE_VECTOR_FLOAT || ((enum mode_class) mode_class[prev_mode]) == MODE_VECTOR_FRACT || ((enum mode_class) mode_class[prev_mode]) == MODE_VECTOR_UFRACT || ((enum mode_class) mode_class[prev_mode]) == MODE_VECTOR_ACCUM || ((enum mode_class) mode_class[prev_mode]) == MODE_VECTOR_UACCUM ) && mode == QImode(scalar_int_mode ((scalar_int_mode::from_int) E_QImode))) |
4128 | return target; |
4129 | |
4130 | rtx prev_rtx = prev->data; |
4131 | |
4132 | if (REG_P (prev_rtx)(((enum rtx_code) (prev_rtx)->code) == REG) |
4133 | && HARD_REGISTER_P (prev_rtx)((((rhs_regno(prev_rtx))) < 76)) |
4134 | && lowpart_subreg_regno (REGNO (prev_rtx)(rhs_regno(prev_rtx)), prev_mode, mode) < 0) |
4135 | { |
4136 | /* This case occurs when PREV_MODE is a vector and when |
4137 | MODE is too small to store using vector operations. |
4138 | After register allocation, the code will need to move the |
4139 | lowpart of the vector register into a non-vector register. |
4140 | |
4141 | Also, the target has chosen to use a hard register |
4142 | instead of going with the default choice of using a |
4143 | pseudo register. We should respect that choice and try to |
4144 | avoid creating a pseudo register with the same mode as the |
4145 | current hard register. |
4146 | |
4147 | In principle, we could just use a lowpart MODE subreg of |
4148 | the vector register. However, the vector register mode might |
4149 | be too wide for non-vector registers, and we already know |
4150 | that the non-vector mode is too small for vector registers. |
4151 | It's therefore likely that we'd need to spill to memory in |
4152 | the vector mode and reload the non-vector value from there. |
4153 | |
4154 | Try to avoid that by reducing the vector register to the |
4155 | smallest size that it can hold. This should increase the |
4156 | chances that non-vector registers can hold both the inner |
4157 | and outer modes of the subreg that we generate later. */ |
4158 | machine_mode m; |
4159 | fixed_size_mode candidate; |
4160 | FOR_EACH_MODE_IN_CLASS (m, GET_MODE_CLASS (mode))for (mode_iterator::start (&(m), ((enum mode_class) mode_class [mode])); mode_iterator::iterate_p (&(m)); mode_iterator:: get_next (&(m))) |
4161 | if (is_a<fixed_size_mode> (m, &candidate)) |
4162 | { |
4163 | if (GET_MODE_SIZE (candidate) |
4164 | >= GET_MODE_SIZE (prev_mode)) |
4165 | break; |
4166 | if (GET_MODE_SIZE (candidate) >= GET_MODE_SIZE (mode) |
4167 | && lowpart_subreg_regno (REGNO (prev_rtx)(rhs_regno(prev_rtx)), |
4168 | prev_mode, candidate) >= 0) |
4169 | { |
4170 | target = lowpart_subreg (candidate, prev_rtx, |
4171 | prev_mode); |
4172 | prev_rtx = target; |
4173 | prev_mode = candidate; |
4174 | break; |
4175 | } |
4176 | } |
4177 | if (target == nullptr) |
4178 | prev_rtx = copy_to_reg (prev_rtx); |
4179 | } |
4180 | |
4181 | target = lowpart_subreg (mode, prev_rtx, prev_mode); |
4182 | } |
4183 | return target; |
4184 | } |
4185 | |
4186 | /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE) |
4187 | bytes from constant string DATA + OFFSET and return it as target |
4188 | constant. If PREV isn't nullptr, it has the RTL info from the |
4189 | previous iteration. */ |
4190 | |
4191 | rtx |
4192 | builtin_memset_read_str (void *data, void *prev, |
4193 | HOST_WIDE_INTlong offset ATTRIBUTE_UNUSED__attribute__ ((__unused__)), |
4194 | fixed_size_mode mode) |
4195 | { |
4196 | const char *c = (const char *) data; |
4197 | unsigned int size = GET_MODE_SIZE (mode); |
4198 | |
4199 | rtx target = gen_memset_value_from_prev ((by_pieces_prev *) prev, |
4200 | mode); |
4201 | if (target != nullptr) |
4202 | return target; |
4203 | rtx src = gen_int_mode (*c, QImode(scalar_int_mode ((scalar_int_mode::from_int) E_QImode))); |
4204 | |
4205 | if (VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || ( (enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM)) |
4206 | { |
4207 | gcc_assert (GET_MODE_INNER (mode) == QImode)((void)(!((mode_to_inner (mode)) == (scalar_int_mode ((scalar_int_mode ::from_int) E_QImode))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4207, __FUNCTION__), 0 : 0)); |
4208 | |
4209 | rtx const_vec = gen_const_vec_duplicate (mode, src); |
4210 | if (prev == NULL__null) |
4211 | /* Return CONST_VECTOR when called by a query function. */ |
4212 | return const_vec; |
4213 | |
4214 | /* Use the move expander with CONST_VECTOR. */ |
4215 | target = gen_reg_rtx (mode); |
4216 | emit_move_insn (target, const_vec); |
4217 | return target; |
4218 | } |
4219 | |
4220 | char *p = XALLOCAVEC (char, size)((char *) __builtin_alloca(sizeof (char) * (size))); |
4221 | |
4222 | memset (p, *c, size); |
4223 | |
4224 | /* Vector modes should be handled above. */ |
4225 | return c_readstr (p, as_a <scalar_int_mode> (mode)); |
4226 | } |
4227 | |
4228 | /* Callback routine for store_by_pieces. Return the RTL of a register |
4229 | containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned |
4230 | char value given in the RTL register data. For example, if mode is |
4231 | 4 bytes wide, return the RTL for 0x01010101*data. If PREV isn't |
4232 | nullptr, it has the RTL info from the previous iteration. */ |
4233 | |
4234 | static rtx |
4235 | builtin_memset_gen_str (void *data, void *prev, |
4236 | HOST_WIDE_INTlong offset ATTRIBUTE_UNUSED__attribute__ ((__unused__)), |
4237 | fixed_size_mode mode) |
4238 | { |
4239 | rtx target, coeff; |
4240 | size_t size; |
4241 | char *p; |
4242 | |
4243 | size = GET_MODE_SIZE (mode); |
4244 | if (size == 1) |
4245 | return (rtx) data; |
4246 | |
4247 | target = gen_memset_value_from_prev ((by_pieces_prev *) prev, mode); |
4248 | if (target != nullptr) |
4249 | return target; |
4250 | |
4251 | if (VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || ( (enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM)) |
4252 | { |
4253 | gcc_assert (GET_MODE_INNER (mode) == QImode)((void)(!((mode_to_inner (mode)) == (scalar_int_mode ((scalar_int_mode ::from_int) E_QImode))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4253, __FUNCTION__), 0 : 0)); |
4254 | |
4255 | /* vec_duplicate_optab is a precondition to pick a vector mode for |
4256 | the memset expander. */ |
4257 | insn_code icode = optab_handler (vec_duplicate_optab, mode); |
4258 | |
4259 | target = gen_reg_rtx (mode); |
4260 | class expand_operand ops[2]; |
4261 | create_output_operand (&ops[0], target, mode); |
4262 | create_input_operand (&ops[1], (rtx) data, QImode(scalar_int_mode ((scalar_int_mode::from_int) E_QImode))); |
4263 | expand_insn (icode, 2, ops); |
4264 | if (!rtx_equal_p (target, ops[0].value)) |
4265 | emit_move_insn (target, ops[0].value); |
4266 | |
4267 | return target; |
4268 | } |
4269 | |
4270 | p = XALLOCAVEC (char, size)((char *) __builtin_alloca(sizeof (char) * (size))); |
4271 | memset (p, 1, size); |
4272 | /* Vector modes should be handled above. */ |
4273 | coeff = c_readstr (p, as_a <scalar_int_mode> (mode)); |
4274 | |
4275 | target = convert_to_mode (mode, (rtx) data, 1); |
4276 | target = expand_mult (mode, target, coeff, NULL_RTX(rtx) 0, 1); |
4277 | return force_reg (mode, target); |
4278 | } |
4279 | |
4280 | /* Expand expression EXP, which is a call to the memset builtin. Return |
4281 | NULL_RTX if we failed the caller should emit a normal call, otherwise |
4282 | try to get the result in TARGET, if convenient (and in mode MODE if that's |
4283 | convenient). */ |
4284 | |
4285 | rtx |
4286 | expand_builtin_memset (tree exp, rtx target, machine_mode mode) |
4287 | { |
4288 | if (!validate_arglist (exp, |
4289 | POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)) |
4290 | return NULL_RTX(rtx) 0; |
4291 | |
4292 | tree dest = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4292, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4292, __FUNCTION__))))); |
4293 | tree val = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4293, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4293, __FUNCTION__))))); |
4294 | tree len = CALL_EXPR_ARG (exp, 2)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4294, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4294, __FUNCTION__))))); |
4295 | |
4296 | return expand_builtin_memset_args (dest, val, len, target, mode, exp); |
4297 | } |
4298 | |
4299 | /* Try to store VAL (or, if NULL_RTX, VALC) in LEN bytes starting at TO. |
4300 | Return TRUE if successful, FALSE otherwise. TO is assumed to be |
4301 | aligned at an ALIGN-bits boundary. LEN must be a multiple of |
4302 | 1<<CTZ_LEN between MIN_LEN and MAX_LEN. |
4303 | |
4304 | The strategy is to issue one store_by_pieces for each power of two, |
4305 | from most to least significant, guarded by a test on whether there |
4306 | are at least that many bytes left to copy in LEN. |
4307 | |
4308 | ??? Should we skip some powers of two in favor of loops? Maybe start |
4309 | at the max of TO/LEN/word alignment, at least when optimizing for |
4310 | size, instead of ensuring O(log len) dynamic compares? */ |
4311 | |
4312 | bool |
4313 | try_store_by_multiple_pieces (rtx to, rtx len, unsigned int ctz_len, |
4314 | unsigned HOST_WIDE_INTlong min_len, |
4315 | unsigned HOST_WIDE_INTlong max_len, |
4316 | rtx val, char valc, unsigned int align) |
4317 | { |
4318 | int max_bits = floor_log2 (max_len); |
4319 | int min_bits = floor_log2 (min_len); |
4320 | int sctz_len = ctz_len; |
4321 | |
4322 | gcc_checking_assert (sctz_len >= 0)((void)(!(sctz_len >= 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4322, __FUNCTION__), 0 : 0)); |
4323 | |
4324 | if (val) |
4325 | valc = 1; |
4326 | |
4327 | /* Bits more significant than TST_BITS are part of the shared prefix |
4328 | in the binary representation of both min_len and max_len. Since |
4329 | they're identical, we don't need to test them in the loop. */ |
4330 | int tst_bits = (max_bits != min_bits ? max_bits |
4331 | : floor_log2 (max_len ^ min_len)); |
4332 | |
4333 | /* Check whether it's profitable to start by storing a fixed BLKSIZE |
4334 | bytes, to lower max_bits. In the unlikely case of a constant LEN |
4335 | (implied by identical MAX_LEN and MIN_LEN), we want to issue a |
4336 | single store_by_pieces, but otherwise, select the minimum multiple |
4337 | of the ALIGN (in bytes) and of the MCD of the possible LENs, that |
4338 | brings MAX_LEN below TST_BITS, if that's lower than min_len. */ |
4339 | unsigned HOST_WIDE_INTlong blksize; |
4340 | if (max_len > min_len) |
4341 | { |
4342 | unsigned HOST_WIDE_INTlong alrng = MAX (HOST_WIDE_INT_1U << ctz_len,((1UL << ctz_len) > (align / (8)) ? (1UL << ctz_len ) : (align / (8))) |
4343 | align / BITS_PER_UNIT)((1UL << ctz_len) > (align / (8)) ? (1UL << ctz_len ) : (align / (8))); |
4344 | blksize = max_len - (HOST_WIDE_INT_1U1UL << tst_bits) + alrng; |
4345 | blksize &= ~(alrng - 1); |
4346 | } |
4347 | else if (max_len == min_len) |
4348 | blksize = max_len; |
4349 | else |
4350 | /* Huh, max_len < min_len? Punt. See pr100843.c. */ |
4351 | return false; |
4352 | if (min_len >= blksize) |
4353 | { |
4354 | min_len -= blksize; |
4355 | min_bits = floor_log2 (min_len); |
4356 | max_len -= blksize; |
4357 | max_bits = floor_log2 (max_len); |
4358 | |
4359 | tst_bits = (max_bits != min_bits ? max_bits |
4360 | : floor_log2 (max_len ^ min_len)); |
4361 | } |
4362 | else |
4363 | blksize = 0; |
4364 | |
4365 | /* Check that we can use store by pieces for the maximum store count |
4366 | we may issue (initial fixed-size block, plus conditional |
4367 | power-of-two-sized from max_bits to ctz_len. */ |
4368 | unsigned HOST_WIDE_INTlong xlenest = blksize; |
4369 | if (max_bits >= 0) |
4370 | xlenest += ((HOST_WIDE_INT_1U1UL << max_bits) * 2 |
4371 | - (HOST_WIDE_INT_1U1UL << ctz_len)); |
4372 | if (!can_store_by_pieces (xlenest, builtin_memset_read_str, |
4373 | &valc, align, true)) |
4374 | return false; |
4375 | |
4376 | by_pieces_constfn constfun; |
4377 | void *constfundata; |
4378 | if (val) |
4379 | { |
4380 | constfun = builtin_memset_gen_str; |
4381 | constfundata = val = force_reg (TYPE_MODE (unsigned_char_type_node)((((enum tree_code) ((tree_class_check ((integer_types[itk_unsigned_char ]), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4381, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (integer_types[itk_unsigned_char]) : (integer_types[itk_unsigned_char ])->type_common.mode), |
Although the value stored to 'val' is used in the enclosing expression, the value is never actually read from 'val' | |
4382 | val); |
4383 | } |
4384 | else |
4385 | { |
4386 | constfun = builtin_memset_read_str; |
4387 | constfundata = &valc; |
4388 | } |
4389 | |
4390 | rtx ptr = copy_addr_to_reg (XEXP (to, 0)(((to)->u.fld[0]).rt_rtx)); |
4391 | rtx rem = copy_to_mode_reg (ptr_mode, convert_to_mode (ptr_mode, len, 0)); |
4392 | to = replace_equiv_address (to, ptr); |
4393 | set_mem_align (to, align); |
4394 | |
4395 | if (blksize) |
4396 | { |
4397 | to = store_by_pieces (to, blksize, |
4398 | constfun, constfundata, |
4399 | align, true, |
4400 | max_len != 0 ? RETURN_END : RETURN_BEGIN); |
4401 | if (max_len == 0) |
4402 | return true; |
4403 | |
4404 | /* Adjust PTR, TO and REM. Since TO's address is likely |
4405 | PTR+offset, we have to replace it. */ |
4406 | emit_move_insn (ptr, force_operand (XEXP (to, 0)(((to)->u.fld[0]).rt_rtx), NULL_RTX(rtx) 0)); |
4407 | to = replace_equiv_address (to, ptr); |
4408 | rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize); |
4409 | emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX(rtx) 0)); |
4410 | } |
4411 | |
4412 | /* Iterate over power-of-two block sizes from the maximum length to |
4413 | the least significant bit possibly set in the length. */ |
4414 | for (int i = max_bits; i >= sctz_len; i--) |
4415 | { |
4416 | rtx_code_label *label = NULL__null; |
4417 | blksize = HOST_WIDE_INT_1U1UL << i; |
4418 | |
4419 | /* If we're past the bits shared between min_ and max_len, expand |
4420 | a test on the dynamic length, comparing it with the |
4421 | BLKSIZE. */ |
4422 | if (i <= tst_bits) |
4423 | { |
4424 | label = gen_label_rtx (); |
4425 | emit_cmp_and_jump_insns (rem, GEN_INT (blksize)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (blksize)), LT, NULL__null, |
4426 | ptr_mode, 1, label, |
4427 | profile_probability::even ()); |
4428 | } |
4429 | /* If we are at a bit that is in the prefix shared by min_ and |
4430 | max_len, skip this BLKSIZE if the bit is clear. */ |
4431 | else if ((max_len & blksize) == 0) |
4432 | continue; |
4433 | |
4434 | /* Issue a store of BLKSIZE bytes. */ |
4435 | to = store_by_pieces (to, blksize, |
4436 | constfun, constfundata, |
4437 | align, true, |
4438 | i != sctz_len ? RETURN_END : RETURN_BEGIN); |
4439 | |
4440 | /* Adjust REM and PTR, unless this is the last iteration. */ |
4441 | if (i != sctz_len) |
4442 | { |
4443 | emit_move_insn (ptr, force_operand (XEXP (to, 0)(((to)->u.fld[0]).rt_rtx), NULL_RTX(rtx) 0)); |
4444 | to = replace_equiv_address (to, ptr); |
4445 | rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize); |
4446 | emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX(rtx) 0)); |
4447 | } |
4448 | |
4449 | if (label) |
4450 | { |
4451 | emit_label (label); |
4452 | |
4453 | /* Given conditional stores, the offset can no longer be |
4454 | known, so clear it. */ |
4455 | clear_mem_offset (to); |
4456 | } |
4457 | } |
4458 | |
4459 | return true; |
4460 | } |
4461 | |
4462 | /* Helper function to do the actual work for expand_builtin_memset. The |
4463 | arguments to the builtin_memset call DEST, VAL, and LEN are broken out |
4464 | so that this can also be called without constructing an actual CALL_EXPR. |
4465 | The other arguments and return value are the same as for |
4466 | expand_builtin_memset. */ |
4467 | |
4468 | static rtx |
4469 | expand_builtin_memset_args (tree dest, tree val, tree len, |
4470 | rtx target, machine_mode mode, tree orig_exp) |
4471 | { |
4472 | tree fndecl, fn; |
4473 | enum built_in_function fcode; |
4474 | machine_mode val_mode; |
4475 | char c; |
4476 | unsigned int dest_align; |
4477 | rtx dest_mem, dest_addr, len_rtx; |
4478 | HOST_WIDE_INTlong expected_size = -1; |
4479 | unsigned int expected_align = 0; |
4480 | unsigned HOST_WIDE_INTlong min_size; |
4481 | unsigned HOST_WIDE_INTlong max_size; |
4482 | unsigned HOST_WIDE_INTlong probable_max_size; |
4483 | |
4484 | dest_align = get_pointer_alignment (dest); |
4485 | |
4486 | /* If DEST is not a pointer type, don't do this operation in-line. */ |
4487 | if (dest_align == 0) |
4488 | return NULL_RTX(rtx) 0; |
4489 | |
4490 | if (currently_expanding_gimple_stmt) |
4491 | stringop_block_profile (currently_expanding_gimple_stmt, |
4492 | &expected_align, &expected_size); |
4493 | |
4494 | if (expected_align < dest_align) |
4495 | expected_align = dest_align; |
4496 | |
4497 | /* If the LEN parameter is zero, return DEST. */ |
4498 | if (integer_zerop (len)) |
4499 | { |
4500 | /* Evaluate and ignore VAL in case it has side-effects. */ |
4501 | expand_expr (val, const0_rtx(const_int_rtx[64]), VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL); |
4502 | return expand_expr (dest, target, mode, EXPAND_NORMAL); |
4503 | } |
4504 | |
4505 | /* Stabilize the arguments in case we fail. */ |
4506 | dest = builtin_save_expr (dest); |
4507 | val = builtin_save_expr (val); |
4508 | len = builtin_save_expr (len); |
4509 | |
4510 | len_rtx = expand_normal (len); |
4511 | determine_block_size (len, len_rtx, &min_size, &max_size, |
4512 | &probable_max_size); |
4513 | dest_mem = get_memory_rtx (dest, len); |
4514 | val_mode = TYPE_MODE (unsigned_char_type_node)((((enum tree_code) ((tree_class_check ((integer_types[itk_unsigned_char ]), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4514, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (integer_types[itk_unsigned_char]) : (integer_types[itk_unsigned_char ])->type_common.mode); |
4515 | |
4516 | if (TREE_CODE (val)((enum tree_code) (val)->base.code) != INTEGER_CST |
4517 | || target_char_cast (val, &c)) |
4518 | { |
4519 | rtx val_rtx; |
4520 | |
4521 | val_rtx = expand_normal (val); |
4522 | val_rtx = convert_to_mode (val_mode, val_rtx, 0); |
4523 | |
4524 | /* Assume that we can memset by pieces if we can store |
4525 | * the coefficients by pieces (in the required modes). |
4526 | * We can't pass builtin_memset_gen_str as that emits RTL. */ |
4527 | c = 1; |
4528 | if (tree_fits_uhwi_p (len) |
4529 | && can_store_by_pieces (tree_to_uhwi (len), |
4530 | builtin_memset_read_str, &c, dest_align, |
4531 | true)) |
4532 | { |
4533 | val_rtx = force_reg (val_mode, val_rtx); |
4534 | store_by_pieces (dest_mem, tree_to_uhwi (len), |
4535 | builtin_memset_gen_str, val_rtx, dest_align, |
4536 | true, RETURN_BEGIN); |
4537 | } |
4538 | else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx, |
4539 | dest_align, expected_align, |
4540 | expected_size, min_size, max_size, |
4541 | probable_max_size) |
4542 | && !try_store_by_multiple_pieces (dest_mem, len_rtx, |
4543 | tree_ctz (len), |
4544 | min_size, max_size, |
4545 | val_rtx, 0, |
4546 | dest_align)) |
4547 | goto do_libcall; |
4548 | |
4549 | dest_mem = force_operand (XEXP (dest_mem, 0)(((dest_mem)->u.fld[0]).rt_rtx), NULL_RTX(rtx) 0); |
4550 | dest_mem = convert_memory_address (ptr_mode, dest_mem)convert_memory_address_addr_space ((ptr_mode), (dest_mem), 0); |
4551 | return dest_mem; |
4552 | } |
4553 | |
4554 | if (c) |
4555 | { |
4556 | if (tree_fits_uhwi_p (len) |
4557 | && can_store_by_pieces (tree_to_uhwi (len), |
4558 | builtin_memset_read_str, &c, dest_align, |
4559 | true)) |
4560 | store_by_pieces (dest_mem, tree_to_uhwi (len), |
4561 | builtin_memset_read_str, &c, dest_align, true, |
4562 | RETURN_BEGIN); |
4563 | else if (!set_storage_via_setmem (dest_mem, len_rtx, |
4564 | gen_int_mode (c, val_mode), |
4565 | dest_align, expected_align, |
4566 | expected_size, min_size, max_size, |
4567 | probable_max_size) |
4568 | && !try_store_by_multiple_pieces (dest_mem, len_rtx, |
4569 | tree_ctz (len), |
4570 | min_size, max_size, |
4571 | NULL_RTX(rtx) 0, c, |
4572 | dest_align)) |
4573 | goto do_libcall; |
4574 | |
4575 | dest_mem = force_operand (XEXP (dest_mem, 0)(((dest_mem)->u.fld[0]).rt_rtx), NULL_RTX(rtx) 0); |
4576 | dest_mem = convert_memory_address (ptr_mode, dest_mem)convert_memory_address_addr_space ((ptr_mode), (dest_mem), 0); |
4577 | return dest_mem; |
4578 | } |
4579 | |
4580 | set_mem_align (dest_mem, dest_align); |
4581 | dest_addr = clear_storage_hints (dest_mem, len_rtx, |
4582 | CALL_EXPR_TAILCALL (orig_exp)((tree_check ((orig_exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4582, __FUNCTION__, (CALL_EXPR)))->base.addressable_flag ) |
4583 | ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL, |
4584 | expected_align, expected_size, |
4585 | min_size, max_size, |
4586 | probable_max_size, tree_ctz (len)); |
4587 | |
4588 | if (dest_addr == 0) |
4589 | { |
4590 | dest_addr = force_operand (XEXP (dest_mem, 0)(((dest_mem)->u.fld[0]).rt_rtx), NULL_RTX(rtx) 0); |
4591 | dest_addr = convert_memory_address (ptr_mode, dest_addr)convert_memory_address_addr_space ((ptr_mode), (dest_addr), 0 ); |
4592 | } |
4593 | |
4594 | return dest_addr; |
4595 | |
4596 | do_libcall: |
4597 | fndecl = get_callee_fndecl (orig_exp); |
4598 | fcode = DECL_FUNCTION_CODE (fndecl); |
4599 | if (fcode == BUILT_IN_MEMSET) |
4600 | fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp)((((orig_exp)) && ((tree_code_type_tmpl <0>::tree_code_type [(int) (((enum tree_code) ((orig_exp))->base.code))]) >= tcc_reference && (tree_code_type_tmpl <0>::tree_code_type [(int) (((enum tree_code) ((orig_exp))->base.code))]) <= tcc_expression)) ? (orig_exp)->exp.locus : ((location_t) 0 )), fndecl, 3, |
4601 | dest, val, len); |
4602 | else if (fcode == BUILT_IN_BZERO) |
4603 | fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp)((((orig_exp)) && ((tree_code_type_tmpl <0>::tree_code_type [(int) (((enum tree_code) ((orig_exp))->base.code))]) >= tcc_reference && (tree_code_type_tmpl <0>::tree_code_type [(int) (((enum tree_code) ((orig_exp))->base.code))]) <= tcc_expression)) ? (orig_exp)->exp.locus : ((location_t) 0 )), fndecl, 2, |
4604 | dest, len); |
4605 | else |
4606 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4606, __FUNCTION__)); |
4607 | gcc_assert (TREE_CODE (fn) == CALL_EXPR)((void)(!(((enum tree_code) (fn)->base.code) == CALL_EXPR) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4607, __FUNCTION__), 0 : 0)); |
4608 | CALL_EXPR_TAILCALL (fn)((tree_check ((fn), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4608, __FUNCTION__, (CALL_EXPR)))->base.addressable_flag ) = CALL_EXPR_TAILCALL (orig_exp)((tree_check ((orig_exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4608, __FUNCTION__, (CALL_EXPR)))->base.addressable_flag ); |
4609 | return expand_call (fn, target, target == const0_rtx(const_int_rtx[64])); |
4610 | } |
4611 | |
4612 | /* Expand expression EXP, which is a call to the bzero builtin. Return |
4613 | NULL_RTX if we failed the caller should emit a normal call. */ |
4614 | |
4615 | static rtx |
4616 | expand_builtin_bzero (tree exp) |
4617 | { |
4618 | if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)) |
4619 | return NULL_RTX(rtx) 0; |
4620 | |
4621 | tree dest = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4621, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4621, __FUNCTION__))))); |
4622 | tree size = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4622, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4622, __FUNCTION__))))); |
4623 | |
4624 | /* New argument list transforming bzero(ptr x, int y) to |
4625 | memset(ptr x, int 0, size_t y). This is done this way |
4626 | so that if it isn't expanded inline, we fallback to |
4627 | calling bzero instead of memset. */ |
4628 | |
4629 | location_t loc = EXPR_LOCATION (exp)((((exp)) && ((tree_code_type_tmpl <0>::tree_code_type [(int) (((enum tree_code) ((exp))->base.code))]) >= tcc_reference && (tree_code_type_tmpl <0>::tree_code_type[(int ) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression )) ? (exp)->exp.locus : ((location_t) 0)); |
4630 | |
4631 | return expand_builtin_memset_args (dest, integer_zero_nodeglobal_trees[TI_INTEGER_ZERO], |
4632 | fold_convert_loc (loc, |
4633 | size_type_nodeglobal_trees[TI_SIZE_TYPE], size), |
4634 | const0_rtx(const_int_rtx[64]), VOIDmode((void) 0, E_VOIDmode), exp); |
4635 | } |
4636 | |
4637 | /* Try to expand cmpstr operation ICODE with the given operands. |
4638 | Return the result rtx on success, otherwise return null. */ |
4639 | |
4640 | static rtx |
4641 | expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx, |
4642 | HOST_WIDE_INTlong align) |
4643 | { |
4644 | machine_mode insn_mode = insn_data[icode].operand[0].mode; |
4645 | |
4646 | if (target && (!REG_P (target)(((enum rtx_code) (target)->code) == REG) || HARD_REGISTER_P (target)((((rhs_regno(target))) < 76)))) |
4647 | target = NULL_RTX(rtx) 0; |
4648 | |
4649 | class expand_operand ops[4]; |
4650 | create_output_operand (&ops[0], target, insn_mode); |
4651 | create_fixed_operand (&ops[1], arg1_rtx); |
4652 | create_fixed_operand (&ops[2], arg2_rtx); |
4653 | create_integer_operand (&ops[3], align); |
4654 | if (maybe_expand_insn (icode, 4, ops)) |
4655 | return ops[0].value; |
4656 | return NULL_RTX(rtx) 0; |
4657 | } |
4658 | |
4659 | /* Expand expression EXP, which is a call to the memcmp built-in function. |
4660 | Return NULL_RTX if we failed and the caller should emit a normal call, |
4661 | otherwise try to get the result in TARGET, if convenient. |
4662 | RESULT_EQ is true if we can relax the returned value to be either zero |
4663 | or nonzero, without caring about the sign. */ |
4664 | |
4665 | static rtx |
4666 | expand_builtin_memcmp (tree exp, rtx target, bool result_eq) |
4667 | { |
4668 | if (!validate_arglist (exp, |
4669 | POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)) |
4670 | return NULL_RTX(rtx) 0; |
4671 | |
4672 | tree arg1 = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4672, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4672, __FUNCTION__))))); |
4673 | tree arg2 = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4673, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4673, __FUNCTION__))))); |
4674 | tree len = CALL_EXPR_ARG (exp, 2)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4674, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4674, __FUNCTION__))))); |
4675 | |
4676 | /* Due to the performance benefit, always inline the calls first |
4677 | when result_eq is false. */ |
4678 | rtx result = NULL_RTX(rtx) 0; |
4679 | enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp)); |
4680 | if (!result_eq && fcode != BUILT_IN_BCMP) |
4681 | { |
4682 | result = inline_expand_builtin_bytecmp (exp, target); |
4683 | if (result) |
4684 | return result; |
4685 | } |
4686 | |
4687 | machine_mode mode = TYPE_MODE (TREE_TYPE (exp))((((enum tree_code) ((tree_class_check ((((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4687, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4687, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4687, __FUNCTION__))->typed.type)) : (((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4687, __FUNCTION__))->typed.type))->type_common.mode); |
4688 | location_t loc = EXPR_LOCATION (exp)((((exp)) && ((tree_code_type_tmpl <0>::tree_code_type [(int) (((enum tree_code) ((exp))->base.code))]) >= tcc_reference && (tree_code_type_tmpl <0>::tree_code_type[(int ) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression )) ? (exp)->exp.locus : ((location_t) 0)); |
4689 | |
4690 | unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT(8); |
4691 | unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT(8); |
4692 | |
4693 | /* If we don't have POINTER_TYPE, call the function. */ |
4694 | if (arg1_align == 0 || arg2_align == 0) |
4695 | return NULL_RTX(rtx) 0; |
4696 | |
4697 | rtx arg1_rtx = get_memory_rtx (arg1, len); |
4698 | rtx arg2_rtx = get_memory_rtx (arg2, len); |
4699 | rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetypesizetype_tab[(int) stk_sizetype], len)); |
4700 | |
4701 | /* Set MEM_SIZE as appropriate. */ |
4702 | if (CONST_INT_P (len_rtx)(((enum rtx_code) (len_rtx)->code) == CONST_INT)) |
4703 | { |
4704 | set_mem_size (arg1_rtx, INTVAL (len_rtx)((len_rtx)->u.hwint[0])); |
4705 | set_mem_size (arg2_rtx, INTVAL (len_rtx)((len_rtx)->u.hwint[0])); |
4706 | } |
4707 | |
4708 | by_pieces_constfn constfn = NULL__null; |
4709 | |
4710 | /* Try to get the byte representation of the constant ARG2 (or, only |
4711 | when the function's result is used for equality to zero, ARG1) |
4712 | points to, with its byte size in NBYTES. */ |
4713 | unsigned HOST_WIDE_INTlong nbytes; |
4714 | const char *rep = getbyterep (arg2, &nbytes); |
4715 | if (result_eq && rep == NULL__null) |
4716 | { |
4717 | /* For equality to zero the arguments are interchangeable. */ |
4718 | rep = getbyterep (arg1, &nbytes); |
4719 | if (rep != NULL__null) |
4720 | std::swap (arg1_rtx, arg2_rtx); |
4721 | } |
4722 | |
4723 | /* If the function's constant bound LEN_RTX is less than or equal |
4724 | to the byte size of the representation of the constant argument, |
4725 | and if block move would be done by pieces, we can avoid loading |
4726 | the bytes from memory and only store the computed constant result. */ |
4727 | if (rep |
4728 | && CONST_INT_P (len_rtx)(((enum rtx_code) (len_rtx)->code) == CONST_INT) |
4729 | && (unsigned HOST_WIDE_INTlong) INTVAL (len_rtx)((len_rtx)->u.hwint[0]) <= nbytes) |
4730 | constfn = builtin_memcpy_read_str; |
4731 | |
4732 | result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx, |
4733 | TREE_TYPE (len)((contains_struct_check ((len), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4733, __FUNCTION__))->typed.type), target, |
4734 | result_eq, constfn, |
4735 | CONST_CAST (char *, rep)(const_cast<char *> ((rep)))); |
4736 | |
4737 | if (result) |
4738 | { |
4739 | /* Return the value in the proper mode for this function. */ |
4740 | if (GET_MODE (result)((machine_mode) (result)->mode) == mode) |
4741 | return result; |
4742 | |
4743 | if (target != 0) |
4744 | { |
4745 | convert_move (target, result, 0); |
4746 | return target; |
4747 | } |
4748 | |
4749 | return convert_to_mode (mode, result, 0); |
4750 | } |
4751 | |
4752 | return NULL_RTX(rtx) 0; |
4753 | } |
4754 | |
4755 | /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX |
4756 | if we failed the caller should emit a normal call, otherwise try to get |
4757 | the result in TARGET, if convenient. */ |
4758 | |
4759 | static rtx |
4760 | expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED__attribute__ ((__unused__)) rtx target) |
4761 | { |
4762 | if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)) |
4763 | return NULL_RTX(rtx) 0; |
4764 | |
4765 | tree arg1 = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4765, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4765, __FUNCTION__))))); |
4766 | tree arg2 = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4766, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4766, __FUNCTION__))))); |
4767 | |
4768 | /* Due to the performance benefit, always inline the calls first. */ |
4769 | rtx result = NULL_RTX(rtx) 0; |
4770 | result = inline_expand_builtin_bytecmp (exp, target); |
4771 | if (result) |
4772 | return result; |
4773 | |
4774 | insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))); |
4775 | insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))); |
4776 | if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing) |
4777 | return NULL_RTX(rtx) 0; |
4778 | |
4779 | unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT(8); |
4780 | unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT(8); |
4781 | |
4782 | /* If we don't have POINTER_TYPE, call the function. */ |
4783 | if (arg1_align == 0 || arg2_align == 0) |
4784 | return NULL_RTX(rtx) 0; |
4785 | |
4786 | /* Stabilize the arguments in case gen_cmpstr(n)si fail. */ |
4787 | arg1 = builtin_save_expr (arg1); |
4788 | arg2 = builtin_save_expr (arg2); |
4789 | |
4790 | rtx arg1_rtx = get_memory_rtx (arg1, NULL__null); |
4791 | rtx arg2_rtx = get_memory_rtx (arg2, NULL__null); |
4792 | |
4793 | /* Try to call cmpstrsi. */ |
4794 | if (cmpstr_icode != CODE_FOR_nothing) |
4795 | result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx, |
4796 | MIN (arg1_align, arg2_align)((arg1_align) < (arg2_align) ? (arg1_align) : (arg2_align) )); |
4797 | |
4798 | /* Try to determine at least one length and call cmpstrnsi. */ |
4799 | if (!result && cmpstrn_icode != CODE_FOR_nothing) |
4800 | { |
4801 | tree len; |
4802 | rtx arg3_rtx; |
4803 | |
4804 | tree len1 = c_strlen (arg1, 1); |
4805 | tree len2 = c_strlen (arg2, 1); |
4806 | |
4807 | if (len1) |
4808 | len1 = size_binop (PLUS_EXPR, ssize_int (1), len1)size_binop_loc (((location_t) 0), PLUS_EXPR, size_int_kind (1 , stk_ssizetype), len1); |
4809 | if (len2) |
4810 | len2 = size_binop (PLUS_EXPR, ssize_int (1), len2)size_binop_loc (((location_t) 0), PLUS_EXPR, size_int_kind (1 , stk_ssizetype), len2); |
4811 | |
4812 | /* If we don't have a constant length for the first, use the length |
4813 | of the second, if we know it. We don't require a constant for |
4814 | this case; some cost analysis could be done if both are available |
4815 | but neither is constant. For now, assume they're equally cheap, |
4816 | unless one has side effects. If both strings have constant lengths, |
4817 | use the smaller. */ |
4818 | |
4819 | if (!len1) |
4820 | len = len2; |
4821 | else if (!len2) |
4822 | len = len1; |
4823 | else if (TREE_SIDE_EFFECTS (len1)((non_type_check ((len1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4823, __FUNCTION__))->base.side_effects_flag)) |
4824 | len = len2; |
4825 | else if (TREE_SIDE_EFFECTS (len2)((non_type_check ((len2), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4825, __FUNCTION__))->base.side_effects_flag)) |
4826 | len = len1; |
4827 | else if (TREE_CODE (len1)((enum tree_code) (len1)->base.code) != INTEGER_CST) |
4828 | len = len2; |
4829 | else if (TREE_CODE (len2)((enum tree_code) (len2)->base.code) != INTEGER_CST) |
4830 | len = len1; |
4831 | else if (tree_int_cst_lt (len1, len2)) |
4832 | len = len1; |
4833 | else |
4834 | len = len2; |
4835 | |
4836 | /* If both arguments have side effects, we cannot optimize. */ |
4837 | if (len && !TREE_SIDE_EFFECTS (len)((non_type_check ((len), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4837, __FUNCTION__))->base.side_effects_flag)) |
4838 | { |
4839 | arg3_rtx = expand_normal (len); |
4840 | result = expand_cmpstrn_or_cmpmem |
4841 | (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len)((contains_struct_check ((len), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4841, __FUNCTION__))->typed.type), |
4842 | arg3_rtx, MIN (arg1_align, arg2_align)((arg1_align) < (arg2_align) ? (arg1_align) : (arg2_align) )); |
4843 | } |
4844 | } |
4845 | |
4846 | tree fndecl = get_callee_fndecl (exp); |
4847 | if (result) |
4848 | { |
4849 | /* Return the value in the proper mode for this function. */ |
4850 | machine_mode mode = TYPE_MODE (TREE_TYPE (exp))((((enum tree_code) ((tree_class_check ((((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4850, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4850, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4850, __FUNCTION__))->typed.type)) : (((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4850, __FUNCTION__))->typed.type))->type_common.mode); |
4851 | if (GET_MODE (result)((machine_mode) (result)->mode) == mode) |
4852 | return result; |
4853 | if (target == 0) |
4854 | return convert_to_mode (mode, result, 0); |
4855 | convert_move (target, result, 0); |
4856 | return target; |
4857 | } |
4858 | |
4859 | /* Expand the library call ourselves using a stabilized argument |
4860 | list to avoid re-evaluating the function's arguments twice. */ |
4861 | tree fn = build_call_nofold_loc (EXPR_LOCATION (exp)((((exp)) && ((tree_code_type_tmpl <0>::tree_code_type [(int) (((enum tree_code) ((exp))->base.code))]) >= tcc_reference && (tree_code_type_tmpl <0>::tree_code_type[(int ) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression )) ? (exp)->exp.locus : ((location_t) 0)), fndecl, 2, arg1, arg2); |
4862 | copy_warning (fn, exp); |
4863 | gcc_assert (TREE_CODE (fn) == CALL_EXPR)((void)(!(((enum tree_code) (fn)->base.code) == CALL_EXPR) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4863, __FUNCTION__), 0 : 0)); |
4864 | CALL_EXPR_TAILCALL (fn)((tree_check ((fn), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4864, __FUNCTION__, (CALL_EXPR)))->base.addressable_flag ) = CALL_EXPR_TAILCALL (exp)((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4864, __FUNCTION__, (CALL_EXPR)))->base.addressable_flag ); |
4865 | return expand_call (fn, target, target == const0_rtx(const_int_rtx[64])); |
4866 | } |
4867 | |
4868 | /* Expand expression EXP, which is a call to the strncmp builtin. Return |
4869 | NULL_RTX if we failed the caller should emit a normal call, otherwise |
4870 | try to get the result in TARGET, if convenient. */ |
4871 | |
4872 | static rtx |
4873 | expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED__attribute__ ((__unused__)) rtx target, |
4874 | ATTRIBUTE_UNUSED__attribute__ ((__unused__)) machine_mode mode) |
4875 | { |
4876 | if (!validate_arglist (exp, |
4877 | POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)) |
4878 | return NULL_RTX(rtx) 0; |
4879 | |
4880 | tree arg1 = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4880, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4880, __FUNCTION__))))); |
4881 | tree arg2 = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4881, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4881, __FUNCTION__))))); |
4882 | tree arg3 = CALL_EXPR_ARG (exp, 2)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4882, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4882, __FUNCTION__))))); |
4883 | |
4884 | location_t loc = EXPR_LOCATION (exp)((((exp)) && ((tree_code_type_tmpl <0>::tree_code_type [(int) (((enum tree_code) ((exp))->base.code))]) >= tcc_reference && (tree_code_type_tmpl <0>::tree_code_type[(int ) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression )) ? (exp)->exp.locus : ((location_t) 0)); |
4885 | tree len1 = c_strlen (arg1, 1); |
4886 | tree len2 = c_strlen (arg2, 1); |
4887 | |
4888 | /* Due to the performance benefit, always inline the calls first. */ |
4889 | rtx result = NULL_RTX(rtx) 0; |
4890 | result = inline_expand_builtin_bytecmp (exp, target); |
4891 | if (result) |
4892 | return result; |
4893 | |
4894 | /* If c_strlen can determine an expression for one of the string |
4895 | lengths, and it doesn't have side effects, then emit cmpstrnsi |
4896 | using length MIN(strlen(string)+1, arg3). */ |
4897 | insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))); |
4898 | if (cmpstrn_icode == CODE_FOR_nothing) |
4899 | return NULL_RTX(rtx) 0; |
4900 | |
4901 | tree len; |
4902 | |
4903 | unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT(8); |
4904 | unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT(8); |
4905 | |
4906 | if (len1) |
4907 | len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1)size_int_kind (1, stk_ssizetype), len1); |
4908 | if (len2) |
4909 | len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1)size_int_kind (1, stk_ssizetype), len2); |
4910 | |
4911 | tree len3 = fold_convert_loc (loc, sizetypesizetype_tab[(int) stk_sizetype], arg3); |
4912 | |
4913 | /* If we don't have a constant length for the first, use the length |
4914 | of the second, if we know it. If neither string is constant length, |
4915 | use the given length argument. We don't require a constant for |
4916 | this case; some cost analysis could be done if both are available |
4917 | but neither is constant. For now, assume they're equally cheap, |
4918 | unless one has side effects. If both strings have constant lengths, |
4919 | use the smaller. */ |
4920 | |
4921 | if (!len1 && !len2) |
4922 | len = len3; |
4923 | else if (!len1) |
4924 | len = len2; |
4925 | else if (!len2) |
4926 | len = len1; |
4927 | else if (TREE_SIDE_EFFECTS (len1)((non_type_check ((len1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4927, __FUNCTION__))->base.side_effects_flag)) |
4928 | len = len2; |
4929 | else if (TREE_SIDE_EFFECTS (len2)((non_type_check ((len2), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4929, __FUNCTION__))->base.side_effects_flag)) |
4930 | len = len1; |
4931 | else if (TREE_CODE (len1)((enum tree_code) (len1)->base.code) != INTEGER_CST) |
4932 | len = len2; |
4933 | else if (TREE_CODE (len2)((enum tree_code) (len2)->base.code) != INTEGER_CST) |
4934 | len = len1; |
4935 | else if (tree_int_cst_lt (len1, len2)) |
4936 | len = len1; |
4937 | else |
4938 | len = len2; |
4939 | |
4940 | /* If we are not using the given length, we must incorporate it here. |
4941 | The actual new length parameter will be MIN(len,arg3) in this case. */ |
4942 | if (len != len3) |
4943 | { |
4944 | len = fold_convert_loc (loc, sizetypesizetype_tab[(int) stk_sizetype], len); |
4945 | len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len)((contains_struct_check ((len), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4945, __FUNCTION__))->typed.type), len, len3); |
4946 | } |
4947 | rtx arg1_rtx = get_memory_rtx (arg1, len); |
4948 | rtx arg2_rtx = get_memory_rtx (arg2, len); |
4949 | rtx arg3_rtx = expand_normal (len); |
4950 | result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx, |
4951 | arg2_rtx, TREE_TYPE (len)((contains_struct_check ((len), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4951, __FUNCTION__))->typed.type), arg3_rtx, |
4952 | MIN (arg1_align, arg2_align)((arg1_align) < (arg2_align) ? (arg1_align) : (arg2_align) )); |
4953 | |
4954 | tree fndecl = get_callee_fndecl (exp); |
4955 | if (result) |
4956 | { |
4957 | /* Return the value in the proper mode for this function. */ |
4958 | mode = TYPE_MODE (TREE_TYPE (exp))((((enum tree_code) ((tree_class_check ((((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4958, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4958, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4958, __FUNCTION__))->typed.type)) : (((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4958, __FUNCTION__))->typed.type))->type_common.mode); |
4959 | if (GET_MODE (result)((machine_mode) (result)->mode) == mode) |
4960 | return result; |
4961 | if (target == 0) |
4962 | return convert_to_mode (mode, result, 0); |
4963 | convert_move (target, result, 0); |
4964 | return target; |
4965 | } |
4966 | |
4967 | /* Expand the library call ourselves using a stabilized argument |
4968 | list to avoid re-evaluating the function's arguments twice. */ |
4969 | tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len); |
4970 | copy_warning (call, exp); |
4971 | gcc_assert (TREE_CODE (call) == CALL_EXPR)((void)(!(((enum tree_code) (call)->base.code) == CALL_EXPR ) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4971, __FUNCTION__), 0 : 0)); |
4972 | CALL_EXPR_TAILCALL (call)((tree_check ((call), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4972, __FUNCTION__, (CALL_EXPR)))->base.addressable_flag ) = CALL_EXPR_TAILCALL (exp)((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 4972, __FUNCTION__, (CALL_EXPR)))->base.addressable_flag ); |
4973 | return expand_call (call, target, target == const0_rtx(const_int_rtx[64])); |
4974 | } |
4975 | |
4976 | /* Expand a call to __builtin_saveregs, generating the result in TARGET, |
4977 | if that's convenient. */ |
4978 | |
4979 | rtx |
4980 | expand_builtin_saveregs (void) |
4981 | { |
4982 | rtx val; |
4983 | rtx_insn *seq; |
4984 | |
4985 | /* Don't do __builtin_saveregs more than once in a function. |
4986 | Save the result of the first call and reuse it. */ |
4987 | if (saveregs_value((&x_rtl)->expr.x_saveregs_value) != 0) |
4988 | return saveregs_value((&x_rtl)->expr.x_saveregs_value); |
4989 | |
4990 | /* When this function is called, it means that registers must be |
4991 | saved on entry to this function. So we migrate the call to the |
4992 | first insn of this function. */ |
4993 | |
4994 | start_sequence (); |
4995 | |
4996 | /* Do whatever the machine needs done in this case. */ |
4997 | val = targetm.calls.expand_builtin_saveregs (); |
4998 | |
4999 | seq = get_insns (); |
5000 | end_sequence (); |
5001 | |
5002 | saveregs_value((&x_rtl)->expr.x_saveregs_value) = val; |
5003 | |
5004 | /* Put the insns after the NOTE that starts the function. If this |
5005 | is inside a start_sequence, make the outer-level insn chain current, so |
5006 | the code is placed at the start of the function. */ |
5007 | push_topmost_sequence (); |
5008 | emit_insn_after (seq, entry_of_function ()); |
5009 | pop_topmost_sequence (); |
5010 | |
5011 | return val; |
5012 | } |
5013 | |
5014 | /* Expand a call to __builtin_next_arg. */ |
5015 | |
5016 | static rtx |
5017 | expand_builtin_next_arg (void) |
5018 | { |
5019 | /* Checking arguments is already done in fold_builtin_next_arg |
5020 | that must be called before this function. */ |
5021 | return expand_binop (ptr_mode, add_optab, |
5022 | crtl(&x_rtl)->args.internal_arg_pointer, |
5023 | crtl(&x_rtl)->args.arg_offset_rtx, |
5024 | NULL_RTX(rtx) 0, 0, OPTAB_LIB_WIDEN); |
5025 | } |
5026 | |
5027 | /* Make it easier for the backends by protecting the valist argument |
5028 | from multiple evaluations. */ |
5029 | |
5030 | static tree |
5031 | stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue) |
5032 | { |
5033 | tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist)((contains_struct_check ((valist), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5033, __FUNCTION__))->typed.type)); |
5034 | |
5035 | /* The current way of determining the type of valist is completely |
5036 | bogus. We should have the information on the va builtin instead. */ |
5037 | if (!vatype) |
5038 | vatype = targetm.fn_abi_va_list (cfun(cfun + 0)->decl); |
5039 | |
5040 | if (TREE_CODE (vatype)((enum tree_code) (vatype)->base.code) == ARRAY_TYPE) |
5041 | { |
5042 | if (TREE_SIDE_EFFECTS (valist)((non_type_check ((valist), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5042, __FUNCTION__))->base.side_effects_flag)) |
5043 | valist = save_expr (valist); |
5044 | |
5045 | /* For this case, the backends will be expecting a pointer to |
5046 | vatype, but it's possible we've actually been given an array |
5047 | (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)). |
5048 | So fix it. */ |
5049 | if (TREE_CODE (TREE_TYPE (valist))((enum tree_code) (((contains_struct_check ((valist), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5049, __FUNCTION__))->typed.type))->base.code) == ARRAY_TYPE) |
5050 | { |
5051 | tree p1 = build_pointer_type (TREE_TYPE (vatype)((contains_struct_check ((vatype), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5051, __FUNCTION__))->typed.type)); |
5052 | valist = build_fold_addr_expr_with_type_loc (loc, valist, p1); |
5053 | } |
5054 | } |
5055 | else |
5056 | { |
5057 | tree pt = build_pointer_type (vatype); |
5058 | |
5059 | if (! needs_lvalue) |
5060 | { |
5061 | if (! TREE_SIDE_EFFECTS (valist)((non_type_check ((valist), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5061, __FUNCTION__))->base.side_effects_flag)) |
5062 | return valist; |
5063 | |
5064 | valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist); |
5065 | TREE_SIDE_EFFECTS (valist)((non_type_check ((valist), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5065, __FUNCTION__))->base.side_effects_flag) = 1; |
5066 | } |
5067 | |
5068 | if (TREE_SIDE_EFFECTS (valist)((non_type_check ((valist), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5068, __FUNCTION__))->base.side_effects_flag)) |
5069 | valist = save_expr (valist); |
5070 | valist = fold_build2_loc (loc, MEM_REF, |
5071 | vatype, valist, build_int_cst (pt, 0)); |
5072 | } |
5073 | |
5074 | return valist; |
5075 | } |
5076 | |
5077 | /* The "standard" definition of va_list is void*. */ |
5078 | |
5079 | tree |
5080 | std_build_builtin_va_list (void) |
5081 | { |
5082 | return ptr_type_nodeglobal_trees[TI_PTR_TYPE]; |
5083 | } |
5084 | |
5085 | /* The "standard" abi va_list is va_list_type_node. */ |
5086 | |
5087 | tree |
5088 | std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED__attribute__ ((__unused__))) |
5089 | { |
5090 | return va_list_type_nodeglobal_trees[TI_VA_LIST_TYPE]; |
5091 | } |
5092 | |
5093 | /* The "standard" type of va_list is va_list_type_node. */ |
5094 | |
5095 | tree |
5096 | std_canonical_va_list_type (tree type) |
5097 | { |
5098 | tree wtype, htype; |
5099 | |
5100 | wtype = va_list_type_nodeglobal_trees[TI_VA_LIST_TYPE]; |
5101 | htype = type; |
5102 | |
5103 | if (TREE_CODE (wtype)((enum tree_code) (wtype)->base.code) == ARRAY_TYPE) |
5104 | { |
5105 | /* If va_list is an array type, the argument may have decayed |
5106 | to a pointer type, e.g. by being passed to another function. |
5107 | In that case, unwrap both types so that we can compare the |
5108 | underlying records. */ |
5109 | if (TREE_CODE (htype)((enum tree_code) (htype)->base.code) == ARRAY_TYPE |
5110 | || POINTER_TYPE_P (htype)(((enum tree_code) (htype)->base.code) == POINTER_TYPE || ( (enum tree_code) (htype)->base.code) == REFERENCE_TYPE)) |
5111 | { |
5112 | wtype = TREE_TYPE (wtype)((contains_struct_check ((wtype), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5112, __FUNCTION__))->typed.type); |
5113 | htype = TREE_TYPE (htype)((contains_struct_check ((htype), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5113, __FUNCTION__))->typed.type); |
5114 | } |
5115 | } |
5116 | if (TYPE_MAIN_VARIANT (wtype)((tree_class_check ((wtype), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5116, __FUNCTION__))->type_common.main_variant) == TYPE_MAIN_VARIANT (htype)((tree_class_check ((htype), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5116, __FUNCTION__))->type_common.main_variant)) |
5117 | return va_list_type_nodeglobal_trees[TI_VA_LIST_TYPE]; |
5118 | |
5119 | return NULL_TREE(tree) __null; |
5120 | } |
5121 | |
5122 | /* The "standard" implementation of va_start: just assign `nextarg' to |
5123 | the variable. */ |
5124 | |
5125 | void |
5126 | std_expand_builtin_va_start (tree valist, rtx nextarg) |
5127 | { |
5128 | rtx va_r = expand_expr (valist, NULL_RTX(rtx) 0, VOIDmode((void) 0, E_VOIDmode), EXPAND_WRITE); |
5129 | convert_move (va_r, nextarg, 0); |
5130 | } |
5131 | |
5132 | /* Expand EXP, a call to __builtin_va_start. */ |
5133 | |
5134 | static rtx |
5135 | expand_builtin_va_start (tree exp) |
5136 | { |
5137 | rtx nextarg; |
5138 | tree valist; |
5139 | location_t loc = EXPR_LOCATION (exp)((((exp)) && ((tree_code_type_tmpl <0>::tree_code_type [(int) (((enum tree_code) ((exp))->base.code))]) >= tcc_reference && (tree_code_type_tmpl <0>::tree_code_type[(int ) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression )) ? (exp)->exp.locus : ((location_t) 0)); |
5140 | |
5141 | if (call_expr_nargs (exp)(((int)((unsigned long) (*tree_int_cst_elt_check (((tree_class_check ((exp), (tcc_vl_exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5141, __FUNCTION__))->exp.operands[0]), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5141, __FUNCTION__)))) - 3) < 2) |
5142 | { |
5143 | error_at (loc, "too few arguments to function %<va_start%>"); |
5144 | return const0_rtx(const_int_rtx[64]); |
5145 | } |
5146 | |
5147 | if (fold_builtin_next_arg (exp, true)) |
5148 | return const0_rtx(const_int_rtx[64]); |
5149 | |
5150 | nextarg = expand_builtin_next_arg (); |
5151 | valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5151, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5151, __FUNCTION__))))), 1); |
5152 | |
5153 | if (targetm.expand_builtin_va_start) |
5154 | targetm.expand_builtin_va_start (valist, nextarg); |
5155 | else |
5156 | std_expand_builtin_va_start (valist, nextarg); |
5157 | |
5158 | return const0_rtx(const_int_rtx[64]); |
5159 | } |
5160 | |
5161 | /* Expand EXP, a call to __builtin_va_end. */ |
5162 | |
5163 | static rtx |
5164 | expand_builtin_va_end (tree exp) |
5165 | { |
5166 | tree valist = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5166, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5166, __FUNCTION__))))); |
5167 | |
5168 | /* Evaluate for side effects, if needed. I hate macros that don't |
5169 | do that. */ |
5170 | if (TREE_SIDE_EFFECTS (valist)((non_type_check ((valist), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5170, __FUNCTION__))->base.side_effects_flag)) |
5171 | expand_expr (valist, const0_rtx(const_int_rtx[64]), VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL); |
5172 | |
5173 | return const0_rtx(const_int_rtx[64]); |
5174 | } |
5175 | |
5176 | /* Expand EXP, a call to __builtin_va_copy. We do this as a |
5177 | builtin rather than just as an assignment in stdarg.h because of the |
5178 | nastiness of array-type va_list types. */ |
5179 | |
5180 | static rtx |
5181 | expand_builtin_va_copy (tree exp) |
5182 | { |
5183 | tree dst, src, t; |
5184 | location_t loc = EXPR_LOCATION (exp)((((exp)) && ((tree_code_type_tmpl <0>::tree_code_type [(int) (((enum tree_code) ((exp))->base.code))]) >= tcc_reference && (tree_code_type_tmpl <0>::tree_code_type[(int ) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression )) ? (exp)->exp.locus : ((location_t) 0)); |
5185 | |
5186 | dst = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5186, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5186, __FUNCTION__))))); |
5187 | src = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5187, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5187, __FUNCTION__))))); |
5188 | |
5189 | dst = stabilize_va_list_loc (loc, dst, 1); |
5190 | src = stabilize_va_list_loc (loc, src, 0); |
5191 | |
5192 | gcc_assert (cfun != NULL && cfun->decl != NULL_TREE)((void)(!((cfun + 0) != __null && (cfun + 0)->decl != (tree) __null) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5192, __FUNCTION__), 0 : 0)); |
5193 | |
5194 | if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl))((enum tree_code) (targetm.fn_abi_va_list ((cfun + 0)->decl ))->base.code) != ARRAY_TYPE) |
5195 | { |
5196 | t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun(cfun + 0)->decl), dst, src); |
5197 | TREE_SIDE_EFFECTS (t)((non_type_check ((t), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5197, __FUNCTION__))->base.side_effects_flag) = 1; |
5198 | expand_expr (t, const0_rtx(const_int_rtx[64]), VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL); |
5199 | } |
5200 | else |
5201 | { |
5202 | rtx dstb, srcb, size; |
5203 | |
5204 | /* Evaluate to pointers. */ |
5205 | dstb = expand_expr (dst, NULL_RTX(rtx) 0, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), EXPAND_NORMAL); |
5206 | srcb = expand_expr (src, NULL_RTX(rtx) 0, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), EXPAND_NORMAL); |
5207 | size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl))((tree_class_check ((targetm.fn_abi_va_list ((cfun + 0)->decl )), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5207, __FUNCTION__))->type_common.size_unit), |
5208 | NULL_RTX(rtx) 0, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL); |
5209 | |
5210 | dstb = convert_memory_address (Pmode, dstb)convert_memory_address_addr_space (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) )), (dstb), 0); |
5211 | srcb = convert_memory_address (Pmode, srcb)convert_memory_address_addr_space (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) )), (srcb), 0); |
5212 | |
5213 | /* "Dereference" to BLKmode memories. */ |
5214 | dstb = gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), dstb); |
5215 | set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))((contains_struct_check ((((contains_struct_check ((dst), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5215, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5215, __FUNCTION__))->typed.type))); |
5216 | set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl))(((tree_class_check ((targetm.fn_abi_va_list ((cfun + 0)-> decl)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5216, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((targetm.fn_abi_va_list ((cfun + 0)->decl)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5216, __FUNCTION__))->type_common.align) - 1) : 0)); |
5217 | srcb = gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), srcb); |
5218 | set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))((contains_struct_check ((((contains_struct_check ((src), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5218, __FUNCTION__))->typed.type)), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5218, __FUNCTION__))->typed.type))); |
5219 | set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl))(((tree_class_check ((targetm.fn_abi_va_list ((cfun + 0)-> decl)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5219, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((targetm.fn_abi_va_list ((cfun + 0)->decl)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5219, __FUNCTION__))->type_common.align) - 1) : 0)); |
5220 | |
5221 | /* Copy. */ |
5222 | emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL); |
5223 | } |
5224 | |
5225 | return const0_rtx(const_int_rtx[64]); |
5226 | } |
5227 | |
5228 | /* Expand a call to one of the builtin functions __builtin_frame_address or |
5229 | __builtin_return_address. */ |
5230 | |
5231 | static rtx |
5232 | expand_builtin_frame_address (tree fndecl, tree exp) |
5233 | { |
5234 | /* The argument must be a nonnegative integer constant. |
5235 | It counts the number of frames to scan up the stack. |
5236 | The value is either the frame pointer value or the return |
5237 | address saved in that frame. */ |
5238 | if (call_expr_nargs (exp)(((int)((unsigned long) (*tree_int_cst_elt_check (((tree_class_check ((exp), (tcc_vl_exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5238, __FUNCTION__))->exp.operands[0]), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5238, __FUNCTION__)))) - 3) == 0) |
5239 | /* Warning about missing arg was already issued. */ |
5240 | return const0_rtx(const_int_rtx[64]); |
5241 | else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5241, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5241, __FUNCTION__))))))) |
5242 | { |
5243 | error ("invalid argument to %qD", fndecl); |
5244 | return const0_rtx(const_int_rtx[64]); |
5245 | } |
5246 | else |
5247 | { |
5248 | /* Number of frames to scan up the stack. */ |
5249 | unsigned HOST_WIDE_INTlong count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5249, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5249, __FUNCTION__)))))); |
5250 | |
5251 | rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count); |
5252 | |
5253 | /* Some ports cannot access arbitrary stack frames. */ |
5254 | if (tem == NULL__null) |
5255 | { |
5256 | warning (0, "unsupported argument to %qD", fndecl); |
5257 | return const0_rtx(const_int_rtx[64]); |
5258 | } |
5259 | |
5260 | if (count) |
5261 | { |
5262 | /* Warn since no effort is made to ensure that any frame |
5263 | beyond the current one exists or can be safely reached. */ |
5264 | warning (OPT_Wframe_address, "calling %qD with " |
5265 | "a nonzero argument is unsafe", fndecl); |
5266 | } |
5267 | |
5268 | /* For __builtin_frame_address, return what we've got. */ |
5269 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS) |
5270 | return tem; |
5271 | |
5272 | if (!REG_P (tem)(((enum rtx_code) (tem)->code) == REG) |
5273 | && ! CONSTANT_P (tem)((rtx_class[(int) (((enum rtx_code) (tem)->code))]) == RTX_CONST_OBJ )) |
5274 | tem = copy_addr_to_reg (tem); |
5275 | return tem; |
5276 | } |
5277 | } |
5278 | |
5279 | /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we |
5280 | failed and the caller should emit a normal call. */ |
5281 | |
5282 | static rtx |
5283 | expand_builtin_alloca (tree exp) |
5284 | { |
5285 | rtx op0; |
5286 | rtx result; |
5287 | unsigned int align; |
5288 | tree fndecl = get_callee_fndecl (exp); |
5289 | HOST_WIDE_INTlong max_size; |
5290 | enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl); |
5291 | bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp)((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5291, __FUNCTION__, (CALL_EXPR)))->base.protected_flag); |
5292 | bool valid_arglist |
5293 | = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX |
5294 | ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE, |
5295 | VOID_TYPE) |
5296 | : fcode == BUILT_IN_ALLOCA_WITH_ALIGN |
5297 | ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE) |
5298 | : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE)); |
5299 | |
5300 | if (!valid_arglist) |
5301 | return NULL_RTX(rtx) 0; |
5302 | |
5303 | /* Compute the argument. */ |
5304 | op0 = expand_normal (CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5304, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5304, __FUNCTION__)))))); |
5305 | |
5306 | /* Compute the alignment. */ |
5307 | align = (fcode == BUILT_IN_ALLOCA |
5308 | ? BIGGEST_ALIGNMENT(((global_options.x_target_flags & (1U << 12)) != 0 ) ? 32 : (((global_options.x_ix86_isa_flags & (1UL << 15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & ( 1UL << 8)) != 0) ? 256 : 128))) |
5309 | : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))((unsigned long) (*tree_int_cst_elt_check (((*((const_cast< tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5309, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5309, __FUNCTION__)))))), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5309, __FUNCTION__)))); |
5310 | |
5311 | /* Compute the maximum size. */ |
5312 | max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX |
5313 | ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))((unsigned long) (*tree_int_cst_elt_check (((*((const_cast< tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5313, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5313, __FUNCTION__)))))), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5313, __FUNCTION__))) |
5314 | : -1); |
5315 | |
5316 | /* Allocate the desired space. If the allocation stems from the declaration |
5317 | of a variable-sized object, it cannot accumulate. */ |
5318 | result |
5319 | = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var); |
5320 | result = convert_memory_address (ptr_mode, result)convert_memory_address_addr_space ((ptr_mode), (result), 0); |
5321 | |
5322 | /* Dynamic allocations for variables are recorded during gimplification. */ |
5323 | if (!alloca_for_var && (flag_callgraph_infoglobal_options.x_flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)) |
5324 | record_dynamic_alloc (exp); |
5325 | |
5326 | return result; |
5327 | } |
5328 | |
5329 | /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument |
5330 | of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the |
5331 | STACK_DYNAMIC_OFFSET value. See motivation for this in comment to |
5332 | handle_builtin_stack_restore function. */ |
5333 | |
5334 | static rtx |
5335 | expand_asan_emit_allocas_unpoison (tree exp) |
5336 | { |
5337 | tree arg0 = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5337, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5337, __FUNCTION__))))); |
5338 | tree arg1 = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5338, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.cc" , 5338, __FUNCTION__))))); |
5339 | rtx top = expand_expr (arg0, NULL_RTX(rtx) 0, ptr_mode, EXPAND_NORMAL); |
5340 | rtx bot = expand_expr (arg1, NULL_RTX(rtx) 0, ptr_mode, EXPAND_NORMAL); |
5341 | rtx off = expand_simple_binop (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mo |