File: | build/gcc/config/i386/i386.cc |
Warning: | line 10481, column 19 Although the value stored to 'base_reg' is used in the enclosing expression, the value is never actually read from 'base_reg' |
Press '?' to see keyboard shortcuts
Keyboard shortcuts:
1 | /* Subroutines used for code generation on IA-32. |
2 | Copyright (C) 1988-2023 Free Software Foundation, Inc. |
3 | |
4 | This file is part of GCC. |
5 | |
6 | GCC is free software; you can redistribute it and/or modify |
7 | it under the terms of the GNU General Public License as published by |
8 | the Free Software Foundation; either version 3, or (at your option) |
9 | any later version. |
10 | |
11 | GCC is distributed in the hope that it will be useful, |
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
14 | GNU General Public License for more details. |
15 | |
16 | You should have received a copy of the GNU General Public License |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ |
19 | |
20 | #define IN_TARGET_CODE1 1 |
21 | |
22 | #include "config.h" |
23 | #include "system.h" |
24 | #include "coretypes.h" |
25 | #include "backend.h" |
26 | #include "rtl.h" |
27 | #include "tree.h" |
28 | #include "memmodel.h" |
29 | #include "gimple.h" |
30 | #include "cfghooks.h" |
31 | #include "cfgloop.h" |
32 | #include "df.h" |
33 | #include "tm_p.h" |
34 | #include "stringpool.h" |
35 | #include "expmed.h" |
36 | #include "optabs.h" |
37 | #include "regs.h" |
38 | #include "emit-rtl.h" |
39 | #include "recog.h" |
40 | #include "cgraph.h" |
41 | #include "diagnostic.h" |
42 | #include "cfgbuild.h" |
43 | #include "alias.h" |
44 | #include "fold-const.h" |
45 | #include "attribs.h" |
46 | #include "calls.h" |
47 | #include "stor-layout.h" |
48 | #include "varasm.h" |
49 | #include "output.h" |
50 | #include "insn-attr.h" |
51 | #include "flags.h" |
52 | #include "except.h" |
53 | #include "explow.h" |
54 | #include "expr.h" |
55 | #include "cfgrtl.h" |
56 | #include "common/common-target.h" |
57 | #include "langhooks.h" |
58 | #include "reload.h" |
59 | #include "gimplify.h" |
60 | #include "dwarf2.h" |
61 | #include "tm-constrs.h" |
62 | #include "cselib.h" |
63 | #include "sched-int.h" |
64 | #include "opts.h" |
65 | #include "tree-pass.h" |
66 | #include "context.h" |
67 | #include "pass_manager.h" |
68 | #include "target-globals.h" |
69 | #include "gimple-iterator.h" |
70 | #include "gimple-fold.h" |
71 | #include "tree-vectorizer.h" |
72 | #include "shrink-wrap.h" |
73 | #include "builtins.h" |
74 | #include "rtl-iter.h" |
75 | #include "tree-iterator.h" |
76 | #include "dbgcnt.h" |
77 | #include "case-cfn-macros.h" |
78 | #include "dojump.h" |
79 | #include "fold-const-call.h" |
80 | #include "tree-vrp.h" |
81 | #include "tree-ssanames.h" |
82 | #include "selftest.h" |
83 | #include "selftest-rtl.h" |
84 | #include "print-rtl.h" |
85 | #include "intl.h" |
86 | #include "ifcvt.h" |
87 | #include "symbol-summary.h" |
88 | #include "ipa-prop.h" |
89 | #include "ipa-fnsummary.h" |
90 | #include "wide-int-bitmask.h" |
91 | #include "tree-vector-builder.h" |
92 | #include "debug.h" |
93 | #include "dwarf2out.h" |
94 | #include "i386-options.h" |
95 | #include "i386-builtins.h" |
96 | #include "i386-expand.h" |
97 | #include "i386-features.h" |
98 | #include "function-abi.h" |
99 | |
100 | /* This file should be included last. */ |
101 | #include "target-def.h" |
102 | |
103 | static rtx legitimize_dllimport_symbol (rtx, bool); |
104 | static rtx legitimize_pe_coff_extern_decl (rtx, bool); |
105 | static void ix86_print_operand_address_as (FILE *, rtx, addr_space_t, bool); |
106 | static void ix86_emit_restore_reg_using_pop (rtx); |
107 | |
108 | |
109 | #ifndef CHECK_STACK_LIMIT(-1) |
110 | #define CHECK_STACK_LIMIT(-1) (-1) |
111 | #endif |
112 | |
113 | /* Return index of given mode in mult and division cost tables. */ |
114 | #define MODE_INDEX(mode)((mode) == (scalar_int_mode ((scalar_int_mode::from_int) E_QImode )) ? 0 : (mode) == (scalar_int_mode ((scalar_int_mode::from_int ) E_HImode)) ? 1 : (mode) == (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)) ? 2 : (mode) == (scalar_int_mode ((scalar_int_mode ::from_int) E_DImode)) ? 3 : 4) \ |
115 | ((mode) == QImode(scalar_int_mode ((scalar_int_mode::from_int) E_QImode)) ? 0 \ |
116 | : (mode) == HImode(scalar_int_mode ((scalar_int_mode::from_int) E_HImode)) ? 1 \ |
117 | : (mode) == SImode(scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) ? 2 \ |
118 | : (mode) == DImode(scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) ? 3 \ |
119 | : 4) |
120 | |
121 | |
122 | /* Set by -mtune. */ |
123 | const struct processor_costs *ix86_tune_cost = NULL__null; |
124 | |
125 | /* Set by -mtune or -Os. */ |
126 | const struct processor_costs *ix86_cost = NULL__null; |
127 | |
128 | /* In case the average insn count for single function invocation is |
129 | lower than this constant, emit fast (but longer) prologue and |
130 | epilogue code. */ |
131 | #define FAST_PROLOGUE_INSN_COUNT20 20 |
132 | |
133 | /* Names for 8 (low), 8 (high), and 16-bit registers, respectively. */ |
134 | static const char *const qi_reg_name[] = QI_REGISTER_NAMES{"al", "dl", "cl", "bl", "sil", "dil", "bpl", "spl"}; |
135 | static const char *const qi_high_reg_name[] = QI_HIGH_REGISTER_NAMES{"ah", "dh", "ch", "bh"}; |
136 | static const char *const hi_reg_name[] = HI_REGISTER_NAMES{"ax","dx","cx","bx","si","di","bp","sp", "st","st(1)","st(2)" ,"st(3)","st(4)","st(5)","st(6)","st(7)", "argp", "flags", "fpsr" , "frame", "xmm0","xmm1","xmm2","xmm3","xmm4","xmm5","xmm6","xmm7" , "mm0", "mm1", "mm2", "mm3", "mm4", "mm5", "mm6", "mm7", "r8" , "r9", "r10", "r11", "r12", "r13", "r14", "r15", "xmm8", "xmm9" , "xmm10", "xmm11", "xmm12", "xmm13", "xmm14", "xmm15", "xmm16" , "xmm17", "xmm18", "xmm19", "xmm20", "xmm21", "xmm22", "xmm23" , "xmm24", "xmm25", "xmm26", "xmm27", "xmm28", "xmm29", "xmm30" , "xmm31", "k0", "k1", "k2", "k3", "k4", "k5", "k6", "k7" }; |
137 | |
138 | /* Array of the smallest class containing reg number REGNO, indexed by |
139 | REGNO. Used by REGNO_REG_CLASS in i386.h. */ |
140 | |
141 | enum reg_class const regclass_map[FIRST_PSEUDO_REGISTER76] = |
142 | { |
143 | /* ax, dx, cx, bx */ |
144 | AREG, DREG, CREG, BREG, |
145 | /* si, di, bp, sp */ |
146 | SIREG, DIREG, NON_Q_REGS, NON_Q_REGS, |
147 | /* FP registers */ |
148 | FP_TOP_REG, FP_SECOND_REG, FLOAT_REGS, FLOAT_REGS, |
149 | FLOAT_REGS, FLOAT_REGS, FLOAT_REGS, FLOAT_REGS, |
150 | /* arg pointer, flags, fpsr, frame */ |
151 | NON_Q_REGS, NO_REGS, NO_REGS, NON_Q_REGS, |
152 | /* SSE registers */ |
153 | SSE_FIRST_REG, SSE_REGS, SSE_REGS, SSE_REGS, |
154 | SSE_REGS, SSE_REGS, SSE_REGS, SSE_REGS, |
155 | /* MMX registers */ |
156 | MMX_REGS, MMX_REGS, MMX_REGS, MMX_REGS, |
157 | MMX_REGS, MMX_REGS, MMX_REGS, MMX_REGS, |
158 | /* REX registers */ |
159 | GENERAL_REGS, GENERAL_REGS, GENERAL_REGS, GENERAL_REGS, |
160 | GENERAL_REGS, GENERAL_REGS, GENERAL_REGS, GENERAL_REGS, |
161 | /* SSE REX registers */ |
162 | SSE_REGS, SSE_REGS, SSE_REGS, SSE_REGS, |
163 | SSE_REGS, SSE_REGS, SSE_REGS, SSE_REGS, |
164 | /* AVX-512 SSE registers */ |
165 | ALL_SSE_REGS, ALL_SSE_REGS, ALL_SSE_REGS, ALL_SSE_REGS, |
166 | ALL_SSE_REGS, ALL_SSE_REGS, ALL_SSE_REGS, ALL_SSE_REGS, |
167 | ALL_SSE_REGS, ALL_SSE_REGS, ALL_SSE_REGS, ALL_SSE_REGS, |
168 | ALL_SSE_REGS, ALL_SSE_REGS, ALL_SSE_REGS, ALL_SSE_REGS, |
169 | /* Mask registers. */ |
170 | ALL_MASK_REGS, MASK_REGS, MASK_REGS, MASK_REGS, |
171 | MASK_REGS, MASK_REGS, MASK_REGS, MASK_REGS |
172 | }; |
173 | |
174 | /* The "default" register map used in 32bit mode. */ |
175 | |
176 | int const debugger_register_map[FIRST_PSEUDO_REGISTER76] = |
177 | { |
178 | /* general regs */ |
179 | 0, 2, 1, 3, 6, 7, 4, 5, |
180 | /* fp regs */ |
181 | 12, 13, 14, 15, 16, 17, 18, 19, |
182 | /* arg, flags, fpsr, frame */ |
183 | IGNORED_DWARF_REGNUM((~(unsigned int) 0) - 1), IGNORED_DWARF_REGNUM((~(unsigned int) 0) - 1), |
184 | IGNORED_DWARF_REGNUM((~(unsigned int) 0) - 1), IGNORED_DWARF_REGNUM((~(unsigned int) 0) - 1), |
185 | /* SSE */ |
186 | 21, 22, 23, 24, 25, 26, 27, 28, |
187 | /* MMX */ |
188 | 29, 30, 31, 32, 33, 34, 35, 36, |
189 | /* extended integer registers */ |
190 | INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), |
191 | INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), |
192 | /* extended sse registers */ |
193 | INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), |
194 | INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), |
195 | /* AVX-512 registers 16-23 */ |
196 | INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), |
197 | INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), |
198 | /* AVX-512 registers 24-31 */ |
199 | INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), |
200 | INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), |
201 | /* Mask registers */ |
202 | 93, 94, 95, 96, 97, 98, 99, 100 |
203 | }; |
204 | |
205 | /* The "default" register map used in 64bit mode. */ |
206 | |
207 | int const debugger64_register_map[FIRST_PSEUDO_REGISTER76] = |
208 | { |
209 | /* general regs */ |
210 | 0, 1, 2, 3, 4, 5, 6, 7, |
211 | /* fp regs */ |
212 | 33, 34, 35, 36, 37, 38, 39, 40, |
213 | /* arg, flags, fpsr, frame */ |
214 | IGNORED_DWARF_REGNUM((~(unsigned int) 0) - 1), IGNORED_DWARF_REGNUM((~(unsigned int) 0) - 1), |
215 | IGNORED_DWARF_REGNUM((~(unsigned int) 0) - 1), IGNORED_DWARF_REGNUM((~(unsigned int) 0) - 1), |
216 | /* SSE */ |
217 | 17, 18, 19, 20, 21, 22, 23, 24, |
218 | /* MMX */ |
219 | 41, 42, 43, 44, 45, 46, 47, 48, |
220 | /* extended integer registers */ |
221 | 8, 9, 10, 11, 12, 13, 14, 15, |
222 | /* extended SSE registers */ |
223 | 25, 26, 27, 28, 29, 30, 31, 32, |
224 | /* AVX-512 registers 16-23 */ |
225 | 67, 68, 69, 70, 71, 72, 73, 74, |
226 | /* AVX-512 registers 24-31 */ |
227 | 75, 76, 77, 78, 79, 80, 81, 82, |
228 | /* Mask registers */ |
229 | 118, 119, 120, 121, 122, 123, 124, 125 |
230 | }; |
231 | |
232 | /* Define the register numbers to be used in Dwarf debugging information. |
233 | The SVR4 reference port C compiler uses the following register numbers |
234 | in its Dwarf output code: |
235 | 0 for %eax (gcc regno = 0) |
236 | 1 for %ecx (gcc regno = 2) |
237 | 2 for %edx (gcc regno = 1) |
238 | 3 for %ebx (gcc regno = 3) |
239 | 4 for %esp (gcc regno = 7) |
240 | 5 for %ebp (gcc regno = 6) |
241 | 6 for %esi (gcc regno = 4) |
242 | 7 for %edi (gcc regno = 5) |
243 | The following three DWARF register numbers are never generated by |
244 | the SVR4 C compiler or by the GNU compilers, but SDB on x86/svr4 |
245 | believed these numbers have these meanings. |
246 | 8 for %eip (no gcc equivalent) |
247 | 9 for %eflags (gcc regno = 17) |
248 | 10 for %trapno (no gcc equivalent) |
249 | It is not at all clear how we should number the FP stack registers |
250 | for the x86 architecture. If the version of SDB on x86/svr4 were |
251 | a bit less brain dead with respect to floating-point then we would |
252 | have a precedent to follow with respect to DWARF register numbers |
253 | for x86 FP registers, but the SDB on x86/svr4 was so completely |
254 | broken with respect to FP registers that it is hardly worth thinking |
255 | of it as something to strive for compatibility with. |
256 | The version of x86/svr4 SDB I had does (partially) |
257 | seem to believe that DWARF register number 11 is associated with |
258 | the x86 register %st(0), but that's about all. Higher DWARF |
259 | register numbers don't seem to be associated with anything in |
260 | particular, and even for DWARF regno 11, SDB only seemed to under- |
261 | stand that it should say that a variable lives in %st(0) (when |
262 | asked via an `=' command) if we said it was in DWARF regno 11, |
263 | but SDB still printed garbage when asked for the value of the |
264 | variable in question (via a `/' command). |
265 | (Also note that the labels SDB printed for various FP stack regs |
266 | when doing an `x' command were all wrong.) |
267 | Note that these problems generally don't affect the native SVR4 |
268 | C compiler because it doesn't allow the use of -O with -g and |
269 | because when it is *not* optimizing, it allocates a memory |
270 | location for each floating-point variable, and the memory |
271 | location is what gets described in the DWARF AT_location |
272 | attribute for the variable in question. |
273 | Regardless of the severe mental illness of the x86/svr4 SDB, we |
274 | do something sensible here and we use the following DWARF |
275 | register numbers. Note that these are all stack-top-relative |
276 | numbers. |
277 | 11 for %st(0) (gcc regno = 8) |
278 | 12 for %st(1) (gcc regno = 9) |
279 | 13 for %st(2) (gcc regno = 10) |
280 | 14 for %st(3) (gcc regno = 11) |
281 | 15 for %st(4) (gcc regno = 12) |
282 | 16 for %st(5) (gcc regno = 13) |
283 | 17 for %st(6) (gcc regno = 14) |
284 | 18 for %st(7) (gcc regno = 15) |
285 | */ |
286 | int const svr4_debugger_register_map[FIRST_PSEUDO_REGISTER76] = |
287 | { |
288 | /* general regs */ |
289 | 0, 2, 1, 3, 6, 7, 5, 4, |
290 | /* fp regs */ |
291 | 11, 12, 13, 14, 15, 16, 17, 18, |
292 | /* arg, flags, fpsr, frame */ |
293 | IGNORED_DWARF_REGNUM((~(unsigned int) 0) - 1), 9, |
294 | IGNORED_DWARF_REGNUM((~(unsigned int) 0) - 1), IGNORED_DWARF_REGNUM((~(unsigned int) 0) - 1), |
295 | /* SSE registers */ |
296 | 21, 22, 23, 24, 25, 26, 27, 28, |
297 | /* MMX registers */ |
298 | 29, 30, 31, 32, 33, 34, 35, 36, |
299 | /* extended integer registers */ |
300 | INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), |
301 | INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), |
302 | /* extended sse registers */ |
303 | INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), |
304 | INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), |
305 | /* AVX-512 registers 16-23 */ |
306 | INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), |
307 | INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), |
308 | /* AVX-512 registers 24-31 */ |
309 | INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), |
310 | INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), |
311 | /* Mask registers */ |
312 | 93, 94, 95, 96, 97, 98, 99, 100 |
313 | }; |
314 | |
315 | /* Define parameter passing and return registers. */ |
316 | |
317 | static int const x86_64_int_parameter_registers[6] = |
318 | { |
319 | DI_REG5, SI_REG4, DX_REG1, CX_REG2, R8_REG36, R9_REG37 |
320 | }; |
321 | |
322 | static int const x86_64_ms_abi_int_parameter_registers[4] = |
323 | { |
324 | CX_REG2, DX_REG1, R8_REG36, R9_REG37 |
325 | }; |
326 | |
327 | static int const x86_64_int_return_registers[4] = |
328 | { |
329 | AX_REG0, DX_REG1, DI_REG5, SI_REG4 |
330 | }; |
331 | |
332 | /* Define the structure for the machine field in struct function. */ |
333 | |
334 | struct GTY(()) stack_local_entry { |
335 | unsigned short mode; |
336 | unsigned short n; |
337 | rtx rtl; |
338 | struct stack_local_entry *next; |
339 | }; |
340 | |
341 | /* Which cpu are we scheduling for. */ |
342 | enum attr_cpu ix86_schedule; |
343 | |
344 | /* Which cpu are we optimizing for. */ |
345 | enum processor_type ix86_tune; |
346 | |
347 | /* Which instruction set architecture to use. */ |
348 | enum processor_type ix86_arch; |
349 | |
350 | /* True if processor has SSE prefetch instruction. */ |
351 | unsigned char ix86_prefetch_sse; |
352 | |
353 | /* Preferred alignment for stack boundary in bits. */ |
354 | unsigned int ix86_preferred_stack_boundary; |
355 | |
356 | /* Alignment for incoming stack boundary in bits specified at |
357 | command line. */ |
358 | unsigned int ix86_user_incoming_stack_boundary; |
359 | |
360 | /* Default alignment for incoming stack boundary in bits. */ |
361 | unsigned int ix86_default_incoming_stack_boundary; |
362 | |
363 | /* Alignment for incoming stack boundary in bits. */ |
364 | unsigned int ix86_incoming_stack_boundary; |
365 | |
366 | /* True if there is no direct access to extern symbols. */ |
367 | bool ix86_has_no_direct_extern_access; |
368 | |
369 | /* Calling abi specific va_list type nodes. */ |
370 | tree sysv_va_list_type_node; |
371 | tree ms_va_list_type_node; |
372 | |
373 | /* Prefix built by ASM_GENERATE_INTERNAL_LABEL. */ |
374 | char internal_label_prefix[16]; |
375 | int internal_label_prefix_len; |
376 | |
377 | /* Fence to use after loop using movnt. */ |
378 | tree x86_mfence; |
379 | |
380 | /* Register class used for passing given 64bit part of the argument. |
381 | These represent classes as documented by the PS ABI, with the exception |
382 | of SSESF, SSEDF classes, that are basically SSE class, just gcc will |
383 | use SF or DFmode move instead of DImode to avoid reformatting penalties. |
384 | |
385 | Similarly we play games with INTEGERSI_CLASS to use cheaper SImode moves |
386 | whenever possible (upper half does contain padding). */ |
387 | enum x86_64_reg_class |
388 | { |
389 | X86_64_NO_CLASS, |
390 | X86_64_INTEGER_CLASS, |
391 | X86_64_INTEGERSI_CLASS, |
392 | X86_64_SSE_CLASS, |
393 | X86_64_SSEHF_CLASS, |
394 | X86_64_SSESF_CLASS, |
395 | X86_64_SSEDF_CLASS, |
396 | X86_64_SSEUP_CLASS, |
397 | X86_64_X87_CLASS, |
398 | X86_64_X87UP_CLASS, |
399 | X86_64_COMPLEX_X87_CLASS, |
400 | X86_64_MEMORY_CLASS |
401 | }; |
402 | |
403 | #define MAX_CLASSES8 8 |
404 | |
405 | /* Table of constants used by fldpi, fldln2, etc.... */ |
406 | static REAL_VALUE_TYPEstruct real_value ext_80387_constants_table [5]; |
407 | static bool ext_80387_constants_init; |
408 | |
409 | |
410 | static rtx ix86_function_value (const_tree, const_tree, bool); |
411 | static bool ix86_function_value_regno_p (const unsigned int); |
412 | static unsigned int ix86_function_arg_boundary (machine_mode, |
413 | const_tree); |
414 | static rtx ix86_static_chain (const_tree, bool); |
415 | static int ix86_function_regparm (const_tree, const_tree); |
416 | static void ix86_compute_frame_layout (void); |
417 | static tree ix86_canonical_va_list_type (tree); |
418 | static unsigned int split_stack_prologue_scratch_regno (void); |
419 | static bool i386_asm_output_addr_const_extra (FILE *, rtx); |
420 | |
421 | static bool ix86_can_inline_p (tree, tree); |
422 | static unsigned int ix86_minimum_incoming_stack_boundary (bool); |
423 | |
424 | |
425 | /* Whether -mtune= or -march= were specified */ |
426 | int ix86_tune_defaulted; |
427 | int ix86_arch_specified; |
428 | |
429 | /* Return true if a red-zone is in use. We can't use red-zone when |
430 | there are local indirect jumps, like "indirect_jump" or "tablejump", |
431 | which jumps to another place in the function, since "call" in the |
432 | indirect thunk pushes the return address onto stack, destroying |
433 | red-zone. |
434 | |
435 | TODO: If we can reserve the first 2 WORDs, for PUSH and, another |
436 | for CALL, in red-zone, we can allow local indirect jumps with |
437 | indirect thunk. */ |
438 | |
439 | bool |
440 | ix86_using_red_zone (void) |
441 | { |
442 | return (TARGET_RED_ZONE((global_options.x_target_flags & (1U << 20)) == 0) |
443 | && !TARGET_64BIT_MS_ABI(((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) && ix86_cfun_abi () == MS_ABI) |
444 | && (!cfun(cfun + 0)->machine->has_local_indirect_jump |
445 | || cfun(cfun + 0)->machine->indirect_branch_type == indirect_branch_keep)); |
446 | } |
447 | |
448 | /* Return true, if profiling code should be emitted before |
449 | prologue. Otherwise it returns false. |
450 | Note: For x86 with "hotfix" it is sorried. */ |
451 | static bool |
452 | ix86_profile_before_prologue (void) |
453 | { |
454 | return flag_fentryglobal_options.x_flag_fentry != 0; |
455 | } |
456 | |
457 | /* Update register usage after having seen the compiler flags. */ |
458 | |
459 | static void |
460 | ix86_conditional_register_usage (void) |
461 | { |
462 | int i, c_mask; |
463 | |
464 | /* If there are no caller-saved registers, preserve all registers. |
465 | except fixed_regs and registers used for function return value |
466 | since aggregate_value_p checks call_used_regs[regno] on return |
467 | value. */ |
468 | if (cfun(cfun + 0) && cfun(cfun + 0)->machine->no_caller_saved_registers) |
469 | for (i = 0; i < FIRST_PSEUDO_REGISTER76; i++) |
470 | if (!fixed_regs(this_target_hard_regs->x_fixed_regs)[i] && !ix86_function_value_regno_p (i)) |
471 | call_used_regs(this_target_hard_regs->x_call_used_regs)[i] = 0; |
472 | |
473 | /* For 32-bit targets, disable the REX registers. */ |
474 | if (! TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 )) |
475 | { |
476 | for (i = FIRST_REX_INT_REG36; i <= LAST_REX_INT_REG43; i++) |
477 | CLEAR_HARD_REG_BIT (accessible_reg_set(this_target_hard_regs->x_accessible_reg_set), i); |
478 | for (i = FIRST_REX_SSE_REG44; i <= LAST_REX_SSE_REG51; i++) |
479 | CLEAR_HARD_REG_BIT (accessible_reg_set(this_target_hard_regs->x_accessible_reg_set), i); |
480 | for (i = FIRST_EXT_REX_SSE_REG52; i <= LAST_EXT_REX_SSE_REG67; i++) |
481 | CLEAR_HARD_REG_BIT (accessible_reg_set(this_target_hard_regs->x_accessible_reg_set), i); |
482 | } |
483 | |
484 | /* See the definition of CALL_USED_REGISTERS in i386.h. */ |
485 | c_mask = CALL_USED_REGISTERS_MASK (TARGET_64BIT_MS_ABI)(((((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) && ix86_cfun_abi () == MS_ABI)) ? (1 << 3) : ((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? (1 << 2) : (1 << 1)); |
486 | |
487 | CLEAR_HARD_REG_SET (reg_class_contents(this_target_hard_regs->x_reg_class_contents)[(int)CLOBBERED_REGS]); |
488 | |
489 | for (i = 0; i < FIRST_PSEUDO_REGISTER76; i++) |
490 | { |
491 | /* Set/reset conditionally defined registers from |
492 | CALL_USED_REGISTERS initializer. */ |
493 | if (call_used_regs(this_target_hard_regs->x_call_used_regs)[i] > 1) |
494 | call_used_regs(this_target_hard_regs->x_call_used_regs)[i] = !!(call_used_regs(this_target_hard_regs->x_call_used_regs)[i] & c_mask); |
495 | |
496 | /* Calculate registers of CLOBBERED_REGS register set |
497 | as call used registers from GENERAL_REGS register set. */ |
498 | if (TEST_HARD_REG_BIT (reg_class_contents(this_target_hard_regs->x_reg_class_contents)[(int)GENERAL_REGS], i) |
499 | && call_used_regs(this_target_hard_regs->x_call_used_regs)[i]) |
500 | SET_HARD_REG_BIT (reg_class_contents(this_target_hard_regs->x_reg_class_contents)[(int)CLOBBERED_REGS], i); |
501 | } |
502 | |
503 | /* If MMX is disabled, disable the registers. */ |
504 | if (! TARGET_MMX((global_options.x_ix86_isa_flags & (1UL << 36)) != 0)) |
505 | accessible_reg_set(this_target_hard_regs->x_accessible_reg_set) &= ~reg_class_contents(this_target_hard_regs->x_reg_class_contents)[MMX_REGS]; |
506 | |
507 | /* If SSE is disabled, disable the registers. */ |
508 | if (! TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) != 0)) |
509 | accessible_reg_set(this_target_hard_regs->x_accessible_reg_set) &= ~reg_class_contents(this_target_hard_regs->x_reg_class_contents)[ALL_SSE_REGS]; |
510 | |
511 | /* If the FPU is disabled, disable the registers. */ |
512 | if (! (TARGET_80387((global_options.x_target_flags & (1U << 1)) != 0) || TARGET_FLOAT_RETURNS_IN_80387(((global_options.x_target_flags & (1U << 11)) != 0 ) && ((global_options.x_target_flags & (1U << 1)) != 0) && !((global_options.x_target_flags & ( 1U << 12)) != 0)))) |
513 | accessible_reg_set(this_target_hard_regs->x_accessible_reg_set) &= ~reg_class_contents(this_target_hard_regs->x_reg_class_contents)[FLOAT_REGS]; |
514 | |
515 | /* If AVX512F is disabled, disable the registers. */ |
516 | if (! TARGET_AVX512F((global_options.x_ix86_isa_flags & (1UL << 15)) != 0)) |
517 | { |
518 | for (i = FIRST_EXT_REX_SSE_REG52; i <= LAST_EXT_REX_SSE_REG67; i++) |
519 | CLEAR_HARD_REG_BIT (accessible_reg_set(this_target_hard_regs->x_accessible_reg_set), i); |
520 | |
521 | accessible_reg_set(this_target_hard_regs->x_accessible_reg_set) &= ~reg_class_contents(this_target_hard_regs->x_reg_class_contents)[ALL_MASK_REGS]; |
522 | } |
523 | } |
524 | |
525 | /* Canonicalize a comparison from one we don't have to one we do have. */ |
526 | |
527 | static void |
528 | ix86_canonicalize_comparison (int *code, rtx *op0, rtx *op1, |
529 | bool op0_preserve_value) |
530 | { |
531 | /* The order of operands in x87 ficom compare is forced by combine in |
532 | simplify_comparison () function. Float operator is treated as RTX_OBJ |
533 | with a precedence over other operators and is always put in the first |
534 | place. Swap condition and operands to match ficom instruction. */ |
535 | if (!op0_preserve_value |
536 | && GET_CODE (*op0)((enum rtx_code) (*op0)->code) == FLOAT && MEM_P (XEXP (*op0, 0))(((enum rtx_code) ((((*op0)->u.fld[0]).rt_rtx))->code) == MEM) && REG_P (*op1)(((enum rtx_code) (*op1)->code) == REG)) |
537 | { |
538 | enum rtx_code scode = swap_condition ((enum rtx_code) *code); |
539 | |
540 | /* We are called only for compares that are split to SAHF instruction. |
541 | Ensure that we have setcc/jcc insn for the swapped condition. */ |
542 | if (ix86_fp_compare_code_to_integer (scode) != UNKNOWN) |
543 | { |
544 | std::swap (*op0, *op1); |
545 | *code = (int) scode; |
546 | } |
547 | } |
548 | } |
549 | |
550 | |
551 | /* Hook to determine if one function can safely inline another. */ |
552 | |
553 | static bool |
554 | ix86_can_inline_p (tree caller, tree callee) |
555 | { |
556 | tree caller_tree = DECL_FUNCTION_SPECIFIC_TARGET (caller)((tree_check ((caller), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 556, __FUNCTION__, (FUNCTION_DECL)))->function_decl.function_specific_target ); |
557 | tree callee_tree = DECL_FUNCTION_SPECIFIC_TARGET (callee)((tree_check ((callee), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 557, __FUNCTION__, (FUNCTION_DECL)))->function_decl.function_specific_target ); |
558 | |
559 | /* Changes of those flags can be tolerated for always inlines. Lets hope |
560 | user knows what he is doing. */ |
561 | unsigned HOST_WIDE_INTlong always_inline_safe_mask |
562 | = (MASK_USE_8BIT_IDIV(1U << 2) | MASK_ACCUMULATE_OUTGOING_ARGS(1U << 3) |
563 | | MASK_NO_ALIGN_STRINGOPS(1U << 5) | MASK_AVX256_SPLIT_UNALIGNED_LOAD(1U << 6) |
564 | | MASK_AVX256_SPLIT_UNALIGNED_STORE(1U << 7) | MASK_CLD(1U << 9) |
565 | | MASK_NO_FANCY_MATH_387(1U << 10) | MASK_IEEE_FP(1U << 13) | MASK_INLINE_ALL_STRINGOPS(1U << 14) |
566 | | MASK_INLINE_STRINGOPS_DYNAMICALLY(1U << 15) | MASK_RECIP(1U << 22) | MASK_STACK_PROBE(1U << 26) |
567 | | MASK_STV(1U << 27) | MASK_TLS_DIRECT_SEG_REFS(1U << 28) | MASK_VZEROUPPER(1U << 30) |
568 | | MASK_NO_PUSH_ARGS(1U << 19) | MASK_OMIT_LEAF_FRAME_POINTER(1U << 21)); |
569 | |
570 | |
571 | if (!callee_tree) |
572 | callee_tree = target_option_default_nodeglobal_trees[TI_TARGET_OPTION_DEFAULT]; |
573 | if (!caller_tree) |
574 | caller_tree = target_option_default_nodeglobal_trees[TI_TARGET_OPTION_DEFAULT]; |
575 | if (callee_tree == caller_tree) |
576 | return true; |
577 | |
578 | struct cl_target_option *caller_opts = TREE_TARGET_OPTION (caller_tree)((tree_check ((caller_tree), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 578, __FUNCTION__, (TARGET_OPTION_NODE)))->target_option .opts); |
579 | struct cl_target_option *callee_opts = TREE_TARGET_OPTION (callee_tree)((tree_check ((callee_tree), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 579, __FUNCTION__, (TARGET_OPTION_NODE)))->target_option .opts); |
580 | bool ret = false; |
581 | bool always_inline |
582 | = (DECL_DISREGARD_INLINE_LIMITS (callee)((tree_check ((callee), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 582, __FUNCTION__, (FUNCTION_DECL)))->function_decl.disregard_inline_limits ) |
583 | && lookup_attribute ("always_inline", |
584 | DECL_ATTRIBUTES (callee)((contains_struct_check ((callee), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 584, __FUNCTION__))->decl_common.attributes))); |
585 | |
586 | /* If callee only uses GPRs, ignore MASK_80387. */ |
587 | if (TARGET_GENERAL_REGS_ONLY_P (callee_opts->x_ix86_target_flags)(((callee_opts->x_ix86_target_flags) & (1U << 0) ) != 0)) |
588 | always_inline_safe_mask |= MASK_80387(1U << 1); |
589 | |
590 | cgraph_node *callee_node = cgraph_node::get (callee); |
591 | /* Callee's isa options should be a subset of the caller's, i.e. a SSE4 |
592 | function can inline a SSE2 function but a SSE2 function can't inline |
593 | a SSE4 function. */ |
594 | if (((caller_opts->x_ix86_isa_flags & callee_opts->x_ix86_isa_flags) |
595 | != callee_opts->x_ix86_isa_flags) |
596 | || ((caller_opts->x_ix86_isa_flags2 & callee_opts->x_ix86_isa_flags2) |
597 | != callee_opts->x_ix86_isa_flags2)) |
598 | ret = false; |
599 | |
600 | /* See if we have the same non-isa options. */ |
601 | else if ((!always_inline |
602 | && caller_opts->x_target_flags != callee_opts->x_target_flags) |
603 | || (caller_opts->x_target_flags & ~always_inline_safe_mask) |
604 | != (callee_opts->x_target_flags & ~always_inline_safe_mask)) |
605 | ret = false; |
606 | |
607 | /* See if arch, tune, etc. are the same. */ |
608 | else if (caller_opts->arch != callee_opts->arch) |
609 | ret = false; |
610 | |
611 | else if (!always_inline && caller_opts->tune != callee_opts->tune) |
612 | ret = false; |
613 | |
614 | else if (caller_opts->x_ix86_fpmath != callee_opts->x_ix86_fpmath |
615 | /* If the calle doesn't use FP expressions differences in |
616 | ix86_fpmath can be ignored. We are called from FEs |
617 | for multi-versioning call optimization, so beware of |
618 | ipa_fn_summaries not available. */ |
619 | && (! ipa_fn_summaries |
620 | || ipa_fn_summaries->get (callee_node) == NULL__null |
621 | || ipa_fn_summaries->get (callee_node)->fp_expressions)) |
622 | ret = false; |
623 | |
624 | else if (!always_inline |
625 | && caller_opts->branch_cost != callee_opts->branch_cost) |
626 | ret = false; |
627 | |
628 | else |
629 | ret = true; |
630 | |
631 | return ret; |
632 | } |
633 | |
634 | /* Return true if this goes in large data/bss. */ |
635 | |
636 | static bool |
637 | ix86_in_large_data_p (tree exp) |
638 | { |
639 | if (ix86_cmodelglobal_options.x_ix86_cmodel != CM_MEDIUM && ix86_cmodelglobal_options.x_ix86_cmodel != CM_MEDIUM_PIC) |
640 | return false; |
641 | |
642 | if (exp == NULL_TREE(tree) __null) |
643 | return false; |
644 | |
645 | /* Functions are never large data. */ |
646 | if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == FUNCTION_DECL) |
647 | return false; |
648 | |
649 | /* Automatic variables are never large data. */ |
650 | if (VAR_P (exp)(((enum tree_code) (exp)->base.code) == VAR_DECL) && !is_global_var (exp)) |
651 | return false; |
652 | |
653 | if (VAR_P (exp)(((enum tree_code) (exp)->base.code) == VAR_DECL) && DECL_SECTION_NAME (exp)decl_section_name (exp)) |
654 | { |
655 | const char *section = DECL_SECTION_NAME (exp)decl_section_name (exp); |
656 | if (strcmp (section, ".ldata") == 0 |
657 | || strcmp (section, ".lbss") == 0) |
658 | return true; |
659 | return false; |
660 | } |
661 | else |
662 | { |
663 | HOST_WIDE_INTlong size = int_size_in_bytes (TREE_TYPE (exp)((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 663, __FUNCTION__))->typed.type)); |
664 | |
665 | /* If this is an incomplete type with size 0, then we can't put it |
666 | in data because it might be too big when completed. Also, |
667 | int_size_in_bytes returns -1 if size can vary or is larger than |
668 | an integer in which case also it is safer to assume that it goes in |
669 | large data. */ |
670 | if (size <= 0 || size > ix86_section_thresholdglobal_options.x_ix86_section_threshold) |
671 | return true; |
672 | } |
673 | |
674 | return false; |
675 | } |
676 | |
677 | /* i386-specific section flag to mark large sections. */ |
678 | #define SECTION_LARGESECTION_MACH_DEP SECTION_MACH_DEP |
679 | |
680 | /* Switch to the appropriate section for output of DECL. |
681 | DECL is either a `VAR_DECL' node or a constant of some sort. |
682 | RELOC indicates whether forming the initial value of DECL requires |
683 | link-time relocations. */ |
684 | |
685 | ATTRIBUTE_UNUSED__attribute__ ((__unused__)) static section * |
686 | x86_64_elf_select_section (tree decl, int reloc, |
687 | unsigned HOST_WIDE_INTlong align) |
688 | { |
689 | if (ix86_in_large_data_p (decl)) |
690 | { |
691 | const char *sname = NULL__null; |
692 | unsigned int flags = SECTION_WRITE | SECTION_LARGESECTION_MACH_DEP; |
693 | switch (categorize_decl_for_section (decl, reloc)) |
694 | { |
695 | case SECCAT_DATA: |
696 | sname = ".ldata"; |
697 | break; |
698 | case SECCAT_DATA_REL: |
699 | sname = ".ldata.rel"; |
700 | break; |
701 | case SECCAT_DATA_REL_LOCAL: |
702 | sname = ".ldata.rel.local"; |
703 | break; |
704 | case SECCAT_DATA_REL_RO: |
705 | sname = ".ldata.rel.ro"; |
706 | break; |
707 | case SECCAT_DATA_REL_RO_LOCAL: |
708 | sname = ".ldata.rel.ro.local"; |
709 | break; |
710 | case SECCAT_BSS: |
711 | sname = ".lbss"; |
712 | flags |= SECTION_BSS; |
713 | break; |
714 | case SECCAT_RODATA: |
715 | case SECCAT_RODATA_MERGE_STR: |
716 | case SECCAT_RODATA_MERGE_STR_INIT: |
717 | case SECCAT_RODATA_MERGE_CONST: |
718 | sname = ".lrodata"; |
719 | flags &= ~SECTION_WRITE; |
720 | break; |
721 | case SECCAT_SRODATA: |
722 | case SECCAT_SDATA: |
723 | case SECCAT_SBSS: |
724 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 724, __FUNCTION__)); |
725 | case SECCAT_TEXT: |
726 | case SECCAT_TDATA: |
727 | case SECCAT_TBSS: |
728 | /* We don't split these for medium model. Place them into |
729 | default sections and hope for best. */ |
730 | break; |
731 | } |
732 | if (sname) |
733 | { |
734 | /* We might get called with string constants, but get_named_section |
735 | doesn't like them as they are not DECLs. Also, we need to set |
736 | flags in that case. */ |
737 | if (!DECL_P (decl)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code ) (decl)->base.code))] == tcc_declaration)) |
738 | return get_section (sname, flags, NULL__null); |
739 | return get_named_section (decl, sname, reloc); |
740 | } |
741 | } |
742 | return default_elf_select_section (decl, reloc, align); |
743 | } |
744 | |
745 | /* Select a set of attributes for section NAME based on the properties |
746 | of DECL and whether or not RELOC indicates that DECL's initializer |
747 | might contain runtime relocations. */ |
748 | |
749 | static unsigned int ATTRIBUTE_UNUSED__attribute__ ((__unused__)) |
750 | x86_64_elf_section_type_flags (tree decl, const char *name, int reloc) |
751 | { |
752 | unsigned int flags = default_section_type_flags (decl, name, reloc); |
753 | |
754 | if (ix86_in_large_data_p (decl)) |
755 | flags |= SECTION_LARGESECTION_MACH_DEP; |
756 | |
757 | if (decl == NULL_TREE(tree) __null |
758 | && (strcmp (name, ".ldata.rel.ro") == 0 |
759 | || strcmp (name, ".ldata.rel.ro.local") == 0)) |
760 | flags |= SECTION_RELRO; |
761 | |
762 | if (strcmp (name, ".lbss") == 0 |
763 | || startswith (name, ".lbss.") |
764 | || startswith (name, ".gnu.linkonce.lb.")) |
765 | flags |= SECTION_BSS; |
766 | |
767 | return flags; |
768 | } |
769 | |
770 | /* Build up a unique section name, expressed as a |
771 | STRING_CST node, and assign it to DECL_SECTION_NAME (decl). |
772 | RELOC indicates whether the initial value of EXP requires |
773 | link-time relocations. */ |
774 | |
775 | static void ATTRIBUTE_UNUSED__attribute__ ((__unused__)) |
776 | x86_64_elf_unique_section (tree decl, int reloc) |
777 | { |
778 | if (ix86_in_large_data_p (decl)) |
779 | { |
780 | const char *prefix = NULL__null; |
781 | /* We only need to use .gnu.linkonce if we don't have COMDAT groups. */ |
782 | bool one_only = DECL_COMDAT_GROUP (decl)decl_comdat_group (decl) && !HAVE_COMDAT_GROUP1; |
783 | |
784 | switch (categorize_decl_for_section (decl, reloc)) |
785 | { |
786 | case SECCAT_DATA: |
787 | case SECCAT_DATA_REL: |
788 | case SECCAT_DATA_REL_LOCAL: |
789 | case SECCAT_DATA_REL_RO: |
790 | case SECCAT_DATA_REL_RO_LOCAL: |
791 | prefix = one_only ? ".ld" : ".ldata"; |
792 | break; |
793 | case SECCAT_BSS: |
794 | prefix = one_only ? ".lb" : ".lbss"; |
795 | break; |
796 | case SECCAT_RODATA: |
797 | case SECCAT_RODATA_MERGE_STR: |
798 | case SECCAT_RODATA_MERGE_STR_INIT: |
799 | case SECCAT_RODATA_MERGE_CONST: |
800 | prefix = one_only ? ".lr" : ".lrodata"; |
801 | break; |
802 | case SECCAT_SRODATA: |
803 | case SECCAT_SDATA: |
804 | case SECCAT_SBSS: |
805 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 805, __FUNCTION__)); |
806 | case SECCAT_TEXT: |
807 | case SECCAT_TDATA: |
808 | case SECCAT_TBSS: |
809 | /* We don't split these for medium model. Place them into |
810 | default sections and hope for best. */ |
811 | break; |
812 | } |
813 | if (prefix) |
814 | { |
815 | const char *name, *linkonce; |
816 | char *string; |
817 | |
818 | name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))((const char *) (tree_check ((decl_assembler_name (decl)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 818, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str ); |
819 | name = targetm.strip_name_encoding (name); |
820 | |
821 | /* If we're using one_only, then there needs to be a .gnu.linkonce |
822 | prefix to the section name. */ |
823 | linkonce = one_only ? ".gnu.linkonce" : ""; |
824 | |
825 | string = ACONCAT ((linkonce, prefix, ".", name, NULL))(libiberty_concat_ptr = (char *) __builtin_alloca(concat_length (linkonce, prefix, ".", name, __null) + 1), concat_copy2 (linkonce , prefix, ".", name, __null)); |
826 | |
827 | set_decl_section_name (decl, string); |
828 | return; |
829 | } |
830 | } |
831 | default_unique_section (decl, reloc); |
832 | } |
833 | |
834 | #ifdef COMMON_ASM_OP"\t.comm\t" |
835 | |
836 | #ifndef LARGECOMM_SECTION_ASM_OP"\t.largecomm\t" |
837 | #define LARGECOMM_SECTION_ASM_OP"\t.largecomm\t" "\t.largecomm\t" |
838 | #endif |
839 | |
840 | /* This says how to output assembler code to declare an |
841 | uninitialized external linkage data object. |
842 | |
843 | For medium model x86-64 we need to use LARGECOMM_SECTION_ASM_OP opcode for |
844 | large objects. */ |
845 | void |
846 | x86_elf_aligned_decl_common (FILE *file, tree decl, |
847 | const char *name, unsigned HOST_WIDE_INTlong size, |
848 | unsigned align) |
849 | { |
850 | if ((ix86_cmodelglobal_options.x_ix86_cmodel == CM_MEDIUM || ix86_cmodelglobal_options.x_ix86_cmodel == CM_MEDIUM_PIC) |
851 | && size > (unsigned int)ix86_section_thresholdglobal_options.x_ix86_section_threshold) |
852 | { |
853 | switch_to_section (get_named_section (decl, ".lbss", 0)); |
854 | fputs (LARGECOMM_SECTION_ASM_OP"\t.largecomm\t", file); |
855 | } |
856 | else |
857 | fputs (COMMON_ASM_OP"\t.comm\t", file); |
858 | assemble_name (file, name); |
859 | fprintf (file, "," HOST_WIDE_INT_PRINT_UNSIGNED"%" "l" "u" ",%u\n", |
860 | size, align / BITS_PER_UNIT(8)); |
861 | } |
862 | #endif |
863 | |
864 | /* Utility function for targets to use in implementing |
865 | ASM_OUTPUT_ALIGNED_BSS. */ |
866 | |
867 | void |
868 | x86_output_aligned_bss (FILE *file, tree decl, const char *name, |
869 | unsigned HOST_WIDE_INTlong size, unsigned align) |
870 | { |
871 | if ((ix86_cmodelglobal_options.x_ix86_cmodel == CM_MEDIUM || ix86_cmodelglobal_options.x_ix86_cmodel == CM_MEDIUM_PIC) |
872 | && size > (unsigned int)ix86_section_thresholdglobal_options.x_ix86_section_threshold) |
873 | switch_to_section (get_named_section (decl, ".lbss", 0)); |
874 | else |
875 | switch_to_section (bss_section); |
876 | ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT))if ((floor_log2 (align / (8))) != 0) fprintf ((file), "\t.align %d\n" , 1 << (floor_log2 (align / (8)))); |
877 | #ifdef ASM_DECLARE_OBJECT_NAME |
878 | last_assemble_variable_decl = decl; |
879 | ASM_DECLARE_OBJECT_NAME (file, name, decl)do { long size; if (global_options.x_flag_gnu_unique && (decl_comdat_group (decl) != (tree) __null && (((decl )->base.public_flag) || ((contains_struct_check ((decl), ( TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 879, __FUNCTION__))->decl_common.decl_flag_1))) && (!((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 879, __FUNCTION__))->decl_common.artificial_flag) || !(( non_type_check ((decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 879, __FUNCTION__))->base.readonly_flag))) do { fputs ("\t.type\t" , file); assemble_name (file, name); fputs (", ", file); fprintf (file, "@%s", "gnu_unique_object"); putc ('\n', file); } while (0); else do { fputs ("\t.type\t", file); assemble_name (file , name); fputs (", ", file); fprintf (file, "@%s", "object"); putc ('\n', file); } while (0); size_directive_output = 0; if (!global_options.x_flag_inhibit_size_directive && (decl ) && ((contains_struct_check ((decl), (TS_DECL_COMMON ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 879, __FUNCTION__))->decl_common.size)) { size_directive_output = 1; size = tree_to_uhwi (((contains_struct_check ((decl), ( TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 879, __FUNCTION__))->decl_common.size_unit)); do { long size_ = (size); fputs ("\t.size\t", file); assemble_name (file, name ); fprintf (file, ", " "%" "l" "d" "\n", size_); } while (0); } do { assemble_name ((file), (name)); fputs (":\n", (file)) ; } while (0); } while (0); |
880 | #else |
881 | /* Standard thing is just output label for the object. */ |
882 | ASM_OUTPUT_LABEL (file, name)do { assemble_name ((file), (name)); fputs (":\n", (file)); } while (0); |
883 | #endif /* ASM_DECLARE_OBJECT_NAME */ |
884 | ASM_OUTPUT_SKIP (file, size ? size : 1)fprintf ((file), "%s" "%" "l" "u" "\n", "\t.zero\t", (size ? size : 1)); |
885 | } |
886 | |
887 | /* Decide whether we must probe the stack before any space allocation |
888 | on this target. It's essentially TARGET_STACK_PROBE except when |
889 | -fstack-check causes the stack to be already probed differently. */ |
890 | |
891 | bool |
892 | ix86_target_stack_probe (void) |
893 | { |
894 | /* Do not probe the stack twice if static stack checking is enabled. */ |
895 | if (flag_stack_checkglobal_options.x_flag_stack_check == STATIC_BUILTIN_STACK_CHECK) |
896 | return false; |
897 | |
898 | return TARGET_STACK_PROBE((global_options.x_target_flags & (1U << 26)) != 0); |
899 | } |
900 | |
901 | /* Decide whether we can make a sibling call to a function. DECL is the |
902 | declaration of the function being targeted by the call and EXP is the |
903 | CALL_EXPR representing the call. */ |
904 | |
905 | static bool |
906 | ix86_function_ok_for_sibcall (tree decl, tree exp) |
907 | { |
908 | tree type, decl_or_type; |
909 | rtx a, b; |
910 | bool bind_global = decl && !targetm.binds_local_p (decl); |
911 | |
912 | if (ix86_function_naked (current_function_decl)) |
913 | return false; |
914 | |
915 | /* Sibling call isn't OK if there are no caller-saved registers |
916 | since all registers must be preserved before return. */ |
917 | if (cfun(cfun + 0)->machine->no_caller_saved_registers) |
918 | return false; |
919 | |
920 | /* If we are generating position-independent code, we cannot sibcall |
921 | optimize direct calls to global functions, as the PLT requires |
922 | %ebx be live. (Darwin does not have a PLT.) */ |
923 | if (!TARGET_MACHO0 |
924 | && !TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 ) |
925 | && flag_picglobal_options.x_flag_pic |
926 | && flag_pltglobal_options.x_flag_plt |
927 | && bind_global) |
928 | return false; |
929 | |
930 | /* If we need to align the outgoing stack, then sibcalling would |
931 | unalign the stack, which may break the called function. */ |
932 | if (ix86_minimum_incoming_stack_boundary (true) |
933 | < PREFERRED_STACK_BOUNDARYix86_preferred_stack_boundary) |
934 | return false; |
935 | |
936 | if (decl) |
937 | { |
938 | decl_or_type = decl; |
939 | type = TREE_TYPE (decl)((contains_struct_check ((decl), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 939, __FUNCTION__))->typed.type); |
940 | } |
941 | else |
942 | { |
943 | /* We're looking at the CALL_EXPR, we need the type of the function. */ |
944 | type = CALL_EXPR_FN (exp)(*((const_cast<tree*> (tree_operand_check (((tree_check ((exp), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 944, __FUNCTION__, (CALL_EXPR)))), (1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 944, __FUNCTION__))))); /* pointer expression */ |
945 | type = TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 945, __FUNCTION__))->typed.type); /* pointer type */ |
946 | type = TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 946, __FUNCTION__))->typed.type); /* function type */ |
947 | decl_or_type = type; |
948 | } |
949 | |
950 | /* If outgoing reg parm stack space changes, we cannot do sibcall. */ |
951 | if ((OUTGOING_REG_PARM_STACK_SPACE (type)(((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) && ix86_function_type_abi (type) == MS_ABI) |
952 | != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl))(((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) && ix86_function_type_abi (((contains_struct_check ((current_function_decl), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 952, __FUNCTION__))->typed.type)) == MS_ABI)) |
953 | || (REG_PARM_STACK_SPACE (decl_or_type)ix86_reg_parm_stack_space (decl_or_type) |
954 | != REG_PARM_STACK_SPACE (current_function_decl)ix86_reg_parm_stack_space (current_function_decl))) |
955 | { |
956 | maybe_complain_about_tail_call (exp, |
957 | "inconsistent size of stack space" |
958 | " allocated for arguments which are" |
959 | " passed in registers"); |
960 | return false; |
961 | } |
962 | |
963 | /* Check that the return value locations are the same. Like |
964 | if we are returning floats on the 80387 register stack, we cannot |
965 | make a sibcall from a function that doesn't return a float to a |
966 | function that does or, conversely, from a function that does return |
967 | a float to a function that doesn't; the necessary stack adjustment |
968 | would not be executed. This is also the place we notice |
969 | differences in the return value ABI. Note that it is ok for one |
970 | of the functions to have void return type as long as the return |
971 | value of the other is passed in a register. */ |
972 | a = ix86_function_value (TREE_TYPE (exp)((contains_struct_check ((exp), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 972, __FUNCTION__))->typed.type), decl_or_type, false); |
973 | b = ix86_function_value (TREE_TYPE (DECL_RESULT (cfun->decl))((contains_struct_check ((((tree_check (((cfun + 0)->decl) , "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 973, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result )), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 973, __FUNCTION__))->typed.type), |
974 | cfun(cfun + 0)->decl, false); |
975 | if (STACK_REG_P (a)((((enum rtx_code) (a)->code) == REG) && ((unsigned long) (((rhs_regno(a)))) - (unsigned long) (8) <= (unsigned long) (15) - (unsigned long) (8))) || STACK_REG_P (b)((((enum rtx_code) (b)->code) == REG) && ((unsigned long) (((rhs_regno(b)))) - (unsigned long) (8) <= (unsigned long) (15) - (unsigned long) (8)))) |
976 | { |
977 | if (!rtx_equal_p (a, b)) |
978 | return false; |
979 | } |
980 | else if (VOID_TYPE_P (TREE_TYPE (DECL_RESULT (cfun->decl)))(((enum tree_code) (((contains_struct_check ((((tree_check (( (cfun + 0)->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 980, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result )), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 980, __FUNCTION__))->typed.type))->base.code) == VOID_TYPE )) |
981 | ; |
982 | else if (!rtx_equal_p (a, b)) |
983 | return false; |
984 | |
985 | if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 )) |
986 | { |
987 | /* The SYSV ABI has more call-clobbered registers; |
988 | disallow sibcalls from MS to SYSV. */ |
989 | if (cfun(cfun + 0)->machine->call_abi == MS_ABI |
990 | && ix86_function_type_abi (type) == SYSV_ABI) |
991 | return false; |
992 | } |
993 | else |
994 | { |
995 | /* If this call is indirect, we'll need to be able to use a |
996 | call-clobbered register for the address of the target function. |
997 | Make sure that all such registers are not used for passing |
998 | parameters. Note that DLLIMPORT functions and call to global |
999 | function via GOT slot are indirect. */ |
1000 | if (!decl |
1001 | || (bind_global && flag_picglobal_options.x_flag_pic && !flag_pltglobal_options.x_flag_plt) |
1002 | || (TARGET_DLLIMPORT_DECL_ATTRIBUTES0 && DECL_DLLIMPORT_P (decl)((contains_struct_check ((decl), (TS_DECL_WITH_VIS), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1002, __FUNCTION__))->decl_with_vis.dllimport_flag)) |
1003 | || flag_force_indirect_callglobal_options.x_flag_force_indirect_call) |
1004 | { |
1005 | /* Check if regparm >= 3 since arg_reg_available is set to |
1006 | false if regparm == 0. If regparm is 1 or 2, there is |
1007 | always a call-clobbered register available. |
1008 | |
1009 | ??? The symbol indirect call doesn't need a call-clobbered |
1010 | register. But we don't know if this is a symbol indirect |
1011 | call or not here. */ |
1012 | if (ix86_function_regparm (type, decl) >= 3 |
1013 | && !cfun(cfun + 0)->machine->arg_reg_available) |
1014 | return false; |
1015 | } |
1016 | } |
1017 | |
1018 | if (decl && ix86_use_pseudo_pic_reg ()) |
1019 | { |
1020 | /* When PIC register is used, it must be restored after ifunc |
1021 | function returns. */ |
1022 | cgraph_node *node = cgraph_node::get (decl); |
1023 | if (node && node->ifunc_resolver) |
1024 | return false; |
1025 | } |
1026 | |
1027 | /* Disable sibcall if callee has indirect_return attribute and |
1028 | caller doesn't since callee will return to the caller's caller |
1029 | via an indirect jump. */ |
1030 | if (((flag_cf_protectionglobal_options.x_flag_cf_protection & (CF_RETURN | CF_BRANCH)) |
1031 | == (CF_RETURN | CF_BRANCH)) |
1032 | && lookup_attribute ("indirect_return", TYPE_ATTRIBUTES (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1032, __FUNCTION__))->type_common.attributes)) |
1033 | && !lookup_attribute ("indirect_return", |
1034 | TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl))((tree_class_check ((((contains_struct_check (((cfun + 0)-> decl), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1034, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1034, __FUNCTION__))->type_common.attributes))) |
1035 | return false; |
1036 | |
1037 | /* Otherwise okay. That also includes certain types of indirect calls. */ |
1038 | return true; |
1039 | } |
1040 | |
1041 | /* This function determines from TYPE the calling-convention. */ |
1042 | |
1043 | unsigned int |
1044 | ix86_get_callcvt (const_tree type) |
1045 | { |
1046 | unsigned int ret = 0; |
1047 | bool is_stdarg; |
1048 | tree attrs; |
1049 | |
1050 | if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 )) |
1051 | return IX86_CALLCVT_CDECL0x1; |
1052 | |
1053 | attrs = TYPE_ATTRIBUTES (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1053, __FUNCTION__))->type_common.attributes); |
1054 | if (attrs != NULL_TREE(tree) __null) |
1055 | { |
1056 | if (lookup_attribute ("cdecl", attrs)) |
1057 | ret |= IX86_CALLCVT_CDECL0x1; |
1058 | else if (lookup_attribute ("stdcall", attrs)) |
1059 | ret |= IX86_CALLCVT_STDCALL0x2; |
1060 | else if (lookup_attribute ("fastcall", attrs)) |
1061 | ret |= IX86_CALLCVT_FASTCALL0x4; |
1062 | else if (lookup_attribute ("thiscall", attrs)) |
1063 | ret |= IX86_CALLCVT_THISCALL0x8; |
1064 | |
1065 | /* Regparam isn't allowed for thiscall and fastcall. */ |
1066 | if ((ret & (IX86_CALLCVT_THISCALL0x8 | IX86_CALLCVT_FASTCALL0x4)) == 0) |
1067 | { |
1068 | if (lookup_attribute ("regparm", attrs)) |
1069 | ret |= IX86_CALLCVT_REGPARM0x10; |
1070 | if (lookup_attribute ("sseregparm", attrs)) |
1071 | ret |= IX86_CALLCVT_SSEREGPARM0x20; |
1072 | } |
1073 | |
1074 | if (IX86_BASE_CALLCVT(ret)((ret) & (0x1 | 0x2 | 0x4 | 0x8)) != 0) |
1075 | return ret; |
1076 | } |
1077 | |
1078 | is_stdarg = stdarg_p (type); |
1079 | if (TARGET_RTD((global_options.x_target_flags & (1U << 24)) != 0) && !is_stdarg) |
1080 | return IX86_CALLCVT_STDCALL0x2 | ret; |
1081 | |
1082 | if (ret != 0 |
1083 | || is_stdarg |
1084 | || TREE_CODE (type)((enum tree_code) (type)->base.code) != METHOD_TYPE |
1085 | || ix86_function_type_abi (type) != MS_ABI) |
1086 | return IX86_CALLCVT_CDECL0x1 | ret; |
1087 | |
1088 | return IX86_CALLCVT_THISCALL0x8; |
1089 | } |
1090 | |
1091 | /* Return 0 if the attributes for two types are incompatible, 1 if they |
1092 | are compatible, and 2 if they are nearly compatible (which causes a |
1093 | warning to be generated). */ |
1094 | |
1095 | static int |
1096 | ix86_comp_type_attributes (const_tree type1, const_tree type2) |
1097 | { |
1098 | unsigned int ccvt1, ccvt2; |
1099 | |
1100 | if (TREE_CODE (type1)((enum tree_code) (type1)->base.code) != FUNCTION_TYPE |
1101 | && TREE_CODE (type1)((enum tree_code) (type1)->base.code) != METHOD_TYPE) |
1102 | return 1; |
1103 | |
1104 | ccvt1 = ix86_get_callcvt (type1); |
1105 | ccvt2 = ix86_get_callcvt (type2); |
1106 | if (ccvt1 != ccvt2) |
1107 | return 0; |
1108 | if (ix86_function_regparm (type1, NULL__null) |
1109 | != ix86_function_regparm (type2, NULL__null)) |
1110 | return 0; |
1111 | |
1112 | return 1; |
1113 | } |
1114 | |
1115 | /* Return the regparm value for a function with the indicated TYPE and DECL. |
1116 | DECL may be NULL when calling function indirectly |
1117 | or considering a libcall. */ |
1118 | |
1119 | static int |
1120 | ix86_function_regparm (const_tree type, const_tree decl) |
1121 | { |
1122 | tree attr; |
1123 | int regparm; |
1124 | unsigned int ccvt; |
1125 | |
1126 | if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 )) |
1127 | return (ix86_function_type_abi (type) == SYSV_ABI |
1128 | ? X86_64_REGPARM_MAX6 : X86_64_MS_REGPARM_MAX4); |
1129 | ccvt = ix86_get_callcvt (type); |
1130 | regparm = ix86_regparmglobal_options.x_ix86_regparm; |
1131 | |
1132 | if ((ccvt & IX86_CALLCVT_REGPARM0x10) != 0) |
1133 | { |
1134 | attr = lookup_attribute ("regparm", TYPE_ATTRIBUTES (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1134, __FUNCTION__))->type_common.attributes)); |
1135 | if (attr) |
1136 | { |
1137 | regparm = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr)))((unsigned long) (*tree_int_cst_elt_check ((((tree_check (((( tree_check ((attr), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1137, __FUNCTION__, (TREE_LIST)))->list.value)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1137, __FUNCTION__, (TREE_LIST)))->list.value)), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1137, __FUNCTION__))); |
1138 | return regparm; |
1139 | } |
1140 | } |
1141 | else if ((ccvt & IX86_CALLCVT_FASTCALL0x4) != 0) |
1142 | return 2; |
1143 | else if ((ccvt & IX86_CALLCVT_THISCALL0x8) != 0) |
1144 | return 1; |
1145 | |
1146 | /* Use register calling convention for local functions when possible. */ |
1147 | if (decl |
1148 | && TREE_CODE (decl)((enum tree_code) (decl)->base.code) == FUNCTION_DECL) |
1149 | { |
1150 | cgraph_node *target = cgraph_node::get (decl); |
1151 | if (target) |
1152 | target = target->function_symbol (); |
1153 | |
1154 | /* Caller and callee must agree on the calling convention, so |
1155 | checking here just optimize means that with |
1156 | __attribute__((optimize (...))) caller could use regparm convention |
1157 | and callee not, or vice versa. Instead look at whether the callee |
1158 | is optimized or not. */ |
1159 | if (target && opt_for_fn (target->decl, optimize)(opts_for_fn (target->decl)->x_optimize) |
1160 | && !(profile_flagglobal_options.x_profile_flag && !flag_fentryglobal_options.x_flag_fentry)) |
1161 | { |
1162 | if (target->local && target->can_change_signature) |
1163 | { |
1164 | int local_regparm, globals = 0, regno; |
1165 | |
1166 | /* Make sure no regparm register is taken by a |
1167 | fixed register variable. */ |
1168 | for (local_regparm = 0; local_regparm < REGPARM_MAX(((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? ((((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) && ix86_cfun_abi () == MS_ABI) ? 4 : 6) : 3 ); |
1169 | local_regparm++) |
1170 | if (fixed_regs(this_target_hard_regs->x_fixed_regs)[local_regparm]) |
1171 | break; |
1172 | |
1173 | /* We don't want to use regparm(3) for nested functions as |
1174 | these use a static chain pointer in the third argument. */ |
1175 | if (local_regparm == 3 && DECL_STATIC_CHAIN (target->decl)((tree_check ((target->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1175, __FUNCTION__, (FUNCTION_DECL)))->decl_with_vis.regdecl_flag )) |
1176 | local_regparm = 2; |
1177 | |
1178 | /* Save a register for the split stack. */ |
1179 | if (flag_split_stackglobal_options.x_flag_split_stack) |
1180 | { |
1181 | if (local_regparm == 3) |
1182 | local_regparm = 2; |
1183 | else if (local_regparm == 2 |
1184 | && DECL_STATIC_CHAIN (target->decl)((tree_check ((target->decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1184, __FUNCTION__, (FUNCTION_DECL)))->decl_with_vis.regdecl_flag )) |
1185 | local_regparm = 1; |
1186 | } |
1187 | |
1188 | /* Each fixed register usage increases register pressure, |
1189 | so less registers should be used for argument passing. |
1190 | This functionality can be overriden by an explicit |
1191 | regparm value. */ |
1192 | for (regno = AX_REG0; regno <= DI_REG5; regno++) |
1193 | if (fixed_regs(this_target_hard_regs->x_fixed_regs)[regno]) |
1194 | globals++; |
1195 | |
1196 | local_regparm |
1197 | = globals < local_regparm ? local_regparm - globals : 0; |
1198 | |
1199 | if (local_regparm > regparm) |
1200 | regparm = local_regparm; |
1201 | } |
1202 | } |
1203 | } |
1204 | |
1205 | return regparm; |
1206 | } |
1207 | |
1208 | /* Return 1 or 2, if we can pass up to SSE_REGPARM_MAX SFmode (1) and |
1209 | DFmode (2) arguments in SSE registers for a function with the |
1210 | indicated TYPE and DECL. DECL may be NULL when calling function |
1211 | indirectly or considering a libcall. Return -1 if any FP parameter |
1212 | should be rejected by error. This is used in siutation we imply SSE |
1213 | calling convetion but the function is called from another function with |
1214 | SSE disabled. Otherwise return 0. */ |
1215 | |
1216 | static int |
1217 | ix86_function_sseregparm (const_tree type, const_tree decl, bool warn) |
1218 | { |
1219 | gcc_assert (!TARGET_64BIT)((void)(!(!((global_options.x_ix86_isa_flags & (1UL << 1)) != 0)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1219, __FUNCTION__), 0 : 0)); |
1220 | |
1221 | /* Use SSE registers to pass SFmode and DFmode arguments if requested |
1222 | by the sseregparm attribute. */ |
1223 | if (TARGET_SSEREGPARM((global_options.x_target_flags & (1U << 25)) != 0) |
1224 | || (type && lookup_attribute ("sseregparm", TYPE_ATTRIBUTES (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1224, __FUNCTION__))->type_common.attributes)))) |
1225 | { |
1226 | if (!TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) != 0)) |
1227 | { |
1228 | if (warn) |
1229 | { |
1230 | if (decl) |
1231 | error ("calling %qD with attribute sseregparm without " |
1232 | "SSE/SSE2 enabled", decl); |
1233 | else |
1234 | error ("calling %qT with attribute sseregparm without " |
1235 | "SSE/SSE2 enabled", type); |
1236 | } |
1237 | return 0; |
1238 | } |
1239 | |
1240 | return 2; |
1241 | } |
1242 | |
1243 | if (!decl) |
1244 | return 0; |
1245 | |
1246 | cgraph_node *target = cgraph_node::get (decl); |
1247 | if (target) |
1248 | target = target->function_symbol (); |
1249 | |
1250 | /* For local functions, pass up to SSE_REGPARM_MAX SFmode |
1251 | (and DFmode for SSE2) arguments in SSE registers. */ |
1252 | if (target |
1253 | /* TARGET_SSE_MATH */ |
1254 | && (target_opts_for_fn (target->decl)->x_ix86_fpmath & FPMATH_SSE) |
1255 | && opt_for_fn (target->decl, optimize)(opts_for_fn (target->decl)->x_optimize) |
1256 | && !(profile_flagglobal_options.x_profile_flag && !flag_fentryglobal_options.x_flag_fentry)) |
1257 | { |
1258 | if (target->local && target->can_change_signature) |
1259 | { |
1260 | /* Refuse to produce wrong code when local function with SSE enabled |
1261 | is called from SSE disabled function. |
1262 | FIXME: We need a way to detect these cases cross-ltrans partition |
1263 | and avoid using SSE calling conventions on local functions called |
1264 | from function with SSE disabled. For now at least delay the |
1265 | warning until we know we are going to produce wrong code. |
1266 | See PR66047 */ |
1267 | if (!TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) != 0) && warn) |
1268 | return -1; |
1269 | return TARGET_SSE2_P (target_opts_for_fn (target->decl)(((target_opts_for_fn (target->decl) ->x_ix86_isa_flags ) & (1UL << 51)) != 0) |
1270 | ->x_ix86_isa_flags)(((target_opts_for_fn (target->decl) ->x_ix86_isa_flags ) & (1UL << 51)) != 0) ? 2 : 1; |
1271 | } |
1272 | } |
1273 | |
1274 | return 0; |
1275 | } |
1276 | |
1277 | /* Return true if EAX is live at the start of the function. Used by |
1278 | ix86_expand_prologue to determine if we need special help before |
1279 | calling allocate_stack_worker. */ |
1280 | |
1281 | static bool |
1282 | ix86_eax_live_at_start_p (void) |
1283 | { |
1284 | /* Cheat. Don't bother working forward from ix86_function_regparm |
1285 | to the function type to whether an actual argument is located in |
1286 | eax. Instead just look at cfg info, which is still close enough |
1287 | to correct at this point. This gives false positives for broken |
1288 | functions that might use uninitialized data that happens to be |
1289 | allocated in eax, but who cares? */ |
1290 | return REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)), 0)bitmap_bit_p (df_get_live_out ((((cfun + 0))->cfg->x_entry_block_ptr )), 0); |
1291 | } |
1292 | |
1293 | static bool |
1294 | ix86_keep_aggregate_return_pointer (tree fntype) |
1295 | { |
1296 | tree attr; |
1297 | |
1298 | if (!TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 )) |
1299 | { |
1300 | attr = lookup_attribute ("callee_pop_aggregate_return", |
1301 | TYPE_ATTRIBUTES (fntype)((tree_class_check ((fntype), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1301, __FUNCTION__))->type_common.attributes)); |
1302 | if (attr) |
1303 | return (TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr)))((unsigned long) (*tree_int_cst_elt_check ((((tree_check (((( tree_check ((attr), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1303, __FUNCTION__, (TREE_LIST)))->list.value)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1303, __FUNCTION__, (TREE_LIST)))->list.value)), (0), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1303, __FUNCTION__))) == 0); |
1304 | |
1305 | /* For 32-bit MS-ABI the default is to keep aggregate |
1306 | return pointer. */ |
1307 | if (ix86_function_type_abi (fntype) == MS_ABI) |
1308 | return true; |
1309 | } |
1310 | return KEEP_AGGREGATE_RETURN_POINTER0 != 0; |
1311 | } |
1312 | |
1313 | /* Value is the number of bytes of arguments automatically |
1314 | popped when returning from a subroutine call. |
1315 | FUNDECL is the declaration node of the function (as a tree), |
1316 | FUNTYPE is the data type of the function (as a tree), |
1317 | or for a library call it is an identifier node for the subroutine name. |
1318 | SIZE is the number of bytes of arguments passed on the stack. |
1319 | |
1320 | On the 80386, the RTD insn may be used to pop them if the number |
1321 | of args is fixed, but if the number is variable then the caller |
1322 | must pop them all. RTD can't be used for library calls now |
1323 | because the library is compiled with the Unix compiler. |
1324 | Use of RTD is a selectable option, since it is incompatible with |
1325 | standard Unix calling sequences. If the option is not selected, |
1326 | the caller must always pop the args. |
1327 | |
1328 | The attribute stdcall is equivalent to RTD on a per module basis. */ |
1329 | |
1330 | static poly_int64 |
1331 | ix86_return_pops_args (tree fundecl, tree funtype, poly_int64 size) |
1332 | { |
1333 | unsigned int ccvt; |
1334 | |
1335 | /* None of the 64-bit ABIs pop arguments. */ |
1336 | if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 )) |
1337 | return 0; |
1338 | |
1339 | ccvt = ix86_get_callcvt (funtype); |
1340 | |
1341 | if ((ccvt & (IX86_CALLCVT_STDCALL0x2 | IX86_CALLCVT_FASTCALL0x4 |
1342 | | IX86_CALLCVT_THISCALL0x8)) != 0 |
1343 | && ! stdarg_p (funtype)) |
1344 | return size; |
1345 | |
1346 | /* Lose any fake structure return argument if it is passed on the stack. */ |
1347 | if (aggregate_value_p (TREE_TYPE (funtype)((contains_struct_check ((funtype), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1347, __FUNCTION__))->typed.type), fundecl) |
1348 | && !ix86_keep_aggregate_return_pointer (funtype)) |
1349 | { |
1350 | int nregs = ix86_function_regparm (funtype, fundecl); |
1351 | if (nregs == 0) |
1352 | return GET_MODE_SIZE (Pmode)((unsigned short) mode_to_bytes ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) )).coeffs[0]); |
1353 | } |
1354 | |
1355 | return 0; |
1356 | } |
1357 | |
1358 | /* Implement the TARGET_LEGITIMATE_COMBINED_INSN hook. */ |
1359 | |
1360 | static bool |
1361 | ix86_legitimate_combined_insn (rtx_insn *insn) |
1362 | { |
1363 | int i; |
1364 | |
1365 | /* Check operand constraints in case hard registers were propagated |
1366 | into insn pattern. This check prevents combine pass from |
1367 | generating insn patterns with invalid hard register operands. |
1368 | These invalid insns can eventually confuse reload to error out |
1369 | with a spill failure. See also PRs 46829 and 46843. */ |
1370 | |
1371 | gcc_assert (INSN_CODE (insn) >= 0)((void)(!((((insn)->u.fld[5]).rt_int) >= 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1371, __FUNCTION__), 0 : 0)); |
1372 | |
1373 | extract_insn (insn); |
1374 | preprocess_constraints (insn); |
1375 | |
1376 | int n_operands = recog_data.n_operands; |
1377 | int n_alternatives = recog_data.n_alternatives; |
1378 | for (i = 0; i < n_operands; i++) |
1379 | { |
1380 | rtx op = recog_data.operand[i]; |
1381 | machine_mode mode = GET_MODE (op)((machine_mode) (op)->mode); |
1382 | const operand_alternative *op_alt; |
1383 | int offset = 0; |
1384 | bool win; |
1385 | int j; |
1386 | |
1387 | /* A unary operator may be accepted by the predicate, but it |
1388 | is irrelevant for matching constraints. */ |
1389 | if (UNARY_P (op)((rtx_class[(int) (((enum rtx_code) (op)->code))]) == RTX_UNARY )) |
1390 | op = XEXP (op, 0)(((op)->u.fld[0]).rt_rtx); |
1391 | |
1392 | if (SUBREG_P (op)(((enum rtx_code) (op)->code) == SUBREG)) |
1393 | { |
1394 | if (REG_P (SUBREG_REG (op))(((enum rtx_code) ((((op)->u.fld[0]).rt_rtx))->code) == REG) |
1395 | && REGNO (SUBREG_REG (op))(rhs_regno((((op)->u.fld[0]).rt_rtx))) < FIRST_PSEUDO_REGISTER76) |
1396 | offset = subreg_regno_offset (REGNO (SUBREG_REG (op))(rhs_regno((((op)->u.fld[0]).rt_rtx))), |
1397 | GET_MODE (SUBREG_REG (op))((machine_mode) ((((op)->u.fld[0]).rt_rtx))->mode), |
1398 | SUBREG_BYTE (op)(((op)->u.fld[1]).rt_subreg), |
1399 | GET_MODE (op)((machine_mode) (op)->mode)); |
1400 | op = SUBREG_REG (op)(((op)->u.fld[0]).rt_rtx); |
1401 | } |
1402 | |
1403 | if (!(REG_P (op)(((enum rtx_code) (op)->code) == REG) && HARD_REGISTER_P (op)((((rhs_regno(op))) < 76)))) |
1404 | continue; |
1405 | |
1406 | op_alt = recog_op_alt; |
1407 | |
1408 | /* Operand has no constraints, anything is OK. */ |
1409 | win = !n_alternatives; |
1410 | |
1411 | alternative_mask preferred = get_preferred_alternatives (insn); |
1412 | for (j = 0; j < n_alternatives; j++, op_alt += n_operands) |
1413 | { |
1414 | if (!TEST_BIT (preferred, j)(((preferred) >> (j)) & 1)) |
1415 | continue; |
1416 | if (op_alt[i].anything_ok |
1417 | || (op_alt[i].matches != -1 |
1418 | && operands_match_p |
1419 | (recog_data.operand[i], |
1420 | recog_data.operand[op_alt[i].matches])) |
1421 | || reg_fits_class_p (op, op_alt[i].cl, offset, mode)) |
1422 | { |
1423 | win = true; |
1424 | break; |
1425 | } |
1426 | } |
1427 | |
1428 | if (!win) |
1429 | return false; |
1430 | } |
1431 | |
1432 | return true; |
1433 | } |
1434 | |
1435 | /* Implement the TARGET_ASAN_SHADOW_OFFSET hook. */ |
1436 | |
1437 | static unsigned HOST_WIDE_INTlong |
1438 | ix86_asan_shadow_offset (void) |
1439 | { |
1440 | return SUBTARGET_SHADOW_OFFSET(((global_options.x_ix86_isa_flags & (1UL << 4)) != 0) ? 0x7fff8000L : 1L << 29); |
1441 | } |
1442 | |
1443 | /* Argument support functions. */ |
1444 | |
1445 | /* Return true when register may be used to pass function parameters. */ |
1446 | bool |
1447 | ix86_function_arg_regno_p (int regno) |
1448 | { |
1449 | int i; |
1450 | enum calling_abi call_abi; |
1451 | const int *parm_regs; |
1452 | |
1453 | if (TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) != 0) && SSE_REGNO_P (regno)(((unsigned long) ((regno)) - (unsigned long) (20) <= (unsigned long) (27) - (unsigned long) (20)) || ((unsigned long) ((regno )) - (unsigned long) (44) <= (unsigned long) (51) - (unsigned long) (44)) || ((unsigned long) ((regno)) - (unsigned long) ( 52) <= (unsigned long) (67) - (unsigned long) (52))) |
1454 | && regno < FIRST_SSE_REG20 + SSE_REGPARM_MAX(((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? ((((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) && ix86_cfun_abi () == MS_ABI) ? 4 : 8) : ( ((global_options.x_ix86_isa_flags & (1UL << 50)) != 0) ? (0 ? 4 : 3) : 0))) |
1455 | return true; |
1456 | |
1457 | if (!TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 )) |
1458 | return (regno < REGPARM_MAX(((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? ((((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) && ix86_cfun_abi () == MS_ABI) ? 4 : 6) : 3 ) |
1459 | || (TARGET_MMX((global_options.x_ix86_isa_flags & (1UL << 36)) != 0) && MMX_REGNO_P (regno)((unsigned long) ((regno)) - (unsigned long) (28) <= (unsigned long) (35) - (unsigned long) (28)) |
1460 | && regno < FIRST_MMX_REG28 + MMX_REGPARM_MAX(((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 0 : (((global_options.x_ix86_isa_flags & (1UL << 36)) != 0) ? (0 ? 0 : 3) : 0)))); |
1461 | |
1462 | /* TODO: The function should depend on current function ABI but |
1463 | builtins.cc would need updating then. Therefore we use the |
1464 | default ABI. */ |
1465 | call_abi = ix86_cfun_abi (); |
1466 | |
1467 | /* RAX is used as hidden argument to va_arg functions. */ |
1468 | if (call_abi == SYSV_ABI && regno == AX_REG0) |
1469 | return true; |
1470 | |
1471 | if (call_abi == MS_ABI) |
1472 | parm_regs = x86_64_ms_abi_int_parameter_registers; |
1473 | else |
1474 | parm_regs = x86_64_int_parameter_registers; |
1475 | |
1476 | for (i = 0; i < (call_abi == MS_ABI |
1477 | ? X86_64_MS_REGPARM_MAX4 : X86_64_REGPARM_MAX6); i++) |
1478 | if (regno == parm_regs[i]) |
1479 | return true; |
1480 | return false; |
1481 | } |
1482 | |
1483 | /* Return if we do not know how to pass ARG solely in registers. */ |
1484 | |
1485 | static bool |
1486 | ix86_must_pass_in_stack (const function_arg_info &arg) |
1487 | { |
1488 | if (must_pass_in_stack_var_size_or_pad (arg)) |
1489 | return true; |
1490 | |
1491 | /* For 32-bit, we want TImode aggregates to go on the stack. But watch out! |
1492 | The layout_type routine is crafty and tries to trick us into passing |
1493 | currently unsupported vector types on the stack by using TImode. */ |
1494 | return (!TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 ) && arg.mode == TImode(scalar_int_mode ((scalar_int_mode::from_int) E_TImode)) |
1495 | && arg.type && TREE_CODE (arg.type)((enum tree_code) (arg.type)->base.code) != VECTOR_TYPE); |
1496 | } |
1497 | |
1498 | /* It returns the size, in bytes, of the area reserved for arguments passed |
1499 | in registers for the function represented by fndecl dependent to the used |
1500 | abi format. */ |
1501 | int |
1502 | ix86_reg_parm_stack_space (const_tree fndecl) |
1503 | { |
1504 | enum calling_abi call_abi = SYSV_ABI; |
1505 | if (fndecl != NULL_TREE(tree) __null && TREE_CODE (fndecl)((enum tree_code) (fndecl)->base.code) == FUNCTION_DECL) |
1506 | call_abi = ix86_function_abi (fndecl); |
1507 | else |
1508 | call_abi = ix86_function_type_abi (fndecl); |
1509 | if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 ) && call_abi == MS_ABI) |
1510 | return 32; |
1511 | return 0; |
1512 | } |
1513 | |
1514 | /* We add this as a workaround in order to use libc_has_function |
1515 | hook in i386.md. */ |
1516 | bool |
1517 | ix86_libc_has_function (enum function_class fn_class) |
1518 | { |
1519 | return targetm.libc_has_function (fn_class, NULL_TREE(tree) __null); |
1520 | } |
1521 | |
1522 | /* Returns value SYSV_ABI, MS_ABI dependent on fntype, |
1523 | specifying the call abi used. */ |
1524 | enum calling_abi |
1525 | ix86_function_type_abi (const_tree fntype) |
1526 | { |
1527 | enum calling_abi abi = ix86_abiglobal_options.x_ix86_abi; |
1528 | |
1529 | if (fntype == NULL_TREE(tree) __null || TYPE_ATTRIBUTES (fntype)((tree_class_check ((fntype), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1529, __FUNCTION__))->type_common.attributes) == NULL_TREE(tree) __null) |
1530 | return abi; |
1531 | |
1532 | if (abi == SYSV_ABI |
1533 | && lookup_attribute ("ms_abi", TYPE_ATTRIBUTES (fntype)((tree_class_check ((fntype), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1533, __FUNCTION__))->type_common.attributes))) |
1534 | { |
1535 | static int warned; |
1536 | if (TARGET_X32((global_options.x_ix86_isa_flags & (1UL << 58)) != 0) && !warned) |
1537 | { |
1538 | error ("X32 does not support %<ms_abi%> attribute"); |
1539 | warned = 1; |
1540 | } |
1541 | |
1542 | abi = MS_ABI; |
1543 | } |
1544 | else if (abi == MS_ABI |
1545 | && lookup_attribute ("sysv_abi", TYPE_ATTRIBUTES (fntype)((tree_class_check ((fntype), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1545, __FUNCTION__))->type_common.attributes))) |
1546 | abi = SYSV_ABI; |
1547 | |
1548 | return abi; |
1549 | } |
1550 | |
1551 | enum calling_abi |
1552 | ix86_function_abi (const_tree fndecl) |
1553 | { |
1554 | return fndecl ? ix86_function_type_abi (TREE_TYPE (fndecl)((contains_struct_check ((fndecl), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1554, __FUNCTION__))->typed.type)) : ix86_abiglobal_options.x_ix86_abi; |
1555 | } |
1556 | |
1557 | /* Returns value SYSV_ABI, MS_ABI dependent on cfun, |
1558 | specifying the call abi used. */ |
1559 | enum calling_abi |
1560 | ix86_cfun_abi (void) |
1561 | { |
1562 | return cfun(cfun + 0) ? cfun(cfun + 0)->machine->call_abi : ix86_abiglobal_options.x_ix86_abi; |
1563 | } |
1564 | |
1565 | bool |
1566 | ix86_function_ms_hook_prologue (const_tree fn) |
1567 | { |
1568 | if (fn && lookup_attribute ("ms_hook_prologue", DECL_ATTRIBUTES (fn)((contains_struct_check ((fn), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1568, __FUNCTION__))->decl_common.attributes))) |
1569 | { |
1570 | if (decl_function_context (fn) != NULL_TREE(tree) __null) |
1571 | error_at (DECL_SOURCE_LOCATION (fn)((contains_struct_check ((fn), (TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1571, __FUNCTION__))->decl_minimal.locus), |
1572 | "%<ms_hook_prologue%> attribute is not compatible " |
1573 | "with nested function"); |
1574 | else |
1575 | return true; |
1576 | } |
1577 | return false; |
1578 | } |
1579 | |
1580 | bool |
1581 | ix86_function_naked (const_tree fn) |
1582 | { |
1583 | if (fn && lookup_attribute ("naked", DECL_ATTRIBUTES (fn)((contains_struct_check ((fn), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1583, __FUNCTION__))->decl_common.attributes))) |
1584 | return true; |
1585 | |
1586 | return false; |
1587 | } |
1588 | |
1589 | /* Write the extra assembler code needed to declare a function properly. */ |
1590 | |
1591 | void |
1592 | ix86_asm_output_function_label (FILE *out_file, const char *fname, |
1593 | tree decl) |
1594 | { |
1595 | bool is_ms_hook = ix86_function_ms_hook_prologue (decl); |
1596 | |
1597 | if (cfun(cfun + 0)) |
1598 | cfun(cfun + 0)->machine->function_label_emitted = true; |
1599 | |
1600 | if (is_ms_hook) |
1601 | { |
1602 | int i, filler_count = (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 ) ? 32 : 16); |
1603 | unsigned int filler_cc = 0xcccccccc; |
1604 | |
1605 | for (i = 0; i < filler_count; i += 4) |
1606 | fprintf (out_file, ASM_LONG"\t.long\t" " %#x\n", filler_cc); |
1607 | } |
1608 | |
1609 | #ifdef SUBTARGET_ASM_UNWIND_INIT |
1610 | SUBTARGET_ASM_UNWIND_INIT (out_file); |
1611 | #endif |
1612 | |
1613 | ASM_OUTPUT_LABEL (out_file, fname)do { assemble_name ((out_file), (fname)); fputs (":\n", (out_file )); } while (0); |
1614 | |
1615 | /* Output magic byte marker, if hot-patch attribute is set. */ |
1616 | if (is_ms_hook) |
1617 | { |
1618 | if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 )) |
1619 | { |
1620 | /* leaq [%rsp + 0], %rsp */ |
1621 | fputs (ASM_BYTE"\t.byte\t" "0x48, 0x8d, 0xa4, 0x24, 0x00, 0x00, 0x00, 0x00\n", |
1622 | out_file); |
1623 | } |
1624 | else |
1625 | { |
1626 | /* movl.s %edi, %edi |
1627 | push %ebp |
1628 | movl.s %esp, %ebp */ |
1629 | fputs (ASM_BYTE"\t.byte\t" "0x8b, 0xff, 0x55, 0x8b, 0xec\n", out_file); |
1630 | } |
1631 | } |
1632 | } |
1633 | |
1634 | /* Implementation of call abi switching target hook. Specific to FNDECL |
1635 | the specific call register sets are set. See also |
1636 | ix86_conditional_register_usage for more details. */ |
1637 | void |
1638 | ix86_call_abi_override (const_tree fndecl) |
1639 | { |
1640 | cfun(cfun + 0)->machine->call_abi = ix86_function_abi (fndecl); |
1641 | } |
1642 | |
1643 | /* Return 1 if pseudo register should be created and used to hold |
1644 | GOT address for PIC code. */ |
1645 | bool |
1646 | ix86_use_pseudo_pic_reg (void) |
1647 | { |
1648 | if ((TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 ) |
1649 | && (ix86_cmodelglobal_options.x_ix86_cmodel == CM_SMALL_PIC |
1650 | || TARGET_PECOFF0)) |
1651 | || !flag_picglobal_options.x_flag_pic) |
1652 | return false; |
1653 | return true; |
1654 | } |
1655 | |
1656 | /* Initialize large model PIC register. */ |
1657 | |
1658 | static void |
1659 | ix86_init_large_pic_reg (unsigned int tmp_regno) |
1660 | { |
1661 | rtx_code_label *label; |
1662 | rtx tmp_reg; |
1663 | |
1664 | gcc_assert (Pmode == DImode)((void)(!((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ( (scalar_int_mode::from_int) E_SImode))) == (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1664, __FUNCTION__), 0 : 0)); |
1665 | label = gen_label_rtx (); |
1666 | emit_label (label); |
1667 | LABEL_PRESERVE_P (label)(__extension__ ({ __typeof ((label)) const _rtx = ((label)); if (((enum rtx_code) (_rtx)->code) != CODE_LABEL && ( (enum rtx_code) (_rtx)->code) != NOTE) rtl_check_failed_flag ("LABEL_PRESERVE_P",_rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1667, __FUNCTION__); _rtx; })->in_struct) = 1; |
1668 | tmp_reg = gen_rtx_REG (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), tmp_regno); |
1669 | gcc_assert (REGNO (pic_offset_table_rtx) != tmp_regno)((void)(!((rhs_regno((this_target_rtl->x_pic_offset_table_rtx ))) != tmp_regno) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1669, __FUNCTION__), 0 : 0)); |
1670 | emit_insn (gen_set_rip_rex64 (pic_offset_table_rtx(this_target_rtl->x_pic_offset_table_rtx), |
1671 | label)); |
1672 | emit_insn (gen_set_got_offset_rex64 (tmp_reg, label)); |
1673 | emit_insn (gen_add2_insn (pic_offset_table_rtx(this_target_rtl->x_pic_offset_table_rtx), tmp_reg)); |
1674 | const char *name = LABEL_NAME (label)(((label)->u.fld[6]).rt_str); |
1675 | PUT_CODE (label, NOTE)((label)->code = (NOTE)); |
1676 | NOTE_KIND (label)(((label)->u.fld[4]).rt_int) = NOTE_INSN_DELETED_LABEL; |
1677 | NOTE_DELETED_LABEL_NAME (label)(((label)->u.fld[3]).rt_str) = name; |
1678 | } |
1679 | |
1680 | /* Create and initialize PIC register if required. */ |
1681 | static void |
1682 | ix86_init_pic_reg (void) |
1683 | { |
1684 | edge entry_edge; |
1685 | rtx_insn *seq; |
1686 | |
1687 | if (!ix86_use_pseudo_pic_reg ()) |
1688 | return; |
1689 | |
1690 | start_sequence (); |
1691 | |
1692 | if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 )) |
1693 | { |
1694 | if (ix86_cmodelglobal_options.x_ix86_cmodel == CM_LARGE_PIC) |
1695 | ix86_init_large_pic_reg (R11_REG39); |
1696 | else |
1697 | emit_insn (gen_set_got_rex64 (pic_offset_table_rtx(this_target_rtl->x_pic_offset_table_rtx))); |
1698 | } |
1699 | else |
1700 | { |
1701 | /* If there is future mcount call in the function it is more profitable |
1702 | to emit SET_GOT into ABI defined REAL_PIC_OFFSET_TABLE_REGNUM. */ |
1703 | rtx reg = crtl(&x_rtl)->profile |
1704 | ? gen_rtx_REG (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), REAL_PIC_OFFSET_TABLE_REGNUM(((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 43 : 3)) |
1705 | : pic_offset_table_rtx(this_target_rtl->x_pic_offset_table_rtx); |
1706 | rtx_insn *insn = emit_insn (gen_set_got (reg)); |
1707 | RTX_FRAME_RELATED_P (insn)(__extension__ ({ __typeof ((insn)) const _rtx = ((insn)); if (((enum rtx_code) (_rtx)->code) != DEBUG_INSN && ( (enum rtx_code) (_rtx)->code) != INSN && ((enum rtx_code ) (_rtx)->code) != CALL_INSN && ((enum rtx_code) ( _rtx)->code) != JUMP_INSN && ((enum rtx_code) (_rtx )->code) != BARRIER && ((enum rtx_code) (_rtx)-> code) != SET) rtl_check_failed_flag ("RTX_FRAME_RELATED_P",_rtx , "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1707, __FUNCTION__); _rtx; })->frame_related) = 1; |
1708 | if (crtl(&x_rtl)->profile) |
1709 | emit_move_insn (pic_offset_table_rtx(this_target_rtl->x_pic_offset_table_rtx), reg); |
1710 | add_reg_note (insn, REG_CFA_FLUSH_QUEUE, NULL_RTX(rtx) 0); |
1711 | } |
1712 | |
1713 | seq = get_insns (); |
1714 | end_sequence (); |
1715 | |
1716 | entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr)); |
1717 | insert_insn_on_edge (seq, entry_edge); |
1718 | commit_one_edge_insertion (entry_edge); |
1719 | } |
1720 | |
1721 | /* Initialize a variable CUM of type CUMULATIVE_ARGS |
1722 | for a call to a function whose data type is FNTYPE. |
1723 | For a library call, FNTYPE is 0. */ |
1724 | |
1725 | void |
1726 | init_cumulative_args (CUMULATIVE_ARGS *cum, /* Argument info to initialize */ |
1727 | tree fntype, /* tree ptr for function decl */ |
1728 | rtx libname, /* SYMBOL_REF of library name or 0 */ |
1729 | tree fndecl, |
1730 | int caller) |
1731 | { |
1732 | struct cgraph_node *local_info_node = NULL__null; |
1733 | struct cgraph_node *target = NULL__null; |
1734 | |
1735 | /* Set silent_p to false to raise an error for invalid calls when |
1736 | expanding function body. */ |
1737 | cfun(cfun + 0)->machine->silent_p = false; |
1738 | |
1739 | memset (cum, 0, sizeof (*cum)); |
1740 | |
1741 | if (fndecl) |
1742 | { |
1743 | target = cgraph_node::get (fndecl); |
1744 | if (target) |
1745 | { |
1746 | target = target->function_symbol (); |
1747 | local_info_node = cgraph_node::local_info_node (target->decl); |
1748 | cum->call_abi = ix86_function_abi (target->decl); |
1749 | } |
1750 | else |
1751 | cum->call_abi = ix86_function_abi (fndecl); |
1752 | } |
1753 | else |
1754 | cum->call_abi = ix86_function_type_abi (fntype); |
1755 | |
1756 | cum->caller = caller; |
1757 | |
1758 | /* Set up the number of registers to use for passing arguments. */ |
1759 | cum->nregs = ix86_regparmglobal_options.x_ix86_regparm; |
1760 | if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 )) |
1761 | { |
1762 | cum->nregs = (cum->call_abi == SYSV_ABI |
1763 | ? X86_64_REGPARM_MAX6 |
1764 | : X86_64_MS_REGPARM_MAX4); |
1765 | } |
1766 | if (TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) != 0)) |
1767 | { |
1768 | cum->sse_nregs = SSE_REGPARM_MAX(((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? ((((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) && ix86_cfun_abi () == MS_ABI) ? 4 : 8) : ( ((global_options.x_ix86_isa_flags & (1UL << 50)) != 0) ? (0 ? 4 : 3) : 0)); |
1769 | if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 )) |
1770 | { |
1771 | cum->sse_nregs = (cum->call_abi == SYSV_ABI |
1772 | ? X86_64_SSE_REGPARM_MAX8 |
1773 | : X86_64_MS_SSE_REGPARM_MAX4); |
1774 | } |
1775 | } |
1776 | if (TARGET_MMX((global_options.x_ix86_isa_flags & (1UL << 36)) != 0)) |
1777 | cum->mmx_nregs = MMX_REGPARM_MAX(((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 0 : (((global_options.x_ix86_isa_flags & (1UL << 36)) != 0) ? (0 ? 0 : 3) : 0)); |
1778 | cum->warn_avx512f = true; |
1779 | cum->warn_avx = true; |
1780 | cum->warn_sse = true; |
1781 | cum->warn_mmx = true; |
1782 | |
1783 | /* Because type might mismatch in between caller and callee, we need to |
1784 | use actual type of function for local calls. |
1785 | FIXME: cgraph_analyze can be told to actually record if function uses |
1786 | va_start so for local functions maybe_vaarg can be made aggressive |
1787 | helping K&R code. |
1788 | FIXME: once typesytem is fixed, we won't need this code anymore. */ |
1789 | if (local_info_node && local_info_node->local |
1790 | && local_info_node->can_change_signature) |
1791 | fntype = TREE_TYPE (target->decl)((contains_struct_check ((target->decl), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1791, __FUNCTION__))->typed.type); |
1792 | cum->stdarg = stdarg_p (fntype); |
1793 | cum->maybe_vaarg = (fntype |
1794 | ? (!prototype_p (fntype) || stdarg_p (fntype)) |
1795 | : !libname); |
1796 | |
1797 | cum->decl = fndecl; |
1798 | |
1799 | cum->warn_empty = !warn_abiglobal_options.x_warn_abi || cum->stdarg; |
1800 | if (!cum->warn_empty && fntype) |
1801 | { |
1802 | function_args_iterator iter; |
1803 | tree argtype; |
1804 | bool seen_empty_type = false; |
1805 | FOREACH_FUNCTION_ARGS (fntype, argtype, iter)for (function_args_iter_init (&(iter), (fntype)); (argtype = function_args_iter_cond (&(iter))) != (tree) __null; function_args_iter_next (&(iter))) |
1806 | { |
1807 | if (argtype == error_mark_nodeglobal_trees[TI_ERROR_MARK] || VOID_TYPE_P (argtype)(((enum tree_code) (argtype)->base.code) == VOID_TYPE)) |
1808 | break; |
1809 | if (TYPE_EMPTY_P (argtype)((tree_class_check ((argtype), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1809, __FUNCTION__))->type_common.empty_flag)) |
1810 | seen_empty_type = true; |
1811 | else if (seen_empty_type) |
1812 | { |
1813 | cum->warn_empty = true; |
1814 | break; |
1815 | } |
1816 | } |
1817 | } |
1818 | |
1819 | if (!TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 )) |
1820 | { |
1821 | /* If there are variable arguments, then we won't pass anything |
1822 | in registers in 32-bit mode. */ |
1823 | if (stdarg_p (fntype)) |
1824 | { |
1825 | cum->nregs = 0; |
1826 | /* Since in 32-bit, variable arguments are always passed on |
1827 | stack, there is scratch register available for indirect |
1828 | sibcall. */ |
1829 | cfun(cfun + 0)->machine->arg_reg_available = true; |
1830 | cum->sse_nregs = 0; |
1831 | cum->mmx_nregs = 0; |
1832 | cum->warn_avx512f = false; |
1833 | cum->warn_avx = false; |
1834 | cum->warn_sse = false; |
1835 | cum->warn_mmx = false; |
1836 | return; |
1837 | } |
1838 | |
1839 | /* Use ecx and edx registers if function has fastcall attribute, |
1840 | else look for regparm information. */ |
1841 | if (fntype) |
1842 | { |
1843 | unsigned int ccvt = ix86_get_callcvt (fntype); |
1844 | if ((ccvt & IX86_CALLCVT_THISCALL0x8) != 0) |
1845 | { |
1846 | cum->nregs = 1; |
1847 | cum->fastcall = 1; /* Same first register as in fastcall. */ |
1848 | } |
1849 | else if ((ccvt & IX86_CALLCVT_FASTCALL0x4) != 0) |
1850 | { |
1851 | cum->nregs = 2; |
1852 | cum->fastcall = 1; |
1853 | } |
1854 | else |
1855 | cum->nregs = ix86_function_regparm (fntype, fndecl); |
1856 | } |
1857 | |
1858 | /* Set up the number of SSE registers used for passing SFmode |
1859 | and DFmode arguments. Warn for mismatching ABI. */ |
1860 | cum->float_in_sse = ix86_function_sseregparm (fntype, fndecl, true); |
1861 | } |
1862 | |
1863 | cfun(cfun + 0)->machine->arg_reg_available = (cum->nregs > 0); |
1864 | } |
1865 | |
1866 | /* Return the "natural" mode for TYPE. In most cases, this is just TYPE_MODE. |
1867 | But in the case of vector types, it is some vector mode. |
1868 | |
1869 | When we have only some of our vector isa extensions enabled, then there |
1870 | are some modes for which vector_mode_supported_p is false. For these |
1871 | modes, the generic vector support in gcc will choose some non-vector mode |
1872 | in order to implement the type. By computing the natural mode, we'll |
1873 | select the proper ABI location for the operand and not depend on whatever |
1874 | the middle-end decides to do with these vector types. |
1875 | |
1876 | The midde-end can't deal with the vector types > 16 bytes. In this |
1877 | case, we return the original mode and warn ABI change if CUM isn't |
1878 | NULL. |
1879 | |
1880 | If INT_RETURN is true, warn ABI change if the vector mode isn't |
1881 | available for function return value. */ |
1882 | |
1883 | static machine_mode |
1884 | type_natural_mode (const_tree type, const CUMULATIVE_ARGS *cum, |
1885 | bool in_return) |
1886 | { |
1887 | machine_mode mode = TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1887, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (type) : (type)->type_common.mode); |
1888 | |
1889 | if (TREE_CODE (type)((enum tree_code) (type)->base.code) == VECTOR_TYPE && !VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || ( (enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM)) |
1890 | { |
1891 | HOST_WIDE_INTlong size = int_size_in_bytes (type); |
1892 | if ((size == 8 || size == 16 || size == 32 || size == 64) |
1893 | /* ??? Generic code allows us to create width 1 vectors. Ignore. */ |
1894 | && TYPE_VECTOR_SUBPARTS (type) > 1) |
1895 | { |
1896 | machine_mode innermode = TYPE_MODE (TREE_TYPE (type))((((enum tree_code) ((tree_class_check ((((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1896, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1896, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1896, __FUNCTION__))->typed.type)) : (((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1896, __FUNCTION__))->typed.type))->type_common.mode); |
1897 | |
1898 | /* There are no XFmode vector modes ... */ |
1899 | if (innermode == XFmode(scalar_float_mode ((scalar_float_mode::from_int) E_XFmode))) |
1900 | return mode; |
1901 | |
1902 | /* ... and no decimal float vector modes. */ |
1903 | if (DECIMAL_FLOAT_MODE_P (innermode)(((enum mode_class) mode_class[innermode]) == MODE_DECIMAL_FLOAT )) |
1904 | return mode; |
1905 | |
1906 | if (TREE_CODE (TREE_TYPE (type))((enum tree_code) (((contains_struct_check ((type), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1906, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE) |
1907 | mode = MIN_MODE_VECTOR_FLOAT; |
1908 | else |
1909 | mode = MIN_MODE_VECTOR_INT; |
1910 | |
1911 | /* Get the mode which has this inner mode and number of units. */ |
1912 | FOR_EACH_MODE_FROM (mode, mode)for ((mode) = (mode); mode_iterator::iterate_p (&(mode)); mode_iterator::get_next (&(mode))) |
1913 | if (GET_MODE_NUNITS (mode)(mode_to_nunits (mode).coeffs[0]) == TYPE_VECTOR_SUBPARTS (type) |
1914 | && GET_MODE_INNER (mode)(mode_to_inner (mode)) == innermode) |
1915 | { |
1916 | if (size == 64 && !TARGET_AVX512F((global_options.x_ix86_isa_flags & (1UL << 15)) != 0) && !TARGET_IAMCU((global_options.x_target_flags & (1U << 12)) != 0)) |
1917 | { |
1918 | static bool warnedavx512f; |
1919 | static bool warnedavx512f_ret; |
1920 | |
1921 | if (cum && cum->warn_avx512f && !warnedavx512f) |
1922 | { |
1923 | if (warning (OPT_Wpsabi, "AVX512F vector argument " |
1924 | "without AVX512F enabled changes the ABI")) |
1925 | warnedavx512f = true; |
1926 | } |
1927 | else if (in_return && !warnedavx512f_ret) |
1928 | { |
1929 | if (warning (OPT_Wpsabi, "AVX512F vector return " |
1930 | "without AVX512F enabled changes the ABI")) |
1931 | warnedavx512f_ret = true; |
1932 | } |
1933 | |
1934 | return TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1934, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (type) : (type)->type_common.mode); |
1935 | } |
1936 | else if (size == 32 && !TARGET_AVX((global_options.x_ix86_isa_flags & (1UL << 8)) != 0 ) && !TARGET_IAMCU((global_options.x_target_flags & (1U << 12)) != 0)) |
1937 | { |
1938 | static bool warnedavx; |
1939 | static bool warnedavx_ret; |
1940 | |
1941 | if (cum && cum->warn_avx && !warnedavx) |
1942 | { |
1943 | if (warning (OPT_Wpsabi, "AVX vector argument " |
1944 | "without AVX enabled changes the ABI")) |
1945 | warnedavx = true; |
1946 | } |
1947 | else if (in_return && !warnedavx_ret) |
1948 | { |
1949 | if (warning (OPT_Wpsabi, "AVX vector return " |
1950 | "without AVX enabled changes the ABI")) |
1951 | warnedavx_ret = true; |
1952 | } |
1953 | |
1954 | return TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 1954, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (type) : (type)->type_common.mode); |
1955 | } |
1956 | else if (((size == 8 && TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 )) || size == 16) |
1957 | && !TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) != 0) |
1958 | && !TARGET_IAMCU((global_options.x_target_flags & (1U << 12)) != 0)) |
1959 | { |
1960 | static bool warnedsse; |
1961 | static bool warnedsse_ret; |
1962 | |
1963 | if (cum && cum->warn_sse && !warnedsse) |
1964 | { |
1965 | if (warning (OPT_Wpsabi, "SSE vector argument " |
1966 | "without SSE enabled changes the ABI")) |
1967 | warnedsse = true; |
1968 | } |
1969 | else if (!TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 ) && in_return && !warnedsse_ret) |
1970 | { |
1971 | if (warning (OPT_Wpsabi, "SSE vector return " |
1972 | "without SSE enabled changes the ABI")) |
1973 | warnedsse_ret = true; |
1974 | } |
1975 | } |
1976 | else if ((size == 8 && !TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 )) |
1977 | && (!cfun(cfun + 0) |
1978 | || cfun(cfun + 0)->machine->func_type == TYPE_NORMAL) |
1979 | && !TARGET_MMX((global_options.x_ix86_isa_flags & (1UL << 36)) != 0) |
1980 | && !TARGET_IAMCU((global_options.x_target_flags & (1U << 12)) != 0)) |
1981 | { |
1982 | static bool warnedmmx; |
1983 | static bool warnedmmx_ret; |
1984 | |
1985 | if (cum && cum->warn_mmx && !warnedmmx) |
1986 | { |
1987 | if (warning (OPT_Wpsabi, "MMX vector argument " |
1988 | "without MMX enabled changes the ABI")) |
1989 | warnedmmx = true; |
1990 | } |
1991 | else if (in_return && !warnedmmx_ret) |
1992 | { |
1993 | if (warning (OPT_Wpsabi, "MMX vector return " |
1994 | "without MMX enabled changes the ABI")) |
1995 | warnedmmx_ret = true; |
1996 | } |
1997 | } |
1998 | return mode; |
1999 | } |
2000 | |
2001 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2001, __FUNCTION__)); |
2002 | } |
2003 | } |
2004 | |
2005 | return mode; |
2006 | } |
2007 | |
2008 | /* We want to pass a value in REGNO whose "natural" mode is MODE. However, |
2009 | this may not agree with the mode that the type system has chosen for the |
2010 | register, which is ORIG_MODE. If ORIG_MODE is not BLKmode, then we can |
2011 | go ahead and use it. Otherwise we have to build a PARALLEL instead. */ |
2012 | |
2013 | static rtx |
2014 | gen_reg_or_parallel (machine_mode mode, machine_mode orig_mode, |
2015 | unsigned int regno) |
2016 | { |
2017 | rtx tmp; |
2018 | |
2019 | if (orig_mode != BLKmode((void) 0, E_BLKmode)) |
2020 | tmp = gen_rtx_REG (orig_mode, regno); |
2021 | else |
2022 | { |
2023 | tmp = gen_rtx_REG (mode, regno); |
2024 | tmp = gen_rtx_EXPR_LIST (VOIDmode((void) 0, E_VOIDmode), tmp, const0_rtx(const_int_rtx[64])); |
2025 | tmp = gen_rtx_PARALLEL (orig_mode, gen_rtvec (1, tmp))gen_rtx_fmt_E_stat ((PARALLEL), ((orig_mode)), ((gen_rtvec (1 , tmp))) ); |
2026 | } |
2027 | |
2028 | return tmp; |
2029 | } |
2030 | |
2031 | /* x86-64 register passing implementation. See x86-64 ABI for details. Goal |
2032 | of this code is to classify each 8bytes of incoming argument by the register |
2033 | class and assign registers accordingly. */ |
2034 | |
2035 | /* Return the union class of CLASS1 and CLASS2. |
2036 | See the x86-64 PS ABI for details. */ |
2037 | |
2038 | static enum x86_64_reg_class |
2039 | merge_classes (enum x86_64_reg_class class1, enum x86_64_reg_class class2) |
2040 | { |
2041 | /* Rule #1: If both classes are equal, this is the resulting class. */ |
2042 | if (class1 == class2) |
2043 | return class1; |
2044 | |
2045 | /* Rule #2: If one of the classes is NO_CLASS, the resulting class is |
2046 | the other class. */ |
2047 | if (class1 == X86_64_NO_CLASS) |
2048 | return class2; |
2049 | if (class2 == X86_64_NO_CLASS) |
2050 | return class1; |
2051 | |
2052 | /* Rule #3: If one of the classes is MEMORY, the result is MEMORY. */ |
2053 | if (class1 == X86_64_MEMORY_CLASS || class2 == X86_64_MEMORY_CLASS) |
2054 | return X86_64_MEMORY_CLASS; |
2055 | |
2056 | /* Rule #4: If one of the classes is INTEGER, the result is INTEGER. */ |
2057 | if ((class1 == X86_64_INTEGERSI_CLASS |
2058 | && (class2 == X86_64_SSESF_CLASS || class2 == X86_64_SSEHF_CLASS)) |
2059 | || (class2 == X86_64_INTEGERSI_CLASS |
2060 | && (class1 == X86_64_SSESF_CLASS || class1 == X86_64_SSEHF_CLASS))) |
2061 | return X86_64_INTEGERSI_CLASS; |
2062 | if (class1 == X86_64_INTEGER_CLASS || class1 == X86_64_INTEGERSI_CLASS |
2063 | || class2 == X86_64_INTEGER_CLASS || class2 == X86_64_INTEGERSI_CLASS) |
2064 | return X86_64_INTEGER_CLASS; |
2065 | |
2066 | /* Rule #5: If one of the classes is X87, X87UP, or COMPLEX_X87 class, |
2067 | MEMORY is used. */ |
2068 | if (class1 == X86_64_X87_CLASS |
2069 | || class1 == X86_64_X87UP_CLASS |
2070 | || class1 == X86_64_COMPLEX_X87_CLASS |
2071 | || class2 == X86_64_X87_CLASS |
2072 | || class2 == X86_64_X87UP_CLASS |
2073 | || class2 == X86_64_COMPLEX_X87_CLASS) |
2074 | return X86_64_MEMORY_CLASS; |
2075 | |
2076 | /* Rule #6: Otherwise class SSE is used. */ |
2077 | return X86_64_SSE_CLASS; |
2078 | } |
2079 | |
2080 | /* Classify the argument of type TYPE and mode MODE. |
2081 | CLASSES will be filled by the register class used to pass each word |
2082 | of the operand. The number of words is returned. In case the parameter |
2083 | should be passed in memory, 0 is returned. As a special case for zero |
2084 | sized containers, classes[0] will be NO_CLASS and 1 is returned. |
2085 | |
2086 | BIT_OFFSET is used internally for handling records and specifies offset |
2087 | of the offset in bits modulo 512 to avoid overflow cases. |
2088 | |
2089 | See the x86-64 PS ABI for details. |
2090 | */ |
2091 | |
2092 | static int |
2093 | classify_argument (machine_mode mode, const_tree type, |
2094 | enum x86_64_reg_class classes[MAX_CLASSES8], int bit_offset, |
2095 | int &zero_width_bitfields) |
2096 | { |
2097 | HOST_WIDE_INTlong bytes |
2098 | = mode == BLKmode((void) 0, E_BLKmode) ? int_size_in_bytes (type) : (int) GET_MODE_SIZE (mode)((unsigned short) mode_to_bytes (mode).coeffs[0]); |
2099 | int words = CEIL (bytes + (bit_offset % 64) / 8, UNITS_PER_WORD)(((bytes + (bit_offset % 64) / 8) + ((((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4)) - 1) / ((((global_options .x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4))); |
2100 | |
2101 | /* Variable sized entities are always passed/returned in memory. */ |
2102 | if (bytes < 0) |
2103 | return 0; |
2104 | |
2105 | if (mode != VOIDmode((void) 0, E_VOIDmode)) |
2106 | { |
2107 | /* The value of "named" doesn't matter. */ |
2108 | function_arg_info arg (const_cast<tree> (type), mode, /*named=*/true); |
2109 | if (targetm.calls.must_pass_in_stack (arg)) |
2110 | return 0; |
2111 | } |
2112 | |
2113 | if (type && AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || ((( enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code ) (type)->base.code) == QUAL_UNION_TYPE))) |
2114 | { |
2115 | int i; |
2116 | tree field; |
2117 | enum x86_64_reg_class subclasses[MAX_CLASSES8]; |
2118 | |
2119 | /* On x86-64 we pass structures larger than 64 bytes on the stack. */ |
2120 | if (bytes > 64) |
2121 | return 0; |
2122 | |
2123 | for (i = 0; i < words; i++) |
2124 | classes[i] = X86_64_NO_CLASS; |
2125 | |
2126 | /* Zero sized arrays or structures are NO_CLASS. We return 0 to |
2127 | signalize memory class, so handle it as special case. */ |
2128 | if (!words) |
2129 | { |
2130 | classes[0] = X86_64_NO_CLASS; |
2131 | return 1; |
2132 | } |
2133 | |
2134 | /* Classify each field of record and merge classes. */ |
2135 | switch (TREE_CODE (type)((enum tree_code) (type)->base.code)) |
2136 | { |
2137 | case RECORD_TYPE: |
2138 | /* And now merge the fields of structure. */ |
2139 | for (field = TYPE_FIELDS (type)((tree_check3 ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2139, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE )))->type_non_common.values); field; field = DECL_CHAIN (field)(((contains_struct_check (((contains_struct_check ((field), ( TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2139, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2139, __FUNCTION__))->common.chain))) |
2140 | { |
2141 | if (TREE_CODE (field)((enum tree_code) (field)->base.code) == FIELD_DECL) |
2142 | { |
2143 | int num; |
2144 | |
2145 | if (TREE_TYPE (field)((contains_struct_check ((field), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2145, __FUNCTION__))->typed.type) == error_mark_nodeglobal_trees[TI_ERROR_MARK]) |
2146 | continue; |
2147 | |
2148 | /* Bitfields are always classified as integer. Handle them |
2149 | early, since later code would consider them to be |
2150 | misaligned integers. */ |
2151 | if (DECL_BIT_FIELD (field)((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2151, __FUNCTION__, (FIELD_DECL)))->decl_common.decl_flag_1 )) |
2152 | { |
2153 | if (integer_zerop (DECL_SIZE (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2153, __FUNCTION__))->decl_common.size))) |
2154 | { |
2155 | if (DECL_FIELD_CXX_ZERO_WIDTH_BIT_FIELD (field)(((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2155, __FUNCTION__, (FIELD_DECL)))->decl_common.decl_flag_1 ) && (field)->decl_common.decl_flag_0)) |
2156 | continue; |
2157 | if (zero_width_bitfields != 2) |
2158 | { |
2159 | zero_width_bitfields = 1; |
2160 | continue; |
2161 | } |
2162 | } |
2163 | for (i = (int_bit_position (field) |
2164 | + (bit_offset % 64)) / 8 / 8; |
2165 | i < ((int_bit_position (field) + (bit_offset % 64)) |
2166 | + tree_to_shwi (DECL_SIZE (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2166, __FUNCTION__))->decl_common.size)) |
2167 | + 63) / 8 / 8; i++) |
2168 | classes[i] |
2169 | = merge_classes (X86_64_INTEGER_CLASS, classes[i]); |
2170 | } |
2171 | else |
2172 | { |
2173 | int pos; |
2174 | |
2175 | type = TREE_TYPE (field)((contains_struct_check ((field), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2175, __FUNCTION__))->typed.type); |
2176 | |
2177 | /* Flexible array member is ignored. */ |
2178 | if (TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2178, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (type) : (type)->type_common.mode) == BLKmode((void) 0, E_BLKmode) |
2179 | && TREE_CODE (type)((enum tree_code) (type)->base.code) == ARRAY_TYPE |
2180 | && TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2180, __FUNCTION__))->type_common.size) == NULL_TREE(tree) __null |
2181 | && TYPE_DOMAIN (type)((tree_check ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2181, __FUNCTION__, (ARRAY_TYPE)))->type_non_common.values ) != NULL_TREE(tree) __null |
2182 | && (TYPE_MAX_VALUE (TYPE_DOMAIN (type))((tree_check5 ((((tree_check ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2182, __FUNCTION__, (ARRAY_TYPE)))->type_non_common.values )), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2182, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE ), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval ) |
2183 | == NULL_TREE(tree) __null)) |
2184 | { |
2185 | static bool warned; |
2186 | |
2187 | if (!warned && warn_psabiglobal_options.x_warn_psabi) |
2188 | { |
2189 | warned = true; |
2190 | inform (input_location, |
2191 | "the ABI of passing struct with" |
2192 | " a flexible array member has" |
2193 | " changed in GCC 4.4"); |
2194 | } |
2195 | continue; |
2196 | } |
2197 | num = classify_argument (TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2197, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (type) : (type)->type_common.mode), type, |
2198 | subclasses, |
2199 | (int_bit_position (field) |
2200 | + bit_offset) % 512, |
2201 | zero_width_bitfields); |
2202 | if (!num) |
2203 | return 0; |
2204 | pos = (int_bit_position (field) |
2205 | + (bit_offset % 64)) / 8 / 8; |
2206 | for (i = 0; i < num && (i + pos) < words; i++) |
2207 | classes[i + pos] |
2208 | = merge_classes (subclasses[i], classes[i + pos]); |
2209 | } |
2210 | } |
2211 | } |
2212 | break; |
2213 | |
2214 | case ARRAY_TYPE: |
2215 | /* Arrays are handled as small records. */ |
2216 | { |
2217 | int num; |
2218 | num = classify_argument (TYPE_MODE (TREE_TYPE (type))((((enum tree_code) ((tree_class_check ((((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2218, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2218, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2218, __FUNCTION__))->typed.type)) : (((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2218, __FUNCTION__))->typed.type))->type_common.mode), |
2219 | TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2219, __FUNCTION__))->typed.type), subclasses, bit_offset, |
2220 | zero_width_bitfields); |
2221 | if (!num) |
2222 | return 0; |
2223 | |
2224 | /* The partial classes are now full classes. */ |
2225 | if (subclasses[0] == X86_64_SSESF_CLASS && bytes != 4) |
2226 | subclasses[0] = X86_64_SSE_CLASS; |
2227 | if (subclasses[0] == X86_64_SSEHF_CLASS && bytes != 2) |
2228 | subclasses[0] = X86_64_SSE_CLASS; |
2229 | if (subclasses[0] == X86_64_INTEGERSI_CLASS |
2230 | && !((bit_offset % 64) == 0 && bytes == 4)) |
2231 | subclasses[0] = X86_64_INTEGER_CLASS; |
2232 | |
2233 | for (i = 0; i < words; i++) |
2234 | classes[i] = subclasses[i % num]; |
2235 | |
2236 | break; |
2237 | } |
2238 | case UNION_TYPE: |
2239 | case QUAL_UNION_TYPE: |
2240 | /* Unions are similar to RECORD_TYPE but offset is always 0. |
2241 | */ |
2242 | for (field = TYPE_FIELDS (type)((tree_check3 ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2242, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE )))->type_non_common.values); field; field = DECL_CHAIN (field)(((contains_struct_check (((contains_struct_check ((field), ( TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2242, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2242, __FUNCTION__))->common.chain))) |
2243 | { |
2244 | if (TREE_CODE (field)((enum tree_code) (field)->base.code) == FIELD_DECL) |
2245 | { |
2246 | int num; |
2247 | |
2248 | if (TREE_TYPE (field)((contains_struct_check ((field), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2248, __FUNCTION__))->typed.type) == error_mark_nodeglobal_trees[TI_ERROR_MARK]) |
2249 | continue; |
2250 | |
2251 | num = classify_argument (TYPE_MODE (TREE_TYPE (field))((((enum tree_code) ((tree_class_check ((((contains_struct_check ((field), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2251, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2251, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (((contains_struct_check ((field), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2251, __FUNCTION__))->typed.type)) : (((contains_struct_check ((field), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2251, __FUNCTION__))->typed.type))->type_common.mode), |
2252 | TREE_TYPE (field)((contains_struct_check ((field), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2252, __FUNCTION__))->typed.type), subclasses, |
2253 | bit_offset, zero_width_bitfields); |
2254 | if (!num) |
2255 | return 0; |
2256 | for (i = 0; i < num && i < words; i++) |
2257 | classes[i] = merge_classes (subclasses[i], classes[i]); |
2258 | } |
2259 | } |
2260 | break; |
2261 | |
2262 | default: |
2263 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2263, __FUNCTION__)); |
2264 | } |
2265 | |
2266 | if (words > 2) |
2267 | { |
2268 | /* When size > 16 bytes, if the first one isn't |
2269 | X86_64_SSE_CLASS or any other ones aren't |
2270 | X86_64_SSEUP_CLASS, everything should be passed in |
2271 | memory. */ |
2272 | if (classes[0] != X86_64_SSE_CLASS) |
2273 | return 0; |
2274 | |
2275 | for (i = 1; i < words; i++) |
2276 | if (classes[i] != X86_64_SSEUP_CLASS) |
2277 | return 0; |
2278 | } |
2279 | |
2280 | /* Final merger cleanup. */ |
2281 | for (i = 0; i < words; i++) |
2282 | { |
2283 | /* If one class is MEMORY, everything should be passed in |
2284 | memory. */ |
2285 | if (classes[i] == X86_64_MEMORY_CLASS) |
2286 | return 0; |
2287 | |
2288 | /* The X86_64_SSEUP_CLASS should be always preceded by |
2289 | X86_64_SSE_CLASS or X86_64_SSEUP_CLASS. */ |
2290 | if (classes[i] == X86_64_SSEUP_CLASS |
2291 | && classes[i - 1] != X86_64_SSE_CLASS |
2292 | && classes[i - 1] != X86_64_SSEUP_CLASS) |
2293 | { |
2294 | /* The first one should never be X86_64_SSEUP_CLASS. */ |
2295 | gcc_assert (i != 0)((void)(!(i != 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2295, __FUNCTION__), 0 : 0)); |
2296 | classes[i] = X86_64_SSE_CLASS; |
2297 | } |
2298 | |
2299 | /* If X86_64_X87UP_CLASS isn't preceded by X86_64_X87_CLASS, |
2300 | everything should be passed in memory. */ |
2301 | if (classes[i] == X86_64_X87UP_CLASS |
2302 | && (classes[i - 1] != X86_64_X87_CLASS)) |
2303 | { |
2304 | static bool warned; |
2305 | |
2306 | /* The first one should never be X86_64_X87UP_CLASS. */ |
2307 | gcc_assert (i != 0)((void)(!(i != 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2307, __FUNCTION__), 0 : 0)); |
2308 | if (!warned && warn_psabiglobal_options.x_warn_psabi) |
2309 | { |
2310 | warned = true; |
2311 | inform (input_location, |
2312 | "the ABI of passing union with %<long double%>" |
2313 | " has changed in GCC 4.4"); |
2314 | } |
2315 | return 0; |
2316 | } |
2317 | } |
2318 | return words; |
2319 | } |
2320 | |
2321 | /* Compute alignment needed. We align all types to natural boundaries with |
2322 | exception of XFmode that is aligned to 64bits. */ |
2323 | if (mode != VOIDmode((void) 0, E_VOIDmode) && mode != BLKmode((void) 0, E_BLKmode)) |
2324 | { |
2325 | int mode_alignment = GET_MODE_BITSIZE (mode)((unsigned short) mode_to_bits (mode).coeffs[0]); |
2326 | |
2327 | if (mode == XFmode(scalar_float_mode ((scalar_float_mode::from_int) E_XFmode))) |
2328 | mode_alignment = 128; |
2329 | else if (mode == XCmode(complex_mode ((complex_mode::from_int) E_XCmode))) |
2330 | mode_alignment = 256; |
2331 | if (COMPLEX_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_COMPLEX_INT || ( (enum mode_class) mode_class[mode]) == MODE_COMPLEX_FLOAT)) |
2332 | mode_alignment /= 2; |
2333 | /* Misaligned fields are always returned in memory. */ |
2334 | if (bit_offset % mode_alignment) |
2335 | return 0; |
2336 | } |
2337 | |
2338 | /* for V1xx modes, just use the base mode */ |
2339 | if (VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || ( (enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM) && mode != V1DImode((void) 0, E_V1DImode) && mode != V1TImode((void) 0, E_V1TImode) |
2340 | && GET_MODE_UNIT_SIZE (mode)mode_to_unit_size (mode) == bytes) |
2341 | mode = GET_MODE_INNER (mode)(mode_to_inner (mode)); |
2342 | |
2343 | /* Classification of atomic types. */ |
2344 | switch (mode) |
2345 | { |
2346 | case E_SDmode: |
2347 | case E_DDmode: |
2348 | classes[0] = X86_64_SSE_CLASS; |
2349 | return 1; |
2350 | case E_TDmode: |
2351 | classes[0] = X86_64_SSE_CLASS; |
2352 | classes[1] = X86_64_SSEUP_CLASS; |
2353 | return 2; |
2354 | case E_DImode: |
2355 | case E_SImode: |
2356 | case E_HImode: |
2357 | case E_QImode: |
2358 | case E_CSImode: |
2359 | case E_CHImode: |
2360 | case E_CQImode: |
2361 | { |
2362 | int size = bit_offset + (int) GET_MODE_BITSIZE (mode)((unsigned short) mode_to_bits (mode).coeffs[0]); |
2363 | |
2364 | /* Analyze last 128 bits only. */ |
2365 | size = (size - 1) & 0x7f; |
2366 | |
2367 | if (size < 32) |
2368 | { |
2369 | classes[0] = X86_64_INTEGERSI_CLASS; |
2370 | return 1; |
2371 | } |
2372 | else if (size < 64) |
2373 | { |
2374 | classes[0] = X86_64_INTEGER_CLASS; |
2375 | return 1; |
2376 | } |
2377 | else if (size < 64+32) |
2378 | { |
2379 | classes[0] = X86_64_INTEGER_CLASS; |
2380 | classes[1] = X86_64_INTEGERSI_CLASS; |
2381 | return 2; |
2382 | } |
2383 | else if (size < 64+64) |
2384 | { |
2385 | classes[0] = classes[1] = X86_64_INTEGER_CLASS; |
2386 | return 2; |
2387 | } |
2388 | else |
2389 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2389, __FUNCTION__)); |
2390 | } |
2391 | case E_CDImode: |
2392 | case E_TImode: |
2393 | classes[0] = classes[1] = X86_64_INTEGER_CLASS; |
2394 | return 2; |
2395 | case E_COImode: |
2396 | case E_OImode: |
2397 | /* OImode shouldn't be used directly. */ |
2398 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2398, __FUNCTION__)); |
2399 | case E_CTImode: |
2400 | return 0; |
2401 | case E_HFmode: |
2402 | case E_BFmode: |
2403 | if (!(bit_offset % 64)) |
2404 | classes[0] = X86_64_SSEHF_CLASS; |
2405 | else |
2406 | classes[0] = X86_64_SSE_CLASS; |
2407 | return 1; |
2408 | case E_SFmode: |
2409 | if (!(bit_offset % 64)) |
2410 | classes[0] = X86_64_SSESF_CLASS; |
2411 | else |
2412 | classes[0] = X86_64_SSE_CLASS; |
2413 | return 1; |
2414 | case E_DFmode: |
2415 | classes[0] = X86_64_SSEDF_CLASS; |
2416 | return 1; |
2417 | case E_XFmode: |
2418 | classes[0] = X86_64_X87_CLASS; |
2419 | classes[1] = X86_64_X87UP_CLASS; |
2420 | return 2; |
2421 | case E_TFmode: |
2422 | classes[0] = X86_64_SSE_CLASS; |
2423 | classes[1] = X86_64_SSEUP_CLASS; |
2424 | return 2; |
2425 | case E_HCmode: |
2426 | case E_BCmode: |
2427 | classes[0] = X86_64_SSE_CLASS; |
2428 | if (!(bit_offset % 64)) |
2429 | return 1; |
2430 | else |
2431 | { |
2432 | classes[1] = X86_64_SSEHF_CLASS; |
2433 | return 2; |
2434 | } |
2435 | case E_SCmode: |
2436 | classes[0] = X86_64_SSE_CLASS; |
2437 | if (!(bit_offset % 64)) |
2438 | return 1; |
2439 | else |
2440 | { |
2441 | static bool warned; |
2442 | |
2443 | if (!warned && warn_psabiglobal_options.x_warn_psabi) |
2444 | { |
2445 | warned = true; |
2446 | inform (input_location, |
2447 | "the ABI of passing structure with %<complex float%>" |
2448 | " member has changed in GCC 4.4"); |
2449 | } |
2450 | classes[1] = X86_64_SSESF_CLASS; |
2451 | return 2; |
2452 | } |
2453 | case E_DCmode: |
2454 | classes[0] = X86_64_SSEDF_CLASS; |
2455 | classes[1] = X86_64_SSEDF_CLASS; |
2456 | return 2; |
2457 | case E_XCmode: |
2458 | classes[0] = X86_64_COMPLEX_X87_CLASS; |
2459 | return 1; |
2460 | case E_TCmode: |
2461 | /* This modes is larger than 16 bytes. */ |
2462 | return 0; |
2463 | case E_V8SFmode: |
2464 | case E_V8SImode: |
2465 | case E_V32QImode: |
2466 | case E_V16HFmode: |
2467 | case E_V16BFmode: |
2468 | case E_V16HImode: |
2469 | case E_V4DFmode: |
2470 | case E_V4DImode: |
2471 | classes[0] = X86_64_SSE_CLASS; |
2472 | classes[1] = X86_64_SSEUP_CLASS; |
2473 | classes[2] = X86_64_SSEUP_CLASS; |
2474 | classes[3] = X86_64_SSEUP_CLASS; |
2475 | return 4; |
2476 | case E_V8DFmode: |
2477 | case E_V16SFmode: |
2478 | case E_V32HFmode: |
2479 | case E_V32BFmode: |
2480 | case E_V8DImode: |
2481 | case E_V16SImode: |
2482 | case E_V32HImode: |
2483 | case E_V64QImode: |
2484 | classes[0] = X86_64_SSE_CLASS; |
2485 | classes[1] = X86_64_SSEUP_CLASS; |
2486 | classes[2] = X86_64_SSEUP_CLASS; |
2487 | classes[3] = X86_64_SSEUP_CLASS; |
2488 | classes[4] = X86_64_SSEUP_CLASS; |
2489 | classes[5] = X86_64_SSEUP_CLASS; |
2490 | classes[6] = X86_64_SSEUP_CLASS; |
2491 | classes[7] = X86_64_SSEUP_CLASS; |
2492 | return 8; |
2493 | case E_V4SFmode: |
2494 | case E_V4SImode: |
2495 | case E_V16QImode: |
2496 | case E_V8HImode: |
2497 | case E_V8HFmode: |
2498 | case E_V8BFmode: |
2499 | case E_V2DFmode: |
2500 | case E_V2DImode: |
2501 | classes[0] = X86_64_SSE_CLASS; |
2502 | classes[1] = X86_64_SSEUP_CLASS; |
2503 | return 2; |
2504 | case E_V1TImode: |
2505 | case E_V1DImode: |
2506 | case E_V2SFmode: |
2507 | case E_V2SImode: |
2508 | case E_V4HImode: |
2509 | case E_V4HFmode: |
2510 | case E_V4BFmode: |
2511 | case E_V2HFmode: |
2512 | case E_V2BFmode: |
2513 | case E_V8QImode: |
2514 | classes[0] = X86_64_SSE_CLASS; |
2515 | return 1; |
2516 | case E_BLKmode: |
2517 | case E_VOIDmode: |
2518 | return 0; |
2519 | default: |
2520 | gcc_assert (VECTOR_MODE_P (mode))((void)(!((((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2520, __FUNCTION__), 0 : 0)); |
2521 | |
2522 | if (bytes > 16) |
2523 | return 0; |
2524 | |
2525 | gcc_assert (GET_MODE_CLASS (GET_MODE_INNER (mode)) == MODE_INT)((void)(!(((enum mode_class) mode_class[(mode_to_inner (mode) )]) == MODE_INT) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2525, __FUNCTION__), 0 : 0)); |
2526 | |
2527 | if (bit_offset + GET_MODE_BITSIZE (mode)((unsigned short) mode_to_bits (mode).coeffs[0]) <= 32) |
2528 | classes[0] = X86_64_INTEGERSI_CLASS; |
2529 | else |
2530 | classes[0] = X86_64_INTEGER_CLASS; |
2531 | classes[1] = X86_64_INTEGER_CLASS; |
2532 | return 1 + (bytes > 8); |
2533 | } |
2534 | } |
2535 | |
2536 | /* Wrapper around classify_argument with the extra zero_width_bitfields |
2537 | argument, to diagnose GCC 12.1 ABI differences for C. */ |
2538 | |
2539 | static int |
2540 | classify_argument (machine_mode mode, const_tree type, |
2541 | enum x86_64_reg_class classes[MAX_CLASSES8], int bit_offset) |
2542 | { |
2543 | int zero_width_bitfields = 0; |
2544 | static bool warned = false; |
2545 | int n = classify_argument (mode, type, classes, bit_offset, |
2546 | zero_width_bitfields); |
2547 | if (!zero_width_bitfields || warned || !warn_psabiglobal_options.x_warn_psabi) |
2548 | return n; |
2549 | enum x86_64_reg_class alt_classes[MAX_CLASSES8]; |
2550 | zero_width_bitfields = 2; |
2551 | if (classify_argument (mode, type, alt_classes, bit_offset, |
2552 | zero_width_bitfields) != n) |
2553 | zero_width_bitfields = 3; |
2554 | else |
2555 | for (int i = 0; i < n; i++) |
2556 | if (classes[i] != alt_classes[i]) |
2557 | { |
2558 | zero_width_bitfields = 3; |
2559 | break; |
2560 | } |
2561 | if (zero_width_bitfields == 3) |
2562 | { |
2563 | warned = true; |
2564 | const char *url |
2565 | = CHANGES_ROOT_URL"https://gcc.gnu.org/" "gcc-12/changes.html#zero_width_bitfields"; |
2566 | |
2567 | inform (input_location, |
2568 | "the ABI of passing C structures with zero-width bit-fields" |
2569 | " has changed in GCC %{12.1%}", url); |
2570 | } |
2571 | return n; |
2572 | } |
2573 | |
2574 | /* Examine the argument and return set number of register required in each |
2575 | class. Return true iff parameter should be passed in memory. */ |
2576 | |
2577 | static bool |
2578 | examine_argument (machine_mode mode, const_tree type, int in_return, |
2579 | int *int_nregs, int *sse_nregs) |
2580 | { |
2581 | enum x86_64_reg_class regclass[MAX_CLASSES8]; |
2582 | int n = classify_argument (mode, type, regclass, 0); |
2583 | |
2584 | *int_nregs = 0; |
2585 | *sse_nregs = 0; |
2586 | |
2587 | if (!n) |
2588 | return true; |
2589 | for (n--; n >= 0; n--) |
2590 | switch (regclass[n]) |
2591 | { |
2592 | case X86_64_INTEGER_CLASS: |
2593 | case X86_64_INTEGERSI_CLASS: |
2594 | (*int_nregs)++; |
2595 | break; |
2596 | case X86_64_SSE_CLASS: |
2597 | case X86_64_SSEHF_CLASS: |
2598 | case X86_64_SSESF_CLASS: |
2599 | case X86_64_SSEDF_CLASS: |
2600 | (*sse_nregs)++; |
2601 | break; |
2602 | case X86_64_NO_CLASS: |
2603 | case X86_64_SSEUP_CLASS: |
2604 | break; |
2605 | case X86_64_X87_CLASS: |
2606 | case X86_64_X87UP_CLASS: |
2607 | case X86_64_COMPLEX_X87_CLASS: |
2608 | if (!in_return) |
2609 | return true; |
2610 | break; |
2611 | case X86_64_MEMORY_CLASS: |
2612 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2612, __FUNCTION__)); |
2613 | } |
2614 | |
2615 | return false; |
2616 | } |
2617 | |
2618 | /* Construct container for the argument used by GCC interface. See |
2619 | FUNCTION_ARG for the detailed description. */ |
2620 | |
2621 | static rtx |
2622 | construct_container (machine_mode mode, machine_mode orig_mode, |
2623 | const_tree type, int in_return, int nintregs, int nsseregs, |
2624 | const int *intreg, int sse_regno) |
2625 | { |
2626 | /* The following variables hold the static issued_error state. */ |
2627 | static bool issued_sse_arg_error; |
2628 | static bool issued_sse_ret_error; |
2629 | static bool issued_x87_ret_error; |
2630 | |
2631 | machine_mode tmpmode; |
2632 | int bytes |
2633 | = mode == BLKmode((void) 0, E_BLKmode) ? int_size_in_bytes (type) : (int) GET_MODE_SIZE (mode)((unsigned short) mode_to_bytes (mode).coeffs[0]); |
2634 | enum x86_64_reg_class regclass[MAX_CLASSES8]; |
2635 | int n; |
2636 | int i; |
2637 | int nexps = 0; |
2638 | int needed_sseregs, needed_intregs; |
2639 | rtx exp[MAX_CLASSES8]; |
2640 | rtx ret; |
2641 | |
2642 | n = classify_argument (mode, type, regclass, 0); |
2643 | if (!n) |
2644 | return NULL__null; |
2645 | if (examine_argument (mode, type, in_return, &needed_intregs, |
2646 | &needed_sseregs)) |
2647 | return NULL__null; |
2648 | if (needed_intregs > nintregs || needed_sseregs > nsseregs) |
2649 | return NULL__null; |
2650 | |
2651 | /* We allowed the user to turn off SSE for kernel mode. Don't crash if |
2652 | some less clueful developer tries to use floating-point anyway. */ |
2653 | if (needed_sseregs && !TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) != 0)) |
2654 | { |
2655 | /* Return early if we shouldn't raise an error for invalid |
2656 | calls. */ |
2657 | if (cfun(cfun + 0) != NULL__null && cfun(cfun + 0)->machine->silent_p) |
2658 | return NULL__null; |
2659 | if (in_return) |
2660 | { |
2661 | if (!issued_sse_ret_error) |
2662 | { |
2663 | error ("SSE register return with SSE disabled"); |
2664 | issued_sse_ret_error = true; |
2665 | } |
2666 | } |
2667 | else if (!issued_sse_arg_error) |
2668 | { |
2669 | error ("SSE register argument with SSE disabled"); |
2670 | issued_sse_arg_error = true; |
2671 | } |
2672 | return NULL__null; |
2673 | } |
2674 | |
2675 | /* Likewise, error if the ABI requires us to return values in the |
2676 | x87 registers and the user specified -mno-80387. */ |
2677 | if (!TARGET_FLOAT_RETURNS_IN_80387(((global_options.x_target_flags & (1U << 11)) != 0 ) && ((global_options.x_target_flags & (1U << 1)) != 0) && !((global_options.x_target_flags & ( 1U << 12)) != 0)) && in_return) |
2678 | for (i = 0; i < n; i++) |
2679 | if (regclass[i] == X86_64_X87_CLASS |
2680 | || regclass[i] == X86_64_X87UP_CLASS |
2681 | || regclass[i] == X86_64_COMPLEX_X87_CLASS) |
2682 | { |
2683 | /* Return early if we shouldn't raise an error for invalid |
2684 | calls. */ |
2685 | if (cfun(cfun + 0) != NULL__null && cfun(cfun + 0)->machine->silent_p) |
2686 | return NULL__null; |
2687 | if (!issued_x87_ret_error) |
2688 | { |
2689 | error ("x87 register return with x87 disabled"); |
2690 | issued_x87_ret_error = true; |
2691 | } |
2692 | return NULL__null; |
2693 | } |
2694 | |
2695 | /* First construct simple cases. Avoid SCmode, since we want to use |
2696 | single register to pass this type. */ |
2697 | if (n == 1 && mode != SCmode(complex_mode ((complex_mode::from_int) E_SCmode)) && mode != HCmode(complex_mode ((complex_mode::from_int) E_HCmode))) |
2698 | switch (regclass[0]) |
2699 | { |
2700 | case X86_64_INTEGER_CLASS: |
2701 | case X86_64_INTEGERSI_CLASS: |
2702 | return gen_rtx_REG (mode, intreg[0]); |
2703 | case X86_64_SSE_CLASS: |
2704 | case X86_64_SSEHF_CLASS: |
2705 | case X86_64_SSESF_CLASS: |
2706 | case X86_64_SSEDF_CLASS: |
2707 | if (mode != BLKmode((void) 0, E_BLKmode)) |
2708 | return gen_reg_or_parallel (mode, orig_mode, |
2709 | GET_SSE_REGNO (sse_regno)((sse_regno) < 8 ? 20 + (sse_regno) : (sse_regno) < 16 ? 44 + (sse_regno) - 8 : 52 + (sse_regno) - 16)); |
2710 | break; |
2711 | case X86_64_X87_CLASS: |
2712 | case X86_64_COMPLEX_X87_CLASS: |
2713 | return gen_rtx_REG (mode, FIRST_STACK_REG8); |
2714 | case X86_64_NO_CLASS: |
2715 | /* Zero sized array, struct or class. */ |
2716 | return NULL__null; |
2717 | default: |
2718 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2718, __FUNCTION__)); |
2719 | } |
2720 | if (n == 2 |
2721 | && regclass[0] == X86_64_SSE_CLASS |
2722 | && regclass[1] == X86_64_SSEUP_CLASS |
2723 | && mode != BLKmode((void) 0, E_BLKmode)) |
2724 | return gen_reg_or_parallel (mode, orig_mode, |
2725 | GET_SSE_REGNO (sse_regno)((sse_regno) < 8 ? 20 + (sse_regno) : (sse_regno) < 16 ? 44 + (sse_regno) - 8 : 52 + (sse_regno) - 16)); |
2726 | if (n == 4 |
2727 | && regclass[0] == X86_64_SSE_CLASS |
2728 | && regclass[1] == X86_64_SSEUP_CLASS |
2729 | && regclass[2] == X86_64_SSEUP_CLASS |
2730 | && regclass[3] == X86_64_SSEUP_CLASS |
2731 | && mode != BLKmode((void) 0, E_BLKmode)) |
2732 | return gen_reg_or_parallel (mode, orig_mode, |
2733 | GET_SSE_REGNO (sse_regno)((sse_regno) < 8 ? 20 + (sse_regno) : (sse_regno) < 16 ? 44 + (sse_regno) - 8 : 52 + (sse_regno) - 16)); |
2734 | if (n == 8 |
2735 | && regclass[0] == X86_64_SSE_CLASS |
2736 | && regclass[1] == X86_64_SSEUP_CLASS |
2737 | && regclass[2] == X86_64_SSEUP_CLASS |
2738 | && regclass[3] == X86_64_SSEUP_CLASS |
2739 | && regclass[4] == X86_64_SSEUP_CLASS |
2740 | && regclass[5] == X86_64_SSEUP_CLASS |
2741 | && regclass[6] == X86_64_SSEUP_CLASS |
2742 | && regclass[7] == X86_64_SSEUP_CLASS |
2743 | && mode != BLKmode((void) 0, E_BLKmode)) |
2744 | return gen_reg_or_parallel (mode, orig_mode, |
2745 | GET_SSE_REGNO (sse_regno)((sse_regno) < 8 ? 20 + (sse_regno) : (sse_regno) < 16 ? 44 + (sse_regno) - 8 : 52 + (sse_regno) - 16)); |
2746 | if (n == 2 |
2747 | && regclass[0] == X86_64_X87_CLASS |
2748 | && regclass[1] == X86_64_X87UP_CLASS) |
2749 | return gen_rtx_REG (XFmode(scalar_float_mode ((scalar_float_mode::from_int) E_XFmode)), FIRST_STACK_REG8); |
2750 | |
2751 | if (n == 2 |
2752 | && regclass[0] == X86_64_INTEGER_CLASS |
2753 | && regclass[1] == X86_64_INTEGER_CLASS |
2754 | && (mode == CDImode(complex_mode ((complex_mode::from_int) E_CDImode)) || mode == TImode(scalar_int_mode ((scalar_int_mode::from_int) E_TImode)) || mode == BLKmode((void) 0, E_BLKmode)) |
2755 | && intreg[0] + 1 == intreg[1]) |
2756 | { |
2757 | if (mode == BLKmode((void) 0, E_BLKmode)) |
2758 | { |
2759 | /* Use TImode for BLKmode values in 2 integer registers. */ |
2760 | exp[0] = gen_rtx_EXPR_LIST (VOIDmode((void) 0, E_VOIDmode), |
2761 | gen_rtx_REG (TImode(scalar_int_mode ((scalar_int_mode::from_int) E_TImode)), intreg[0]), |
2762 | GEN_INT (0)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (0))); |
2763 | ret = gen_rtx_PARALLEL (mode, rtvec_alloc (1))gen_rtx_fmt_E_stat ((PARALLEL), ((mode)), ((rtvec_alloc (1))) ); |
2764 | XVECEXP (ret, 0, 0)(((((ret)->u.fld[0]).rt_rtvec))->elem[0]) = exp[0]; |
2765 | return ret; |
2766 | } |
2767 | else |
2768 | return gen_rtx_REG (mode, intreg[0]); |
2769 | } |
2770 | |
2771 | /* Otherwise figure out the entries of the PARALLEL. */ |
2772 | for (i = 0; i < n; i++) |
2773 | { |
2774 | int pos; |
2775 | |
2776 | switch (regclass[i]) |
2777 | { |
2778 | case X86_64_NO_CLASS: |
2779 | break; |
2780 | case X86_64_INTEGER_CLASS: |
2781 | case X86_64_INTEGERSI_CLASS: |
2782 | /* Merge TImodes on aligned occasions here too. */ |
2783 | if (i * 8 + 8 > bytes) |
2784 | { |
2785 | unsigned int tmpbits = (bytes - i * 8) * BITS_PER_UNIT(8); |
2786 | if (!int_mode_for_size (tmpbits, 0).exists (&tmpmode)) |
2787 | /* We've requested 24 bytes we |
2788 | don't have mode for. Use DImode. */ |
2789 | tmpmode = DImode(scalar_int_mode ((scalar_int_mode::from_int) E_DImode)); |
2790 | } |
2791 | else if (regclass[i] == X86_64_INTEGERSI_CLASS) |
2792 | tmpmode = SImode(scalar_int_mode ((scalar_int_mode::from_int) E_SImode)); |
2793 | else |
2794 | tmpmode = DImode(scalar_int_mode ((scalar_int_mode::from_int) E_DImode)); |
2795 | exp [nexps++] |
2796 | = gen_rtx_EXPR_LIST (VOIDmode((void) 0, E_VOIDmode), |
2797 | gen_rtx_REG (tmpmode, *intreg), |
2798 | GEN_INT (i*8)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (i*8))); |
2799 | intreg++; |
2800 | break; |
2801 | case X86_64_SSEHF_CLASS: |
2802 | tmpmode = (mode == BFmode(scalar_float_mode ((scalar_float_mode::from_int) E_BFmode)) ? BFmode(scalar_float_mode ((scalar_float_mode::from_int) E_BFmode)) : HFmode(scalar_float_mode ((scalar_float_mode::from_int) E_HFmode))); |
2803 | exp [nexps++] |
2804 | = gen_rtx_EXPR_LIST (VOIDmode((void) 0, E_VOIDmode), |
2805 | gen_rtx_REG (tmpmode, |
2806 | GET_SSE_REGNO (sse_regno)((sse_regno) < 8 ? 20 + (sse_regno) : (sse_regno) < 16 ? 44 + (sse_regno) - 8 : 52 + (sse_regno) - 16)), |
2807 | GEN_INT (i*8)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (i*8))); |
2808 | sse_regno++; |
2809 | break; |
2810 | case X86_64_SSESF_CLASS: |
2811 | exp [nexps++] |
2812 | = gen_rtx_EXPR_LIST (VOIDmode((void) 0, E_VOIDmode), |
2813 | gen_rtx_REG (SFmode(scalar_float_mode ((scalar_float_mode::from_int) E_SFmode)), |
2814 | GET_SSE_REGNO (sse_regno)((sse_regno) < 8 ? 20 + (sse_regno) : (sse_regno) < 16 ? 44 + (sse_regno) - 8 : 52 + (sse_regno) - 16)), |
2815 | GEN_INT (i*8)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (i*8))); |
2816 | sse_regno++; |
2817 | break; |
2818 | case X86_64_SSEDF_CLASS: |
2819 | exp [nexps++] |
2820 | = gen_rtx_EXPR_LIST (VOIDmode((void) 0, E_VOIDmode), |
2821 | gen_rtx_REG (DFmode(scalar_float_mode ((scalar_float_mode::from_int) E_DFmode)), |
2822 | GET_SSE_REGNO (sse_regno)((sse_regno) < 8 ? 20 + (sse_regno) : (sse_regno) < 16 ? 44 + (sse_regno) - 8 : 52 + (sse_regno) - 16)), |
2823 | GEN_INT (i*8)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (i*8))); |
2824 | sse_regno++; |
2825 | break; |
2826 | case X86_64_SSE_CLASS: |
2827 | pos = i; |
2828 | switch (n) |
2829 | { |
2830 | case 1: |
2831 | tmpmode = DImode(scalar_int_mode ((scalar_int_mode::from_int) E_DImode)); |
2832 | break; |
2833 | case 2: |
2834 | if (i == 0 && regclass[1] == X86_64_SSEUP_CLASS) |
2835 | { |
2836 | tmpmode = TImode(scalar_int_mode ((scalar_int_mode::from_int) E_TImode)); |
2837 | i++; |
2838 | } |
2839 | else |
2840 | tmpmode = DImode(scalar_int_mode ((scalar_int_mode::from_int) E_DImode)); |
2841 | break; |
2842 | case 4: |
2843 | gcc_assert (i == 0((void)(!(i == 0 && regclass[1] == X86_64_SSEUP_CLASS && regclass[2] == X86_64_SSEUP_CLASS && regclass [3] == X86_64_SSEUP_CLASS) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2846, __FUNCTION__), 0 : 0)) |
2844 | && regclass[1] == X86_64_SSEUP_CLASS((void)(!(i == 0 && regclass[1] == X86_64_SSEUP_CLASS && regclass[2] == X86_64_SSEUP_CLASS && regclass [3] == X86_64_SSEUP_CLASS) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2846, __FUNCTION__), 0 : 0)) |
2845 | && regclass[2] == X86_64_SSEUP_CLASS((void)(!(i == 0 && regclass[1] == X86_64_SSEUP_CLASS && regclass[2] == X86_64_SSEUP_CLASS && regclass [3] == X86_64_SSEUP_CLASS) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2846, __FUNCTION__), 0 : 0)) |
2846 | && regclass[3] == X86_64_SSEUP_CLASS)((void)(!(i == 0 && regclass[1] == X86_64_SSEUP_CLASS && regclass[2] == X86_64_SSEUP_CLASS && regclass [3] == X86_64_SSEUP_CLASS) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2846, __FUNCTION__), 0 : 0)); |
2847 | tmpmode = OImode(scalar_int_mode ((scalar_int_mode::from_int) E_OImode)); |
2848 | i += 3; |
2849 | break; |
2850 | case 8: |
2851 | gcc_assert (i == 0((void)(!(i == 0 && regclass[1] == X86_64_SSEUP_CLASS && regclass[2] == X86_64_SSEUP_CLASS && regclass [3] == X86_64_SSEUP_CLASS && regclass[4] == X86_64_SSEUP_CLASS && regclass[5] == X86_64_SSEUP_CLASS && regclass [6] == X86_64_SSEUP_CLASS && regclass[7] == X86_64_SSEUP_CLASS ) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2858, __FUNCTION__), 0 : 0)) |
2852 | && regclass[1] == X86_64_SSEUP_CLASS((void)(!(i == 0 && regclass[1] == X86_64_SSEUP_CLASS && regclass[2] == X86_64_SSEUP_CLASS && regclass [3] == X86_64_SSEUP_CLASS && regclass[4] == X86_64_SSEUP_CLASS && regclass[5] == X86_64_SSEUP_CLASS && regclass [6] == X86_64_SSEUP_CLASS && regclass[7] == X86_64_SSEUP_CLASS ) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2858, __FUNCTION__), 0 : 0)) |
2853 | && regclass[2] == X86_64_SSEUP_CLASS((void)(!(i == 0 && regclass[1] == X86_64_SSEUP_CLASS && regclass[2] == X86_64_SSEUP_CLASS && regclass [3] == X86_64_SSEUP_CLASS && regclass[4] == X86_64_SSEUP_CLASS && regclass[5] == X86_64_SSEUP_CLASS && regclass [6] == X86_64_SSEUP_CLASS && regclass[7] == X86_64_SSEUP_CLASS ) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2858, __FUNCTION__), 0 : 0)) |
2854 | && regclass[3] == X86_64_SSEUP_CLASS((void)(!(i == 0 && regclass[1] == X86_64_SSEUP_CLASS && regclass[2] == X86_64_SSEUP_CLASS && regclass [3] == X86_64_SSEUP_CLASS && regclass[4] == X86_64_SSEUP_CLASS && regclass[5] == X86_64_SSEUP_CLASS && regclass [6] == X86_64_SSEUP_CLASS && regclass[7] == X86_64_SSEUP_CLASS ) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2858, __FUNCTION__), 0 : 0)) |
2855 | && regclass[4] == X86_64_SSEUP_CLASS((void)(!(i == 0 && regclass[1] == X86_64_SSEUP_CLASS && regclass[2] == X86_64_SSEUP_CLASS && regclass [3] == X86_64_SSEUP_CLASS && regclass[4] == X86_64_SSEUP_CLASS && regclass[5] == X86_64_SSEUP_CLASS && regclass [6] == X86_64_SSEUP_CLASS && regclass[7] == X86_64_SSEUP_CLASS ) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2858, __FUNCTION__), 0 : 0)) |
2856 | && regclass[5] == X86_64_SSEUP_CLASS((void)(!(i == 0 && regclass[1] == X86_64_SSEUP_CLASS && regclass[2] == X86_64_SSEUP_CLASS && regclass [3] == X86_64_SSEUP_CLASS && regclass[4] == X86_64_SSEUP_CLASS && regclass[5] == X86_64_SSEUP_CLASS && regclass [6] == X86_64_SSEUP_CLASS && regclass[7] == X86_64_SSEUP_CLASS ) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2858, __FUNCTION__), 0 : 0)) |
2857 | && regclass[6] == X86_64_SSEUP_CLASS((void)(!(i == 0 && regclass[1] == X86_64_SSEUP_CLASS && regclass[2] == X86_64_SSEUP_CLASS && regclass [3] == X86_64_SSEUP_CLASS && regclass[4] == X86_64_SSEUP_CLASS && regclass[5] == X86_64_SSEUP_CLASS && regclass [6] == X86_64_SSEUP_CLASS && regclass[7] == X86_64_SSEUP_CLASS ) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2858, __FUNCTION__), 0 : 0)) |
2858 | && regclass[7] == X86_64_SSEUP_CLASS)((void)(!(i == 0 && regclass[1] == X86_64_SSEUP_CLASS && regclass[2] == X86_64_SSEUP_CLASS && regclass [3] == X86_64_SSEUP_CLASS && regclass[4] == X86_64_SSEUP_CLASS && regclass[5] == X86_64_SSEUP_CLASS && regclass [6] == X86_64_SSEUP_CLASS && regclass[7] == X86_64_SSEUP_CLASS ) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2858, __FUNCTION__), 0 : 0)); |
2859 | tmpmode = XImode(scalar_int_mode ((scalar_int_mode::from_int) E_XImode)); |
2860 | i += 7; |
2861 | break; |
2862 | default: |
2863 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2863, __FUNCTION__)); |
2864 | } |
2865 | exp [nexps++] |
2866 | = gen_rtx_EXPR_LIST (VOIDmode((void) 0, E_VOIDmode), |
2867 | gen_rtx_REG (tmpmode, |
2868 | GET_SSE_REGNO (sse_regno)((sse_regno) < 8 ? 20 + (sse_regno) : (sse_regno) < 16 ? 44 + (sse_regno) - 8 : 52 + (sse_regno) - 16)), |
2869 | GEN_INT (pos*8)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (pos*8))); |
2870 | sse_regno++; |
2871 | break; |
2872 | default: |
2873 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2873, __FUNCTION__)); |
2874 | } |
2875 | } |
2876 | |
2877 | /* Empty aligned struct, union or class. */ |
2878 | if (nexps == 0) |
2879 | return NULL__null; |
2880 | |
2881 | ret = gen_rtx_PARALLEL (mode, rtvec_alloc (nexps))gen_rtx_fmt_E_stat ((PARALLEL), ((mode)), ((rtvec_alloc (nexps ))) ); |
2882 | for (i = 0; i < nexps; i++) |
2883 | XVECEXP (ret, 0, i)(((((ret)->u.fld[0]).rt_rtvec))->elem[i]) = exp [i]; |
2884 | return ret; |
2885 | } |
2886 | |
2887 | /* Update the data in CUM to advance over an argument of mode MODE |
2888 | and data type TYPE. (TYPE is null for libcalls where that information |
2889 | may not be available.) |
2890 | |
2891 | Return a number of integer regsiters advanced over. */ |
2892 | |
2893 | static int |
2894 | function_arg_advance_32 (CUMULATIVE_ARGS *cum, machine_mode mode, |
2895 | const_tree type, HOST_WIDE_INTlong bytes, |
2896 | HOST_WIDE_INTlong words) |
2897 | { |
2898 | int res = 0; |
2899 | bool error_p = false; |
2900 | |
2901 | if (TARGET_IAMCU((global_options.x_target_flags & (1U << 12)) != 0)) |
2902 | { |
2903 | /* Intel MCU psABI passes scalars and aggregates no larger than 8 |
2904 | bytes in registers. */ |
2905 | if (!VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || ( (enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM) && bytes <= 8) |
2906 | goto pass_in_reg; |
2907 | return res; |
2908 | } |
2909 | |
2910 | switch (mode) |
2911 | { |
2912 | default: |
2913 | break; |
2914 | |
2915 | case E_BLKmode: |
2916 | if (bytes < 0) |
2917 | break; |
2918 | /* FALLTHRU */ |
2919 | |
2920 | case E_DImode: |
2921 | case E_SImode: |
2922 | case E_HImode: |
2923 | case E_QImode: |
2924 | pass_in_reg: |
2925 | cum->words += words; |
2926 | cum->nregs -= words; |
2927 | cum->regno += words; |
2928 | if (cum->nregs >= 0) |
2929 | res = words; |
2930 | if (cum->nregs <= 0) |
2931 | { |
2932 | cum->nregs = 0; |
2933 | cfun(cfun + 0)->machine->arg_reg_available = false; |
2934 | cum->regno = 0; |
2935 | } |
2936 | break; |
2937 | |
2938 | case E_OImode: |
2939 | /* OImode shouldn't be used directly. */ |
2940 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 2940, __FUNCTION__)); |
2941 | |
2942 | case E_DFmode: |
2943 | if (cum->float_in_sse == -1) |
2944 | error_p = true; |
2945 | if (cum->float_in_sse < 2) |
2946 | break; |
2947 | /* FALLTHRU */ |
2948 | case E_SFmode: |
2949 | if (cum->float_in_sse == -1) |
2950 | error_p = true; |
2951 | if (cum->float_in_sse < 1) |
2952 | break; |
2953 | /* FALLTHRU */ |
2954 | |
2955 | case E_V16HFmode: |
2956 | case E_V16BFmode: |
2957 | case E_V8SFmode: |
2958 | case E_V8SImode: |
2959 | case E_V64QImode: |
2960 | case E_V32HImode: |
2961 | case E_V16SImode: |
2962 | case E_V8DImode: |
2963 | case E_V32HFmode: |
2964 | case E_V32BFmode: |
2965 | case E_V16SFmode: |
2966 | case E_V8DFmode: |
2967 | case E_V32QImode: |
2968 | case E_V16HImode: |
2969 | case E_V4DFmode: |
2970 | case E_V4DImode: |
2971 | case E_TImode: |
2972 | case E_V16QImode: |
2973 | case E_V8HImode: |
2974 | case E_V4SImode: |
2975 | case E_V2DImode: |
2976 | case E_V8HFmode: |
2977 | case E_V8BFmode: |
2978 | case E_V4SFmode: |
2979 | case E_V2DFmode: |
2980 | if (!type || !AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || ((( enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code ) (type)->base.code) == QUAL_UNION_TYPE))) |
2981 | { |
2982 | cum->sse_words += words; |
2983 | cum->sse_nregs -= 1; |
2984 | cum->sse_regno += 1; |
2985 | if (cum->sse_nregs <= 0) |
2986 | { |
2987 | cum->sse_nregs = 0; |
2988 | cum->sse_regno = 0; |
2989 | } |
2990 | } |
2991 | break; |
2992 | |
2993 | case E_V8QImode: |
2994 | case E_V4HImode: |
2995 | case E_V4HFmode: |
2996 | case E_V4BFmode: |
2997 | case E_V2SImode: |
2998 | case E_V2SFmode: |
2999 | case E_V1TImode: |
3000 | case E_V1DImode: |
3001 | if (!type || !AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || ((( enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code ) (type)->base.code) == QUAL_UNION_TYPE))) |
3002 | { |
3003 | cum->mmx_words += words; |
3004 | cum->mmx_nregs -= 1; |
3005 | cum->mmx_regno += 1; |
3006 | if (cum->mmx_nregs <= 0) |
3007 | { |
3008 | cum->mmx_nregs = 0; |
3009 | cum->mmx_regno = 0; |
3010 | } |
3011 | } |
3012 | break; |
3013 | } |
3014 | if (error_p) |
3015 | { |
3016 | cum->float_in_sse = 0; |
3017 | error ("calling %qD with SSE calling convention without " |
3018 | "SSE/SSE2 enabled", cum->decl); |
3019 | sorry ("this is a GCC bug that can be worked around by adding " |
3020 | "attribute used to function called"); |
3021 | } |
3022 | |
3023 | return res; |
3024 | } |
3025 | |
3026 | static int |
3027 | function_arg_advance_64 (CUMULATIVE_ARGS *cum, machine_mode mode, |
3028 | const_tree type, HOST_WIDE_INTlong words, bool named) |
3029 | { |
3030 | int int_nregs, sse_nregs; |
3031 | |
3032 | /* Unnamed 512 and 256bit vector mode parameters are passed on stack. */ |
3033 | if (!named && (VALID_AVX512F_REG_MODE (mode)((mode) == ((void) 0, E_V8DImode) || (mode) == ((void) 0, E_V8DFmode ) || (mode) == ((void) 0, E_V64QImode) || (mode) == ((void) 0 , E_V16SImode) || (mode) == ((void) 0, E_V16SFmode) || (mode) == ((void) 0, E_V32HImode) || (mode) == ((void) 0, E_V4TImode ) || (mode) == ((void) 0, E_V32HFmode) || (mode) == ((void) 0 , E_V32BFmode)) |
3034 | || VALID_AVX256_REG_MODE (mode)((mode) == ((void) 0, E_V32QImode) || (mode) == ((void) 0, E_V16HImode ) || (mode) == ((void) 0, E_V8SImode) || (mode) == ((void) 0, E_V4DImode) || (mode) == ((void) 0, E_V2TImode) || (mode) == ((void) 0, E_V8SFmode) || (mode) == ((void) 0, E_V4DFmode) || (mode) == ((void) 0, E_V16HFmode) || (mode) == ((void) 0, E_V16BFmode )))) |
3035 | return 0; |
3036 | |
3037 | if (!examine_argument (mode, type, 0, &int_nregs, &sse_nregs) |
3038 | && sse_nregs <= cum->sse_nregs && int_nregs <= cum->nregs) |
3039 | { |
3040 | cum->nregs -= int_nregs; |
3041 | cum->sse_nregs -= sse_nregs; |
3042 | cum->regno += int_nregs; |
3043 | cum->sse_regno += sse_nregs; |
3044 | return int_nregs; |
3045 | } |
3046 | else |
3047 | { |
3048 | int align = ix86_function_arg_boundary (mode, type) / BITS_PER_WORD((8) * (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4)); |
3049 | cum->words = ROUND_UP (cum->words, align)(((cum->words) + (align) - 1) & ~((align) - 1)); |
3050 | cum->words += words; |
3051 | return 0; |
3052 | } |
3053 | } |
3054 | |
3055 | static int |
3056 | function_arg_advance_ms_64 (CUMULATIVE_ARGS *cum, HOST_WIDE_INTlong bytes, |
3057 | HOST_WIDE_INTlong words) |
3058 | { |
3059 | /* Otherwise, this should be passed indirect. */ |
3060 | gcc_assert (bytes == 1 || bytes == 2 || bytes == 4 || bytes == 8)((void)(!(bytes == 1 || bytes == 2 || bytes == 4 || bytes == 8 ) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3060, __FUNCTION__), 0 : 0)); |
3061 | |
3062 | cum->words += words; |
3063 | if (cum->nregs > 0) |
3064 | { |
3065 | cum->nregs -= 1; |
3066 | cum->regno += 1; |
3067 | return 1; |
3068 | } |
3069 | return 0; |
3070 | } |
3071 | |
3072 | /* Update the data in CUM to advance over argument ARG. */ |
3073 | |
3074 | static void |
3075 | ix86_function_arg_advance (cumulative_args_t cum_v, |
3076 | const function_arg_info &arg) |
3077 | { |
3078 | CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v); |
3079 | machine_mode mode = arg.mode; |
3080 | HOST_WIDE_INTlong bytes, words; |
3081 | int nregs; |
3082 | |
3083 | /* The argument of interrupt handler is a special case and is |
3084 | handled in ix86_function_arg. */ |
3085 | if (!cum->caller && cfun(cfun + 0)->machine->func_type != TYPE_NORMAL) |
3086 | return; |
3087 | |
3088 | bytes = arg.promoted_size_in_bytes (); |
3089 | words = CEIL (bytes, UNITS_PER_WORD)(((bytes) + ((((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4)) - 1) / ((((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4))); |
3090 | |
3091 | if (arg.type) |
3092 | mode = type_natural_mode (arg.type, NULL__null, false); |
3093 | |
3094 | if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 )) |
3095 | { |
3096 | enum calling_abi call_abi = cum ? cum->call_abi : ix86_abiglobal_options.x_ix86_abi; |
3097 | |
3098 | if (call_abi == MS_ABI) |
3099 | nregs = function_arg_advance_ms_64 (cum, bytes, words); |
3100 | else |
3101 | nregs = function_arg_advance_64 (cum, mode, arg.type, words, |
3102 | arg.named); |
3103 | } |
3104 | else |
3105 | nregs = function_arg_advance_32 (cum, mode, arg.type, bytes, words); |
3106 | |
3107 | if (!nregs) |
3108 | { |
3109 | /* Track if there are outgoing arguments on stack. */ |
3110 | if (cum->caller) |
3111 | cfun(cfun + 0)->machine->outgoing_args_on_stack = true; |
3112 | } |
3113 | } |
3114 | |
3115 | /* Define where to put the arguments to a function. |
3116 | Value is zero to push the argument on the stack, |
3117 | or a hard register in which to store the argument. |
3118 | |
3119 | MODE is the argument's machine mode. |
3120 | TYPE is the data type of the argument (as a tree). |
3121 | This is null for libcalls where that information may |
3122 | not be available. |
3123 | CUM is a variable of type CUMULATIVE_ARGS which gives info about |
3124 | the preceding args and about the function being called. |
3125 | NAMED is nonzero if this argument is a named parameter |
3126 | (otherwise it is an extra parameter matching an ellipsis). */ |
3127 | |
3128 | static rtx |
3129 | function_arg_32 (CUMULATIVE_ARGS *cum, machine_mode mode, |
3130 | machine_mode orig_mode, const_tree type, |
3131 | HOST_WIDE_INTlong bytes, HOST_WIDE_INTlong words) |
3132 | { |
3133 | bool error_p = false; |
3134 | |
3135 | /* Avoid the AL settings for the Unix64 ABI. */ |
3136 | if (mode == VOIDmode((void) 0, E_VOIDmode)) |
3137 | return constm1_rtx(const_int_rtx[64 -1]); |
3138 | |
3139 | if (TARGET_IAMCU((global_options.x_target_flags & (1U << 12)) != 0)) |
3140 | { |
3141 | /* Intel MCU psABI passes scalars and aggregates no larger than 8 |
3142 | bytes in registers. */ |
3143 | if (!VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || ( (enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM) && bytes <= 8) |
3144 | goto pass_in_reg; |
3145 | return NULL_RTX(rtx) 0; |
3146 | } |
3147 | |
3148 | switch (mode) |
3149 | { |
3150 | default: |
3151 | break; |
3152 | |
3153 | case E_BLKmode: |
3154 | if (bytes < 0) |
3155 | break; |
3156 | /* FALLTHRU */ |
3157 | case E_DImode: |
3158 | case E_SImode: |
3159 | case E_HImode: |
3160 | case E_QImode: |
3161 | pass_in_reg: |
3162 | if (words <= cum->nregs) |
3163 | { |
3164 | int regno = cum->regno; |
3165 | |
3166 | /* Fastcall allocates the first two DWORD (SImode) or |
3167 | smaller arguments to ECX and EDX if it isn't an |
3168 | aggregate type . */ |
3169 | if (cum->fastcall) |
3170 | { |
3171 | if (mode == BLKmode((void) 0, E_BLKmode) |
3172 | || mode == DImode(scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) |
3173 | || (type && AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || ((( enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code ) (type)->base.code) == QUAL_UNION_TYPE)))) |
3174 | break; |
3175 | |
3176 | /* ECX not EAX is the first allocated register. */ |
3177 | if (regno == AX_REG0) |
3178 | regno = CX_REG2; |
3179 | } |
3180 | return gen_rtx_REG (mode, regno); |
3181 | } |
3182 | break; |
3183 | |
3184 | case E_DFmode: |
3185 | if (cum->float_in_sse == -1) |
3186 | error_p = true; |
3187 | if (cum->float_in_sse < 2) |
3188 | break; |
3189 | /* FALLTHRU */ |
3190 | case E_SFmode: |
3191 | if (cum->float_in_sse == -1) |
3192 | error_p = true; |
3193 | if (cum->float_in_sse < 1) |
3194 | break; |
3195 | /* FALLTHRU */ |
3196 | case E_TImode: |
3197 | /* In 32bit, we pass TImode in xmm registers. */ |
3198 | case E_V16QImode: |
3199 | case E_V8HImode: |
3200 | case E_V4SImode: |
3201 | case E_V2DImode: |
3202 | case E_V8HFmode: |
3203 | case E_V8BFmode: |
3204 | case E_V4SFmode: |
3205 | case E_V2DFmode: |
3206 | if (!type || !AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || ((( enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code ) (type)->base.code) == QUAL_UNION_TYPE))) |
3207 | { |
3208 | if (cum->sse_nregs) |
3209 | return gen_reg_or_parallel (mode, orig_mode, |
3210 | cum->sse_regno + FIRST_SSE_REG20); |
3211 | } |
3212 | break; |
3213 | |
3214 | case E_OImode: |
3215 | case E_XImode: |
3216 | /* OImode and XImode shouldn't be used directly. */ |
3217 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3217, __FUNCTION__)); |
3218 | |
3219 | case E_V64QImode: |
3220 | case E_V32HImode: |
3221 | case E_V16SImode: |
3222 | case E_V8DImode: |
3223 | case E_V32HFmode: |
3224 | case E_V32BFmode: |
3225 | case E_V16SFmode: |
3226 | case E_V8DFmode: |
3227 | case E_V16HFmode: |
3228 | case E_V16BFmode: |
3229 | case E_V8SFmode: |
3230 | case E_V8SImode: |
3231 | case E_V32QImode: |
3232 | case E_V16HImode: |
3233 | case E_V4DFmode: |
3234 | case E_V4DImode: |
3235 | if (!type || !AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || ((( enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code ) (type)->base.code) == QUAL_UNION_TYPE))) |
3236 | { |
3237 | if (cum->sse_nregs) |
3238 | return gen_reg_or_parallel (mode, orig_mode, |
3239 | cum->sse_regno + FIRST_SSE_REG20); |
3240 | } |
3241 | break; |
3242 | |
3243 | case E_V8QImode: |
3244 | case E_V4HImode: |
3245 | case E_V4HFmode: |
3246 | case E_V4BFmode: |
3247 | case E_V2SImode: |
3248 | case E_V2SFmode: |
3249 | case E_V1TImode: |
3250 | case E_V1DImode: |
3251 | if (!type || !AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || ((( enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code ) (type)->base.code) == QUAL_UNION_TYPE))) |
3252 | { |
3253 | if (cum->mmx_nregs) |
3254 | return gen_reg_or_parallel (mode, orig_mode, |
3255 | cum->mmx_regno + FIRST_MMX_REG28); |
3256 | } |
3257 | break; |
3258 | } |
3259 | if (error_p) |
3260 | { |
3261 | cum->float_in_sse = 0; |
3262 | error ("calling %qD with SSE calling convention without " |
3263 | "SSE/SSE2 enabled", cum->decl); |
3264 | sorry ("this is a GCC bug that can be worked around by adding " |
3265 | "attribute used to function called"); |
3266 | } |
3267 | |
3268 | return NULL_RTX(rtx) 0; |
3269 | } |
3270 | |
3271 | static rtx |
3272 | function_arg_64 (const CUMULATIVE_ARGS *cum, machine_mode mode, |
3273 | machine_mode orig_mode, const_tree type, bool named) |
3274 | { |
3275 | /* Handle a hidden AL argument containing number of registers |
3276 | for varargs x86-64 functions. */ |
3277 | if (mode == VOIDmode((void) 0, E_VOIDmode)) |
3278 | return GEN_INT (cum->maybe_vaarggen_rtx_CONST_INT (((void) 0, E_VOIDmode), (cum->maybe_vaarg ? (cum->sse_nregs < 0 ? 8 : cum->sse_regno) : -1)) |
3279 | ? (cum->sse_nregs < 0gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (cum->maybe_vaarg ? (cum->sse_nregs < 0 ? 8 : cum->sse_regno) : -1)) |
3280 | ? X86_64_SSE_REGPARM_MAXgen_rtx_CONST_INT (((void) 0, E_VOIDmode), (cum->maybe_vaarg ? (cum->sse_nregs < 0 ? 8 : cum->sse_regno) : -1)) |
3281 | : cum->sse_regno)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (cum->maybe_vaarg ? (cum->sse_nregs < 0 ? 8 : cum->sse_regno) : -1)) |
3282 | : -1)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (cum->maybe_vaarg ? (cum->sse_nregs < 0 ? 8 : cum->sse_regno) : -1)); |
3283 | |
3284 | switch (mode) |
3285 | { |
3286 | default: |
3287 | break; |
3288 | |
3289 | case E_V16HFmode: |
3290 | case E_V16BFmode: |
3291 | case E_V8SFmode: |
3292 | case E_V8SImode: |
3293 | case E_V32QImode: |
3294 | case E_V16HImode: |
3295 | case E_V4DFmode: |
3296 | case E_V4DImode: |
3297 | case E_V32HFmode: |
3298 | case E_V32BFmode: |
3299 | case E_V16SFmode: |
3300 | case E_V16SImode: |
3301 | case E_V64QImode: |
3302 | case E_V32HImode: |
3303 | case E_V8DFmode: |
3304 | case E_V8DImode: |
3305 | /* Unnamed 256 and 512bit vector mode parameters are passed on stack. */ |
3306 | if (!named) |
3307 | return NULL__null; |
3308 | break; |
3309 | } |
3310 | |
3311 | return construct_container (mode, orig_mode, type, 0, cum->nregs, |
3312 | cum->sse_nregs, |
3313 | &x86_64_int_parameter_registers [cum->regno], |
3314 | cum->sse_regno); |
3315 | } |
3316 | |
3317 | static rtx |
3318 | function_arg_ms_64 (const CUMULATIVE_ARGS *cum, machine_mode mode, |
3319 | machine_mode orig_mode, bool named, const_tree type, |
3320 | HOST_WIDE_INTlong bytes) |
3321 | { |
3322 | unsigned int regno; |
3323 | |
3324 | /* We need to add clobber for MS_ABI->SYSV ABI calls in expand_call. |
3325 | We use value of -2 to specify that current function call is MSABI. */ |
3326 | if (mode == VOIDmode((void) 0, E_VOIDmode)) |
3327 | return GEN_INT (-2)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (-2)); |
3328 | |
3329 | /* If we've run out of registers, it goes on the stack. */ |
3330 | if (cum->nregs == 0) |
3331 | return NULL_RTX(rtx) 0; |
3332 | |
3333 | regno = x86_64_ms_abi_int_parameter_registers[cum->regno]; |
3334 | |
3335 | /* Only floating point modes are passed in anything but integer regs. */ |
3336 | if (TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) != 0) && (mode == SFmode(scalar_float_mode ((scalar_float_mode::from_int) E_SFmode)) || mode == DFmode(scalar_float_mode ((scalar_float_mode::from_int) E_DFmode)))) |
3337 | { |
3338 | if (named) |
3339 | { |
3340 | if (type == NULL_TREE(tree) __null || !AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || ((( enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code ) (type)->base.code) == QUAL_UNION_TYPE))) |
3341 | regno = cum->regno + FIRST_SSE_REG20; |
3342 | } |
3343 | else |
3344 | { |
3345 | rtx t1, t2; |
3346 | |
3347 | /* Unnamed floating parameters are passed in both the |
3348 | SSE and integer registers. */ |
3349 | t1 = gen_rtx_REG (mode, cum->regno + FIRST_SSE_REG20); |
3350 | t2 = gen_rtx_REG (mode, regno); |
3351 | t1 = gen_rtx_EXPR_LIST (VOIDmode((void) 0, E_VOIDmode), t1, const0_rtx(const_int_rtx[64])); |
3352 | t2 = gen_rtx_EXPR_LIST (VOIDmode((void) 0, E_VOIDmode), t2, const0_rtx(const_int_rtx[64])); |
3353 | return gen_rtx_PARALLEL (mode, gen_rtvec (2, t1, t2))gen_rtx_fmt_E_stat ((PARALLEL), ((mode)), ((gen_rtvec (2, t1, t2))) ); |
3354 | } |
3355 | } |
3356 | /* Handle aggregated types passed in register. */ |
3357 | if (orig_mode == BLKmode((void) 0, E_BLKmode)) |
3358 | { |
3359 | if (bytes > 0 && bytes <= 8) |
3360 | mode = (bytes > 4 ? DImode(scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : SImode(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))); |
3361 | if (mode == BLKmode((void) 0, E_BLKmode)) |
3362 | mode = DImode(scalar_int_mode ((scalar_int_mode::from_int) E_DImode)); |
3363 | } |
3364 | |
3365 | return gen_reg_or_parallel (mode, orig_mode, regno); |
3366 | } |
3367 | |
3368 | /* Return where to put the arguments to a function. |
3369 | Return zero to push the argument on the stack, or a hard register in which to store the argument. |
3370 | |
3371 | ARG describes the argument while CUM gives information about the |
3372 | preceding args and about the function being called. */ |
3373 | |
3374 | static rtx |
3375 | ix86_function_arg (cumulative_args_t cum_v, const function_arg_info &arg) |
3376 | { |
3377 | CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v); |
3378 | machine_mode mode = arg.mode; |
3379 | HOST_WIDE_INTlong bytes, words; |
3380 | rtx reg; |
3381 | |
3382 | if (!cum->caller && cfun(cfun + 0)->machine->func_type != TYPE_NORMAL) |
3383 | { |
3384 | gcc_assert (arg.type != NULL_TREE)((void)(!(arg.type != (tree) __null) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3384, __FUNCTION__), 0 : 0)); |
3385 | if (POINTER_TYPE_P (arg.type)(((enum tree_code) (arg.type)->base.code) == POINTER_TYPE || ((enum tree_code) (arg.type)->base.code) == REFERENCE_TYPE )) |
3386 | { |
3387 | /* This is the pointer argument. */ |
3388 | gcc_assert (TYPE_MODE (arg.type) == ptr_mode)((void)(!(((((enum tree_code) ((tree_class_check ((arg.type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3388, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (arg.type) : (arg.type)->type_common.mode) == ptr_mode) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3388, __FUNCTION__), 0 : 0)); |
3389 | /* It is at -WORD(AP) in the current frame in interrupt and |
3390 | exception handlers. */ |
3391 | reg = plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), arg_pointer_rtx((this_target_rtl->x_global_rtl)[GR_ARG_POINTER]), -UNITS_PER_WORD(((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4)); |
3392 | } |
3393 | else |
3394 | { |
3395 | gcc_assert (cfun->machine->func_type == TYPE_EXCEPTION((void)(!((cfun + 0)->machine->func_type == TYPE_EXCEPTION && ((enum tree_code) (arg.type)->base.code) == INTEGER_TYPE && ((((enum tree_code) ((tree_class_check ((arg.type ), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3397, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (arg.type) : (arg.type)->type_common.mode) == word_mode) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3397, __FUNCTION__), 0 : 0)) |
3396 | && TREE_CODE (arg.type) == INTEGER_TYPE((void)(!((cfun + 0)->machine->func_type == TYPE_EXCEPTION && ((enum tree_code) (arg.type)->base.code) == INTEGER_TYPE && ((((enum tree_code) ((tree_class_check ((arg.type ), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3397, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (arg.type) : (arg.type)->type_common.mode) == word_mode) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3397, __FUNCTION__), 0 : 0)) |
3397 | && TYPE_MODE (arg.type) == word_mode)((void)(!((cfun + 0)->machine->func_type == TYPE_EXCEPTION && ((enum tree_code) (arg.type)->base.code) == INTEGER_TYPE && ((((enum tree_code) ((tree_class_check ((arg.type ), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3397, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (arg.type) : (arg.type)->type_common.mode) == word_mode) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3397, __FUNCTION__), 0 : 0)); |
3398 | /* The error code is the word-mode integer argument at |
3399 | -2 * WORD(AP) in the current frame of the exception |
3400 | handler. */ |
3401 | reg = gen_rtx_MEM (word_mode, |
3402 | plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), |
3403 | arg_pointer_rtx((this_target_rtl->x_global_rtl)[GR_ARG_POINTER]), |
3404 | -2 * UNITS_PER_WORD(((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4))); |
3405 | } |
3406 | return reg; |
3407 | } |
3408 | |
3409 | bytes = arg.promoted_size_in_bytes (); |
3410 | words = CEIL (bytes, UNITS_PER_WORD)(((bytes) + ((((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4)) - 1) / ((((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4))); |
3411 | |
3412 | /* To simplify the code below, represent vector types with a vector mode |
3413 | even if MMX/SSE are not active. */ |
3414 | if (arg.type && TREE_CODE (arg.type)((enum tree_code) (arg.type)->base.code) == VECTOR_TYPE) |
3415 | mode = type_natural_mode (arg.type, cum, false); |
3416 | |
3417 | if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 )) |
3418 | { |
3419 | enum calling_abi call_abi = cum ? cum->call_abi : ix86_abiglobal_options.x_ix86_abi; |
3420 | |
3421 | if (call_abi == MS_ABI) |
3422 | reg = function_arg_ms_64 (cum, mode, arg.mode, arg.named, |
3423 | arg.type, bytes); |
3424 | else |
3425 | reg = function_arg_64 (cum, mode, arg.mode, arg.type, arg.named); |
3426 | } |
3427 | else |
3428 | reg = function_arg_32 (cum, mode, arg.mode, arg.type, bytes, words); |
3429 | |
3430 | /* Track if there are outgoing arguments on stack. */ |
3431 | if (reg == NULL_RTX(rtx) 0 && cum->caller) |
3432 | cfun(cfun + 0)->machine->outgoing_args_on_stack = true; |
3433 | |
3434 | return reg; |
3435 | } |
3436 | |
3437 | /* A C expression that indicates when an argument must be passed by |
3438 | reference. If nonzero for an argument, a copy of that argument is |
3439 | made in memory and a pointer to the argument is passed instead of |
3440 | the argument itself. The pointer is passed in whatever way is |
3441 | appropriate for passing a pointer to that type. */ |
3442 | |
3443 | static bool |
3444 | ix86_pass_by_reference (cumulative_args_t cum_v, const function_arg_info &arg) |
3445 | { |
3446 | CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v); |
3447 | |
3448 | if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 )) |
3449 | { |
3450 | enum calling_abi call_abi = cum ? cum->call_abi : ix86_abiglobal_options.x_ix86_abi; |
3451 | |
3452 | /* See Windows x64 Software Convention. */ |
3453 | if (call_abi == MS_ABI) |
3454 | { |
3455 | HOST_WIDE_INTlong msize = GET_MODE_SIZE (arg.mode)((unsigned short) mode_to_bytes (arg.mode).coeffs[0]); |
3456 | |
3457 | if (tree type = arg.type) |
3458 | { |
3459 | /* Arrays are passed by reference. */ |
3460 | if (TREE_CODE (type)((enum tree_code) (type)->base.code) == ARRAY_TYPE) |
3461 | return true; |
3462 | |
3463 | if (RECORD_OR_UNION_TYPE_P (type)(((enum tree_code) (type)->base.code) == RECORD_TYPE || (( enum tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code) (type)->base.code) == QUAL_UNION_TYPE)) |
3464 | { |
3465 | /* Structs/unions of sizes other than 8, 16, 32, or 64 bits |
3466 | are passed by reference. */ |
3467 | msize = int_size_in_bytes (type); |
3468 | } |
3469 | } |
3470 | |
3471 | /* __m128 is passed by reference. */ |
3472 | return msize != 1 && msize != 2 && msize != 4 && msize != 8; |
3473 | } |
3474 | else if (arg.type && int_size_in_bytes (arg.type) == -1) |
3475 | return true; |
3476 | } |
3477 | |
3478 | return false; |
3479 | } |
3480 | |
3481 | /* Return true when TYPE should be 128bit aligned for 32bit argument |
3482 | passing ABI. XXX: This function is obsolete and is only used for |
3483 | checking psABI compatibility with previous versions of GCC. */ |
3484 | |
3485 | static bool |
3486 | ix86_compat_aligned_value_p (const_tree type) |
3487 | { |
3488 | machine_mode mode = TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3488, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (type) : (type)->type_common.mode); |
3489 | if (((TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) != 0) && SSE_REG_MODE_P (mode)((mode) == ((void) 0, E_V1TImode) || (mode) == (scalar_int_mode ((scalar_int_mode::from_int) E_TImode)) || (mode) == ((void) 0, E_V16QImode) || (mode) == (scalar_float_mode ((scalar_float_mode ::from_int) E_TFmode)) || (mode) == ((void) 0, E_V8HImode) || (mode) == ((void) 0, E_V2DFmode) || (mode) == ((void) 0, E_V2DImode ) || (mode) == ((void) 0, E_V4SFmode) || (mode) == ((void) 0, E_V4SImode) || (mode) == ((void) 0, E_V32QImode) || (mode) == ((void) 0, E_V16HImode) || (mode) == ((void) 0, E_V8SImode) || (mode) == ((void) 0, E_V4DImode) || (mode) == ((void) 0, E_V8SFmode ) || (mode) == ((void) 0, E_V4DFmode) || (mode) == ((void) 0, E_V2TImode) || (mode) == ((void) 0, E_V8DImode) || (mode) == ((void) 0, E_V64QImode) || (mode) == ((void) 0, E_V16SImode) || (mode) == ((void) 0, E_V32HImode) || (mode) == ((void) 0, E_V8DFmode) || (mode) == ((void) 0, E_V16SFmode) || (mode) == ((void) 0, E_V32HFmode) || (mode) == ((void) 0, E_V16HFmode) || (mode) == ((void) 0, E_V8HFmode) || (mode) == ((void) 0, E_V32BFmode ) || (mode) == ((void) 0, E_V16BFmode) || (mode) == ((void) 0 , E_V8BFmode))) |
3490 | || mode == TDmode(scalar_float_mode ((scalar_float_mode::from_int) E_TDmode)) |
3491 | || mode == TFmode(scalar_float_mode ((scalar_float_mode::from_int) E_TFmode)) |
3492 | || mode == TCmode(complex_mode ((complex_mode::from_int) E_TCmode))) |
3493 | && (!TYPE_USER_ALIGN (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3493, __FUNCTION__))->base.u.bits.user_align) || TYPE_ALIGN (type)(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3493, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3493, __FUNCTION__))->type_common.align) - 1) : 0) > 128)) |
3494 | return true; |
3495 | if (TYPE_ALIGN (type)(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3495, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3495, __FUNCTION__))->type_common.align) - 1) : 0) < 128) |
3496 | return false; |
3497 | |
3498 | if (AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || ((( enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code ) (type)->base.code) == QUAL_UNION_TYPE))) |
3499 | { |
3500 | /* Walk the aggregates recursively. */ |
3501 | switch (TREE_CODE (type)((enum tree_code) (type)->base.code)) |
3502 | { |
3503 | case RECORD_TYPE: |
3504 | case UNION_TYPE: |
3505 | case QUAL_UNION_TYPE: |
3506 | { |
3507 | tree field; |
3508 | |
3509 | /* Walk all the structure fields. */ |
3510 | for (field = TYPE_FIELDS (type)((tree_check3 ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3510, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE )))->type_non_common.values); field; field = DECL_CHAIN (field)(((contains_struct_check (((contains_struct_check ((field), ( TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3510, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3510, __FUNCTION__))->common.chain))) |
3511 | { |
3512 | if (TREE_CODE (field)((enum tree_code) (field)->base.code) == FIELD_DECL |
3513 | && ix86_compat_aligned_value_p (TREE_TYPE (field)((contains_struct_check ((field), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3513, __FUNCTION__))->typed.type))) |
3514 | return true; |
3515 | } |
3516 | break; |
3517 | } |
3518 | |
3519 | case ARRAY_TYPE: |
3520 | /* Just for use if some languages passes arrays by value. */ |
3521 | if (ix86_compat_aligned_value_p (TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3521, __FUNCTION__))->typed.type))) |
3522 | return true; |
3523 | break; |
3524 | |
3525 | default: |
3526 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3526, __FUNCTION__)); |
3527 | } |
3528 | } |
3529 | return false; |
3530 | } |
3531 | |
3532 | /* Return the alignment boundary for MODE and TYPE with alignment ALIGN. |
3533 | XXX: This function is obsolete and is only used for checking psABI |
3534 | compatibility with previous versions of GCC. */ |
3535 | |
3536 | static unsigned int |
3537 | ix86_compat_function_arg_boundary (machine_mode mode, |
3538 | const_tree type, unsigned int align) |
3539 | { |
3540 | /* In 32bit, only _Decimal128 and __float128 are aligned to their |
3541 | natural boundaries. */ |
3542 | if (!TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 ) && mode != TDmode(scalar_float_mode ((scalar_float_mode::from_int) E_TDmode)) && mode != TFmode(scalar_float_mode ((scalar_float_mode::from_int) E_TFmode))) |
3543 | { |
3544 | /* i386 ABI defines all arguments to be 4 byte aligned. We have to |
3545 | make an exception for SSE modes since these require 128bit |
3546 | alignment. |
3547 | |
3548 | The handling here differs from field_alignment. ICC aligns MMX |
3549 | arguments to 4 byte boundaries, while structure fields are aligned |
3550 | to 8 byte boundaries. */ |
3551 | if (!type) |
3552 | { |
3553 | if (!(TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) != 0) && SSE_REG_MODE_P (mode)((mode) == ((void) 0, E_V1TImode) || (mode) == (scalar_int_mode ((scalar_int_mode::from_int) E_TImode)) || (mode) == ((void) 0, E_V16QImode) || (mode) == (scalar_float_mode ((scalar_float_mode ::from_int) E_TFmode)) || (mode) == ((void) 0, E_V8HImode) || (mode) == ((void) 0, E_V2DFmode) || (mode) == ((void) 0, E_V2DImode ) || (mode) == ((void) 0, E_V4SFmode) || (mode) == ((void) 0, E_V4SImode) || (mode) == ((void) 0, E_V32QImode) || (mode) == ((void) 0, E_V16HImode) || (mode) == ((void) 0, E_V8SImode) || (mode) == ((void) 0, E_V4DImode) || (mode) == ((void) 0, E_V8SFmode ) || (mode) == ((void) 0, E_V4DFmode) || (mode) == ((void) 0, E_V2TImode) || (mode) == ((void) 0, E_V8DImode) || (mode) == ((void) 0, E_V64QImode) || (mode) == ((void) 0, E_V16SImode) || (mode) == ((void) 0, E_V32HImode) || (mode) == ((void) 0, E_V8DFmode) || (mode) == ((void) 0, E_V16SFmode) || (mode) == ((void) 0, E_V32HFmode) || (mode) == ((void) 0, E_V16HFmode) || (mode) == ((void) 0, E_V8HFmode) || (mode) == ((void) 0, E_V32BFmode ) || (mode) == ((void) 0, E_V16BFmode) || (mode) == ((void) 0 , E_V8BFmode)))) |
3554 | align = PARM_BOUNDARY((8) * (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4)); |
3555 | } |
3556 | else |
3557 | { |
3558 | if (!ix86_compat_aligned_value_p (type)) |
3559 | align = PARM_BOUNDARY((8) * (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4)); |
3560 | } |
3561 | } |
3562 | if (align > BIGGEST_ALIGNMENT(((global_options.x_target_flags & (1U << 12)) != 0 ) ? 32 : (((global_options.x_ix86_isa_flags & (1UL << 15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & ( 1UL << 8)) != 0) ? 256 : 128)))) |
3563 | align = BIGGEST_ALIGNMENT(((global_options.x_target_flags & (1U << 12)) != 0 ) ? 32 : (((global_options.x_ix86_isa_flags & (1UL << 15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & ( 1UL << 8)) != 0) ? 256 : 128))); |
3564 | return align; |
3565 | } |
3566 | |
3567 | /* Return true when TYPE should be 128bit aligned for 32bit argument |
3568 | passing ABI. */ |
3569 | |
3570 | static bool |
3571 | ix86_contains_aligned_value_p (const_tree type) |
3572 | { |
3573 | machine_mode mode = TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3573, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (type) : (type)->type_common.mode); |
3574 | |
3575 | if (mode == XFmode(scalar_float_mode ((scalar_float_mode::from_int) E_XFmode)) || mode == XCmode(complex_mode ((complex_mode::from_int) E_XCmode))) |
3576 | return false; |
3577 | |
3578 | if (TYPE_ALIGN (type)(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3578, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3578, __FUNCTION__))->type_common.align) - 1) : 0) < 128) |
3579 | return false; |
3580 | |
3581 | if (AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || ((( enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code ) (type)->base.code) == QUAL_UNION_TYPE))) |
3582 | { |
3583 | /* Walk the aggregates recursively. */ |
3584 | switch (TREE_CODE (type)((enum tree_code) (type)->base.code)) |
3585 | { |
3586 | case RECORD_TYPE: |
3587 | case UNION_TYPE: |
3588 | case QUAL_UNION_TYPE: |
3589 | { |
3590 | tree field; |
3591 | |
3592 | /* Walk all the structure fields. */ |
3593 | for (field = TYPE_FIELDS (type)((tree_check3 ((type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3593, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE )))->type_non_common.values); |
3594 | field; |
3595 | field = DECL_CHAIN (field)(((contains_struct_check (((contains_struct_check ((field), ( TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3595, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3595, __FUNCTION__))->common.chain))) |
3596 | { |
3597 | if (TREE_CODE (field)((enum tree_code) (field)->base.code) == FIELD_DECL |
3598 | && ix86_contains_aligned_value_p (TREE_TYPE (field)((contains_struct_check ((field), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3598, __FUNCTION__))->typed.type))) |
3599 | return true; |
3600 | } |
3601 | break; |
3602 | } |
3603 | |
3604 | case ARRAY_TYPE: |
3605 | /* Just for use if some languages passes arrays by value. */ |
3606 | if (ix86_contains_aligned_value_p (TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3606, __FUNCTION__))->typed.type))) |
3607 | return true; |
3608 | break; |
3609 | |
3610 | default: |
3611 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3611, __FUNCTION__)); |
3612 | } |
3613 | } |
3614 | else |
3615 | return TYPE_ALIGN (type)(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3615, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3615, __FUNCTION__))->type_common.align) - 1) : 0) >= 128; |
3616 | |
3617 | return false; |
3618 | } |
3619 | |
3620 | /* Gives the alignment boundary, in bits, of an argument with the |
3621 | specified mode and type. */ |
3622 | |
3623 | static unsigned int |
3624 | ix86_function_arg_boundary (machine_mode mode, const_tree type) |
3625 | { |
3626 | unsigned int align; |
3627 | if (type) |
3628 | { |
3629 | /* Since the main variant type is used for call, we convert it to |
3630 | the main variant type. */ |
3631 | type = TYPE_MAIN_VARIANT (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3631, __FUNCTION__))->type_common.main_variant); |
3632 | align = TYPE_ALIGN (type)(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3632, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3632, __FUNCTION__))->type_common.align) - 1) : 0); |
3633 | if (TYPE_EMPTY_P (type)((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3633, __FUNCTION__))->type_common.empty_flag)) |
3634 | return PARM_BOUNDARY((8) * (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4)); |
3635 | } |
3636 | else |
3637 | align = GET_MODE_ALIGNMENT (mode)get_mode_alignment (mode); |
3638 | if (align < PARM_BOUNDARY((8) * (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4))) |
3639 | align = PARM_BOUNDARY((8) * (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4)); |
3640 | else |
3641 | { |
3642 | static bool warned; |
3643 | unsigned int saved_align = align; |
3644 | |
3645 | if (!TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 )) |
3646 | { |
3647 | /* i386 ABI defines XFmode arguments to be 4 byte aligned. */ |
3648 | if (!type) |
3649 | { |
3650 | if (mode == XFmode(scalar_float_mode ((scalar_float_mode::from_int) E_XFmode)) || mode == XCmode(complex_mode ((complex_mode::from_int) E_XCmode))) |
3651 | align = PARM_BOUNDARY((8) * (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4)); |
3652 | } |
3653 | else if (!ix86_contains_aligned_value_p (type)) |
3654 | align = PARM_BOUNDARY((8) * (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4)); |
3655 | |
3656 | if (align < 128) |
3657 | align = PARM_BOUNDARY((8) * (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4)); |
3658 | } |
3659 | |
3660 | if (warn_psabiglobal_options.x_warn_psabi |
3661 | && !warned |
3662 | && align != ix86_compat_function_arg_boundary (mode, type, |
3663 | saved_align)) |
3664 | { |
3665 | warned = true; |
3666 | inform (input_location, |
3667 | "the ABI for passing parameters with %d-byte" |
3668 | " alignment has changed in GCC 4.6", |
3669 | align / BITS_PER_UNIT(8)); |
3670 | } |
3671 | } |
3672 | |
3673 | return align; |
3674 | } |
3675 | |
3676 | /* Return true if N is a possible register number of function value. */ |
3677 | |
3678 | static bool |
3679 | ix86_function_value_regno_p (const unsigned int regno) |
3680 | { |
3681 | switch (regno) |
3682 | { |
3683 | case AX_REG0: |
3684 | return true; |
3685 | case DX_REG1: |
3686 | return (!TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 ) || ix86_cfun_abi () != MS_ABI); |
3687 | case DI_REG5: |
3688 | case SI_REG4: |
3689 | return TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 ) && ix86_cfun_abi () != MS_ABI; |
3690 | |
3691 | /* Complex values are returned in %st(0)/%st(1) pair. */ |
3692 | case ST0_REG8: |
3693 | case ST1_REG9: |
3694 | /* TODO: The function should depend on current function ABI but |
3695 | builtins.cc would need updating then. Therefore we use the |
3696 | default ABI. */ |
3697 | if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 ) && ix86_cfun_abi () == MS_ABI) |
3698 | return false; |
3699 | return TARGET_FLOAT_RETURNS_IN_80387(((global_options.x_target_flags & (1U << 11)) != 0 ) && ((global_options.x_target_flags & (1U << 1)) != 0) && !((global_options.x_target_flags & ( 1U << 12)) != 0)); |
3700 | |
3701 | /* Complex values are returned in %xmm0/%xmm1 pair. */ |
3702 | case XMM0_REG20: |
3703 | case XMM1_REG21: |
3704 | return TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) != 0); |
3705 | |
3706 | case MM0_REG28: |
3707 | if (TARGET_MACHO0 || TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 )) |
3708 | return false; |
3709 | return TARGET_MMX((global_options.x_ix86_isa_flags & (1UL << 36)) != 0); |
3710 | } |
3711 | |
3712 | return false; |
3713 | } |
3714 | |
3715 | /* Check whether the register REGNO should be zeroed on X86. |
3716 | When ALL_SSE_ZEROED is true, all SSE registers have been zeroed |
3717 | together, no need to zero it again. |
3718 | When NEED_ZERO_MMX is true, MMX registers should be cleared. */ |
3719 | |
3720 | static bool |
3721 | zero_call_used_regno_p (const unsigned int regno, |
3722 | bool all_sse_zeroed, |
3723 | bool need_zero_mmx) |
3724 | { |
3725 | return GENERAL_REGNO_P (regno)((((unsigned long) ((regno)) - (unsigned long) (0) <= (unsigned long) (7) - (unsigned long) (0))) || ((unsigned long) ((regno )) - (unsigned long) (36) <= (unsigned long) (43) - (unsigned long) (36))) |
3726 | || (!all_sse_zeroed && SSE_REGNO_P (regno)(((unsigned long) ((regno)) - (unsigned long) (20) <= (unsigned long) (27) - (unsigned long) (20)) || ((unsigned long) ((regno )) - (unsigned long) (44) <= (unsigned long) (51) - (unsigned long) (44)) || ((unsigned long) ((regno)) - (unsigned long) ( 52) <= (unsigned long) (67) - (unsigned long) (52)))) |
3727 | || MASK_REGNO_P (regno)((unsigned long) ((regno)) - (unsigned long) (68) <= (unsigned long) (75) - (unsigned long) (68)) |
3728 | || (need_zero_mmx && MMX_REGNO_P (regno)((unsigned long) ((regno)) - (unsigned long) (28) <= (unsigned long) (35) - (unsigned long) (28))); |
3729 | } |
3730 | |
3731 | /* Return the machine_mode that is used to zero register REGNO. */ |
3732 | |
3733 | static machine_mode |
3734 | zero_call_used_regno_mode (const unsigned int regno) |
3735 | { |
3736 | /* NB: We only need to zero the lower 32 bits for integer registers |
3737 | and the lower 128 bits for vector registers since destination are |
3738 | zero-extended to the full register width. */ |
3739 | if (GENERAL_REGNO_P (regno)((((unsigned long) ((regno)) - (unsigned long) (0) <= (unsigned long) (7) - (unsigned long) (0))) || ((unsigned long) ((regno )) - (unsigned long) (36) <= (unsigned long) (43) - (unsigned long) (36)))) |
3740 | return SImode(scalar_int_mode ((scalar_int_mode::from_int) E_SImode)); |
3741 | else if (SSE_REGNO_P (regno)(((unsigned long) ((regno)) - (unsigned long) (20) <= (unsigned long) (27) - (unsigned long) (20)) || ((unsigned long) ((regno )) - (unsigned long) (44) <= (unsigned long) (51) - (unsigned long) (44)) || ((unsigned long) ((regno)) - (unsigned long) ( 52) <= (unsigned long) (67) - (unsigned long) (52)))) |
3742 | return V4SFmode((void) 0, E_V4SFmode); |
3743 | else if (MASK_REGNO_P (regno)((unsigned long) ((regno)) - (unsigned long) (68) <= (unsigned long) (75) - (unsigned long) (68))) |
3744 | return HImode(scalar_int_mode ((scalar_int_mode::from_int) E_HImode)); |
3745 | else if (MMX_REGNO_P (regno)((unsigned long) ((regno)) - (unsigned long) (28) <= (unsigned long) (35) - (unsigned long) (28))) |
3746 | return V2SImode((void) 0, E_V2SImode); |
3747 | else |
3748 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3748, __FUNCTION__)); |
3749 | } |
3750 | |
3751 | /* Generate a rtx to zero all vector registers together if possible, |
3752 | otherwise, return NULL. */ |
3753 | |
3754 | static rtx |
3755 | zero_all_vector_registers (HARD_REG_SET need_zeroed_hardregs) |
3756 | { |
3757 | if (!TARGET_AVX((global_options.x_ix86_isa_flags & (1UL << 8)) != 0 )) |
3758 | return NULL__null; |
3759 | |
3760 | for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++) |
3761 | if ((LEGACY_SSE_REGNO_P (regno)((unsigned long) ((regno)) - (unsigned long) (20) <= (unsigned long) (27) - (unsigned long) (20)) |
3762 | || (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 ) |
3763 | && (REX_SSE_REGNO_P (regno)((unsigned long) ((regno)) - (unsigned long) (44) <= (unsigned long) (51) - (unsigned long) (44)) |
3764 | || (TARGET_AVX512F((global_options.x_ix86_isa_flags & (1UL << 15)) != 0) && EXT_REX_SSE_REGNO_P (regno)((unsigned long) ((regno)) - (unsigned long) (52) <= (unsigned long) (67) - (unsigned long) (52)))))) |
3765 | && !TEST_HARD_REG_BIT (need_zeroed_hardregs, regno)) |
3766 | return NULL__null; |
3767 | |
3768 | return gen_avx_vzeroall (); |
3769 | } |
3770 | |
3771 | /* Generate insns to zero all st registers together. |
3772 | Return true when zeroing instructions are generated. |
3773 | Assume the number of st registers that are zeroed is num_of_st, |
3774 | we will emit the following sequence to zero them together: |
3775 | fldz; \ |
3776 | fldz; \ |
3777 | ... |
3778 | fldz; \ |
3779 | fstp %%st(0); \ |
3780 | fstp %%st(0); \ |
3781 | ... |
3782 | fstp %%st(0); |
3783 | i.e., num_of_st fldz followed by num_of_st fstp to clear the stack |
3784 | mark stack slots empty. |
3785 | |
3786 | How to compute the num_of_st: |
3787 | There is no direct mapping from stack registers to hard register |
3788 | numbers. If one stack register needs to be cleared, we don't know |
3789 | where in the stack the value remains. So, if any stack register |
3790 | needs to be cleared, the whole stack should be cleared. However, |
3791 | x87 stack registers that hold the return value should be excluded. |
3792 | x87 returns in the top (two for complex values) register, so |
3793 | num_of_st should be 7/6 when x87 returns, otherwise it will be 8. |
3794 | return the value of num_of_st. */ |
3795 | |
3796 | |
3797 | static int |
3798 | zero_all_st_registers (HARD_REG_SET need_zeroed_hardregs) |
3799 | { |
3800 | |
3801 | /* If the FPU is disabled, no need to zero all st registers. */ |
3802 | if (! (TARGET_80387((global_options.x_target_flags & (1U << 1)) != 0) || TARGET_FLOAT_RETURNS_IN_80387(((global_options.x_target_flags & (1U << 11)) != 0 ) && ((global_options.x_target_flags & (1U << 1)) != 0) && !((global_options.x_target_flags & ( 1U << 12)) != 0)))) |
3803 | return 0; |
3804 | |
3805 | unsigned int num_of_st = 0; |
3806 | for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++) |
3807 | if ((STACK_REGNO_P (regno)((unsigned long) ((regno)) - (unsigned long) (8) <= (unsigned long) (15) - (unsigned long) (8)) || MMX_REGNO_P (regno)((unsigned long) ((regno)) - (unsigned long) (28) <= (unsigned long) (35) - (unsigned long) (28))) |
3808 | && TEST_HARD_REG_BIT (need_zeroed_hardregs, regno)) |
3809 | { |
3810 | num_of_st++; |
3811 | break; |
3812 | } |
3813 | |
3814 | if (num_of_st == 0) |
3815 | return 0; |
3816 | |
3817 | bool return_with_x87 = false; |
3818 | return_with_x87 = (crtl(&x_rtl)->return_rtx |
3819 | && (STACK_REG_P (crtl->return_rtx)((((enum rtx_code) ((&x_rtl)->return_rtx)->code) == REG) && ((unsigned long) (((rhs_regno((&x_rtl)-> return_rtx)))) - (unsigned long) (8) <= (unsigned long) (15 ) - (unsigned long) (8))))); |
3820 | |
3821 | bool complex_return = false; |
3822 | complex_return = (crtl(&x_rtl)->return_rtx |
3823 | && COMPLEX_MODE_P (GET_MODE (crtl->return_rtx))(((enum mode_class) mode_class[((machine_mode) ((&x_rtl)-> return_rtx)->mode)]) == MODE_COMPLEX_INT || ((enum mode_class ) mode_class[((machine_mode) ((&x_rtl)->return_rtx)-> mode)]) == MODE_COMPLEX_FLOAT)); |
3824 | |
3825 | if (return_with_x87) |
3826 | if (complex_return) |
3827 | num_of_st = 6; |
3828 | else |
3829 | num_of_st = 7; |
3830 | else |
3831 | num_of_st = 8; |
3832 | |
3833 | rtx st_reg = gen_rtx_REG (XFmode(scalar_float_mode ((scalar_float_mode::from_int) E_XFmode)), FIRST_STACK_REG8); |
3834 | for (unsigned int i = 0; i < num_of_st; i++) |
3835 | emit_insn (gen_rtx_SET (st_reg, CONST0_RTX (XFmode))gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((st_reg )), (((const_tiny_rtx[0][(int) ((scalar_float_mode ((scalar_float_mode ::from_int) E_XFmode)))]))) )); |
3836 | |
3837 | for (unsigned int i = 0; i < num_of_st; i++) |
3838 | { |
3839 | rtx insn; |
3840 | insn = emit_insn (gen_rtx_SET (st_reg, st_reg)gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((st_reg )), ((st_reg)) )); |
3841 | add_reg_note (insn, REG_DEAD, st_reg); |
3842 | } |
3843 | return num_of_st; |
3844 | } |
3845 | |
3846 | |
3847 | /* When the routine exit in MMX mode, if any ST register needs |
3848 | to be zeroed, we should clear all MMX registers except the |
3849 | RET_MMX_REGNO that holds the return value. */ |
3850 | static bool |
3851 | zero_all_mm_registers (HARD_REG_SET need_zeroed_hardregs, |
3852 | unsigned int ret_mmx_regno) |
3853 | { |
3854 | bool need_zero_all_mm = false; |
3855 | for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++) |
3856 | if (STACK_REGNO_P (regno)((unsigned long) ((regno)) - (unsigned long) (8) <= (unsigned long) (15) - (unsigned long) (8)) |
3857 | && TEST_HARD_REG_BIT (need_zeroed_hardregs, regno)) |
3858 | { |
3859 | need_zero_all_mm = true; |
3860 | break; |
3861 | } |
3862 | |
3863 | if (!need_zero_all_mm) |
3864 | return false; |
3865 | |
3866 | machine_mode mode = V2SImode((void) 0, E_V2SImode); |
3867 | for (unsigned int regno = FIRST_MMX_REG28; regno <= LAST_MMX_REG35; regno++) |
3868 | if (regno != ret_mmx_regno) |
3869 | { |
3870 | rtx reg = gen_rtx_REG (mode, regno); |
3871 | emit_insn (gen_rtx_SET (reg, CONST0_RTX (mode))gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((reg)) , (((const_tiny_rtx[0][(int) (mode)]))) )); |
3872 | } |
3873 | return true; |
3874 | } |
3875 | |
3876 | /* TARGET_ZERO_CALL_USED_REGS. */ |
3877 | /* Generate a sequence of instructions that zero registers specified by |
3878 | NEED_ZEROED_HARDREGS. Return the ZEROED_HARDREGS that are actually |
3879 | zeroed. */ |
3880 | static HARD_REG_SET |
3881 | ix86_zero_call_used_regs (HARD_REG_SET need_zeroed_hardregs) |
3882 | { |
3883 | HARD_REG_SET zeroed_hardregs; |
3884 | bool all_sse_zeroed = false; |
3885 | int all_st_zeroed_num = 0; |
3886 | bool all_mm_zeroed = false; |
3887 | |
3888 | CLEAR_HARD_REG_SET (zeroed_hardregs); |
3889 | |
3890 | /* first, let's see whether we can zero all vector registers together. */ |
3891 | rtx zero_all_vec_insn = zero_all_vector_registers (need_zeroed_hardregs); |
3892 | if (zero_all_vec_insn) |
3893 | { |
3894 | emit_insn (zero_all_vec_insn); |
3895 | all_sse_zeroed = true; |
3896 | } |
3897 | |
3898 | /* mm/st registers are shared registers set, we should follow the following |
3899 | rules to clear them: |
3900 | MMX exit mode x87 exit mode |
3901 | -------------|----------------------|--------------- |
3902 | uses x87 reg | clear all MMX | clear all x87 |
3903 | uses MMX reg | clear individual MMX | clear all x87 |
3904 | x87 + MMX | clear all MMX | clear all x87 |
3905 | |
3906 | first, we should decide which mode (MMX mode or x87 mode) the function |
3907 | exit with. */ |
3908 | |
3909 | bool exit_with_mmx_mode = (crtl(&x_rtl)->return_rtx |
3910 | && (MMX_REG_P (crtl->return_rtx)((((enum rtx_code) ((&x_rtl)->return_rtx)->code) == REG) && ((unsigned long) (((rhs_regno((&x_rtl)-> return_rtx)))) - (unsigned long) (28) <= (unsigned long) ( 35) - (unsigned long) (28))))); |
3911 | |
3912 | if (!exit_with_mmx_mode) |
3913 | /* x87 exit mode, we should zero all st registers together. */ |
3914 | { |
3915 | all_st_zeroed_num = zero_all_st_registers (need_zeroed_hardregs); |
3916 | |
3917 | if (all_st_zeroed_num > 0) |
3918 | for (unsigned int regno = FIRST_STACK_REG8; regno <= LAST_STACK_REG15; regno++) |
3919 | /* x87 stack registers that hold the return value should be excluded. |
3920 | x87 returns in the top (two for complex values) register. */ |
3921 | if (all_st_zeroed_num == 8 |
3922 | || !((all_st_zeroed_num >= 6 && regno == REGNO (crtl->return_rtx)(rhs_regno((&x_rtl)->return_rtx))) |
3923 | || (all_st_zeroed_num == 6 |
3924 | && (regno == (REGNO (crtl->return_rtx)(rhs_regno((&x_rtl)->return_rtx)) + 1))))) |
3925 | SET_HARD_REG_BIT (zeroed_hardregs, regno); |
3926 | } |
3927 | else |
3928 | /* MMX exit mode, check whether we can zero all mm registers. */ |
3929 | { |
3930 | unsigned int exit_mmx_regno = REGNO (crtl->return_rtx)(rhs_regno((&x_rtl)->return_rtx)); |
3931 | all_mm_zeroed = zero_all_mm_registers (need_zeroed_hardregs, |
3932 | exit_mmx_regno); |
3933 | if (all_mm_zeroed) |
3934 | for (unsigned int regno = FIRST_MMX_REG28; regno <= LAST_MMX_REG35; regno++) |
3935 | if (regno != exit_mmx_regno) |
3936 | SET_HARD_REG_BIT (zeroed_hardregs, regno); |
3937 | } |
3938 | |
3939 | /* Now, generate instructions to zero all the other registers. */ |
3940 | |
3941 | for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++) |
3942 | { |
3943 | if (!TEST_HARD_REG_BIT (need_zeroed_hardregs, regno)) |
3944 | continue; |
3945 | if (!zero_call_used_regno_p (regno, all_sse_zeroed, |
3946 | exit_with_mmx_mode && !all_mm_zeroed)) |
3947 | continue; |
3948 | |
3949 | SET_HARD_REG_BIT (zeroed_hardregs, regno); |
3950 | |
3951 | machine_mode mode = zero_call_used_regno_mode (regno); |
3952 | |
3953 | rtx reg = gen_rtx_REG (mode, regno); |
3954 | rtx tmp = gen_rtx_SET (reg, CONST0_RTX (mode))gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((reg)) , (((const_tiny_rtx[0][(int) (mode)]))) ); |
3955 | |
3956 | switch (mode) |
3957 | { |
3958 | case E_SImode: |
3959 | if (!TARGET_USE_MOV0ix86_tune_features[X86_TUNE_USE_MOV0] || optimize_insn_for_size_p ()) |
3960 | { |
3961 | rtx clob = gen_rtx_CLOBBER (VOIDmode,gen_rtx_fmt_e_stat ((CLOBBER), ((((void) 0, E_VOIDmode))), (( gen_rtx_REG (((void) 0, E_CCmode), 17))) ) |
3962 | gen_rtx_REG (CCmode,gen_rtx_fmt_e_stat ((CLOBBER), ((((void) 0, E_VOIDmode))), (( gen_rtx_REG (((void) 0, E_CCmode), 17))) ) |
3963 | FLAGS_REG))gen_rtx_fmt_e_stat ((CLOBBER), ((((void) 0, E_VOIDmode))), (( gen_rtx_REG (((void) 0, E_CCmode), 17))) ); |
3964 | tmp = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2,gen_rtx_fmt_E_stat ((PARALLEL), ((((void) 0, E_VOIDmode))), ( (gen_rtvec (2, tmp, clob))) ) |
3965 | tmp,gen_rtx_fmt_E_stat ((PARALLEL), ((((void) 0, E_VOIDmode))), ( (gen_rtvec (2, tmp, clob))) ) |
3966 | clob))gen_rtx_fmt_E_stat ((PARALLEL), ((((void) 0, E_VOIDmode))), ( (gen_rtvec (2, tmp, clob))) ); |
3967 | } |
3968 | /* FALLTHRU. */ |
3969 | |
3970 | case E_V4SFmode: |
3971 | case E_HImode: |
3972 | case E_V2SImode: |
3973 | emit_insn (tmp); |
3974 | break; |
3975 | |
3976 | default: |
3977 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 3977, __FUNCTION__)); |
3978 | } |
3979 | } |
3980 | return zeroed_hardregs; |
3981 | } |
3982 | |
3983 | /* Define how to find the value returned by a function. |
3984 | VALTYPE is the data type of the value (as a tree). |
3985 | If the precise function being called is known, FUNC is its FUNCTION_DECL; |
3986 | otherwise, FUNC is 0. */ |
3987 | |
3988 | static rtx |
3989 | function_value_32 (machine_mode orig_mode, machine_mode mode, |
3990 | const_tree fntype, const_tree fn) |
3991 | { |
3992 | unsigned int regno; |
3993 | |
3994 | /* 8-byte vector modes in %mm0. See ix86_return_in_memory for where |
3995 | we normally prevent this case when mmx is not available. However |
3996 | some ABIs may require the result to be returned like DImode. */ |
3997 | if (VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || ( (enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM) && GET_MODE_SIZE (mode)((unsigned short) mode_to_bytes (mode).coeffs[0]) == 8) |
3998 | regno = FIRST_MMX_REG28; |
3999 | |
4000 | /* 16-byte vector modes in %xmm0. See ix86_return_in_memory for where |
4001 | we prevent this case when sse is not available. However some ABIs |
4002 | may require the result to be returned like integer TImode. */ |
4003 | else if (mode == TImode(scalar_int_mode ((scalar_int_mode::from_int) E_TImode)) |
4004 | || (VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || ( (enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM) && GET_MODE_SIZE (mode)((unsigned short) mode_to_bytes (mode).coeffs[0]) == 16)) |
4005 | regno = FIRST_SSE_REG20; |
4006 | |
4007 | /* 32-byte vector modes in %ymm0. */ |
4008 | else if (VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || ( (enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM) && GET_MODE_SIZE (mode)((unsigned short) mode_to_bytes (mode).coeffs[0]) == 32) |
4009 | regno = FIRST_SSE_REG20; |
4010 | |
4011 | /* 64-byte vector modes in %zmm0. */ |
4012 | else if (VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || ( (enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM) && GET_MODE_SIZE (mode)((unsigned short) mode_to_bytes (mode).coeffs[0]) == 64) |
4013 | regno = FIRST_SSE_REG20; |
4014 | |
4015 | /* Floating point return values in %st(0) (unless -mno-fp-ret-in-387). */ |
4016 | else if (X87_FLOAT_MODE_P (mode)(((global_options.x_target_flags & (1U << 1)) != 0) && ((mode) == (scalar_float_mode ((scalar_float_mode ::from_int) E_SFmode)) || (mode) == (scalar_float_mode ((scalar_float_mode ::from_int) E_DFmode)) || (mode) == (scalar_float_mode ((scalar_float_mode ::from_int) E_XFmode)))) && TARGET_FLOAT_RETURNS_IN_80387(((global_options.x_target_flags & (1U << 11)) != 0 ) && ((global_options.x_target_flags & (1U << 1)) != 0) && !((global_options.x_target_flags & ( 1U << 12)) != 0))) |
4017 | regno = FIRST_FLOAT_REG8; |
4018 | else |
4019 | /* Most things go in %eax. */ |
4020 | regno = AX_REG0; |
4021 | |
4022 | /* Return __bf16/ _Float16/_Complex _Foat16 by sse register. */ |
4023 | if (mode == HFmode(scalar_float_mode ((scalar_float_mode::from_int) E_HFmode)) || mode == BFmode(scalar_float_mode ((scalar_float_mode::from_int) E_BFmode))) |
4024 | regno = FIRST_SSE_REG20; |
4025 | if (mode == HCmode(complex_mode ((complex_mode::from_int) E_HCmode))) |
4026 | { |
4027 | rtx ret = gen_rtx_PARALLEL (mode, rtvec_alloc(1))gen_rtx_fmt_E_stat ((PARALLEL), ((mode)), ((rtvec_alloc(1))) ); |
4028 | XVECEXP (ret, 0, 0)(((((ret)->u.fld[0]).rt_rtvec))->elem[0]) |
4029 | = gen_rtx_EXPR_LIST (VOIDmode((void) 0, E_VOIDmode), |
4030 | gen_rtx_REG (SImode(scalar_int_mode ((scalar_int_mode::from_int) E_SImode)), FIRST_SSE_REG20), |
4031 | GEN_INT (0)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (0))); |
4032 | return ret; |
4033 | } |
4034 | |
4035 | /* Override FP return register with %xmm0 for local functions when |
4036 | SSE math is enabled or for functions with sseregparm attribute. */ |
4037 | if ((fn || fntype) && (mode == SFmode(scalar_float_mode ((scalar_float_mode::from_int) E_SFmode)) || mode == DFmode(scalar_float_mode ((scalar_float_mode::from_int) E_DFmode)))) |
4038 | { |
4039 | int sse_level = ix86_function_sseregparm (fntype, fn, false); |
4040 | if (sse_level == -1) |
4041 | { |
4042 | error ("calling %qD with SSE calling convention without " |
4043 | "SSE/SSE2 enabled", fn); |
4044 | sorry ("this is a GCC bug that can be worked around by adding " |
4045 | "attribute used to function called"); |
4046 | } |
4047 | else if ((sse_level >= 1 && mode == SFmode(scalar_float_mode ((scalar_float_mode::from_int) E_SFmode))) |
4048 | || (sse_level == 2 && mode == DFmode(scalar_float_mode ((scalar_float_mode::from_int) E_DFmode)))) |
4049 | regno = FIRST_SSE_REG20; |
4050 | } |
4051 | |
4052 | /* OImode shouldn't be used directly. */ |
4053 | gcc_assert (mode != OImode)((void)(!(mode != (scalar_int_mode ((scalar_int_mode::from_int ) E_OImode))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4053, __FUNCTION__), 0 : 0)); |
4054 | |
4055 | return gen_rtx_REG (orig_mode, regno); |
4056 | } |
4057 | |
4058 | static rtx |
4059 | function_value_64 (machine_mode orig_mode, machine_mode mode, |
4060 | const_tree valtype) |
4061 | { |
4062 | rtx ret; |
4063 | |
4064 | /* Handle libcalls, which don't provide a type node. */ |
4065 | if (valtype == NULL__null) |
4066 | { |
4067 | unsigned int regno; |
4068 | |
4069 | switch (mode) |
4070 | { |
4071 | case E_BFmode: |
4072 | case E_HFmode: |
4073 | case E_HCmode: |
4074 | case E_SFmode: |
4075 | case E_SCmode: |
4076 | case E_DFmode: |
4077 | case E_DCmode: |
4078 | case E_TFmode: |
4079 | case E_SDmode: |
4080 | case E_DDmode: |
4081 | case E_TDmode: |
4082 | regno = FIRST_SSE_REG20; |
4083 | break; |
4084 | case E_XFmode: |
4085 | case E_XCmode: |
4086 | regno = FIRST_FLOAT_REG8; |
4087 | break; |
4088 | case E_TCmode: |
4089 | return NULL__null; |
4090 | default: |
4091 | regno = AX_REG0; |
4092 | } |
4093 | |
4094 | return gen_rtx_REG (mode, regno); |
4095 | } |
4096 | else if (POINTER_TYPE_P (valtype)(((enum tree_code) (valtype)->base.code) == POINTER_TYPE || ((enum tree_code) (valtype)->base.code) == REFERENCE_TYPE )) |
4097 | { |
4098 | /* Pointers are always returned in word_mode. */ |
4099 | mode = word_mode; |
4100 | } |
4101 | |
4102 | ret = construct_container (mode, orig_mode, valtype, 1, |
4103 | X86_64_REGPARM_MAX6, X86_64_SSE_REGPARM_MAX8, |
4104 | x86_64_int_return_registers, 0); |
4105 | |
4106 | /* For zero sized structures, construct_container returns NULL, but we |
4107 | need to keep rest of compiler happy by returning meaningful value. */ |
4108 | if (!ret) |
4109 | ret = gen_rtx_REG (orig_mode, AX_REG0); |
4110 | |
4111 | return ret; |
4112 | } |
4113 | |
4114 | static rtx |
4115 | function_value_ms_32 (machine_mode orig_mode, machine_mode mode, |
4116 | const_tree fntype, const_tree fn, const_tree valtype) |
4117 | { |
4118 | unsigned int regno; |
4119 | |
4120 | /* Floating point return values in %st(0) |
4121 | (unless -mno-fp-ret-in-387 or aggregate type of up to 8 bytes). */ |
4122 | if (X87_FLOAT_MODE_P (mode)(((global_options.x_target_flags & (1U << 1)) != 0) && ((mode) == (scalar_float_mode ((scalar_float_mode ::from_int) E_SFmode)) || (mode) == (scalar_float_mode ((scalar_float_mode ::from_int) E_DFmode)) || (mode) == (scalar_float_mode ((scalar_float_mode ::from_int) E_XFmode)))) && TARGET_FLOAT_RETURNS_IN_80387(((global_options.x_target_flags & (1U << 11)) != 0 ) && ((global_options.x_target_flags & (1U << 1)) != 0) && !((global_options.x_target_flags & ( 1U << 12)) != 0)) |
4123 | && (GET_MODE_SIZE (mode)((unsigned short) mode_to_bytes (mode).coeffs[0]) > 8 |
4124 | || valtype == NULL_TREE(tree) __null || !AGGREGATE_TYPE_P (valtype)(((enum tree_code) (valtype)->base.code) == ARRAY_TYPE || ( ((enum tree_code) (valtype)->base.code) == RECORD_TYPE || ( (enum tree_code) (valtype)->base.code) == UNION_TYPE || (( enum tree_code) (valtype)->base.code) == QUAL_UNION_TYPE)))) |
4125 | { |
4126 | regno = FIRST_FLOAT_REG8; |
4127 | return gen_rtx_REG (orig_mode, regno); |
4128 | } |
4129 | else |
4130 | return function_value_32(orig_mode, mode, fntype,fn); |
4131 | } |
4132 | |
4133 | static rtx |
4134 | function_value_ms_64 (machine_mode orig_mode, machine_mode mode, |
4135 | const_tree valtype) |
4136 | { |
4137 | unsigned int regno = AX_REG0; |
4138 | |
4139 | if (TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) != 0)) |
4140 | { |
4141 | switch (GET_MODE_SIZE (mode)((unsigned short) mode_to_bytes (mode).coeffs[0])) |
4142 | { |
4143 | case 16: |
4144 | if (valtype != NULL_TREE(tree) __null |
4145 | && !VECTOR_INTEGER_TYPE_P (valtype)((((enum tree_code) (valtype)->base.code) == VECTOR_TYPE) && ((enum tree_code) (((contains_struct_check ((valtype), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4145, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE ) |
4146 | && !VECTOR_INTEGER_TYPE_P (valtype)((((enum tree_code) (valtype)->base.code) == VECTOR_TYPE) && ((enum tree_code) (((contains_struct_check ((valtype), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4146, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE ) |
4147 | && !INTEGRAL_TYPE_P (valtype)(((enum tree_code) (valtype)->base.code) == ENUMERAL_TYPE || ((enum tree_code) (valtype)->base.code) == BOOLEAN_TYPE || ((enum tree_code) (valtype)->base.code) == INTEGER_TYPE) |
4148 | && !VECTOR_FLOAT_TYPE_P (valtype)((((enum tree_code) (valtype)->base.code) == VECTOR_TYPE) && ((enum tree_code) (((contains_struct_check ((valtype), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4148, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE )) |
4149 | break; |
4150 | if ((SCALAR_INT_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_INT || ((enum mode_class ) mode_class[mode]) == MODE_PARTIAL_INT) || VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || ( (enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM)) |
4151 | && !COMPLEX_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_COMPLEX_INT || ( (enum mode_class) mode_class[mode]) == MODE_COMPLEX_FLOAT)) |
4152 | regno = FIRST_SSE_REG20; |
4153 | break; |
4154 | case 8: |
4155 | case 4: |
4156 | if (valtype != NULL_TREE(tree) __null && AGGREGATE_TYPE_P (valtype)(((enum tree_code) (valtype)->base.code) == ARRAY_TYPE || ( ((enum tree_code) (valtype)->base.code) == RECORD_TYPE || ( (enum tree_code) (valtype)->base.code) == UNION_TYPE || (( enum tree_code) (valtype)->base.code) == QUAL_UNION_TYPE))) |
4157 | break; |
4158 | if (mode == SFmode(scalar_float_mode ((scalar_float_mode::from_int) E_SFmode)) || mode == DFmode(scalar_float_mode ((scalar_float_mode::from_int) E_DFmode))) |
4159 | regno = FIRST_SSE_REG20; |
4160 | break; |
4161 | default: |
4162 | break; |
4163 | } |
4164 | } |
4165 | return gen_rtx_REG (orig_mode, regno); |
4166 | } |
4167 | |
4168 | static rtx |
4169 | ix86_function_value_1 (const_tree valtype, const_tree fntype_or_decl, |
4170 | machine_mode orig_mode, machine_mode mode) |
4171 | { |
4172 | const_tree fn, fntype; |
4173 | |
4174 | fn = NULL_TREE(tree) __null; |
4175 | if (fntype_or_decl && DECL_P (fntype_or_decl)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code ) (fntype_or_decl)->base.code))] == tcc_declaration)) |
4176 | fn = fntype_or_decl; |
4177 | fntype = fn ? TREE_TYPE (fn)((contains_struct_check ((fn), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4177, __FUNCTION__))->typed.type) : fntype_or_decl; |
4178 | |
4179 | if (ix86_function_type_abi (fntype) == MS_ABI) |
4180 | { |
4181 | if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 )) |
4182 | return function_value_ms_64 (orig_mode, mode, valtype); |
4183 | else |
4184 | return function_value_ms_32 (orig_mode, mode, fntype, fn, valtype); |
4185 | } |
4186 | else if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 )) |
4187 | return function_value_64 (orig_mode, mode, valtype); |
4188 | else |
4189 | return function_value_32 (orig_mode, mode, fntype, fn); |
4190 | } |
4191 | |
4192 | static rtx |
4193 | ix86_function_value (const_tree valtype, const_tree fntype_or_decl, bool) |
4194 | { |
4195 | machine_mode mode, orig_mode; |
4196 | |
4197 | orig_mode = TYPE_MODE (valtype)((((enum tree_code) ((tree_class_check ((valtype), (tcc_type) , "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4197, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (valtype) : (valtype)->type_common.mode); |
4198 | mode = type_natural_mode (valtype, NULL__null, true); |
4199 | return ix86_function_value_1 (valtype, fntype_or_decl, orig_mode, mode); |
4200 | } |
4201 | |
4202 | /* Pointer function arguments and return values are promoted to |
4203 | word_mode for normal functions. */ |
4204 | |
4205 | static machine_mode |
4206 | ix86_promote_function_mode (const_tree type, machine_mode mode, |
4207 | int *punsignedp, const_tree fntype, |
4208 | int for_return) |
4209 | { |
4210 | if (cfun(cfun + 0)->machine->func_type == TYPE_NORMAL |
4211 | && type != NULL_TREE(tree) __null |
4212 | && POINTER_TYPE_P (type)(((enum tree_code) (type)->base.code) == POINTER_TYPE || ( (enum tree_code) (type)->base.code) == REFERENCE_TYPE)) |
4213 | { |
4214 | *punsignedp = POINTERS_EXTEND_UNSIGNED1; |
4215 | return word_mode; |
4216 | } |
4217 | return default_promote_function_mode (type, mode, punsignedp, fntype, |
4218 | for_return); |
4219 | } |
4220 | |
4221 | /* Return true if a structure, union or array with MODE containing FIELD |
4222 | should be accessed using BLKmode. */ |
4223 | |
4224 | static bool |
4225 | ix86_member_type_forces_blk (const_tree field, machine_mode mode) |
4226 | { |
4227 | /* Union with XFmode must be in BLKmode. */ |
4228 | return (mode == XFmode(scalar_float_mode ((scalar_float_mode::from_int) E_XFmode)) |
4229 | && (TREE_CODE (DECL_FIELD_CONTEXT (field))((enum tree_code) (((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4229, __FUNCTION__, (FIELD_DECL)))->decl_minimal.context ))->base.code) == UNION_TYPE |
4230 | || TREE_CODE (DECL_FIELD_CONTEXT (field))((enum tree_code) (((tree_check ((field), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4230, __FUNCTION__, (FIELD_DECL)))->decl_minimal.context ))->base.code) == QUAL_UNION_TYPE)); |
4231 | } |
4232 | |
4233 | rtx |
4234 | ix86_libcall_value (machine_mode mode) |
4235 | { |
4236 | return ix86_function_value_1 (NULL__null, NULL__null, mode, mode); |
4237 | } |
4238 | |
4239 | /* Return true iff type is returned in memory. */ |
4240 | |
4241 | static bool |
4242 | ix86_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED__attribute__ ((__unused__))) |
4243 | { |
4244 | const machine_mode mode = type_natural_mode (type, NULL__null, true); |
4245 | HOST_WIDE_INTlong size; |
4246 | |
4247 | if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 )) |
4248 | { |
4249 | if (ix86_function_type_abi (fntype) == MS_ABI) |
4250 | { |
4251 | size = int_size_in_bytes (type); |
4252 | |
4253 | /* __m128 is returned in xmm0. */ |
4254 | if ((!type || VECTOR_INTEGER_TYPE_P (type)((((enum tree_code) (type)->base.code) == VECTOR_TYPE) && ((enum tree_code) (((contains_struct_check ((type), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4254, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE ) |
4255 | || INTEGRAL_TYPE_P (type)(((enum tree_code) (type)->base.code) == ENUMERAL_TYPE || ( (enum tree_code) (type)->base.code) == BOOLEAN_TYPE || ((enum tree_code) (type)->base.code) == INTEGER_TYPE) |
4256 | || VECTOR_FLOAT_TYPE_P (type)((((enum tree_code) (type)->base.code) == VECTOR_TYPE) && ((enum tree_code) (((contains_struct_check ((type), (TS_TYPED ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4256, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE )) |
4257 | && (SCALAR_INT_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_INT || ((enum mode_class ) mode_class[mode]) == MODE_PARTIAL_INT) || VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || ( (enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM)) |
4258 | && !COMPLEX_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_COMPLEX_INT || ( (enum mode_class) mode_class[mode]) == MODE_COMPLEX_FLOAT) |
4259 | && (GET_MODE_SIZE (mode)((unsigned short) mode_to_bytes (mode).coeffs[0]) == 16 || size == 16)) |
4260 | return false; |
4261 | |
4262 | /* Otherwise, the size must be exactly in [1248]. */ |
4263 | return size != 1 && size != 2 && size != 4 && size != 8; |
4264 | } |
4265 | else |
4266 | { |
4267 | int needed_intregs, needed_sseregs; |
4268 | |
4269 | return examine_argument (mode, type, 1, |
4270 | &needed_intregs, &needed_sseregs); |
4271 | } |
4272 | } |
4273 | else |
4274 | { |
4275 | size = int_size_in_bytes (type); |
4276 | |
4277 | /* Intel MCU psABI returns scalars and aggregates no larger than 8 |
4278 | bytes in registers. */ |
4279 | if (TARGET_IAMCU((global_options.x_target_flags & (1U << 12)) != 0)) |
4280 | return VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || ( (enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM) || size < 0 || size > 8; |
4281 | |
4282 | if (mode == BLKmode((void) 0, E_BLKmode)) |
4283 | return true; |
4284 | |
4285 | if (MS_AGGREGATE_RETURN0 && AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || ((( enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code ) (type)->base.code) == QUAL_UNION_TYPE)) && size <= 8) |
4286 | return false; |
4287 | |
4288 | if (VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || ( (enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM) || mode == TImode(scalar_int_mode ((scalar_int_mode::from_int) E_TImode))) |
4289 | { |
4290 | /* User-created vectors small enough to fit in EAX. */ |
4291 | if (size < 8) |
4292 | return false; |
4293 | |
4294 | /* Unless ABI prescibes otherwise, |
4295 | MMX/3dNow values are returned in MM0 if available. */ |
4296 | |
4297 | if (size == 8) |
4298 | return TARGET_VECT8_RETURNS((global_options.x_target_flags & (1U << 29)) != 0) || !TARGET_MMX((global_options.x_ix86_isa_flags & (1UL << 36)) != 0); |
4299 | |
4300 | /* SSE values are returned in XMM0 if available. */ |
4301 | if (size == 16) |
4302 | return !TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) != 0); |
4303 | |
4304 | /* AVX values are returned in YMM0 if available. */ |
4305 | if (size == 32) |
4306 | return !TARGET_AVX((global_options.x_ix86_isa_flags & (1UL << 8)) != 0 ); |
4307 | |
4308 | /* AVX512F values are returned in ZMM0 if available. */ |
4309 | if (size == 64) |
4310 | return !TARGET_AVX512F((global_options.x_ix86_isa_flags & (1UL << 15)) != 0); |
4311 | } |
4312 | |
4313 | if (mode == XFmode(scalar_float_mode ((scalar_float_mode::from_int) E_XFmode))) |
4314 | return false; |
4315 | |
4316 | if (size > 12) |
4317 | return true; |
4318 | |
4319 | /* OImode shouldn't be used directly. */ |
4320 | gcc_assert (mode != OImode)((void)(!(mode != (scalar_int_mode ((scalar_int_mode::from_int ) E_OImode))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4320, __FUNCTION__), 0 : 0)); |
4321 | |
4322 | return false; |
4323 | } |
4324 | } |
4325 | |
4326 | /* Implement TARGET_PUSH_ARGUMENT. */ |
4327 | |
4328 | static bool |
4329 | ix86_push_argument (unsigned int npush) |
4330 | { |
4331 | /* If SSE2 is available, use vector move to put large argument onto |
4332 | stack. NB: In 32-bit mode, use 8-byte vector move. */ |
4333 | return ((!TARGET_SSE2((global_options.x_ix86_isa_flags & (1UL << 51)) != 0) || npush < (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 ) ? 16 : 8)) |
4334 | && TARGET_PUSH_ARGS((global_options.x_target_flags & (1U << 19)) == 0) |
4335 | && !ACCUMULATE_OUTGOING_ARGS((((global_options.x_target_flags & (1U << 3)) != 0 ) && optimize_function_for_speed_p ((cfun + 0))) || ( (cfun + 0)->machine->func_type != TYPE_NORMAL && (&x_rtl)->stack_realign_needed) || ((global_options.x_target_flags & (1U << 26)) != 0) || (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) && ix86_cfun_abi () == MS_ABI) || (0 && (&x_rtl)->profile))); |
4336 | } |
4337 | |
4338 | |
4339 | /* Create the va_list data type. */ |
4340 | |
4341 | static tree |
4342 | ix86_build_builtin_va_list_64 (void) |
4343 | { |
4344 | tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl; |
4345 | |
4346 | record = lang_hooks.types.make_type (RECORD_TYPE); |
4347 | type_decl = build_decl (BUILTINS_LOCATION((location_t) 1), |
4348 | TYPE_DECL, get_identifier ("__va_list_tag")(__builtin_constant_p ("__va_list_tag") ? get_identifier_with_length (("__va_list_tag"), strlen ("__va_list_tag")) : get_identifier ("__va_list_tag")), record); |
4349 | |
4350 | f_gpr = build_decl (BUILTINS_LOCATION((location_t) 1), |
4351 | FIELD_DECL, get_identifier ("gp_offset")(__builtin_constant_p ("gp_offset") ? get_identifier_with_length (("gp_offset"), strlen ("gp_offset")) : get_identifier ("gp_offset" )), |
4352 | unsigned_type_nodeinteger_types[itk_unsigned_int]); |
4353 | f_fpr = build_decl (BUILTINS_LOCATION((location_t) 1), |
4354 | FIELD_DECL, get_identifier ("fp_offset")(__builtin_constant_p ("fp_offset") ? get_identifier_with_length (("fp_offset"), strlen ("fp_offset")) : get_identifier ("fp_offset" )), |
4355 | unsigned_type_nodeinteger_types[itk_unsigned_int]); |
4356 | f_ovf = build_decl (BUILTINS_LOCATION((location_t) 1), |
4357 | FIELD_DECL, get_identifier ("overflow_arg_area")(__builtin_constant_p ("overflow_arg_area") ? get_identifier_with_length (("overflow_arg_area"), strlen ("overflow_arg_area")) : get_identifier ("overflow_arg_area")), |
4358 | ptr_type_nodeglobal_trees[TI_PTR_TYPE]); |
4359 | f_sav = build_decl (BUILTINS_LOCATION((location_t) 1), |
4360 | FIELD_DECL, get_identifier ("reg_save_area")(__builtin_constant_p ("reg_save_area") ? get_identifier_with_length (("reg_save_area"), strlen ("reg_save_area")) : get_identifier ("reg_save_area")), |
4361 | ptr_type_nodeglobal_trees[TI_PTR_TYPE]); |
4362 | |
4363 | va_list_gpr_counter_fieldglobal_trees[TI_VA_LIST_GPR_COUNTER_FIELD] = f_gpr; |
4364 | va_list_fpr_counter_fieldglobal_trees[TI_VA_LIST_FPR_COUNTER_FIELD] = f_fpr; |
4365 | |
4366 | DECL_FIELD_CONTEXT (f_gpr)((tree_check ((f_gpr), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4366, __FUNCTION__, (FIELD_DECL)))->decl_minimal.context ) = record; |
4367 | DECL_FIELD_CONTEXT (f_fpr)((tree_check ((f_fpr), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4367, __FUNCTION__, (FIELD_DECL)))->decl_minimal.context ) = record; |
4368 | DECL_FIELD_CONTEXT (f_ovf)((tree_check ((f_ovf), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4368, __FUNCTION__, (FIELD_DECL)))->decl_minimal.context ) = record; |
4369 | DECL_FIELD_CONTEXT (f_sav)((tree_check ((f_sav), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4369, __FUNCTION__, (FIELD_DECL)))->decl_minimal.context ) = record; |
4370 | |
4371 | TYPE_STUB_DECL (record)(((contains_struct_check (((tree_class_check ((record), (tcc_type ), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4371, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4371, __FUNCTION__))->common.chain)) = type_decl; |
4372 | TYPE_NAME (record)((tree_class_check ((record), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4372, __FUNCTION__))->type_common.name) = type_decl; |
4373 | TYPE_FIELDS (record)((tree_check3 ((record), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4373, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE )))->type_non_common.values) = f_gpr; |
4374 | DECL_CHAIN (f_gpr)(((contains_struct_check (((contains_struct_check ((f_gpr), ( TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4374, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4374, __FUNCTION__))->common.chain)) = f_fpr; |
4375 | DECL_CHAIN (f_fpr)(((contains_struct_check (((contains_struct_check ((f_fpr), ( TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4375, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4375, __FUNCTION__))->common.chain)) = f_ovf; |
4376 | DECL_CHAIN (f_ovf)(((contains_struct_check (((contains_struct_check ((f_ovf), ( TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4376, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4376, __FUNCTION__))->common.chain)) = f_sav; |
4377 | |
4378 | layout_type (record); |
4379 | |
4380 | TYPE_ATTRIBUTES (record)((tree_class_check ((record), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4380, __FUNCTION__))->type_common.attributes) = tree_cons (get_identifier ("sysv_abi va_list")(__builtin_constant_p ("sysv_abi va_list") ? get_identifier_with_length (("sysv_abi va_list"), strlen ("sysv_abi va_list")) : get_identifier ("sysv_abi va_list")), |
4381 | NULL_TREE(tree) __null, TYPE_ATTRIBUTES (record)((tree_class_check ((record), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4381, __FUNCTION__))->type_common.attributes)); |
4382 | |
4383 | /* The correct type is an array type of one element. */ |
4384 | return build_array_type (record, build_index_type (size_zero_nodeglobal_trees[TI_SIZE_ZERO])); |
4385 | } |
4386 | |
4387 | /* Setup the builtin va_list data type and for 64-bit the additional |
4388 | calling convention specific va_list data types. */ |
4389 | |
4390 | static tree |
4391 | ix86_build_builtin_va_list (void) |
4392 | { |
4393 | if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 )) |
4394 | { |
4395 | /* Initialize ABI specific va_list builtin types. |
4396 | |
4397 | In lto1, we can encounter two va_list types: |
4398 | - one as a result of the type-merge across TUs, and |
4399 | - the one constructed here. |
4400 | These two types will not have the same TYPE_MAIN_VARIANT, and therefore |
4401 | a type identity check in canonical_va_list_type based on |
4402 | TYPE_MAIN_VARIANT (which we used to have) will not work. |
4403 | Instead, we tag each va_list_type_node with its unique attribute, and |
4404 | look for the attribute in the type identity check in |
4405 | canonical_va_list_type. |
4406 | |
4407 | Tagging sysv_va_list_type_node directly with the attribute is |
4408 | problematic since it's a array of one record, which will degrade into a |
4409 | pointer to record when used as parameter (see build_va_arg comments for |
4410 | an example), dropping the attribute in the process. So we tag the |
4411 | record instead. */ |
4412 | |
4413 | /* For SYSV_ABI we use an array of one record. */ |
4414 | sysv_va_list_type_node = ix86_build_builtin_va_list_64 (); |
4415 | |
4416 | /* For MS_ABI we use plain pointer to argument area. */ |
4417 | tree char_ptr_type = build_pointer_type (char_type_nodeinteger_types[itk_char]); |
4418 | tree attr = tree_cons (get_identifier ("ms_abi va_list")(__builtin_constant_p ("ms_abi va_list") ? get_identifier_with_length (("ms_abi va_list"), strlen ("ms_abi va_list")) : get_identifier ("ms_abi va_list")), NULL_TREE(tree) __null, |
4419 | TYPE_ATTRIBUTES (char_ptr_type)((tree_class_check ((char_ptr_type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4419, __FUNCTION__))->type_common.attributes)); |
4420 | ms_va_list_type_node = build_type_attribute_variant (char_ptr_type, attr); |
4421 | |
4422 | return ((ix86_abiglobal_options.x_ix86_abi == MS_ABI) |
4423 | ? ms_va_list_type_node |
4424 | : sysv_va_list_type_node); |
4425 | } |
4426 | else |
4427 | { |
4428 | /* For i386 we use plain pointer to argument area. */ |
4429 | return build_pointer_type (char_type_nodeinteger_types[itk_char]); |
4430 | } |
4431 | } |
4432 | |
4433 | /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */ |
4434 | |
4435 | static void |
4436 | setup_incoming_varargs_64 (CUMULATIVE_ARGS *cum) |
4437 | { |
4438 | rtx save_area, mem; |
4439 | alias_set_type set; |
4440 | int i, max; |
4441 | |
4442 | /* GPR size of varargs save area. */ |
4443 | if (cfun(cfun + 0)->va_list_gpr_size) |
4444 | ix86_varargs_gpr_size((cfun + 0)->machine->varargs_gpr_size) = X86_64_REGPARM_MAX6 * UNITS_PER_WORD(((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4); |
4445 | else |
4446 | ix86_varargs_gpr_size((cfun + 0)->machine->varargs_gpr_size) = 0; |
4447 | |
4448 | /* FPR size of varargs save area. We don't need it if we don't pass |
4449 | anything in SSE registers. */ |
4450 | if (TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) != 0) && cfun(cfun + 0)->va_list_fpr_size) |
4451 | ix86_varargs_fpr_size((cfun + 0)->machine->varargs_fpr_size) = X86_64_SSE_REGPARM_MAX8 * 16; |
4452 | else |
4453 | ix86_varargs_fpr_size((cfun + 0)->machine->varargs_fpr_size) = 0; |
4454 | |
4455 | if (! ix86_varargs_gpr_size((cfun + 0)->machine->varargs_gpr_size) && ! ix86_varargs_fpr_size((cfun + 0)->machine->varargs_fpr_size)) |
4456 | return; |
4457 | |
4458 | save_area = frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_FRAME_POINTER]); |
4459 | set = get_varargs_alias_set (); |
4460 | |
4461 | max = cum->regno + cfun(cfun + 0)->va_list_gpr_size / UNITS_PER_WORD(((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4); |
4462 | if (max > X86_64_REGPARM_MAX6) |
4463 | max = X86_64_REGPARM_MAX6; |
4464 | |
4465 | for (i = cum->regno; i < max; i++) |
4466 | { |
4467 | mem = gen_rtx_MEM (word_mode, |
4468 | plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), save_area, i * UNITS_PER_WORD(((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4))); |
4469 | MEM_NOTRAP_P (mem)(__extension__ ({ __typeof ((mem)) const _rtx = ((mem)); if ( ((enum rtx_code) (_rtx)->code) != MEM) rtl_check_failed_flag ("MEM_NOTRAP_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4469, __FUNCTION__); _rtx; })->call) = 1; |
4470 | set_mem_alias_set (mem, set); |
4471 | emit_move_insn (mem, |
4472 | gen_rtx_REG (word_mode, |
4473 | x86_64_int_parameter_registers[i])); |
4474 | } |
4475 | |
4476 | if (ix86_varargs_fpr_size((cfun + 0)->machine->varargs_fpr_size)) |
4477 | { |
4478 | machine_mode smode; |
4479 | rtx_code_label *label; |
4480 | rtx test; |
4481 | |
4482 | /* Now emit code to save SSE registers. The AX parameter contains number |
4483 | of SSE parameter registers used to call this function, though all we |
4484 | actually check here is the zero/non-zero status. */ |
4485 | |
4486 | label = gen_label_rtx (); |
4487 | test = gen_rtx_EQ (VOIDmode, gen_rtx_REG (QImode, AX_REG), const0_rtx)gen_rtx_fmt_ee_stat ((EQ), ((((void) 0, E_VOIDmode))), ((gen_rtx_REG ((scalar_int_mode ((scalar_int_mode::from_int) E_QImode)), 0 ))), (((const_int_rtx[64]))) ); |
4488 | emit_jump_insn (gen_cbranchqi4 (test, XEXP (test, 0)(((test)->u.fld[0]).rt_rtx), XEXP (test, 1)(((test)->u.fld[1]).rt_rtx), |
4489 | label)); |
4490 | |
4491 | /* ??? If !TARGET_SSE_TYPELESS_STORES, would we perform better if |
4492 | we used movdqa (i.e. TImode) instead? Perhaps even better would |
4493 | be if we could determine the real mode of the data, via a hook |
4494 | into pass_stdarg. Ignore all that for now. */ |
4495 | smode = V4SFmode((void) 0, E_V4SFmode); |
4496 | if (crtl(&x_rtl)->stack_alignment_needed < GET_MODE_ALIGNMENT (smode)get_mode_alignment (smode)) |
4497 | crtl(&x_rtl)->stack_alignment_needed = GET_MODE_ALIGNMENT (smode)get_mode_alignment (smode); |
4498 | |
4499 | max = cum->sse_regno + cfun(cfun + 0)->va_list_fpr_size / 16; |
4500 | if (max > X86_64_SSE_REGPARM_MAX8) |
4501 | max = X86_64_SSE_REGPARM_MAX8; |
4502 | |
4503 | for (i = cum->sse_regno; i < max; ++i) |
4504 | { |
4505 | mem = plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), save_area, |
4506 | i * 16 + ix86_varargs_gpr_size((cfun + 0)->machine->varargs_gpr_size)); |
4507 | mem = gen_rtx_MEM (smode, mem); |
4508 | MEM_NOTRAP_P (mem)(__extension__ ({ __typeof ((mem)) const _rtx = ((mem)); if ( ((enum rtx_code) (_rtx)->code) != MEM) rtl_check_failed_flag ("MEM_NOTRAP_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4508, __FUNCTION__); _rtx; })->call) = 1; |
4509 | set_mem_alias_set (mem, set); |
4510 | set_mem_align (mem, GET_MODE_ALIGNMENT (smode)get_mode_alignment (smode)); |
4511 | |
4512 | emit_move_insn (mem, gen_rtx_REG (smode, GET_SSE_REGNO (i)((i) < 8 ? 20 + (i) : (i) < 16 ? 44 + (i) - 8 : 52 + (i ) - 16))); |
4513 | } |
4514 | |
4515 | emit_label (label); |
4516 | } |
4517 | } |
4518 | |
4519 | static void |
4520 | setup_incoming_varargs_ms_64 (CUMULATIVE_ARGS *cum) |
4521 | { |
4522 | alias_set_type set = get_varargs_alias_set (); |
4523 | int i; |
4524 | |
4525 | /* Reset to zero, as there might be a sysv vaarg used |
4526 | before. */ |
4527 | ix86_varargs_gpr_size((cfun + 0)->machine->varargs_gpr_size) = 0; |
4528 | ix86_varargs_fpr_size((cfun + 0)->machine->varargs_fpr_size) = 0; |
4529 | |
4530 | for (i = cum->regno; i < X86_64_MS_REGPARM_MAX4; i++) |
4531 | { |
4532 | rtx reg, mem; |
4533 | |
4534 | mem = gen_rtx_MEM (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), |
4535 | plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), virtual_incoming_args_rtx((this_target_rtl->x_global_rtl)[GR_VIRTUAL_INCOMING_ARGS] ), |
4536 | i * UNITS_PER_WORD(((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4))); |
4537 | MEM_NOTRAP_P (mem)(__extension__ ({ __typeof ((mem)) const _rtx = ((mem)); if ( ((enum rtx_code) (_rtx)->code) != MEM) rtl_check_failed_flag ("MEM_NOTRAP_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4537, __FUNCTION__); _rtx; })->call) = 1; |
4538 | set_mem_alias_set (mem, set); |
4539 | |
4540 | reg = gen_rtx_REG (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), x86_64_ms_abi_int_parameter_registers[i]); |
4541 | emit_move_insn (mem, reg); |
4542 | } |
4543 | } |
4544 | |
4545 | static void |
4546 | ix86_setup_incoming_varargs (cumulative_args_t cum_v, |
4547 | const function_arg_info &arg, |
4548 | int *, int no_rtl) |
4549 | { |
4550 | CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v); |
4551 | CUMULATIVE_ARGS next_cum; |
4552 | tree fntype; |
4553 | |
4554 | /* This argument doesn't appear to be used anymore. Which is good, |
4555 | because the old code here didn't suppress rtl generation. */ |
4556 | gcc_assert (!no_rtl)((void)(!(!no_rtl) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4556, __FUNCTION__), 0 : 0)); |
4557 | |
4558 | if (!TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 )) |
4559 | return; |
4560 | |
4561 | fntype = TREE_TYPE (current_function_decl)((contains_struct_check ((current_function_decl), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4561, __FUNCTION__))->typed.type); |
4562 | |
4563 | /* For varargs, we do not want to skip the dummy va_dcl argument. |
4564 | For stdargs, we do want to skip the last named argument. */ |
4565 | next_cum = *cum; |
4566 | if (!TYPE_NO_NAMED_ARGS_STDARG_P (TREE_TYPE (current_function_decl))((tree_class_check ((((contains_struct_check ((current_function_decl ), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4566, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4566, __FUNCTION__))->type_common.no_named_args_stdarg_p ) |
4567 | && stdarg_p (fntype)) |
4568 | ix86_function_arg_advance (pack_cumulative_args (&next_cum), arg); |
4569 | |
4570 | if (cum->call_abi == MS_ABI) |
4571 | setup_incoming_varargs_ms_64 (&next_cum); |
4572 | else |
4573 | setup_incoming_varargs_64 (&next_cum); |
4574 | } |
4575 | |
4576 | /* Checks if TYPE is of kind va_list char *. */ |
4577 | |
4578 | static bool |
4579 | is_va_list_char_pointer (tree type) |
4580 | { |
4581 | tree canonic; |
4582 | |
4583 | /* For 32-bit it is always true. */ |
4584 | if (!TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0 )) |
4585 | return true; |
4586 | canonic = ix86_canonical_va_list_type (type); |
4587 | return (canonic == ms_va_list_type_node |
4588 | || (ix86_abiglobal_options.x_ix86_abi == MS_ABI && canonic == va_list_type_nodeglobal_trees[TI_VA_LIST_TYPE])); |
4589 | } |
4590 | |
4591 | /* Implement va_start. */ |
4592 | |
4593 | static void |
4594 | ix86_va_start (tree valist, rtx nextarg) |
4595 | { |
4596 | HOST_WIDE_INTlong words, n_gpr, n_fpr; |
4597 | tree f_gpr, f_fpr, f_ovf, f_sav; |
4598 | tree gpr, fpr, ovf, sav, t; |
4599 | tree type; |
4600 | rtx ovf_rtx; |
4601 | |
4602 | if (flag_split_stackglobal_options.x_flag_split_stack |
4603 | && cfun(cfun + 0)->machine->split_stack_varargs_pointer == NULL_RTX(rtx) 0) |
4604 | { |
4605 | unsigned int scratch_regno; |
4606 | |
4607 | /* When we are splitting the stack, we can't refer to the stack |
4608 | arguments using internal_arg_pointer, because they may be on |
4609 | the old stack. The split stack prologue will arrange to |
4610 | leave a pointer to the old stack arguments in a scratch |
4611 | register, which we here copy to a pseudo-register. The split |
4612 | stack prologue can't set the pseudo-register directly because |
4613 | it (the prologue) runs before any registers have been saved. */ |
4614 | |
4615 | scratch_regno = split_stack_prologue_scratch_regno (); |
4616 | if (scratch_regno != INVALID_REGNUM(~(unsigned int) 0)) |
4617 | { |
4618 | rtx reg; |
4619 | rtx_insn *seq; |
4620 | |
4621 | reg = gen_reg_rtx (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))); |
4622 | cfun(cfun + 0)->machine->split_stack_varargs_pointer = reg; |
4623 | |
4624 | start_sequence (); |
4625 | emit_move_insn (reg, gen_rtx_REG (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), scratch_regno)); |
4626 | seq = get_insns (); |
4627 | end_sequence (); |
4628 | |
4629 | push_topmost_sequence (); |
4630 | emit_insn_after (seq, entry_of_function ()); |
4631 | pop_topmost_sequence (); |
4632 | } |
4633 | } |
4634 | |
4635 | /* Only 64bit target needs something special. */ |
4636 | if (is_va_list_char_pointer (TREE_TYPE (valist)((contains_struct_check ((valist), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4636, __FUNCTION__))->typed.type))) |
4637 | { |
4638 | if (cfun(cfun + 0)->machine->split_stack_varargs_pointer == NULL_RTX(rtx) 0) |
4639 | std_expand_builtin_va_start (valist, nextarg); |
4640 | else |
4641 | { |
4642 | rtx va_r, next; |
4643 | |
4644 | va_r = expand_expr (valist, NULL_RTX(rtx) 0, VOIDmode((void) 0, E_VOIDmode), EXPAND_WRITE); |
4645 | next = expand_binop (ptr_mode, add_optab, |
4646 | cfun(cfun + 0)->machine->split_stack_varargs_pointer, |
4647 | crtl(&x_rtl)->args.arg_offset_rtx, |
4648 | NULL_RTX(rtx) 0, 0, OPTAB_LIB_WIDEN); |
4649 | convert_move (va_r, next, 0); |
4650 | } |
4651 | return; |
4652 | } |
4653 | |
4654 | f_gpr = TYPE_FIELDS (TREE_TYPE (sysv_va_list_type_node))((tree_check3 ((((contains_struct_check ((sysv_va_list_type_node ), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4654, __FUNCTION__))->typed.type)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4654, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE )))->type_non_common.values); |
4655 | f_fpr = DECL_CHAIN (f_gpr)(((contains_struct_check (((contains_struct_check ((f_gpr), ( TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4655, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4655, __FUNCTION__))->common.chain)); |
4656 | f_ovf = DECL_CHAIN (f_fpr)(((contains_struct_check (((contains_struct_check ((f_fpr), ( TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4656, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4656, __FUNCTION__))->common.chain)); |
4657 | f_sav = DECL_CHAIN (f_ovf)(((contains_struct_check (((contains_struct_check ((f_ovf), ( TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4657, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4657, __FUNCTION__))->common.chain)); |
4658 | |
4659 | valist = build_simple_mem_ref (valist)build_simple_mem_ref_loc (((location_t) 0), valist); |
4660 | TREE_TYPE (valist)((contains_struct_check ((valist), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4660, __FUNCTION__))->typed.type) = TREE_TYPE (sysv_va_list_type_node)((contains_struct_check ((sysv_va_list_type_node), (TS_TYPED) , "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4660, __FUNCTION__))->typed.type); |
4661 | /* The following should be folded into the MEM_REF offset. */ |
4662 | gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr)((contains_struct_check ((f_gpr), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4662, __FUNCTION__))->typed.type), unshare_expr (valist), |
4663 | f_gpr, NULL_TREE(tree) __null); |
4664 | fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr)((contains_struct_check ((f_fpr), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4664, __FUNCTION__))->typed.type), unshare_expr (valist), |
4665 | f_fpr, NULL_TREE(tree) __null); |
4666 | ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf)((contains_struct_check ((f_ovf), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4666, __FUNCTION__))->typed.type), unshare_expr (valist), |
4667 | f_ovf, NULL_TREE(tree) __null); |
4668 | sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav)((contains_struct_check ((f_sav), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4668, __FUNCTION__))->typed.type), unshare_expr (valist), |
4669 | f_sav, NULL_TREE(tree) __null); |
4670 | |
4671 | /* Count number of gp and fp argument registers used. */ |
4672 | words = crtl(&x_rtl)->args.info.words; |
4673 | n_gpr = crtl(&x_rtl)->args.info.regno; |
4674 | n_fpr = crtl(&x_rtl)->args.info.sse_regno; |
4675 | |
4676 | if (cfun(cfun + 0)->va_list_gpr_size) |
4677 | { |
4678 | type = TREE_TYPE (gpr)((contains_struct_check ((gpr), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4678, __FUNCTION__))->typed.type); |
4679 | t = build2 (MODIFY_EXPR, type, |
4680 | gpr, build_int_cst (type, n_gpr * 8)); |
4681 | TREE_SIDE_EFFECTS (t)((non_type_check ((t), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4681, __FUNCTION__))->base.side_effects_flag) = 1; |
4682 | expand_expr (t, const0_rtx(const_int_rtx[64]), VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL); |
4683 | } |
4684 | |
4685 | if (TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) != 0) && cfun(cfun + 0)->va_list_fpr_size) |
4686 | { |
4687 | type = TREE_TYPE (fpr)((contains_struct_check ((fpr), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4687, __FUNCTION__))->typed.type); |
4688 | t = build2 (MODIFY_EXPR, type, fpr, |
4689 | build_int_cst (type, n_fpr * 16 + 8*X86_64_REGPARM_MAX6)); |
4690 | TREE_SIDE_EFFECTS (t)((non_type_check ((t), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4690, __FUNCTION__))->base.side_effects_flag) = 1; |
4691 | expand_expr (t, const0_rtx(const_int_rtx[64]), VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL); |
4692 | } |
4693 | |
4694 | /* Find the overflow area. */ |
4695 | type = TREE_TYPE (ovf)((contains_struct_check ((ovf), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4695, __FUNCTION__))->typed.type); |
4696 | if (cfun(cfun + 0)->machine->split_stack_varargs_pointer == NULL_RTX(rtx) 0) |
4697 | ovf_rtx = crtl(&x_rtl)->args.internal_arg_pointer; |
4698 | else |
4699 | ovf_rtx = cfun(cfun + 0)->machine->split_stack_varargs_pointer; |
4700 | t = make_tree (type, ovf_rtx); |
4701 | if (words != 0) |
4702 | t = fold_build_pointer_plus_hwi (t, words * UNITS_PER_WORD)fold_build_pointer_plus_hwi_loc (((location_t) 0), t, words * (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4)); |
4703 | |
4704 | t = build2 (MODIFY_EXPR, type, ovf, t); |
4705 | TREE_SIDE_EFFECTS (t)((non_type_check ((t), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4705, __FUNCTION__))->base.side_effects_flag) = 1; |
4706 | expand_expr (t, const0_rtx(const_int_rtx[64]), VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL); |
4707 | |
4708 | if (ix86_varargs_gpr_size((cfun + 0)->machine->varargs_gpr_size) || ix86_varargs_fpr_size((cfun + 0)->machine->varargs_fpr_size)) |
4709 | { |
4710 | /* Find the register save area. |
4711 | Prologue of the function save it right above stack frame. */ |
4712 | type = TREE_TYPE (sav)((contains_struct_check ((sav), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4712, __FUNCTION__))->typed.type); |
4713 | t = make_tree (type, frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_FRAME_POINTER])); |
4714 | if (!ix86_varargs_gpr_size((cfun + 0)->machine->varargs_gpr_size)) |
4715 | t = fold_build_pointer_plus_hwi (t, -8 * X86_64_REGPARM_MAX)fold_build_pointer_plus_hwi_loc (((location_t) 0), t, -8 * 6); |
4716 | |
4717 | t = build2 (MODIFY_EXPR, type, sav, t); |
4718 | TREE_SIDE_EFFECTS (t)((non_type_check ((t), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4718, __FUNCTION__))->base.side_effects_flag) = 1; |
4719 | expand_expr (t, const0_rtx(const_int_rtx[64]), VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL); |
4720 | } |
4721 | } |
4722 | |
4723 | /* Implement va_arg. */ |
4724 | |
4725 | static tree |
4726 | ix86_gimplify_va_arg (tree valist, tree type, gimple_seq *pre_p, |
4727 | gimple_seq *post_p) |
4728 | { |
4729 | static const int intreg[6] = { 0, 1, 2, 3, 4, 5 }; |
4730 | tree f_gpr, f_fpr, f_ovf, f_sav; |
4731 | tree gpr, fpr, ovf, sav, t; |
4732 | int size, rsize; |
4733 | tree lab_false, lab_over = NULL_TREE(tree) __null; |
4734 | tree addr, t2; |
4735 | rtx container; |
4736 | int indirect_p = 0; |
4737 | tree ptrtype; |
4738 | machine_mode nat_mode; |
4739 | unsigned int arg_boundary; |
4740 | unsigned int type_align; |
4741 | |
4742 | /* Only 64bit target needs something special. */ |
4743 | if (is_va_list_char_pointer (TREE_TYPE (valist)((contains_struct_check ((valist), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4743, __FUNCTION__))->typed.type))) |
4744 | return std_gimplify_va_arg_expr (valist, type, pre_p, post_p); |
4745 | |
4746 | f_gpr = TYPE_FIELDS (TREE_TYPE (sysv_va_list_type_node))((tree_check3 ((((contains_struct_check ((sysv_va_list_type_node ), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4746, __FUNCTION__))->typed.type)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4746, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE )))->type_non_common.values); |
4747 | f_fpr = DECL_CHAIN (f_gpr)(((contains_struct_check (((contains_struct_check ((f_gpr), ( TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4747, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4747, __FUNCTION__))->common.chain)); |
4748 | f_ovf = DECL_CHAIN (f_fpr)(((contains_struct_check (((contains_struct_check ((f_fpr), ( TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4748, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4748, __FUNCTION__))->common.chain)); |
4749 | f_sav = DECL_CHAIN (f_ovf)(((contains_struct_check (((contains_struct_check ((f_ovf), ( TS_DECL_MINIMAL), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4749, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4749, __FUNCTION__))->common.chain)); |
4750 | |
4751 | gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr)((contains_struct_check ((f_gpr), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4751, __FUNCTION__))->typed.type), |
4752 | valist, f_gpr, NULL_TREE(tree) __null); |
4753 | |
4754 | fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr)((contains_struct_check ((f_fpr), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4754, __FUNCTION__))->typed.type), valist, f_fpr, NULL_TREE(tree) __null); |
4755 | ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf)((contains_struct_check ((f_ovf), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4755, __FUNCTION__))->typed.type), valist, f_ovf, NULL_TREE(tree) __null); |
4756 | sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav)((contains_struct_check ((f_sav), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4756, __FUNCTION__))->typed.type), valist, f_sav, NULL_TREE(tree) __null); |
4757 | |
4758 | indirect_p = pass_va_arg_by_reference (type); |
4759 | if (indirect_p) |
4760 | type = build_pointer_type (type); |
4761 | size = arg_int_size_in_bytes (type); |
4762 | rsize = CEIL (size, UNITS_PER_WORD)(((size) + ((((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4)) - 1) / ((((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4))); |
4763 | |
4764 | nat_mode = type_natural_mode (type, NULL__null, false); |
4765 | switch (nat_mode) |
4766 | { |
4767 | case E_V16HFmode: |
4768 | case E_V16BFmode: |
4769 | case E_V8SFmode: |
4770 | case E_V8SImode: |
4771 | case E_V32QImode: |
4772 | case E_V16HImode: |
4773 | case E_V4DFmode: |
4774 | case E_V4DImode: |
4775 | case E_V32HFmode: |
4776 | case E_V32BFmode: |
4777 | case E_V16SFmode: |
4778 | case E_V16SImode: |
4779 | case E_V64QImode: |
4780 | case E_V32HImode: |
4781 | case E_V8DFmode: |
4782 | case E_V8DImode: |
4783 | /* Unnamed 256 and 512bit vector mode parameters are passed on stack. */ |
4784 | if (!TARGET_64BIT_MS_ABI(((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) && ix86_cfun_abi () == MS_ABI)) |
4785 | { |
4786 | container = NULL__null; |
4787 | break; |
4788 | } |
4789 | /* FALLTHRU */ |
4790 | |
4791 | default: |
4792 | container = construct_container (nat_mode, TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4792, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (type) : (type)->type_common.mode), |
4793 | type, 0, X86_64_REGPARM_MAX6, |
4794 | X86_64_SSE_REGPARM_MAX8, intreg, |
4795 | 0); |
4796 | break; |
4797 | } |
4798 | |
4799 | /* Pull the value out of the saved registers. */ |
4800 | |
4801 | addr = create_tmp_var (ptr_type_nodeglobal_trees[TI_PTR_TYPE], "addr"); |
4802 | type_align = TYPE_ALIGN (type)(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4802, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4802, __FUNCTION__))->type_common.align) - 1) : 0); |
4803 | |
4804 | if (container) |
4805 | { |
4806 | int needed_intregs, needed_sseregs; |
4807 | bool need_temp; |
4808 | tree int_addr, sse_addr; |
4809 | |
4810 | lab_false = create_artificial_label (UNKNOWN_LOCATION((location_t) 0)); |
4811 | lab_over = create_artificial_label (UNKNOWN_LOCATION((location_t) 0)); |
4812 | |
4813 | examine_argument (nat_mode, type, 0, &needed_intregs, &needed_sseregs); |
4814 | |
4815 | need_temp = (!REG_P (container)(((enum rtx_code) (container)->code) == REG) |
4816 | && ((needed_intregs && TYPE_ALIGN (type)(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4816, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4816, __FUNCTION__))->type_common.align) - 1) : 0) > 64) |
4817 | || TYPE_ALIGN (type)(((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4817, __FUNCTION__))->type_common.align) ? ((unsigned)1) << (((tree_class_check ((type), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4817, __FUNCTION__))->type_common.align) - 1) : 0) > 128)); |
4818 | |
4819 | /* In case we are passing structure, verify that it is consecutive block |
4820 | on the register save area. If not we need to do moves. */ |
4821 | if (!need_temp && !REG_P (container)(((enum rtx_code) (container)->code) == REG)) |
4822 | { |
4823 | /* Verify that all registers are strictly consecutive */ |
4824 | if (SSE_REGNO_P (REGNO (XEXP (XVECEXP (container, 0, 0), 0)))(((unsigned long) (((rhs_regno(((((((((container)->u.fld[0 ]).rt_rtvec))->elem[0]))->u.fld[0]).rt_rtx))))) - (unsigned long) (20) <= (unsigned long) (27) - (unsigned long) (20) ) || ((unsigned long) (((rhs_regno(((((((((container)->u.fld [0]).rt_rtvec))->elem[0]))->u.fld[0]).rt_rtx))))) - (unsigned long) (44) <= (unsigned long) (51) - (unsigned long) (44) ) || ((unsigned long) (((rhs_regno(((((((((container)->u.fld [0]).rt_rtvec))->elem[0]))->u.fld[0]).rt_rtx))))) - (unsigned long) (52) <= (unsigned long) (67) - (unsigned long) (52) ))) |
4825 | { |
4826 | int i; |
4827 | |
4828 | for (i = 0; i < XVECLEN (container, 0)(((((container)->u.fld[0]).rt_rtvec))->num_elem) && !need_temp; i++) |
4829 | { |
4830 | rtx slot = XVECEXP (container, 0, i)(((((container)->u.fld[0]).rt_rtvec))->elem[i]); |
4831 | if (REGNO (XEXP (slot, 0))(rhs_regno((((slot)->u.fld[0]).rt_rtx))) != FIRST_SSE_REG20 + (unsigned int) i |
4832 | || INTVAL (XEXP (slot, 1))(((((slot)->u.fld[1]).rt_rtx))->u.hwint[0]) != i * 16) |
4833 | need_temp = true; |
4834 | } |
4835 | } |
4836 | else |
4837 | { |
4838 | int i; |
4839 | |
4840 | for (i = 0; i < XVECLEN (container, 0)(((((container)->u.fld[0]).rt_rtvec))->num_elem) && !need_temp; i++) |
4841 | { |
4842 | rtx slot = XVECEXP (container, 0, i)(((((container)->u.fld[0]).rt_rtvec))->elem[i]); |
4843 | if (REGNO (XEXP (slot, 0))(rhs_regno((((slot)->u.fld[0]).rt_rtx))) != (unsigned int) i |
4844 | || INTVAL (XEXP (slot, 1))(((((slot)->u.fld[1]).rt_rtx))->u.hwint[0]) != i * 8) |
4845 | need_temp = true; |
4846 | } |
4847 | } |
4848 | } |
4849 | if (!need_temp) |
4850 | { |
4851 | int_addr = addr; |
4852 | sse_addr = addr; |
4853 | } |
4854 | else |
4855 | { |
4856 | int_addr = create_tmp_var (ptr_type_nodeglobal_trees[TI_PTR_TYPE], "int_addr"); |
4857 | sse_addr = create_tmp_var (ptr_type_nodeglobal_trees[TI_PTR_TYPE], "sse_addr"); |
4858 | } |
4859 | |
4860 | /* First ensure that we fit completely in registers. */ |
4861 | if (needed_intregs) |
4862 | { |
4863 | t = build_int_cst (TREE_TYPE (gpr)((contains_struct_check ((gpr), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4863, __FUNCTION__))->typed.type), |
4864 | (X86_64_REGPARM_MAX6 - needed_intregs + 1) * 8); |
4865 | t = build2 (GE_EXPR, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], gpr, t); |
4866 | t2 = build1 (GOTO_EXPR, void_type_nodeglobal_trees[TI_VOID_TYPE], lab_false); |
4867 | t = build3 (COND_EXPR, void_type_nodeglobal_trees[TI_VOID_TYPE], t, t2, NULL_TREE(tree) __null); |
4868 | gimplify_and_add (t, pre_p); |
4869 | } |
4870 | if (needed_sseregs) |
4871 | { |
4872 | t = build_int_cst (TREE_TYPE (fpr)((contains_struct_check ((fpr), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4872, __FUNCTION__))->typed.type), |
4873 | (X86_64_SSE_REGPARM_MAX8 - needed_sseregs + 1) * 16 |
4874 | + X86_64_REGPARM_MAX6 * 8); |
4875 | t = build2 (GE_EXPR, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], fpr, t); |
4876 | t2 = build1 (GOTO_EXPR, void_type_nodeglobal_trees[TI_VOID_TYPE], lab_false); |
4877 | t = build3 (COND_EXPR, void_type_nodeglobal_trees[TI_VOID_TYPE], t, t2, NULL_TREE(tree) __null); |
4878 | gimplify_and_add (t, pre_p); |
4879 | } |
4880 | |
4881 | /* Compute index to start of area used for integer regs. */ |
4882 | if (needed_intregs) |
4883 | { |
4884 | /* int_addr = gpr + sav; */ |
4885 | t = fold_build_pointer_plus (sav, gpr)fold_build_pointer_plus_loc (((location_t) 0), sav, gpr); |
4886 | gimplify_assign (int_addr, t, pre_p); |
4887 | } |
4888 | if (needed_sseregs) |
4889 | { |
4890 | /* sse_addr = fpr + sav; */ |
4891 | t = fold_build_pointer_plus (sav, fpr)fold_build_pointer_plus_loc (((location_t) 0), sav, fpr); |
4892 | gimplify_assign (sse_addr, t, pre_p); |
4893 | } |
4894 | if (need_temp) |
4895 | { |
4896 | int i, prev_size = 0; |
4897 | tree temp = create_tmp_var (type, "va_arg_tmp"); |
4898 | TREE_ADDRESSABLE (temp)((temp)->base.addressable_flag) = 1; |
4899 | |
4900 | /* addr = &temp; */ |
4901 | t = build1 (ADDR_EXPR, build_pointer_type (type), temp); |
4902 | gimplify_assign (addr, t, pre_p); |
4903 | |
4904 | for (i = 0; i < XVECLEN (container, 0)(((((container)->u.fld[0]).rt_rtvec))->num_elem); i++) |
4905 | { |
4906 | rtx slot = XVECEXP (container, 0, i)(((((container)->u.fld[0]).rt_rtvec))->elem[i]); |
4907 | rtx reg = XEXP (slot, 0)(((slot)->u.fld[0]).rt_rtx); |
4908 | machine_mode mode = GET_MODE (reg)((machine_mode) (reg)->mode); |
4909 | tree piece_type; |
4910 | tree addr_type; |
4911 | tree daddr_type; |
4912 | tree src_addr, src; |
4913 | int src_offset; |
4914 | tree dest_addr, dest; |
4915 | int cur_size = GET_MODE_SIZE (mode)((unsigned short) mode_to_bytes (mode).coeffs[0]); |
4916 | |
4917 | gcc_assert (prev_size <= INTVAL (XEXP (slot, 1)))((void)(!(prev_size <= (((((slot)->u.fld[1]).rt_rtx))-> u.hwint[0])) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4917, __FUNCTION__), 0 : 0)); |
4918 | prev_size = INTVAL (XEXP (slot, 1))(((((slot)->u.fld[1]).rt_rtx))->u.hwint[0]); |
4919 | if (prev_size + cur_size > size) |
4920 | { |
4921 | cur_size = size - prev_size; |
4922 | unsigned int nbits = cur_size * BITS_PER_UNIT(8); |
4923 | if (!int_mode_for_size (nbits, 1).exists (&mode)) |
4924 | mode = QImode(scalar_int_mode ((scalar_int_mode::from_int) E_QImode)); |
4925 | } |
4926 | piece_type = lang_hooks.types.type_for_mode (mode, 1); |
4927 | if (mode == GET_MODE (reg)((machine_mode) (reg)->mode)) |
4928 | addr_type = build_pointer_type (piece_type); |
4929 | else |
4930 | addr_type = build_pointer_type_for_mode (piece_type, ptr_mode, |
4931 | true); |
4932 | daddr_type = build_pointer_type_for_mode (piece_type, ptr_mode, |
4933 | true); |
4934 | |
4935 | if (SSE_REGNO_P (REGNO (reg))(((unsigned long) (((rhs_regno(reg)))) - (unsigned long) (20) <= (unsigned long) (27) - (unsigned long) (20)) || ((unsigned long) (((rhs_regno(reg)))) - (unsigned long) (44) <= (unsigned long) (51) - (unsigned long) (44)) || ((unsigned long) (((rhs_regno (reg)))) - (unsigned long) (52) <= (unsigned long) (67) - ( unsigned long) (52)))) |
4936 | { |
4937 | src_addr = sse_addr; |
4938 | src_offset = (REGNO (reg)(rhs_regno(reg)) - FIRST_SSE_REG20) * 16; |
4939 | } |
4940 | else |
4941 | { |
4942 | src_addr = int_addr; |
4943 | src_offset = REGNO (reg)(rhs_regno(reg)) * 8; |
4944 | } |
4945 | src_addr = fold_convert (addr_type, src_addr)fold_convert_loc (((location_t) 0), addr_type, src_addr); |
4946 | src_addr = fold_build_pointer_plus_hwi (src_addr, src_offset)fold_build_pointer_plus_hwi_loc (((location_t) 0), src_addr, src_offset ); |
4947 | |
4948 | dest_addr = fold_convert (daddr_type, addr)fold_convert_loc (((location_t) 0), daddr_type, addr); |
4949 | dest_addr = fold_build_pointer_plus_hwi (dest_addr, prev_size)fold_build_pointer_plus_hwi_loc (((location_t) 0), dest_addr, prev_size); |
4950 | if (cur_size == GET_MODE_SIZE (mode)((unsigned short) mode_to_bytes (mode).coeffs[0])) |
4951 | { |
4952 | src = build_va_arg_indirect_ref (src_addr); |
4953 | dest = build_va_arg_indirect_ref (dest_addr); |
4954 | |
4955 | gimplify_assign (dest, src, pre_p); |
4956 | } |
4957 | else |
4958 | { |
4959 | tree copy |
4960 | = build_call_expr (builtin_decl_implicit (BUILT_IN_MEMCPY), |
4961 | 3, dest_addr, src_addr, |
4962 | size_int (cur_size)size_int_kind (cur_size, stk_sizetype)); |
4963 | gimplify_and_add (copy, pre_p); |
4964 | } |
4965 | prev_size += cur_size; |
4966 | } |
4967 | } |
4968 | |
4969 | if (needed_intregs) |
4970 | { |
4971 | t = build2 (PLUS_EXPR, TREE_TYPE (gpr)((contains_struct_check ((gpr), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4971, __FUNCTION__))->typed.type), gpr, |
4972 | build_int_cst (TREE_TYPE (gpr)((contains_struct_check ((gpr), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4972, __FUNCTION__))->typed.type), needed_intregs * 8)); |
4973 | gimplify_assign (gpr, t, pre_p); |
4974 | /* The GPR save area guarantees only 8-byte alignment. */ |
4975 | if (!need_temp) |
4976 | type_align = MIN (type_align, 64)((type_align) < (64) ? (type_align) : (64)); |
4977 | } |
4978 | |
4979 | if (needed_sseregs) |
4980 | { |
4981 | t = build2 (PLUS_EXPR, TREE_TYPE (fpr)((contains_struct_check ((fpr), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4981, __FUNCTION__))->typed.type), fpr, |
4982 | build_int_cst (TREE_TYPE (fpr)((contains_struct_check ((fpr), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 4982, __FUNCTION__))->typed.type), needed_sseregs * 16)); |
4983 | gimplify_assign (unshare_expr (fpr), t, pre_p); |
4984 | } |
4985 | |
4986 | gimple_seq_add_stmt (pre_p, gimple_build_goto (lab_over)); |
4987 | |
4988 | gimple_seq_add_stmt (pre_p, gimple_build_label (lab_false)); |
4989 | } |
4990 | |
4991 | /* ... otherwise out of the overflow area. */ |
4992 | |
4993 | /* When we align parameter on stack for caller, if the parameter |
4994 | alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be |
4995 | aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee |
4996 | here with caller. */ |
4997 | arg_boundary = ix86_function_arg_boundary (VOIDmode((void) 0, E_VOIDmode), type); |
4998 | if ((unsigned int) arg_boundary > MAX_SUPPORTED_STACK_ALIGNMENT(((unsigned int) 1 << 28) * 8)) |
4999 | arg_boundary = MAX_SUPPORTED_STACK_ALIGNMENT(((unsigned int) 1 << 28) * 8); |
5000 | |
5001 | /* Care for on-stack alignment if needed. */ |
5002 | if (arg_boundary <= 64 || size == 0) |
5003 | t = ovf; |
5004 | else |
5005 | { |
5006 | HOST_WIDE_INTlong align = arg_boundary / 8; |
5007 | t = fold_build_pointer_plus_hwi (ovf, align - 1)fold_build_pointer_plus_hwi_loc (((location_t) 0), ovf, align - 1); |
5008 | t = build2 (BIT_AND_EXPR, TREE_TYPE (t)((contains_struct_check ((t), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 5008, __FUNCTION__))->typed.type), t, |
5009 | build_int_cst (TREE_TYPE (t)((contains_struct_check ((t), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 5009, __FUNCTION__))->typed.type), -align)); |
5010 | } |
5011 | |
5012 | gimplify_expr (&t, pre_p, NULL__null, is_gimple_val, fb_rvalue); |
5013 | gimplify_assign (addr, t, pre_p); |
5014 | |
5015 | t = fold_build_pointer_plus_hwi (t, rsize * UNITS_PER_WORD)fold_build_pointer_plus_hwi_loc (((location_t) 0), t, rsize * (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4)); |
5016 | gimplify_assign (unshare_expr (ovf), t, pre_p); |
5017 | |
5018 | if (container) |
5019 | gimple_seq_add_stmt (pre_p, gimple_build_label (lab_over)); |
5020 | |
5021 | type = build_aligned_type (type, type_align); |
5022 | ptrtype = build_pointer_type_for_mode (type, ptr_mode, true); |
5023 | addr = fold_convert (ptrtype, addr)fold_convert_loc (((location_t) 0), ptrtype, addr); |
5024 | |
5025 | if (indirect_p) |
5026 | addr = build_va_arg_indirect_ref (addr); |
5027 | return build_va_arg_indirect_ref (addr); |
5028 | } |
5029 | |
5030 | /* Return true if OPNUM's MEM should be matched |
5031 | in movabs* patterns. */ |
5032 | |
5033 | bool |
5034 | ix86_check_movabs (rtx insn, int opnum) |
5035 | { |
5036 | rtx set, mem; |
5037 | |
5038 | set = PATTERN (insn); |
5039 | if (GET_CODE (set)((enum rtx_code) (set)->code) == PARALLEL) |
5040 | set = XVECEXP (set, 0, 0)(((((set)->u.fld[0]).rt_rtvec))->elem[0]); |
5041 | gcc_assert (GET_CODE (set) == SET)((void)(!(((enum rtx_code) (set)->code) == SET) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 5041, __FUNCTION__), 0 : 0)); |
5042 | mem = XEXP (set, opnum)(((set)->u.fld[opnum]).rt_rtx); |
5043 | while (SUBREG_P (mem)(((enum rtx_code) (mem)->code) == SUBREG)) |
5044 | mem = SUBREG_REG (mem)(((mem)->u.fld[0]).rt_rtx); |
5045 | gcc_assert (MEM_P (mem))((void)(!((((enum rtx_code) (mem)->code) == MEM)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 5045, __FUNCTION__), 0 : 0)); |
5046 | return volatile_ok || !MEM_VOLATILE_P (mem)(__extension__ ({ __typeof ((mem)) const _rtx = ((mem)); if ( ((enum rtx_code) (_rtx)->code) != MEM && ((enum rtx_code ) (_rtx)->code) != ASM_OPERANDS && ((enum rtx_code ) (_rtx)->code) != ASM_INPUT) rtl_check_failed_flag ("MEM_VOLATILE_P" , _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 5046, __FUNCTION__); _rtx; })->volatil); |
5047 | } |
5048 | |
5049 | /* Return false if INSN contains a MEM with a non-default address space. */ |
5050 | bool |
5051 | ix86_check_no_addr_space (rtx insn) |
5052 | { |
5053 | subrtx_var_iterator::array_type array; |
5054 | FOR_EACH_SUBRTX_VAR (iter, array, PATTERN (insn), ALL)for (subrtx_var_iterator iter (array, PATTERN (insn), rtx_all_subrtx_bounds ); !iter.at_end (); iter.next ()) |
5055 | { |
5056 | rtx x = *iter; |
5057 | if (MEM_P (x)(((enum rtx_code) (x)->code) == MEM) && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))(((get_mem_attrs (x)->addrspace)) == 0)) |
5058 | return false; |
5059 | } |
5060 | return true; |
5061 | } |
5062 | |
5063 | /* Initialize the table of extra 80387 mathematical constants. */ |
5064 | |
5065 | static void |
5066 | init_ext_80387_constants (void) |
5067 | { |
5068 | static const char * cst[5] = |
5069 | { |
5070 | "0.3010299956639811952256464283594894482", /* 0: fldlg2 */ |
5071 | "0.6931471805599453094286904741849753009", /* 1: fldln2 */ |
5072 | "1.4426950408889634073876517827983434472", /* 2: fldl2e */ |
5073 | "3.3219280948873623478083405569094566090", /* 3: fldl2t */ |
5074 | "3.1415926535897932385128089594061862044", /* 4: fldpi */ |
5075 | }; |
5076 | int i; |
5077 | |
5078 | for (i = 0; i < 5; i++) |
5079 | { |
5080 | real_from_string (&ext_80387_constants_table[i], cst[i]); |
5081 | /* Ensure each constant is rounded to XFmode precision. */ |
5082 | real_convert (&ext_80387_constants_table[i], |
5083 | XFmode(scalar_float_mode ((scalar_float_mode::from_int) E_XFmode)), &ext_80387_constants_table[i]); |
5084 | } |
5085 | |
5086 | ext_80387_constants_init = 1; |
5087 | } |
5088 | |
5089 | /* Return non-zero if the constant is something that |
5090 | can be loaded with a special instruction. */ |
5091 | |
5092 | int |
5093 | standard_80387_constant_p (rtx x) |
5094 | { |
5095 | machine_mode mode = GET_MODE (x)((machine_mode) (x)->mode); |
5096 | |
5097 | const REAL_VALUE_TYPEstruct real_value *r; |
5098 | |
5099 | if (!(CONST_DOUBLE_P (x)(((enum rtx_code) (x)->code) == CONST_DOUBLE) && X87_FLOAT_MODE_P (mode)(((global_options.x_target_flags & (1U << 1)) != 0) && ((mode) == (scalar_float_mode ((scalar_float_mode ::from_int) E_SFmode)) || (mode) == (scalar_float_mode ((scalar_float_mode ::from_int) E_DFmode)) || (mode) == (scalar_float_mode ((scalar_float_mode ::from_int) E_XFmode)))))) |
5100 | return -1; |
5101 | |
5102 | if (x == CONST0_RTX (mode)(const_tiny_rtx[0][(int) (mode)])) |
5103 | return 1; |
5104 | if (x == CONST1_RTX (mode)(const_tiny_rtx[1][(int) (mode)])) |
5105 | return 2; |
5106 | |
5107 | r = CONST_DOUBLE_REAL_VALUE (x)((const struct real_value *) (&(x)->u.rv)); |
5108 | |
5109 | /* For XFmode constants, try to find a special 80387 instruction when |
5110 | optimizing for size or on those CPUs that benefit from them. */ |
5111 | if (mode == XFmode(scalar_float_mode ((scalar_float_mode::from_int) E_XFmode)) |
5112 | && (optimize_function_for_size_p (cfun(cfun + 0)) || TARGET_EXT_80387_CONSTANTSix86_tune_features[X86_TUNE_EXT_80387_CONSTANTS]) |
5113 | && !flag_rounding_mathglobal_options.x_flag_rounding_math) |
5114 | { |
5115 | int i; |
5116 | |
5117 | if (! ext_80387_constants_init) |
5118 | init_ext_80387_constants (); |
5119 | |
5120 | for (i = 0; i < 5; i++) |
5121 | if (real_identical (r, &ext_80387_constants_table[i])) |
5122 | return i + 3; |
5123 | } |
5124 | |
5125 | /* Load of the constant -0.0 or -1.0 will be split as |
5126 | fldz;fchs or fld1;fchs sequence. */ |
5127 | if (real_isnegzero (r)) |
5128 | return 8; |
5129 | if (real_identical (r, &dconstm1)) |
5130 | return 9; |
5131 | |
5132 | return 0; |
5133 | } |
5134 | |
5135 | /* Return the opcode of the special instruction to be used to load |
5136 | the constant X. */ |
5137 | |
5138 | const char * |
5139 | standard_80387_constant_opcode (rtx x) |
5140 | { |
5141 | switch (standard_80387_constant_p (x)) |
5142 | { |
5143 | case 1: |
5144 | return "fldz"; |
5145 | case 2: |
5146 | return "fld1"; |
5147 | case 3: |
5148 | return "fldlg2"; |
5149 | case 4: |
5150 | return "fldln2"; |
5151 | case 5: |
5152 | return "fldl2e"; |
5153 | case 6: |
5154 | return "fldl2t"; |
5155 | case 7: |
5156 | return "fldpi"; |
5157 | case 8: |
5158 | case 9: |
5159 | return "#"; |
5160 | default: |
5161 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 5161, __FUNCTION__)); |
5162 | } |
5163 | } |
5164 | |
5165 | /* Return the CONST_DOUBLE representing the 80387 constant that is |
5166 | loaded by the specified special instruction. The argument IDX |
5167 | matches the return value from standard_80387_constant_p. */ |
5168 | |
5169 | rtx |
5170 | standard_80387_constant_rtx (int idx) |
5171 | { |
5172 | int i; |
5173 | |
5174 | if (! ext_80387_constants_init) |
5175 | init_ext_80387_constants (); |
5176 | |
5177 | switch (idx) |
5178 | { |
5179 | case 3: |
5180 | case 4: |
5181 | case 5: |
5182 | case 6: |
5183 | case 7: |
5184 | i = idx - 3; |
5185 | break; |
5186 | |
5187 | default: |
5188 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 5188, __FUNCTION__)); |
5189 | } |
5190 | |
5191 | return const_double_from_real_value (ext_80387_constants_table[i], |
5192 | XFmode(scalar_float_mode ((scalar_float_mode::from_int) E_XFmode))); |
5193 | } |
5194 | |
5195 | /* Return 1 if X is all bits 0, 2 if X is all bits 1 |
5196 | and 3 if X is all bits 1 with zero extend |
5197 | in supported SSE/AVX vector mode. */ |
5198 | |
5199 | int |
5200 | standard_sse_constant_p (rtx x, machine_mode pred_mode) |
5201 | { |
5202 | machine_mode mode; |
5203 | |
5204 | if (!TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) != 0)) |
5205 | return 0; |
5206 | |
5207 | mode = GET_MODE (x)((machine_mode) (x)->mode); |
5208 | |
5209 | if (x == const0_rtx(const_int_rtx[64]) || const0_operand (x, mode)) |
5210 | return 1; |
5211 | |
5212 | if (x == constm1_rtx(const_int_rtx[64 -1]) |
5213 | || vector_all_ones_operand (x, mode) |
5214 | || ((GET_MODE_CLASS (mode)((enum mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT |
5215 | || GET_MODE_CLASS (pred_mode)((enum mode_class) mode_class[pred_mode]) == MODE_VECTOR_FLOAT) |
5216 | && float_vector_all_ones_operand (x, mode))) |
5217 | { |
5218 | /* VOIDmode integer constant, get mode from the predicate. */ |
5219 | if (mode == VOIDmode((void) 0, E_VOIDmode)) |
5220 | mode = pred_mode; |
5221 | |
5222 | switch (GET_MODE_SIZE (mode)((unsigned short) mode_to_bytes (mode).coeffs[0])) |
5223 | { |
5224 | case 64: |
5225 | if (TARGET_AVX512F((global_options.x_ix86_isa_flags & (1UL << 15)) != 0)) |
5226 | return 2; |
5227 | break; |
5228 | case 32: |
5229 | if (TARGET_AVX2((global_options.x_ix86_isa_flags & (1UL << 9)) != 0 )) |
5230 | return 2; |
5231 | break; |
5232 | case 16: |
5233 | if (TARGET_SSE2((global_options.x_ix86_isa_flags & (1UL << 51)) != 0)) |
5234 | return 2; |
5235 | break; |
5236 | case 0: |
5237 | /* VOIDmode */ |
5238 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 5238, __FUNCTION__)); |
5239 | default: |
5240 | break; |
5241 | } |
5242 | } |
5243 | |
5244 | if (vector_all_ones_zero_extend_half_operand (x, mode) |
5245 | || vector_all_ones_zero_extend_quarter_operand (x, mode)) |
5246 | return 3; |
5247 | |
5248 | return 0; |
5249 | } |
5250 | |
5251 | /* Return the opcode of the special instruction to be used to load |
5252 | the constant operands[1] into operands[0]. */ |
5253 | |
5254 | const char * |
5255 | standard_sse_constant_opcode (rtx_insn *insn, rtx *operands) |
5256 | { |
5257 | machine_mode mode; |
5258 | rtx x = operands[1]; |
5259 | |
5260 | gcc_assert (TARGET_SSE)((void)(!(((global_options.x_ix86_isa_flags & (1UL << 50)) != 0)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 5260, __FUNCTION__), 0 : 0)); |
5261 | |
5262 | mode = GET_MODE (x)((machine_mode) (x)->mode); |
5263 | |
5264 | if (x == const0_rtx(const_int_rtx[64]) || const0_operand (x, mode)) |
5265 | { |
5266 | switch (get_attr_mode (insn)) |
5267 | { |
5268 | case MODE_TI: |
5269 | if (!EXT_REX_SSE_REG_P (operands[0])((((enum rtx_code) (operands[0])->code) == REG) && ((unsigned long) (((rhs_regno(operands[0])))) - (unsigned long ) (52) <= (unsigned long) (67) - (unsigned long) (52)))) |
5270 | return "%vpxor\t%0, %d0"; |
5271 | /* FALLTHRU */ |
5272 | case MODE_XI: |
5273 | case MODE_OI: |
5274 | if (EXT_REX_SSE_REG_P (operands[0])((((enum rtx_code) (operands[0])->code) == REG) && ((unsigned long) (((rhs_regno(operands[0])))) - (unsigned long ) (52) <= (unsigned long) (67) - (unsigned long) (52)))) |
5275 | return (TARGET_AVX512VL((global_options.x_ix86_isa_flags & (1UL << 20)) != 0) |
5276 | ? "vpxord\t%x0, %x0, %x0" |
5277 | : "vpxord\t%g0, %g0, %g0"); |
5278 | return "vpxor\t%x0, %x0, %x0"; |
5279 | |
5280 | case MODE_V2DF: |
5281 | if (!EXT_REX_SSE_REG_P (operands[0])((((enum rtx_code) (operands[0])->code) == REG) && ((unsigned long) (((rhs_regno(operands[0])))) - (unsigned long ) (52) <= (unsigned long) (67) - (unsigned long) (52)))) |
5282 | return "%vxorpd\t%0, %d0"; |
5283 | /* FALLTHRU */ |
5284 | case MODE_V8DF: |
5285 | case MODE_V4DF: |
5286 | if (!EXT_REX_SSE_REG_P (operands[0])((((enum rtx_code) (operands[0])->code) == REG) && ((unsigned long) (((rhs_regno(operands[0])))) - (unsigned long ) (52) <= (unsigned long) (67) - (unsigned long) (52)))) |
5287 | return "vxorpd\t%x0, %x0, %x0"; |
5288 | else if (TARGET_AVX512DQ((global_options.x_ix86_isa_flags & (1UL << 13)) != 0)) |
5289 | return (TARGET_AVX512VL((global_options.x_ix86_isa_flags & (1UL << 20)) != 0) |
5290 | ? "vxorpd\t%x0, %x0, %x0" |
5291 | : "vxorpd\t%g0, %g0, %g0"); |
5292 | else |
5293 | return (TARGET_AVX512VL((global_options.x_ix86_isa_flags & (1UL << 20)) != 0) |
5294 | ? "vpxorq\t%x0, %x0, %x0" |
5295 | : "vpxorq\t%g0, %g0, %g0"); |
5296 | |
5297 | case MODE_V4SF: |
5298 | if (!EXT_REX_SSE_REG_P (operands[0])((((enum rtx_code) (operands[0])->code) == REG) && ((unsigned long) (((rhs_regno(operands[0])))) - (unsigned long ) (52) <= (unsigned long) (67) - (unsigned long) (52)))) |
5299 | return "%vxorps\t%0, %d0"; |
5300 | /* FALLTHRU */ |
5301 | case MODE_V16SF: |
5302 | case MODE_V8SF: |
5303 | if (!EXT_REX_SSE_REG_P (operands[0])((((enum rtx_code) (operands[0])->code) == REG) && ((unsigned long) (((rhs_regno(operands[0])))) - (unsigned long ) (52) <= (unsigned long) (67) - (unsigned long) (52)))) |
5304 | return "vxorps\t%x0, %x0, %x0"; |
5305 | else if (TARGET_AVX512DQ((global_options.x_ix86_isa_flags & (1UL << 13)) != 0)) |
5306 | return (TARGET_AVX512VL((global_options.x_ix86_isa_flags & (1UL << 20)) != 0) |
5307 | ? "vxorps\t%x0, %x0, %x0" |
5308 | : "vxorps\t%g0, %g0, %g0"); |
5309 | else |
5310 | return (TARGET_AVX512VL((global_options.x_ix86_isa_flags & (1UL << 20)) != 0) |
5311 | ? "vpxord\t%x0, %x0, %x0" |
5312 | : "vpxord\t%g0, %g0, %g0"); |
5313 | |
5314 | default: |
5315 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 5315, __FUNCTION__)); |
5316 | } |
5317 | } |
5318 | else if (x == constm1_rtx(const_int_rtx[64 -1]) |
5319 | || vector_all_ones_operand (x, mode) |
5320 | || (GET_MODE_CLASS (mode)((enum mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT |
5321 | && float_vector_all_ones_operand (x, mode))) |
5322 | { |
5323 | enum attr_mode insn_mode = get_attr_mode (insn); |
5324 | |
5325 | switch (insn_mode) |
5326 | { |
5327 | case MODE_XI: |
5328 | case MODE_V8DF: |
5329 | case MODE_V16SF: |
5330 | gcc_assert (TARGET_AVX512F)((void)(!(((global_options.x_ix86_isa_flags & (1UL << 15)) != 0)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 5330, __FUNCTION__), 0 : 0)); |
5331 | return "vpternlogd\t{$0xFF, %g0, %g0, %g0|%g0, %g0, %g0, 0xFF}"; |
5332 | |
5333 | case MODE_OI: |
5334 | case MODE_V4DF: |
5335 | case MODE_V8SF: |
5336 | gcc_assert (TARGET_AVX2)((void)(!(((global_options.x_ix86_isa_flags & (1UL << 9)) != 0)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 5336, __FUNCTION__), 0 : 0)); |
5337 | /* FALLTHRU */ |
5338 | case MODE_TI: |
5339 | case MODE_V2DF: |
5340 | case MODE_V4SF: |
5341 | gcc_assert (TARGET_SSE2)((void)(!(((global_options.x_ix86_isa_flags & (1UL << 51)) != 0)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 5341, __FUNCTION__), 0 : 0)); |
5342 | if (!EXT_REX_SSE_REG_P (operands[0])((((enum rtx_code) (operands[0])->code) == REG) && ((unsigned long) (((rhs_regno(operands[0])))) - (unsigned long ) (52) <= (unsigned long) (67) - (unsigned long) (52)))) |
5343 | return (TARGET_AVX((global_options.x_ix86_isa_flags & (1UL << 8)) != 0 ) |
5344 | ? "vpcmpeqd\t%0, %0, %0" |
5345 | : "pcmpeqd\t%0, %0"); |
5346 | else if (TARGET_AVX512VL((global_options.x_ix86_isa_flags & (1UL << 20)) != 0)) |
5347 | return "vpternlogd\t{$0xFF, %0, %0, %0|%0, %0, %0, 0xFF}"; |
5348 | else |
5349 | return "vpternlogd\t{$0xFF, %g0, %g0, %g0|%g0, %g0, %g0, 0xFF}"; |
5350 | |
5351 | default: |
5352 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 5352, __FUNCTION__)); |
5353 | } |
5354 | } |
5355 | else if (vector_all_ones_zero_extend_half_operand (x, mode)) |
5356 | { |
5357 | if (GET_MODE_SIZE (mode)((unsigned short) mode_to_bytes (mode).coeffs[0]) == 64) |
5358 | { |
5359 | gcc_assert (TARGET_AVX512F)((void)(!(((global_options.x_ix86_isa_flags & (1UL << 15)) != 0)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 5359, __FUNCTION__), 0 : 0)); |
5360 | return "vpcmpeqd \t %t0, %t0, %t0"; |
5361 | } |
5362 | else if (GET_MODE_SIZE (mode)((unsigned short) mode_to_bytes (mode).coeffs[0]) == 32) |
5363 | { |
5364 | gcc_assert (TARGET_AVX)((void)(!(((global_options.x_ix86_isa_flags & (1UL << 8)) != 0)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 5364, __FUNCTION__), 0 : 0)); |
5365 | return "vpcmpeqd \t %x0, %x0, %x0"; |
5366 | } |
5367 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 5367, __FUNCTION__)); |
5368 | } |
5369 | else if (vector_all_ones_zero_extend_quarter_operand (x, mode)) |
5370 | { |
5371 | gcc_assert (TARGET_AVX512F)((void)(!(((global_options.x_ix86_isa_flags & (1UL << 15)) != 0)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 5371, __FUNCTION__), 0 : 0)); |
5372 | return "vpcmpeqd \t %x0, %x0, %x0"; |
5373 | } |
5374 | |
5375 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 5375, __FUNCTION__)); |
5376 | } |
5377 | |
5378 | /* Returns true if INSN can be transformed from a memory load |
5379 | to a supported FP constant load. */ |
5380 | |
5381 | bool |
5382 | ix86_standard_x87sse_constant_load_p (const rtx_insn *insn, rtx dst) |
5383 | { |
5384 | rtx src = find_constant_src (insn); |
5385 | |
5386 | gcc_assert (REG_P (dst))((void)(!((((enum rtx_code) (dst)->code) == REG)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.cc" , 5386, __FUNCTION__), 0 : 0)); |
5387 | |
5388 | if (src == NULL__null |
5389 | || (SSE_REGNO_P (REGNO (dst))(((unsigned long) (((rhs_regno(dst)))) - (unsigned long) (20) <= (unsigned long) (27) - (unsigned long) (20)) || ((unsigned long) (((rhs_regno(dst)))) - (unsigned long) (44) <= (unsigned long) (51) - (unsigned long) (44)) || ((unsigned long) (((rhs_regno (dst)))) - (unsigned long) (52) <= (unsigned long) (67) - ( unsigned long) (52))) |
5390 | && standard_sse_constant_p (src, GET_MODE (dst)((machine_mode) (dst)->mode)) != 1) |
5391 | || (STACK_REGNO_P (REGNO (dst))((unsigned long) (((rhs_regno(dst)))) - (unsigned long) (8) <= (unsigned long) (15) - (unsigned long) (8)) |
5392 | && standard_80387_constant_p (src) < 1)) |
5393 | return false; |
5394 | |
5395 | return true; |
5396 | } |
5397 | |
5398 | /* Predicate for pre-reload splitters with associated instructions, |
5399 | which can match any time before the split1 pass (usually combine), |
5400 | then are unconditionally split in that pass and should not be |
5401 | matched again afterwards. */ |
5402 | |
5403 | bool |
5404 | ix86_pre_reload_split (void) |
5405 | { |
5406 | return (can_create_pseudo_p ()(!reload_in_progress && !reload_completed) |
5407 | && !(cfun(cfun + 0)->curr_properties & PROP_rtl_split_insns(1 << 17))); |
5408 | } |
5409 | |
5410 | /* Return the opcode of the TYPE_SSEMOV instruction. To move from |
5411 | or to xmm16-xmm31/ymm16-ymm31 registers, we either require |