Bug Summary

File:build/gcc/final.cc
Warning:line 2441, column 8
The result of the left shift is undefined because the right operand is negative

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-suse-linux -analyze -disable-free -clear-ast-before-backend -disable-llvm-verifier -discard-value-names -main-file-name final.cc -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -ffp-contract=on -fno-rounding-math -mconstructor-aliases -funwind-tables=2 -target-cpu x86-64 -tune-cpu generic -debugger-tuning=gdb -fcoverage-compilation-dir=/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -resource-dir /usr/lib64/clang/15.0.7 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13/backward -internal-isystem /usr/lib64/clang/15.0.7/include -internal-isystem /usr/local/include -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../x86_64-suse-linux/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir=/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -D__GCC_HAVE_DWARF2_CFI_ASM=1 -o /buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2023-03-27-141847-20772-1/report-X1seDU.plist -x c++ /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc

/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc

1/* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20/* This is the final pass of the compiler.
21 It looks at the rtl code for a function and outputs assembler code.
22
23 Call `final_start_function' to output the assembler code for function entry,
24 `final' to output assembler code for some RTL code,
25 `final_end_function' to output assembler code for function exit.
26 If a function is compiled in several pieces, each piece is
27 output separately with `final'.
28
29 Some optimizations are also done at this level.
30 Move instructions that were made unnecessary by good register allocation
31 are detected and omitted from the output. (Though most of these
32 are removed by the last jump pass.)
33
34 Instructions to set the condition codes are omitted when it can be
35 seen that the condition codes already had the desired values.
36
37 In some cases it is sufficient if the inherited condition codes
38 have related values, but this may require the following insn
39 (the one that tests the condition codes) to be modified.
40
41 The code for the function prologue and epilogue are generated
42 directly in assembler by the target functions function_prologue and
43 function_epilogue. Those instructions never exist as rtl. */
44
45#include "config.h"
46#define INCLUDE_ALGORITHM /* reverse */
47#include "system.h"
48#include "coretypes.h"
49#include "backend.h"
50#include "target.h"
51#include "rtl.h"
52#include "tree.h"
53#include "cfghooks.h"
54#include "df.h"
55#include "memmodel.h"
56#include "tm_p.h"
57#include "insn-config.h"
58#include "regs.h"
59#include "emit-rtl.h"
60#include "recog.h"
61#include "cgraph.h"
62#include "tree-pretty-print.h" /* for dump_function_header */
63#include "varasm.h"
64#include "insn-attr.h"
65#include "conditions.h"
66#include "flags.h"
67#include "output.h"
68#include "except.h"
69#include "rtl-error.h"
70#include "toplev.h" /* exact_log2, floor_log2 */
71#include "reload.h"
72#include "intl.h"
73#include "cfgrtl.h"
74#include "debug.h"
75#include "tree-pass.h"
76#include "tree-ssa.h"
77#include "cfgloop.h"
78#include "stringpool.h"
79#include "attribs.h"
80#include "asan.h"
81#include "rtl-iter.h"
82#include "print-rtl.h"
83#include "function-abi.h"
84#include "common/common-target.h"
85
86#include "dwarf2out.h"
87
88/* Most ports don't need to define CC_STATUS_INIT.
89 So define a null default for it to save conditionalization later. */
90#ifndef CC_STATUS_INIT
91#define CC_STATUS_INIT
92#endif
93
94/* Is the given character a logical line separator for the assembler? */
95#ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
96#define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR)((C) == ';') ((C) == ';')
97#endif
98
99#ifndef JUMP_TABLES_IN_TEXT_SECTION(global_options.x_flag_pic && !(((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) || 1))
100#define JUMP_TABLES_IN_TEXT_SECTION(global_options.x_flag_pic && !(((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) || 1))
0
101#endif
102
103/* Bitflags used by final_scan_insn. */
104#define SEEN_NOTE1 1
105#define SEEN_EMITTED2 2
106#define SEEN_NEXT_VIEW4 4
107
108/* Last insn processed by final_scan_insn. */
109static rtx_insn *debug_insn;
110rtx_insn *current_output_insn;
111
112/* Line number of last NOTE. */
113static int last_linenum;
114
115/* Column number of last NOTE. */
116static int last_columnnum;
117
118/* Discriminator written to assembly. */
119static int last_discriminator;
120
121/* Compute discriminator to be written to assembly for current instruction.
122 Note: actual usage depends on loc_discriminator_kind setting. */
123static inline int compute_discriminator (location_t loc);
124
125/* Highest line number in current block. */
126static int high_block_linenum;
127
128/* Likewise for function. */
129static int high_function_linenum;
130
131/* Filename of last NOTE. */
132static const char *last_filename;
133
134/* Override filename, line and column number. */
135static const char *override_filename;
136static int override_linenum;
137static int override_columnnum;
138static int override_discriminator;
139
140/* Whether to force emission of a line note before the next insn. */
141static bool force_source_line = false;
142
143extern const int length_unit_log; /* This is defined in insn-attrtab.cc. */
144
145/* Nonzero while outputting an `asm' with operands.
146 This means that inconsistencies are the user's fault, so don't die.
147 The precise value is the insn being output, to pass to error_for_asm. */
148const rtx_insn *this_is_asm_operands;
149
150/* Number of operands of this insn, for an `asm' with operands. */
151static unsigned int insn_noperands;
152
153/* Compare optimization flag. */
154
155static rtx last_ignored_compare = 0;
156
157/* Assign a unique number to each insn that is output.
158 This can be used to generate unique local labels. */
159
160static int insn_counter = 0;
161
162/* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
163
164static int block_depth;
165
166/* Nonzero if have enabled APP processing of our assembler output. */
167
168static int app_on;
169
170/* If we are outputting an insn sequence, this contains the sequence rtx.
171 Zero otherwise. */
172
173rtx_sequence *final_sequence;
174
175#ifdef ASSEMBLER_DIALECT(global_options.x_ix86_asm_dialect)
176
177/* Number of the assembler dialect to use, starting at 0. */
178static int dialect_number;
179#endif
180
181/* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
182rtx current_insn_predicate;
183
184/* True if printing into -fdump-final-insns= dump. */
185bool final_insns_dump_p;
186
187/* True if profile_function should be called, but hasn't been called yet. */
188static bool need_profile_function;
189
190static int asm_insn_count (rtx);
191static void profile_function (FILE *);
192static void profile_after_prologue (FILE *);
193static bool notice_source_line (rtx_insn *, bool *);
194static rtx walk_alter_subreg (rtx *, bool *);
195static void output_asm_name (void);
196static void output_alternate_entry_point (FILE *, rtx_insn *);
197static tree get_mem_expr_from_op (rtx, int *);
198static void output_asm_operand_names (rtx *, int *, int);
199#ifdef LEAF_REGISTERS
200static void leaf_renumber_regs (rtx_insn *);
201#endif
202static int align_fuzz (rtx, rtx, int, unsigned);
203static void collect_fn_hard_reg_usage (void);
204
205/* Initialize data in final at the beginning of a compilation. */
206
207void
208init_final (const char *filename ATTRIBUTE_UNUSED__attribute__ ((__unused__)))
209{
210 app_on = 0;
211 final_sequence = 0;
212
213#ifdef ASSEMBLER_DIALECT(global_options.x_ix86_asm_dialect)
214 dialect_number = ASSEMBLER_DIALECT(global_options.x_ix86_asm_dialect);
215#endif
216}
217
218/* Default target function prologue and epilogue assembler output.
219
220 If not overridden for epilogue code, then the function body itself
221 contains return instructions wherever needed. */
222void
223default_function_pro_epilogue (FILE *)
224{
225}
226
227void
228default_function_switched_text_sections (FILE *file ATTRIBUTE_UNUSED__attribute__ ((__unused__)),
229 tree decl ATTRIBUTE_UNUSED__attribute__ ((__unused__)),
230 bool new_is_cold ATTRIBUTE_UNUSED__attribute__ ((__unused__)))
231{
232}
233
234/* Default target hook that outputs nothing to a stream. */
235void
236no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED__attribute__ ((__unused__)))
237{
238}
239
240/* Enable APP processing of subsequent output.
241 Used before the output from an `asm' statement. */
242
243void
244app_enable (void)
245{
246 if (! app_on)
247 {
248 fputs (ASM_APP_ON"#APP\n", asm_out_file);
249 app_on = 1;
250 }
251}
252
253/* Disable APP processing of subsequent output.
254 Called from varasm.cc before most kinds of output. */
255
256void
257app_disable (void)
258{
259 if (app_on)
260 {
261 fputs (ASM_APP_OFF"#NO_APP\n", asm_out_file);
262 app_on = 0;
263 }
264}
265
266/* Return the number of slots filled in the current
267 delayed branch sequence (we don't count the insn needing the
268 delay slot). Zero if not in a delayed branch sequence. */
269
270int
271dbr_sequence_length (void)
272{
273 if (final_sequence != 0)
274 return XVECLEN (final_sequence, 0)(((((final_sequence)->u.fld[0]).rt_rtvec))->num_elem) - 1;
275 else
276 return 0;
277}
278
279/* The next two pages contain routines used to compute the length of an insn
280 and to shorten branches. */
281
282/* Arrays for insn lengths, and addresses. The latter is referenced by
283 `insn_current_length'. */
284
285static int *insn_lengths;
286
287vec<int> insn_addresses_;
288
289/* Max uid for which the above arrays are valid. */
290static int insn_lengths_max_uid;
291
292/* Address of insn being processed. Used by `insn_current_length'. */
293int insn_current_address;
294
295/* Address of insn being processed in previous iteration. */
296int insn_last_address;
297
298/* known invariant alignment of insn being processed. */
299int insn_current_align;
300
301/* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
302 gives the next following alignment insn that increases the known
303 alignment, or NULL_RTX if there is no such insn.
304 For any alignment obtained this way, we can again index uid_align with
305 its uid to obtain the next following align that in turn increases the
306 alignment, till we reach NULL_RTX; the sequence obtained this way
307 for each insn we'll call the alignment chain of this insn in the following
308 comments. */
309
310static rtx *uid_align;
311static int *uid_shuid;
312static vec<align_flags> label_align;
313
314/* Indicate that branch shortening hasn't yet been done. */
315
316void
317init_insn_lengths (void)
318{
319 if (uid_shuid)
320 {
321 free (uid_shuid);
322 uid_shuid = 0;
323 }
324 if (insn_lengths)
325 {
326 free (insn_lengths);
327 insn_lengths = 0;
328 insn_lengths_max_uid = 0;
329 }
330 if (HAVE_ATTR_length1)
331 INSN_ADDRESSES_FREE ()(insn_addresses_.release ());
332 if (uid_align)
333 {
334 free (uid_align);
335 uid_align = 0;
336 }
337}
338
339/* Obtain the current length of an insn. If branch shortening has been done,
340 get its actual length. Otherwise, use FALLBACK_FN to calculate the
341 length. */
342static int
343get_attr_length_1 (rtx_insn *insn, int (*fallback_fn) (rtx_insn *))
344{
345 rtx body;
346 int i;
347 int length = 0;
348
349 if (!HAVE_ATTR_length1)
350 return 0;
351
352 if (insn_lengths_max_uid > INSN_UID (insn))
353 return insn_lengths[INSN_UID (insn)];
354 else
355 switch (GET_CODE (insn)((enum rtx_code) (insn)->code))
356 {
357 case NOTE:
358 case BARRIER:
359 case CODE_LABEL:
360 case DEBUG_INSN:
361 return 0;
362
363 case CALL_INSN:
364 case JUMP_INSN:
365 length = fallback_fn (insn);
366 break;
367
368 case INSN:
369 body = PATTERN (insn);
370 if (GET_CODE (body)((enum rtx_code) (body)->code) == USE || GET_CODE (body)((enum rtx_code) (body)->code) == CLOBBER)
371 return 0;
372
373 else if (GET_CODE (body)((enum rtx_code) (body)->code) == ASM_INPUT || asm_noperands (body) >= 0)
374 length = asm_insn_count (body) * fallback_fn (insn);
375 else if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (body))
376 for (i = 0; i < seq->len (); i++)
377 length += get_attr_length_1 (seq->insn (i), fallback_fn);
378 else
379 length = fallback_fn (insn);
380 break;
381
382 default:
383 break;
384 }
385
386#ifdef ADJUST_INSN_LENGTH
387 ADJUST_INSN_LENGTH (insn, length);
388#endif
389 return length;
390}
391
392/* Obtain the current length of an insn. If branch shortening has been done,
393 get its actual length. Otherwise, get its maximum length. */
394int
395get_attr_length (rtx_insn *insn)
396{
397 return get_attr_length_1 (insn, insn_default_length);
398}
399
400/* Obtain the current length of an insn. If branch shortening has been done,
401 get its actual length. Otherwise, get its minimum length. */
402int
403get_attr_min_length (rtx_insn *insn)
404{
405 return get_attr_length_1 (insn, insn_min_length);
406}
407
408/* Code to handle alignment inside shorten_branches. */
409
410/* Here is an explanation how the algorithm in align_fuzz can give
411 proper results:
412
413 Call a sequence of instructions beginning with alignment point X
414 and continuing until the next alignment point `block X'. When `X'
415 is used in an expression, it means the alignment value of the
416 alignment point.
417
418 Call the distance between the start of the first insn of block X, and
419 the end of the last insn of block X `IX', for the `inner size of X'.
420 This is clearly the sum of the instruction lengths.
421
422 Likewise with the next alignment-delimited block following X, which we
423 shall call block Y.
424
425 Call the distance between the start of the first insn of block X, and
426 the start of the first insn of block Y `OX', for the `outer size of X'.
427
428 The estimated padding is then OX - IX.
429
430 OX can be safely estimated as
431
432 if (X >= Y)
433 OX = round_up(IX, Y)
434 else
435 OX = round_up(IX, X) + Y - X
436
437 Clearly est(IX) >= real(IX), because that only depends on the
438 instruction lengths, and those being overestimated is a given.
439
440 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
441 we needn't worry about that when thinking about OX.
442
443 When X >= Y, the alignment provided by Y adds no uncertainty factor
444 for branch ranges starting before X, so we can just round what we have.
445 But when X < Y, we don't know anything about the, so to speak,
446 `middle bits', so we have to assume the worst when aligning up from an
447 address mod X to one mod Y, which is Y - X. */
448
449#ifndef LABEL_ALIGN
450#define LABEL_ALIGN(LABEL)(this_target_flag_state->x_align_labels) align_labels(this_target_flag_state->x_align_labels)
451#endif
452
453#ifndef LOOP_ALIGN
454#define LOOP_ALIGN(LABEL)(this_target_flag_state->x_align_loops) align_loops(this_target_flag_state->x_align_loops)
455#endif
456
457#ifndef LABEL_ALIGN_AFTER_BARRIER
458#define LABEL_ALIGN_AFTER_BARRIER(LABEL)0 0
459#endif
460
461#ifndef JUMP_ALIGN
462#define JUMP_ALIGN(LABEL)(this_target_flag_state->x_align_jumps) align_jumps(this_target_flag_state->x_align_jumps)
463#endif
464
465#ifndef ADDR_VEC_ALIGN
466static int
467final_addr_vec_align (rtx_jump_table_data *addr_vec)
468{
469 int align = GET_MODE_SIZE (addr_vec->get_data_mode ());
470
471 if (align > BIGGEST_ALIGNMENT(((global_options.x_target_flags & (1U << 12)) != 0
) ? 32 : (((global_options.x_ix86_isa_flags & (1UL <<
15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & (
1UL << 8)) != 0) ? 256 : 128)))
/ BITS_PER_UNIT(8)
)
29
Assuming the condition is true
30
'?' condition is true
31
Assuming the condition is false
32
Taking false branch
472 align = BIGGEST_ALIGNMENT(((global_options.x_target_flags & (1U << 12)) != 0
) ? 32 : (((global_options.x_ix86_isa_flags & (1UL <<
15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & (
1UL << 8)) != 0) ? 256 : 128)))
/ BITS_PER_UNIT(8);
473 return exact_log2 (align);
33
Calling 'exact_log2'
36
Returning from 'exact_log2'
37
Returning the value -1
474
475}
476
477#define ADDR_VEC_ALIGN(ADDR_VEC)final_addr_vec_align (ADDR_VEC) final_addr_vec_align (ADDR_VEC)
478#endif
479
480#ifndef INSN_LENGTH_ALIGNMENT
481#define INSN_LENGTH_ALIGNMENT(INSN)length_unit_log length_unit_log
482#endif
483
484#define INSN_SHUID(INSN)(uid_shuid[INSN_UID (INSN)]) (uid_shuid[INSN_UID (INSN)])
485
486static int min_labelno, max_labelno;
487
488#define LABEL_TO_ALIGNMENT(LABEL)(label_align[(((LABEL)->u.fld[5]).rt_int) - min_labelno]) \
489 (label_align[CODE_LABEL_NUMBER (LABEL)(((LABEL)->u.fld[5]).rt_int) - min_labelno])
490
491/* For the benefit of port specific code do this also as a function. */
492
493align_flags
494label_to_alignment (rtx label)
495{
496 if (CODE_LABEL_NUMBER (label)(((label)->u.fld[5]).rt_int) <= max_labelno)
497 return LABEL_TO_ALIGNMENT (label)(label_align[(((label)->u.fld[5]).rt_int) - min_labelno]);
498 return align_flags ();
499}
500
501/* The differences in addresses
502 between a branch and its target might grow or shrink depending on
503 the alignment the start insn of the range (the branch for a forward
504 branch or the label for a backward branch) starts out on; if these
505 differences are used naively, they can even oscillate infinitely.
506 We therefore want to compute a 'worst case' address difference that
507 is independent of the alignment the start insn of the range end
508 up on, and that is at least as large as the actual difference.
509 The function align_fuzz calculates the amount we have to add to the
510 naively computed difference, by traversing the part of the alignment
511 chain of the start insn of the range that is in front of the end insn
512 of the range, and considering for each alignment the maximum amount
513 that it might contribute to a size increase.
514
515 For casesi tables, we also want to know worst case minimum amounts of
516 address difference, in case a machine description wants to introduce
517 some common offset that is added to all offsets in a table.
518 For this purpose, align_fuzz with a growth argument of 0 computes the
519 appropriate adjustment. */
520
521/* Compute the maximum delta by which the difference of the addresses of
522 START and END might grow / shrink due to a different address for start
523 which changes the size of alignment insns between START and END.
524 KNOWN_ALIGN_LOG is the alignment known for START.
525 GROWTH should be ~0 if the objective is to compute potential code size
526 increase, and 0 if the objective is to compute potential shrink.
527 The return value is undefined for any other value of GROWTH. */
528
529static int
530align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
531{
532 int uid = INSN_UID (start);
533 rtx align_label;
534 int known_align = 1 << known_align_log;
535 int end_shuid = INSN_SHUID (end)(uid_shuid[INSN_UID (end)]);
536 int fuzz = 0;
537
538 for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
539 {
540 int align_addr, new_align;
541
542 uid = INSN_UID (align_label);
543 align_addr = INSN_ADDRESSES (uid)(insn_addresses_[uid]) - insn_lengths[uid];
544 if (uid_shuid[uid] > end_shuid)
545 break;
546 align_flags alignment = LABEL_TO_ALIGNMENT (align_label)(label_align[(((align_label)->u.fld[5]).rt_int) - min_labelno
])
;
547 new_align = 1 << alignment.levels[0].log;
548 if (new_align < known_align)
549 continue;
550 fuzz += (-align_addr ^ growth) & (new_align - known_align);
551 known_align = new_align;
552 }
553 return fuzz;
554}
555
556/* Compute a worst-case reference address of a branch so that it
557 can be safely used in the presence of aligned labels. Since the
558 size of the branch itself is unknown, the size of the branch is
559 not included in the range. I.e. for a forward branch, the reference
560 address is the end address of the branch as known from the previous
561 branch shortening pass, minus a value to account for possible size
562 increase due to alignment. For a backward branch, it is the start
563 address of the branch as known from the current pass, plus a value
564 to account for possible size increase due to alignment.
565 NB.: Therefore, the maximum offset allowed for backward branches needs
566 to exclude the branch size. */
567
568int
569insn_current_reference_address (rtx_insn *branch)
570{
571 rtx dest;
572 int seq_uid;
573
574 if (! INSN_ADDRESSES_SET_P ()(insn_addresses_.exists ()))
575 return 0;
576
577 rtx_insn *seq = NEXT_INSN (PREV_INSN (branch));
578 seq_uid = INSN_UID (seq);
579 if (!jump_to_label_p (branch))
580 /* This can happen for example on the PA; the objective is to know the
581 offset to address something in front of the start of the function.
582 Thus, we can treat it like a backward branch.
583 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
584 any alignment we'd encounter, so we skip the call to align_fuzz. */
585 return insn_current_address;
586 dest = JUMP_LABEL (branch)(((branch)->u.fld[7]).rt_rtx);
587
588 /* BRANCH has no proper alignment chain set, so use SEQ.
589 BRANCH also has no INSN_SHUID. */
590 if (INSN_SHUID (seq)(uid_shuid[INSN_UID (seq)]) < INSN_SHUID (dest)(uid_shuid[INSN_UID (dest)]))
591 {
592 /* Forward branch. */
593 return (insn_last_address + insn_lengths[seq_uid]
594 - align_fuzz (seq, dest, length_unit_log, ~0));
595 }
596 else
597 {
598 /* Backward branch. */
599 return (insn_current_address
600 + align_fuzz (dest, seq, length_unit_log, ~0));
601 }
602}
603
604/* Compute branch alignments based on CFG profile. */
605
606unsigned int
607compute_alignments (void)
608{
609 basic_block bb;
610 align_flags max_alignment;
611
612 label_align.truncate (0);
613
614 max_labelno = max_label_num ();
615 min_labelno = get_first_label_num ();
616 label_align.safe_grow_cleared (max_labelno - min_labelno + 1, true);
617
618 /* If not optimizing or optimizing for size, don't assign any alignments. */
619 if (! optimizeglobal_options.x_optimize || optimize_function_for_size_p (cfun(cfun + 0)))
620 return 0;
621
622 if (dump_file)
623 {
624 dump_reg_info (dump_file);
625 dump_flow_info (dump_file, TDF_DETAILS);
626 flow_loops_dump (dump_file, NULL__null, 1);
627 }
628 loop_optimizer_init (AVOID_CFG_MODIFICATIONS(LOOPS_MAY_HAVE_MULTIPLE_LATCHES));
629 profile_count count_threshold = cfun(cfun + 0)->cfg->count_max / param_align_thresholdglobal_options.x_param_align_threshold;
630
631 if (dump_file)
632 {
633 fprintf (dump_file, "count_max: ");
634 cfun(cfun + 0)->cfg->count_max.dump (dump_file);
635 fprintf (dump_file, "\n");
636 }
637 FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb
; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb->
next_bb)
638 {
639 rtx_insn *label = BB_HEAD (bb)(bb)->il.x.head_;
640 bool has_fallthru = 0;
641 edge e;
642 edge_iterator ei;
643
644 if (!LABEL_P (label)(((enum rtx_code) (label)->code) == CODE_LABEL)
645 || optimize_bb_for_size_p (bb))
646 {
647 if (dump_file)
648 fprintf (dump_file,
649 "BB %4i loop %2i loop_depth %2i skipped.\n",
650 bb->index,
651 bb->loop_father->num,
652 bb_loop_depth (bb));
653 continue;
654 }
655 max_alignment = LABEL_ALIGN (label)(this_target_flag_state->x_align_labels);
656 profile_count fallthru_count = profile_count::zero ();
657 profile_count branch_count = profile_count::zero ();
658
659 FOR_EACH_EDGE (e, ei, bb->preds)for ((ei) = ei_start_1 (&((bb->preds))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
660 {
661 if (e->flags & EDGE_FALLTHRU)
662 has_fallthru = 1, fallthru_count += e->count ();
663 else
664 branch_count += e->count ();
665 }
666 if (dump_file)
667 {
668 fprintf (dump_file, "BB %4i loop %2i loop_depth"
669 " %2i fall ",
670 bb->index, bb->loop_father->num,
671 bb_loop_depth (bb));
672 fallthru_count.dump (dump_file);
673 fprintf (dump_file, " branch ");
674 branch_count.dump (dump_file);
675 if (!bb->loop_father->inner && bb->loop_father->num)
676 fprintf (dump_file, " inner_loop");
677 if (bb->loop_father->header == bb)
678 fprintf (dump_file, " loop_header");
679 fprintf (dump_file, "\n");
680 }
681 if (!fallthru_count.initialized_p () || !branch_count.initialized_p ())
682 continue;
683
684 /* There are two purposes to align block with no fallthru incoming edge:
685 1) to avoid fetch stalls when branch destination is near cache boundary
686 2) to improve cache efficiency in case the previous block is not executed
687 (so it does not need to be in the cache).
688
689 We to catch first case, we align frequently executed blocks.
690 To catch the second, we align blocks that are executed more frequently
691 than the predecessor and the predecessor is likely to not be executed
692 when function is called. */
693
694 if (!has_fallthru
695 && (branch_count > count_threshold
696 || (bb->count > bb->prev_bb->count * 10
697 && (bb->prev_bb->count
698 <= ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr)->count / 2))))
699 {
700 align_flags alignment = JUMP_ALIGN (label)(this_target_flag_state->x_align_jumps);
701 if (dump_file)
702 fprintf (dump_file, " jump alignment added.\n");
703 max_alignment = align_flags::max (max_alignment, alignment);
704 }
705 /* In case block is frequent and reached mostly by non-fallthru edge,
706 align it. It is most likely a first block of loop. */
707 if (has_fallthru
708 && !(single_succ_p (bb)
709 && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr))
710 && optimize_bb_for_speed_p (bb)
711 && branch_count + fallthru_count > count_threshold
712 && (branch_count > fallthru_count * param_align_loop_iterationsglobal_options.x_param_align_loop_iterations))
713 {
714 align_flags alignment = LOOP_ALIGN (label)(this_target_flag_state->x_align_loops);
715 if (dump_file)
716 fprintf (dump_file, " internal loop alignment added.\n");
717 max_alignment = align_flags::max (max_alignment, alignment);
718 }
719 LABEL_TO_ALIGNMENT (label)(label_align[(((label)->u.fld[5]).rt_int) - min_labelno]) = max_alignment;
720 }
721
722 loop_optimizer_finalize ();
723 free_dominance_info (CDI_DOMINATORS);
724 return 0;
725}
726
727/* Grow the LABEL_ALIGN array after new labels are created. */
728
729static void
730grow_label_align (void)
731{
732 int old = max_labelno;
733 int n_labels;
734 int n_old_labels;
735
736 max_labelno = max_label_num ();
737
738 n_labels = max_labelno - min_labelno + 1;
739 n_old_labels = old - min_labelno + 1;
740
741 label_align.safe_grow_cleared (n_labels, true);
742
743 /* Range of labels grows monotonically in the function. Failing here
744 means that the initialization of array got lost. */
745 gcc_assert (n_old_labels <= n_labels)((void)(!(n_old_labels <= n_labels) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 745, __FUNCTION__), 0 : 0))
;
746}
747
748/* Update the already computed alignment information. LABEL_PAIRS is a vector
749 made up of pairs of labels for which the alignment information of the first
750 element will be copied from that of the second element. */
751
752void
753update_alignments (vec<rtx> &label_pairs)
754{
755 unsigned int i = 0;
756 rtx iter, label = NULL_RTX(rtx) 0;
757
758 if (max_labelno != max_label_num ())
759 grow_label_align ();
760
761 FOR_EACH_VEC_ELT (label_pairs, i, iter)for (i = 0; (label_pairs).iterate ((i), &(iter)); ++(i))
762 if (i & 1)
763 LABEL_TO_ALIGNMENT (label)(label_align[(((label)->u.fld[5]).rt_int) - min_labelno]) = LABEL_TO_ALIGNMENT (iter)(label_align[(((iter)->u.fld[5]).rt_int) - min_labelno]);
764 else
765 label = iter;
766}
767
768namespace {
769
770const pass_data pass_data_compute_alignments =
771{
772 RTL_PASS, /* type */
773 "alignments", /* name */
774 OPTGROUP_NONE, /* optinfo_flags */
775 TV_NONE, /* tv_id */
776 0, /* properties_required */
777 0, /* properties_provided */
778 0, /* properties_destroyed */
779 0, /* todo_flags_start */
780 0, /* todo_flags_finish */
781};
782
783class pass_compute_alignments : public rtl_opt_pass
784{
785public:
786 pass_compute_alignments (gcc::context *ctxt)
787 : rtl_opt_pass (pass_data_compute_alignments, ctxt)
788 {}
789
790 /* opt_pass methods: */
791 unsigned int execute (function *) final override
792 {
793 return compute_alignments ();
794 }
795
796}; // class pass_compute_alignments
797
798} // anon namespace
799
800rtl_opt_pass *
801make_pass_compute_alignments (gcc::context *ctxt)
802{
803 return new pass_compute_alignments (ctxt);
804}
805
806
807/* Make a pass over all insns and compute their actual lengths by shortening
808 any branches of variable length if possible. */
809
810/* shorten_branches might be called multiple times: for example, the SH
811 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
812 In order to do this, it needs proper length information, which it obtains
813 by calling shorten_branches. This cannot be collapsed with
814 shorten_branches itself into a single pass unless we also want to integrate
815 reorg.cc, since the branch splitting exposes new instructions with delay
816 slots. */
817
818void
819shorten_branches (rtx_insn *first)
820{
821 rtx_insn *insn;
822 int max_uid;
823 int i;
824 rtx_insn *seq;
825 int something_changed = 1;
826 char *varying_length;
827 rtx body;
828 int uid;
829 rtx align_tab[MAX_CODE_ALIGN16 + 1];
830
831 /* Compute maximum UID and allocate label_align / uid_shuid. */
832 max_uid = get_max_uid ();
833
834 /* Free uid_shuid before reallocating it. */
835 free (uid_shuid);
836
837 uid_shuid = XNEWVEC (int, max_uid)((int *) xmalloc (sizeof (int) * (max_uid)));
838
839 if (max_labelno != max_label_num ())
840 grow_label_align ();
841
842 /* Initialize label_align and set up uid_shuid to be strictly
843 monotonically rising with insn order. */
844 /* We use alignment here to keep track of the maximum alignment we want to
845 impose on the next CODE_LABEL (or the current one if we are processing
846 the CODE_LABEL itself). */
847
848 align_flags max_alignment;
849
850 for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
851 {
852 INSN_SHUID (insn)(uid_shuid[INSN_UID (insn)]) = i++;
853 if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code
) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)->
code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) ==
DEBUG_INSN))
)
854 continue;
855
856 if (rtx_code_label *label = dyn_cast <rtx_code_label *> (insn))
857 {
858 /* Merge in alignments computed by compute_alignments. */
859 align_flags alignment = LABEL_TO_ALIGNMENT (label)(label_align[(((label)->u.fld[5]).rt_int) - min_labelno]);
860 max_alignment = align_flags::max (max_alignment, alignment);
861
862 rtx_jump_table_data *table = jump_table_for_label (label);
863 if (!table)
864 {
865 align_flags alignment = LABEL_ALIGN (label)(this_target_flag_state->x_align_labels);
866 max_alignment = align_flags::max (max_alignment, alignment);
867 }
868 /* ADDR_VECs only take room if read-only data goes into the text
869 section. */
870 if ((JUMP_TABLES_IN_TEXT_SECTION(global_options.x_flag_pic && !(((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) || 1))
871 || readonly_data_section == text_section)
872 && table)
873 {
874 align_flags alignment = align_flags (ADDR_VEC_ALIGN (table)final_addr_vec_align (table));
875 max_alignment = align_flags::max (max_alignment, alignment);
876 }
877 LABEL_TO_ALIGNMENT (label)(label_align[(((label)->u.fld[5]).rt_int) - min_labelno]) = max_alignment;
878 max_alignment = align_flags ();
879 }
880 else if (BARRIER_P (insn)(((enum rtx_code) (insn)->code) == BARRIER))
881 {
882 rtx_insn *label;
883
884 for (label = insn; label && ! INSN_P (label)(((((enum rtx_code) (label)->code) == INSN) || (((enum rtx_code
) (label)->code) == JUMP_INSN) || (((enum rtx_code) (label
)->code) == CALL_INSN)) || (((enum rtx_code) (label)->code
) == DEBUG_INSN))
;
885 label = NEXT_INSN (label))
886 if (LABEL_P (label)(((enum rtx_code) (label)->code) == CODE_LABEL))
887 {
888 align_flags alignment
889 = align_flags (LABEL_ALIGN_AFTER_BARRIER (insn)0);
890 max_alignment = align_flags::max (max_alignment, alignment);
891 break;
892 }
893 }
894 }
895 if (!HAVE_ATTR_length1)
896 return;
897
898 /* Allocate the rest of the arrays. */
899 insn_lengths = XNEWVEC (int, max_uid)((int *) xmalloc (sizeof (int) * (max_uid)));
900 insn_lengths_max_uid = max_uid;
901 /* Syntax errors can lead to labels being outside of the main insn stream.
902 Initialize insn_addresses, so that we get reproducible results. */
903 INSN_ADDRESSES_ALLOC (max_uid)do { insn_addresses_.create (max_uid); insn_addresses_.safe_grow_cleared
(max_uid, true); memset (insn_addresses_.address (), 0, sizeof
(int) * max_uid); } while (0)
;
904
905 varying_length = XCNEWVEC (char, max_uid)((char *) xcalloc ((max_uid), sizeof (char)));
906
907 /* Initialize uid_align. We scan instructions
908 from end to start, and keep in align_tab[n] the last seen insn
909 that does an alignment of at least n+1, i.e. the successor
910 in the alignment chain for an insn that does / has a known
911 alignment of n. */
912 uid_align = XCNEWVEC (rtx, max_uid)((rtx *) xcalloc ((max_uid), sizeof (rtx)));
913
914 for (i = MAX_CODE_ALIGN16 + 1; --i >= 0;)
915 align_tab[i] = NULL_RTX(rtx) 0;
916 seq = get_last_insn ();
917 for (; seq; seq = PREV_INSN (seq))
918 {
919 int uid = INSN_UID (seq);
920 int log;
921 log = (LABEL_P (seq)(((enum rtx_code) (seq)->code) == CODE_LABEL) ? LABEL_TO_ALIGNMENT (seq)(label_align[(((seq)->u.fld[5]).rt_int) - min_labelno]).levels[0].log : 0);
922 uid_align[uid] = align_tab[0];
923 if (log)
924 {
925 /* Found an alignment label. */
926 gcc_checking_assert (log < MAX_CODE_ALIGN + 1)((void)(!(log < 16 + 1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 926, __FUNCTION__), 0 : 0))
;
927 uid_align[uid] = align_tab[log];
928 for (i = log - 1; i >= 0; i--)
929 align_tab[i] = seq;
930 }
931 }
932
933 /* When optimizing, we start assuming minimum length, and keep increasing
934 lengths as we find the need for this, till nothing changes.
935 When not optimizing, we start assuming maximum lengths, and
936 do a single pass to update the lengths. */
937 bool increasing = optimizeglobal_options.x_optimize != 0;
938
939#ifdef CASE_VECTOR_SHORTEN_MODE
940 if (optimizeglobal_options.x_optimize)
941 {
942 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
943 label fields. */
944
945 int min_shuid = INSN_SHUID (get_insns ())(uid_shuid[INSN_UID (get_insns ())]) - 1;
946 int max_shuid = INSN_SHUID (get_last_insn ())(uid_shuid[INSN_UID (get_last_insn ())]) + 1;
947 int rel;
948
949 for (insn = first; insn != 0; insn = NEXT_INSN (insn))
950 {
951 rtx min_lab = NULL_RTX(rtx) 0, max_lab = NULL_RTX(rtx) 0, pat;
952 int len, i, min, max, insn_shuid;
953 int min_align;
954 addr_diff_vec_flags flags;
955
956 if (! JUMP_TABLE_DATA_P (insn)(((enum rtx_code) (insn)->code) == JUMP_TABLE_DATA)
957 || GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) != ADDR_DIFF_VEC)
958 continue;
959 pat = PATTERN (insn);
960 len = XVECLEN (pat, 1)(((((pat)->u.fld[1]).rt_rtvec))->num_elem);
961 gcc_assert (len > 0)((void)(!(len > 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 961, __FUNCTION__), 0 : 0))
;
962 min_align = MAX_CODE_ALIGN16;
963 for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
964 {
965 rtx lab = XEXP (XVECEXP (pat, 1, i), 0)((((((((pat)->u.fld[1]).rt_rtvec))->elem[i]))->u.fld
[0]).rt_rtx)
;
966 int shuid = INSN_SHUID (lab)(uid_shuid[INSN_UID (lab)]);
967 if (shuid < min)
968 {
969 min = shuid;
970 min_lab = lab;
971 }
972 if (shuid > max)
973 {
974 max = shuid;
975 max_lab = lab;
976 }
977
978 int label_alignment = LABEL_TO_ALIGNMENT (lab)(label_align[(((lab)->u.fld[5]).rt_int) - min_labelno]).levels[0].log;
979 if (min_align > label_alignment)
980 min_align = label_alignment;
981 }
982 XEXP (pat, 2)(((pat)->u.fld[2]).rt_rtx) = gen_rtx_LABEL_REF (Pmode, min_lab)gen_rtx_fmt_u_stat ((LABEL_REF), (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
))), ((min_lab)) )
;
983 XEXP (pat, 3)(((pat)->u.fld[3]).rt_rtx) = gen_rtx_LABEL_REF (Pmode, max_lab)gen_rtx_fmt_u_stat ((LABEL_REF), (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
))), ((max_lab)) )
;
984 insn_shuid = INSN_SHUID (insn)(uid_shuid[INSN_UID (insn)]);
985 rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0))(uid_shuid[INSN_UID (((((((pat)->u.fld[0]).rt_rtx))->u.
fld[0]).rt_rtx))])
;
986 memset (&flags, 0, sizeof (flags));
987 flags.min_align = min_align;
988 flags.base_after_vec = rel > insn_shuid;
989 flags.min_after_vec = min > insn_shuid;
990 flags.max_after_vec = max > insn_shuid;
991 flags.min_after_base = min > rel;
992 flags.max_after_base = max > rel;
993 ADDR_DIFF_VEC_FLAGS (pat)(((pat)->u.fld[4]).rt_addr_diff_vec_flags) = flags;
994
995 if (increasing)
996 PUT_MODE (pat, CASE_VECTOR_SHORTEN_MODE (0, 0, pat));
997 }
998 }
999#endif /* CASE_VECTOR_SHORTEN_MODE */
1000
1001 /* Compute initial lengths, addresses, and varying flags for each insn. */
1002 int (*length_fun) (rtx_insn *) = increasing ? insn_min_length : insn_default_length;
1003
1004 for (insn_current_address = 0, insn = first;
1005 insn != 0;
1006 insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
1007 {
1008 uid = INSN_UID (insn);
1009
1010 insn_lengths[uid] = 0;
1011
1012 if (LABEL_P (insn)(((enum rtx_code) (insn)->code) == CODE_LABEL))
1013 {
1014 int log = LABEL_TO_ALIGNMENT (insn)(label_align[(((insn)->u.fld[5]).rt_int) - min_labelno]).levels[0].log;
1015 if (log)
1016 {
1017 int align = 1 << log;
1018 int new_address = (insn_current_address + align - 1) & -align;
1019 insn_lengths[uid] = new_address - insn_current_address;
1020 }
1021 }
1022
1023 INSN_ADDRESSES (uid)(insn_addresses_[uid]) = insn_current_address + insn_lengths[uid];
1024
1025 if (NOTE_P (insn)(((enum rtx_code) (insn)->code) == NOTE) || BARRIER_P (insn)(((enum rtx_code) (insn)->code) == BARRIER)
1026 || LABEL_P (insn)(((enum rtx_code) (insn)->code) == CODE_LABEL) || DEBUG_INSN_P (insn)(((enum rtx_code) (insn)->code) == DEBUG_INSN))
1027 continue;
1028 if (insn->deleted ())
1029 continue;
1030
1031 body = PATTERN (insn);
1032 if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
1033 {
1034 /* This only takes room if read-only data goes into the text
1035 section. */
1036 if (JUMP_TABLES_IN_TEXT_SECTION(global_options.x_flag_pic && !(((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) || 1))
1037 || readonly_data_section == text_section)
1038 insn_lengths[uid] = (XVECLEN (body,(((((body)->u.fld[((enum rtx_code) (body)->code) == ADDR_DIFF_VEC
]).rt_rtvec))->num_elem)
1039 GET_CODE (body) == ADDR_DIFF_VEC)(((((body)->u.fld[((enum rtx_code) (body)->code) == ADDR_DIFF_VEC
]).rt_rtvec))->num_elem)
1040 * GET_MODE_SIZE (table->get_data_mode ()));
1041 /* Alignment is handled by ADDR_VEC_ALIGN. */
1042 }
1043 else if (GET_CODE (body)((enum rtx_code) (body)->code) == ASM_INPUT || asm_noperands (body) >= 0)
1044 insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1045 else if (rtx_sequence *body_seq = dyn_cast <rtx_sequence *> (body))
1046 {
1047 int i;
1048 int const_delay_slots;
1049 if (DELAY_SLOTS0)
1050 const_delay_slots = const_num_delay_slots (body_seq->insn (0));
1051 else
1052 const_delay_slots = 0;
1053
1054 int (*inner_length_fun) (rtx_insn *)
1055 = const_delay_slots ? length_fun : insn_default_length;
1056 /* Inside a delay slot sequence, we do not do any branch shortening
1057 if the shortening could change the number of delay slots
1058 of the branch. */
1059 for (i = 0; i < body_seq->len (); i++)
1060 {
1061 rtx_insn *inner_insn = body_seq->insn (i);
1062 int inner_uid = INSN_UID (inner_insn);
1063 int inner_length;
1064
1065 if (GET_CODE (PATTERN (inner_insn))((enum rtx_code) (PATTERN (inner_insn))->code) == ASM_INPUT
1066 || asm_noperands (PATTERN (inner_insn)) >= 0)
1067 inner_length = (asm_insn_count (PATTERN (inner_insn))
1068 * insn_default_length (inner_insn));
1069 else
1070 inner_length = inner_length_fun (inner_insn);
1071
1072 insn_lengths[inner_uid] = inner_length;
1073 if (const_delay_slots)
1074 {
1075 if ((varying_length[inner_uid]
1076 = insn_variable_length_p (inner_insn)) != 0)
1077 varying_length[uid] = 1;
1078 INSN_ADDRESSES (inner_uid)(insn_addresses_[inner_uid]) = (insn_current_address
1079 + insn_lengths[uid]);
1080 }
1081 else
1082 varying_length[inner_uid] = 0;
1083 insn_lengths[uid] += inner_length;
1084 }
1085 }
1086 else if (GET_CODE (body)((enum rtx_code) (body)->code) != USE && GET_CODE (body)((enum rtx_code) (body)->code) != CLOBBER)
1087 {
1088 insn_lengths[uid] = length_fun (insn);
1089 varying_length[uid] = insn_variable_length_p (insn);
1090 }
1091
1092 /* If needed, do any adjustment. */
1093#ifdef ADJUST_INSN_LENGTH
1094 ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1095 if (insn_lengths[uid] < 0)
1096 fatal_insn ("negative insn length", insn)_fatal_insn ("negative insn length", insn, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1096, __FUNCTION__)
;
1097#endif
1098 }
1099
1100 /* Now loop over all the insns finding varying length insns. For each,
1101 get the current insn length. If it has changed, reflect the change.
1102 When nothing changes for a full pass, we are done. */
1103
1104 while (something_changed)
1105 {
1106 something_changed = 0;
1107 insn_current_align = MAX_CODE_ALIGN16 - 1;
1108 for (insn_current_address = 0, insn = first;
1109 insn != 0;
1110 insn = NEXT_INSN (insn))
1111 {
1112 int new_length;
1113#ifdef ADJUST_INSN_LENGTH
1114 int tmp_length;
1115#endif
1116 int length_align;
1117
1118 uid = INSN_UID (insn);
1119
1120 if (rtx_code_label *label = dyn_cast <rtx_code_label *> (insn))
1121 {
1122 int log = LABEL_TO_ALIGNMENT (label)(label_align[(((label)->u.fld[5]).rt_int) - min_labelno]).levels[0].log;
1123
1124#ifdef CASE_VECTOR_SHORTEN_MODE
1125 /* If the mode of a following jump table was changed, we
1126 may need to update the alignment of this label. */
1127
1128 if (JUMP_TABLES_IN_TEXT_SECTION(global_options.x_flag_pic && !(((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) || 1))
1129 || readonly_data_section == text_section)
1130 {
1131 rtx_jump_table_data *table = jump_table_for_label (label);
1132 if (table)
1133 {
1134 int newlog = ADDR_VEC_ALIGN (table)final_addr_vec_align (table);
1135 if (newlog != log)
1136 {
1137 log = newlog;
1138 LABEL_TO_ALIGNMENT (insn)(label_align[(((insn)->u.fld[5]).rt_int) - min_labelno]) = log;
1139 something_changed = 1;
1140 }
1141 }
1142 }
1143#endif
1144
1145 if (log > insn_current_align)
1146 {
1147 int align = 1 << log;
1148 int new_address= (insn_current_address + align - 1) & -align;
1149 insn_lengths[uid] = new_address - insn_current_address;
1150 insn_current_align = log;
1151 insn_current_address = new_address;
1152 }
1153 else
1154 insn_lengths[uid] = 0;
1155 INSN_ADDRESSES (uid)(insn_addresses_[uid]) = insn_current_address;
1156 continue;
1157 }
1158
1159 length_align = INSN_LENGTH_ALIGNMENT (insn)length_unit_log;
1160 if (length_align < insn_current_align)
1161 insn_current_align = length_align;
1162
1163 insn_last_address = INSN_ADDRESSES (uid)(insn_addresses_[uid]);
1164 INSN_ADDRESSES (uid)(insn_addresses_[uid]) = insn_current_address;
1165
1166#ifdef CASE_VECTOR_SHORTEN_MODE
1167 if (optimizeglobal_options.x_optimize
1168 && JUMP_TABLE_DATA_P (insn)(((enum rtx_code) (insn)->code) == JUMP_TABLE_DATA)
1169 && GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == ADDR_DIFF_VEC)
1170 {
1171 rtx_jump_table_data *table = as_a <rtx_jump_table_data *> (insn);
1172 rtx body = PATTERN (insn);
1173 int old_length = insn_lengths[uid];
1174 rtx_insn *rel_lab =
1175 safe_as_a <rtx_insn *> (XEXP (XEXP (body, 0), 0)((((((body)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx));
1176 rtx min_lab = XEXP (XEXP (body, 2), 0)((((((body)->u.fld[2]).rt_rtx))->u.fld[0]).rt_rtx);
1177 rtx max_lab = XEXP (XEXP (body, 3), 0)((((((body)->u.fld[3]).rt_rtx))->u.fld[0]).rt_rtx);
1178 int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab))(insn_addresses_[INSN_UID (rel_lab)]);
1179 int min_addr = INSN_ADDRESSES (INSN_UID (min_lab))(insn_addresses_[INSN_UID (min_lab)]);
1180 int max_addr = INSN_ADDRESSES (INSN_UID (max_lab))(insn_addresses_[INSN_UID (max_lab)]);
1181 rtx_insn *prev;
1182 int rel_align = 0;
1183 addr_diff_vec_flags flags;
1184 scalar_int_mode vec_mode;
1185
1186 /* Avoid automatic aggregate initialization. */
1187 flags = ADDR_DIFF_VEC_FLAGS (body)(((body)->u.fld[4]).rt_addr_diff_vec_flags);
1188
1189 /* Try to find a known alignment for rel_lab. */
1190 for (prev = rel_lab;
1191 prev
1192 && ! insn_lengths[INSN_UID (prev)]
1193 && ! (varying_length[INSN_UID (prev)] & 1);
1194 prev = PREV_INSN (prev))
1195 if (varying_length[INSN_UID (prev)] & 2)
1196 {
1197 rel_align = LABEL_TO_ALIGNMENT (prev)(label_align[(((prev)->u.fld[5]).rt_int) - min_labelno]).levels[0].log;
1198 break;
1199 }
1200
1201 /* See the comment on addr_diff_vec_flags in rtl.h for the
1202 meaning of the flags values. base: REL_LAB vec: INSN */
1203 /* Anything after INSN has still addresses from the last
1204 pass; adjust these so that they reflect our current
1205 estimate for this pass. */
1206 if (flags.base_after_vec)
1207 rel_addr += insn_current_address - insn_last_address;
1208 if (flags.min_after_vec)
1209 min_addr += insn_current_address - insn_last_address;
1210 if (flags.max_after_vec)
1211 max_addr += insn_current_address - insn_last_address;
1212 /* We want to know the worst case, i.e. lowest possible value
1213 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1214 its offset is positive, and we have to be wary of code shrink;
1215 otherwise, it is negative, and we have to be vary of code
1216 size increase. */
1217 if (flags.min_after_base)
1218 {
1219 /* If INSN is between REL_LAB and MIN_LAB, the size
1220 changes we are about to make can change the alignment
1221 within the observed offset, therefore we have to break
1222 it up into two parts that are independent. */
1223 if (! flags.base_after_vec && flags.min_after_vec)
1224 {
1225 min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1226 min_addr -= align_fuzz (insn, min_lab, 0, 0);
1227 }
1228 else
1229 min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1230 }
1231 else
1232 {
1233 if (flags.base_after_vec && ! flags.min_after_vec)
1234 {
1235 min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1236 min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1237 }
1238 else
1239 min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1240 }
1241 /* Likewise, determine the highest lowest possible value
1242 for the offset of MAX_LAB. */
1243 if (flags.max_after_base)
1244 {
1245 if (! flags.base_after_vec && flags.max_after_vec)
1246 {
1247 max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1248 max_addr += align_fuzz (insn, max_lab, 0, ~0);
1249 }
1250 else
1251 max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1252 }
1253 else
1254 {
1255 if (flags.base_after_vec && ! flags.max_after_vec)
1256 {
1257 max_addr += align_fuzz (max_lab, insn, 0, 0);
1258 max_addr += align_fuzz (insn, rel_lab, 0, 0);
1259 }
1260 else
1261 max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1262 }
1263 vec_mode = CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1264 max_addr - rel_addr, body);
1265 if (!increasing
1266 || (GET_MODE_SIZE (vec_mode)
1267 >= GET_MODE_SIZE (table->get_data_mode ())))
1268 PUT_MODE (body, vec_mode);
1269 if (JUMP_TABLES_IN_TEXT_SECTION(global_options.x_flag_pic && !(((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) || 1))
1270 || readonly_data_section == text_section)
1271 {
1272 insn_lengths[uid]
1273 = (XVECLEN (body, 1)(((((body)->u.fld[1]).rt_rtvec))->num_elem)
1274 * GET_MODE_SIZE (table->get_data_mode ()));
1275 insn_current_address += insn_lengths[uid];
1276 if (insn_lengths[uid] != old_length)
1277 something_changed = 1;
1278 }
1279
1280 continue;
1281 }
1282#endif /* CASE_VECTOR_SHORTEN_MODE */
1283
1284 if (! (varying_length[uid]))
1285 {
1286 if (NONJUMP_INSN_P (insn)(((enum rtx_code) (insn)->code) == INSN)
1287 && GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == SEQUENCE)
1288 {
1289 int i;
1290
1291 body = PATTERN (insn);
1292 for (i = 0; i < XVECLEN (body, 0)(((((body)->u.fld[0]).rt_rtvec))->num_elem); i++)
1293 {
1294 rtx inner_insn = XVECEXP (body, 0, i)(((((body)->u.fld[0]).rt_rtvec))->elem[i]);
1295 int inner_uid = INSN_UID (inner_insn);
1296
1297 INSN_ADDRESSES (inner_uid)(insn_addresses_[inner_uid]) = insn_current_address;
1298
1299 insn_current_address += insn_lengths[inner_uid];
1300 }
1301 }
1302 else
1303 insn_current_address += insn_lengths[uid];
1304
1305 continue;
1306 }
1307
1308 if (NONJUMP_INSN_P (insn)(((enum rtx_code) (insn)->code) == INSN) && GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == SEQUENCE)
1309 {
1310 rtx_sequence *seqn = as_a <rtx_sequence *> (PATTERN (insn));
1311 int i;
1312
1313 body = PATTERN (insn);
1314 new_length = 0;
1315 for (i = 0; i < seqn->len (); i++)
1316 {
1317 rtx_insn *inner_insn = seqn->insn (i);
1318 int inner_uid = INSN_UID (inner_insn);
1319 int inner_length;
1320
1321 INSN_ADDRESSES (inner_uid)(insn_addresses_[inner_uid]) = insn_current_address;
1322
1323 /* insn_current_length returns 0 for insns with a
1324 non-varying length. */
1325 if (! varying_length[inner_uid])
1326 inner_length = insn_lengths[inner_uid];
1327 else
1328 inner_length = insn_current_length (inner_insn);
1329
1330 if (inner_length != insn_lengths[inner_uid])
1331 {
1332 if (!increasing || inner_length > insn_lengths[inner_uid])
1333 {
1334 insn_lengths[inner_uid] = inner_length;
1335 something_changed = 1;
1336 }
1337 else
1338 inner_length = insn_lengths[inner_uid];
1339 }
1340 insn_current_address += inner_length;
1341 new_length += inner_length;
1342 }
1343 }
1344 else
1345 {
1346 new_length = insn_current_length (insn);
1347 insn_current_address += new_length;
1348 }
1349
1350#ifdef ADJUST_INSN_LENGTH
1351 /* If needed, do any adjustment. */
1352 tmp_length = new_length;
1353 ADJUST_INSN_LENGTH (insn, new_length);
1354 insn_current_address += (new_length - tmp_length);
1355#endif
1356
1357 if (new_length != insn_lengths[uid]
1358 && (!increasing || new_length > insn_lengths[uid]))
1359 {
1360 insn_lengths[uid] = new_length;
1361 something_changed = 1;
1362 }
1363 else
1364 insn_current_address += insn_lengths[uid] - new_length;
1365 }
1366 /* For a non-optimizing compile, do only a single pass. */
1367 if (!increasing)
1368 break;
1369 }
1370 crtl(&x_rtl)->max_insn_address = insn_current_address;
1371 free (varying_length);
1372}
1373
1374/* Given the body of an INSN known to be generated by an ASM statement, return
1375 the number of machine instructions likely to be generated for this insn.
1376 This is used to compute its length. */
1377
1378static int
1379asm_insn_count (rtx body)
1380{
1381 const char *templ;
1382
1383 if (GET_CODE (body)((enum rtx_code) (body)->code) == ASM_INPUT)
1384 templ = XSTR (body, 0)(((body)->u.fld[0]).rt_str);
1385 else
1386 templ = decode_asm_operands (body, NULL__null, NULL__null, NULL__null, NULL__null, NULL__null);
1387
1388 return asm_str_count (templ);
1389}
1390
1391/* Return the number of machine instructions likely to be generated for the
1392 inline-asm template. */
1393int
1394asm_str_count (const char *templ)
1395{
1396 int count = 1;
1397
1398 if (!*templ)
1399 return 0;
1400
1401 for (; *templ; templ++)
1402 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*templ, templ)((*templ) == ';')
1403 || *templ == '\n')
1404 count++;
1405
1406 return count;
1407}
1408
1409/* Return true if DWARF2 debug info can be emitted for DECL. */
1410
1411static bool
1412dwarf2_debug_info_emitted_p (tree decl)
1413{
1414 /* When DWARF2 debug info is not generated internally. */
1415 if (!dwarf_debuginfo_p () && !dwarf_based_debuginfo_p ())
1416 return false;
1417
1418 if (DECL_IGNORED_P (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1418, __FUNCTION__))->decl_common.ignored_flag)
)
1419 return false;
1420
1421 return true;
1422}
1423
1424/* Return scope resulting from combination of S1 and S2. */
1425static tree
1426choose_inner_scope (tree s1, tree s2)
1427{
1428 if (!s1)
1429 return s2;
1430 if (!s2)
1431 return s1;
1432 if (BLOCK_NUMBER (s1)((tree_check ((s1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1432, __FUNCTION__, (BLOCK)))->block.block_num)
> BLOCK_NUMBER (s2)((tree_check ((s2), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1432, __FUNCTION__, (BLOCK)))->block.block_num)
)
1433 return s1;
1434 return s2;
1435}
1436
1437/* Emit lexical block notes needed to change scope from S1 to S2. */
1438
1439static void
1440change_scope (rtx_insn *orig_insn, tree s1, tree s2)
1441{
1442 rtx_insn *insn = orig_insn;
1443 tree com = NULL_TREE(tree) __null;
1444 tree ts1 = s1, ts2 = s2;
1445 tree s;
1446
1447 while (ts1 != ts2)
1448 {
1449 gcc_assert (ts1 && ts2)((void)(!(ts1 && ts2) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1449, __FUNCTION__), 0 : 0))
;
1450 if (BLOCK_NUMBER (ts1)((tree_check ((ts1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1450, __FUNCTION__, (BLOCK)))->block.block_num)
> BLOCK_NUMBER (ts2)((tree_check ((ts2), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1450, __FUNCTION__, (BLOCK)))->block.block_num)
)
1451 ts1 = BLOCK_SUPERCONTEXT (ts1)((tree_check ((ts1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1451, __FUNCTION__, (BLOCK)))->block.supercontext)
;
1452 else if (BLOCK_NUMBER (ts1)((tree_check ((ts1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1452, __FUNCTION__, (BLOCK)))->block.block_num)
< BLOCK_NUMBER (ts2)((tree_check ((ts2), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1452, __FUNCTION__, (BLOCK)))->block.block_num)
)
1453 ts2 = BLOCK_SUPERCONTEXT (ts2)((tree_check ((ts2), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1453, __FUNCTION__, (BLOCK)))->block.supercontext)
;
1454 else
1455 {
1456 ts1 = BLOCK_SUPERCONTEXT (ts1)((tree_check ((ts1), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1456, __FUNCTION__, (BLOCK)))->block.supercontext)
;
1457 ts2 = BLOCK_SUPERCONTEXT (ts2)((tree_check ((ts2), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1457, __FUNCTION__, (BLOCK)))->block.supercontext)
;
1458 }
1459 }
1460 com = ts1;
1461
1462 /* Close scopes. */
1463 s = s1;
1464 while (s != com)
1465 {
1466 rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1467 NOTE_BLOCK (note)(((note)->u.fld[3]).rt_tree) = s;
1468 s = BLOCK_SUPERCONTEXT (s)((tree_check ((s), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1468, __FUNCTION__, (BLOCK)))->block.supercontext)
;
1469 }
1470
1471 /* Open scopes. */
1472 s = s2;
1473 while (s != com)
1474 {
1475 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
1476 NOTE_BLOCK (insn)(((insn)->u.fld[3]).rt_tree) = s;
1477 s = BLOCK_SUPERCONTEXT (s)((tree_check ((s), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1477, __FUNCTION__, (BLOCK)))->block.supercontext)
;
1478 }
1479}
1480
1481/* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1482 on the scope tree and the newly reordered instructions. */
1483
1484static void
1485reemit_insn_block_notes (void)
1486{
1487 tree cur_block = DECL_INITIAL (cfun->decl)((contains_struct_check (((cfun + 0)->decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1487, __FUNCTION__))->decl_common.initial)
;
1488 rtx_insn *insn;
1489
1490 insn = get_insns ();
1491 for (; insn; insn = NEXT_INSN (insn))
1492 {
1493 tree this_block;
1494
1495 /* Prevent lexical blocks from straddling section boundaries. */
1496 if (NOTE_P (insn)(((enum rtx_code) (insn)->code) == NOTE))
1497 switch (NOTE_KIND (insn)(((insn)->u.fld[4]).rt_int))
1498 {
1499 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1500 {
1501 for (tree s = cur_block; s != DECL_INITIAL (cfun->decl)((contains_struct_check (((cfun + 0)->decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1501, __FUNCTION__))->decl_common.initial)
;
1502 s = BLOCK_SUPERCONTEXT (s)((tree_check ((s), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1502, __FUNCTION__, (BLOCK)))->block.supercontext)
)
1503 {
1504 rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1505 NOTE_BLOCK (note)(((note)->u.fld[3]).rt_tree) = s;
1506 note = emit_note_after (NOTE_INSN_BLOCK_BEG, insn);
1507 NOTE_BLOCK (note)(((note)->u.fld[3]).rt_tree) = s;
1508 }
1509 }
1510 break;
1511
1512 case NOTE_INSN_BEGIN_STMT:
1513 case NOTE_INSN_INLINE_ENTRY:
1514 this_block = LOCATION_BLOCK (NOTE_MARKER_LOCATION (insn))((tree) ((IS_ADHOC_LOC ((((insn)->u.fld[3]).rt_uint))) ? get_data_from_adhoc_loc
(line_table, ((((insn)->u.fld[3]).rt_uint))) : __null))
;
1515 goto set_cur_block_to_this_block;
1516
1517 default:
1518 continue;
1519 }
1520
1521 if (!active_insn_p (insn))
1522 continue;
1523
1524 /* Avoid putting scope notes between jump table and its label. */
1525 if (JUMP_TABLE_DATA_P (insn)(((enum rtx_code) (insn)->code) == JUMP_TABLE_DATA))
1526 continue;
1527
1528 this_block = insn_scope (insn);
1529 /* For sequences compute scope resulting from merging all scopes
1530 of instructions nested inside. */
1531 if (rtx_sequence *body = dyn_cast <rtx_sequence *> (PATTERN (insn)))
1532 {
1533 int i;
1534
1535 this_block = NULL__null;
1536 for (i = 0; i < body->len (); i++)
1537 this_block = choose_inner_scope (this_block,
1538 insn_scope (body->insn (i)));
1539 }
1540 set_cur_block_to_this_block:
1541 if (! this_block)
1542 {
1543 if (INSN_LOCATION (insn) == UNKNOWN_LOCATION((location_t) 0))
1544 continue;
1545 else
1546 this_block = DECL_INITIAL (cfun->decl)((contains_struct_check (((cfun + 0)->decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1546, __FUNCTION__))->decl_common.initial)
;
1547 }
1548
1549 if (this_block != cur_block)
1550 {
1551 change_scope (insn, cur_block, this_block);
1552 cur_block = this_block;
1553 }
1554 }
1555
1556 /* change_scope emits before the insn, not after. */
1557 rtx_note *note = emit_note (NOTE_INSN_DELETED);
1558 change_scope (note, cur_block, DECL_INITIAL (cfun->decl)((contains_struct_check (((cfun + 0)->decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1558, __FUNCTION__))->decl_common.initial)
);
1559 delete_insn (note);
1560
1561 reorder_blocks ();
1562}
1563
1564static const char *some_local_dynamic_name;
1565
1566/* Locate some local-dynamic symbol still in use by this function
1567 so that we can print its name in local-dynamic base patterns.
1568 Return null if there are no local-dynamic references. */
1569
1570const char *
1571get_some_local_dynamic_name ()
1572{
1573 subrtx_iterator::array_type array;
1574 rtx_insn *insn;
1575
1576 if (some_local_dynamic_name)
1577 return some_local_dynamic_name;
1578
1579 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1580 if (NONDEBUG_INSN_P (insn)((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code
) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)->
code) == CALL_INSN))
)
1581 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)for (subrtx_iterator iter (array, PATTERN (insn), rtx_all_subrtx_bounds
); !iter.at_end (); iter.next ())
1582 {
1583 const_rtx x = *iter;
1584 if (GET_CODE (x)((enum rtx_code) (x)->code) == SYMBOL_REF)
1585 {
1586 if (SYMBOL_REF_TLS_MODEL (x)((enum tls_model) (((__extension__ ({ __typeof ((x)) const _rtx
= ((x)); if (((enum rtx_code) (_rtx)->code) != SYMBOL_REF
) rtl_check_failed_flag ("SYMBOL_REF_FLAGS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1586, __FUNCTION__); _rtx; }) ->u2.symbol_ref_flags) >>
3) & 7))
== TLS_MODEL_LOCAL_DYNAMIC)
1587 return some_local_dynamic_name = XSTR (x, 0)(((x)->u.fld[0]).rt_str);
1588 if (CONSTANT_POOL_ADDRESS_P (x)(__extension__ ({ __typeof ((x)) const _rtx = ((x)); if (((enum
rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("CONSTANT_POOL_ADDRESS_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1588, __FUNCTION__); _rtx; })->unchanging)
)
1589 iter.substitute (get_pool_constant (x));
1590 }
1591 }
1592
1593 return 0;
1594}
1595
1596/* Arrange for us to emit a source location note before any further
1597 real insns or section changes, by setting the SEEN_NEXT_VIEW bit in
1598 *SEEN, as long as we are keeping track of location views. The bit
1599 indicates we have referenced the next view at the current PC, so we
1600 have to emit it. This should be called next to the var_location
1601 debug hook. */
1602
1603static inline void
1604set_next_view_needed (int *seen)
1605{
1606 if (debug_variable_location_viewsglobal_options.x_debug_variable_location_views)
1607 *seen |= SEEN_NEXT_VIEW4;
1608}
1609
1610/* Clear the flag in *SEEN indicating we need to emit the next view.
1611 This should be called next to the source_line debug hook. */
1612
1613static inline void
1614clear_next_view_needed (int *seen)
1615{
1616 *seen &= ~SEEN_NEXT_VIEW4;
1617}
1618
1619/* Test whether we have a pending request to emit the next view in
1620 *SEEN, and emit it if needed, clearing the request bit. */
1621
1622static inline void
1623maybe_output_next_view (int *seen)
1624{
1625 if ((*seen & SEEN_NEXT_VIEW4) != 0)
1626 {
1627 clear_next_view_needed (seen);
1628 (*debug_hooks->source_line) (last_linenum, last_columnnum,
1629 last_filename, last_discriminator,
1630 false);
1631 }
1632}
1633
1634/* We want to emit param bindings (before the first begin_stmt) in the
1635 initial view, if we are emitting views. To that end, we may
1636 consume initial notes in the function, processing them in
1637 final_start_function, before signaling the beginning of the
1638 prologue, rather than in final.
1639
1640 We don't test whether the DECLs are PARM_DECLs: the assumption is
1641 that there will be a NOTE_INSN_BEGIN_STMT marker before any
1642 non-parameter NOTE_INSN_VAR_LOCATION. It's ok if the marker is not
1643 there, we'll just have more variable locations bound in the initial
1644 view, which is consistent with their being bound without any code
1645 that would give them a value. */
1646
1647static inline bool
1648in_initial_view_p (rtx_insn *insn)
1649{
1650 return (!DECL_IGNORED_P (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1650, __FUNCTION__))->decl_common.ignored_flag)
1651 && debug_variable_location_viewsglobal_options.x_debug_variable_location_views
1652 && insn && GET_CODE (insn)((enum rtx_code) (insn)->code) == NOTE
1653 && (NOTE_KIND (insn)(((insn)->u.fld[4]).rt_int) == NOTE_INSN_VAR_LOCATION
1654 || NOTE_KIND (insn)(((insn)->u.fld[4]).rt_int) == NOTE_INSN_DELETED));
1655}
1656
1657/* Output assembler code for the start of a function,
1658 and initialize some of the variables in this file
1659 for the new function. The label for the function and associated
1660 assembler pseudo-ops have already been output in `assemble_start_function'.
1661
1662 FIRST is the first insn of the rtl for the function being compiled.
1663 FILE is the file to write assembler code to.
1664 SEEN should be initially set to zero, and it may be updated to
1665 indicate we have references to the next location view, that would
1666 require us to emit it at the current PC.
1667 OPTIMIZE_P is nonzero if we should eliminate redundant
1668 test and compare insns. */
1669
1670static void
1671final_start_function_1 (rtx_insn **firstp, FILE *file, int *seen,
1672 int optimize_p ATTRIBUTE_UNUSED__attribute__ ((__unused__)))
1673{
1674 block_depth = 0;
1675
1676 this_is_asm_operands = 0;
1677
1678 need_profile_function = false;
1679
1680 last_filename = LOCATION_FILE (prologue_location)((expand_location (prologue_location)).file);
1681 last_linenum = LOCATION_LINE (prologue_location)((expand_location (prologue_location)).line);
1682 last_columnnum = LOCATION_COLUMN (prologue_location)((expand_location (prologue_location)).column);
1683 last_discriminator = 0;
1684 force_source_line = false;
1685
1686 high_block_linenum = high_function_linenum = last_linenum;
1687
1688 if (flag_sanitizeglobal_options.x_flag_sanitize & SANITIZE_ADDRESS)
1689 asan_function_start ();
1690
1691 rtx_insn *first = *firstp;
1692 if (in_initial_view_p (first))
1693 {
1694 do
1695 {
1696 final_scan_insn (first, file, 0, 0, seen);
1697 first = NEXT_INSN (first);
1698 }
1699 while (in_initial_view_p (first));
1700 *firstp = first;
1701 }
1702
1703 if (!DECL_IGNORED_P (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1703, __FUNCTION__))->decl_common.ignored_flag)
)
1704 debug_hooks->begin_prologue (last_linenum, last_columnnum,
1705 last_filename);
1706
1707 if (!dwarf2_debug_info_emitted_p (current_function_decl))
1708 dwarf2out_begin_prologue (0, 0, NULL__null);
1709
1710 if (DECL_IGNORED_P (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1710, __FUNCTION__))->decl_common.ignored_flag)
&& last_linenum && last_filename)
1711 debug_hooks->set_ignored_loc (last_linenum, last_columnnum, last_filename);
1712
1713#ifdef LEAF_REG_REMAP
1714 if (crtl(&x_rtl)->uses_only_leaf_regs)
1715 leaf_renumber_regs (first);
1716#endif
1717
1718 /* The Sun386i and perhaps other machines don't work right
1719 if the profiling code comes after the prologue. */
1720 if (targetm.profile_before_prologue () && crtl(&x_rtl)->profile)
1721 {
1722 if (targetm.asm_out.function_prologue == default_function_pro_epilogue
1723 && targetm.have_prologue ())
1724 {
1725 rtx_insn *insn;
1726 for (insn = first; insn; insn = NEXT_INSN (insn))
1727 if (!NOTE_P (insn)(((enum rtx_code) (insn)->code) == NOTE))
1728 {
1729 insn = NULL__null;
1730 break;
1731 }
1732 else if (NOTE_KIND (insn)(((insn)->u.fld[4]).rt_int) == NOTE_INSN_BASIC_BLOCK
1733 || NOTE_KIND (insn)(((insn)->u.fld[4]).rt_int) == NOTE_INSN_FUNCTION_BEG)
1734 break;
1735 else if (NOTE_KIND (insn)(((insn)->u.fld[4]).rt_int) == NOTE_INSN_DELETED
1736 || NOTE_KIND (insn)(((insn)->u.fld[4]).rt_int) == NOTE_INSN_VAR_LOCATION)
1737 continue;
1738 else
1739 {
1740 insn = NULL__null;
1741 break;
1742 }
1743
1744 if (insn)
1745 need_profile_function = true;
1746 else
1747 profile_function (file);
1748 }
1749 else
1750 profile_function (file);
1751 }
1752
1753 /* If debugging, assign block numbers to all of the blocks in this
1754 function. */
1755 if (write_symbolsglobal_options.x_write_symbols)
1756 {
1757 reemit_insn_block_notes ();
1758 number_blocks (current_function_decl);
1759 /* We never actually put out begin/end notes for the top-level
1760 block in the function. But, conceptually, that block is
1761 always needed. */
1762 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl))((((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1762, __FUNCTION__))->decl_common.initial))->base.asm_written_flag
)
= 1;
1763 }
1764
1765 unsigned HOST_WIDE_INTlong min_frame_size
1766 = constant_lower_bound (get_frame_size ());
1767 if (min_frame_size > (unsigned HOST_WIDE_INTlong) warn_frame_larger_than_sizeglobal_options.x_warn_frame_larger_than_size)
1768 {
1769 /* Issue a warning */
1770 warning (OPT_Wframe_larger_than_,
1771 "the frame size of %wu bytes is larger than %wu bytes",
1772 min_frame_size, warn_frame_larger_than_sizeglobal_options.x_warn_frame_larger_than_size);
1773 }
1774
1775 /* First output the function prologue: code to set up the stack frame. */
1776 targetm.asm_out.function_prologue (file);
1777
1778 /* If the machine represents the prologue as RTL, the profiling code must
1779 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1780 if (! targetm.have_prologue ())
1781 profile_after_prologue (file);
1782}
1783
1784/* This is an exported final_start_function_1, callable without SEEN. */
1785
1786void
1787final_start_function (rtx_insn *first, FILE *file,
1788 int optimize_p ATTRIBUTE_UNUSED__attribute__ ((__unused__)))
1789{
1790 int seen = 0;
1791 final_start_function_1 (&first, file, &seen, optimize_p);
1792 gcc_assert (seen == 0)((void)(!(seen == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1792, __FUNCTION__), 0 : 0))
;
1793}
1794
1795static void
1796profile_after_prologue (FILE *file ATTRIBUTE_UNUSED__attribute__ ((__unused__)))
1797{
1798 if (!targetm.profile_before_prologue () && crtl(&x_rtl)->profile)
1799 profile_function (file);
1800}
1801
1802static void
1803profile_function (FILE *file ATTRIBUTE_UNUSED__attribute__ ((__unused__)))
1804{
1805#ifndef NO_PROFILE_COUNTERS1
1806# define NO_PROFILE_COUNTERS1 0
1807#endif
1808#ifdef ASM_OUTPUT_REG_PUSH
1809 rtx sval = NULL__null, chain = NULL__null;
1810
1811 if (cfun(cfun + 0)->returns_struct)
1812 sval = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl)((contains_struct_check ((current_function_decl), (TS_TYPED),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1812, __FUNCTION__))->typed.type)
,
1813 true);
1814 if (cfun(cfun + 0)->static_chain_decl)
1815 chain = targetm.calls.static_chain (current_function_decl, true);
1816#endif /* ASM_OUTPUT_REG_PUSH */
1817
1818 if (! NO_PROFILE_COUNTERS1)
1819 {
1820 int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE)(((((global_options.x_target_flags & (1U << 12)) !=
0) ? 32 : (((global_options.x_ix86_isa_flags & (1UL <<
15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & (
1UL << 8)) != 0) ? 256 : 128)))) < ((((global_options
.x_ix86_isa_flags & (1UL << 58)) != 0) ? 32 : ((8) *
(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? 8 : 4)))) ? ((((global_options.x_target_flags & (1U
<< 12)) != 0) ? 32 : (((global_options.x_ix86_isa_flags
& (1UL << 15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags
& (1UL << 8)) != 0) ? 256 : 128)))) : ((((global_options
.x_ix86_isa_flags & (1UL << 58)) != 0) ? 32 : ((8) *
(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? 8 : 4)))))
;
1821 switch_to_section (data_section);
1822 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT))if ((floor_log2 (align / (8))) != 0) fprintf ((file), "\t.align %d\n"
, 1 << (floor_log2 (align / (8))))
;
1823 targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no((cfun + 0)->funcdef_no));
1824 assemble_integer (const0_rtx(const_int_rtx[64]), LONG_TYPE_SIZE(((global_options.x_ix86_isa_flags & (1UL << 58)) !=
0) ? 32 : ((8) * (((global_options.x_ix86_isa_flags & (1UL
<< 1)) != 0) ? 8 : 4)))
/ BITS_PER_UNIT(8), align, 1);
1825 }
1826
1827 switch_to_section (current_function_section ());
1828
1829#ifdef ASM_OUTPUT_REG_PUSH
1830 if (sval && REG_P (sval)(((enum rtx_code) (sval)->code) == REG))
1831 ASM_OUTPUT_REG_PUSH (file, REGNO (sval))asm_fprintf ((file), "\tpush%z\t%%%r\n", ((rhs_regno(sval))));
1832 if (chain && REG_P (chain)(((enum rtx_code) (chain)->code) == REG))
1833 ASM_OUTPUT_REG_PUSH (file, REGNO (chain))asm_fprintf ((file), "\tpush%z\t%%%r\n", ((rhs_regno(chain)))
)
;
1834#endif
1835
1836 FUNCTION_PROFILER (file, current_function_funcdef_no)x86_function_profiler ((file), (((cfun + 0)->funcdef_no)));
1837
1838#ifdef ASM_OUTPUT_REG_PUSH
1839 if (chain && REG_P (chain)(((enum rtx_code) (chain)->code) == REG))
1840 ASM_OUTPUT_REG_POP (file, REGNO (chain))asm_fprintf ((file), "\tpop%z\t%%%r\n", ((rhs_regno(chain))));
1841 if (sval && REG_P (sval)(((enum rtx_code) (sval)->code) == REG))
1842 ASM_OUTPUT_REG_POP (file, REGNO (sval))asm_fprintf ((file), "\tpop%z\t%%%r\n", ((rhs_regno(sval))));
1843#endif
1844}
1845
1846/* Output assembler code for the end of a function.
1847 For clarity, args are same as those of `final_start_function'
1848 even though not all of them are needed. */
1849
1850void
1851final_end_function (void)
1852{
1853 app_disable ();
1854
1855 if (!DECL_IGNORED_P (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1855, __FUNCTION__))->decl_common.ignored_flag)
)
1856 debug_hooks->end_function (high_function_linenum);
1857
1858 /* Finally, output the function epilogue:
1859 code to restore the stack frame and return to the caller. */
1860 targetm.asm_out.function_epilogue (asm_out_file);
1861
1862 /* And debug output. */
1863 if (!DECL_IGNORED_P (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1863, __FUNCTION__))->decl_common.ignored_flag)
)
1864 debug_hooks->end_epilogue (last_linenum, last_filename);
1865
1866 if (!dwarf2_debug_info_emitted_p (current_function_decl)
1867 && dwarf2out_do_frame ())
1868 dwarf2out_end_epilogue (last_linenum, last_filename);
1869
1870 some_local_dynamic_name = 0;
1871}
1872
1873
1874/* Dumper helper for basic block information. FILE is the assembly
1875 output file, and INSN is the instruction being emitted. */
1876
1877static void
1878dump_basic_block_info (FILE *file, rtx_insn *insn, basic_block *start_to_bb,
1879 basic_block *end_to_bb, int bb_map_size, int *bb_seqn)
1880{
1881 basic_block bb;
1882
1883 if (!flag_debug_asmglobal_options.x_flag_debug_asm)
1884 return;
1885
1886 if (INSN_UID (insn) < bb_map_size
1887 && (bb = start_to_bb[INSN_UID (insn)]) != NULL__null)
1888 {
1889 edge e;
1890 edge_iterator ei;
1891
1892 fprintf (file, "%s BLOCK %d", ASM_COMMENT_START"#", bb->index);
1893 if (bb->count.initialized_p ())
1894 {
1895 fprintf (file, ", count:");
1896 bb->count.dump (file);
1897 }
1898 fprintf (file, " seq:%d", (*bb_seqn)++);
1899 fprintf (file, "\n%s PRED:", ASM_COMMENT_START"#");
1900 FOR_EACH_EDGE (e, ei, bb->preds)for ((ei) = ei_start_1 (&((bb->preds))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
1901 {
1902 dump_edge_info (file, e, TDF_DETAILS, 0);
1903 }
1904 fprintf (file, "\n");
1905 }
1906 if (INSN_UID (insn) < bb_map_size
1907 && (bb = end_to_bb[INSN_UID (insn)]) != NULL__null)
1908 {
1909 edge e;
1910 edge_iterator ei;
1911
1912 fprintf (asm_out_file, "%s SUCC:", ASM_COMMENT_START"#");
1913 FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
1914 {
1915 dump_edge_info (asm_out_file, e, TDF_DETAILS, 1);
1916 }
1917 fprintf (file, "\n");
1918 }
1919}
1920
1921/* Output assembler code for some insns: all or part of a function.
1922 For description of args, see `final_start_function', above. */
1923
1924static void
1925final_1 (rtx_insn *first, FILE *file, int seen, int optimize_p)
1926{
1927 rtx_insn *insn, *next;
1928
1929 /* Used for -dA dump. */
1930 basic_block *start_to_bb = NULL__null;
1931 basic_block *end_to_bb = NULL__null;
1932 int bb_map_size = 0;
1933 int bb_seqn = 0;
1934
1935 last_ignored_compare = 0;
1936
1937 init_recog ();
1938
1939 CC_STATUS_INIT;
1940
1941 if (flag_debug_asmglobal_options.x_flag_debug_asm)
1942 {
1943 basic_block bb;
1944
1945 bb_map_size = get_max_uid () + 1;
1946 start_to_bb = XCNEWVEC (basic_block, bb_map_size)((basic_block *) xcalloc ((bb_map_size), sizeof (basic_block)
))
;
1947 end_to_bb = XCNEWVEC (basic_block, bb_map_size)((basic_block *) xcalloc ((bb_map_size), sizeof (basic_block)
))
;
1948
1949 /* There is no cfg for a thunk. */
1950 if (!cfun(cfun + 0)->is_thunk)
1951 FOR_EACH_BB_REVERSE_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_exit_block_ptr->prev_bb
; bb != ((cfun + 0))->cfg->x_entry_block_ptr; bb = bb->
prev_bb)
1952 {
1953 start_to_bb[INSN_UID (BB_HEAD (bb)(bb)->il.x.head_)] = bb;
1954 end_to_bb[INSN_UID (BB_END (bb)(bb)->il.x.rtl->end_)] = bb;
1955 }
1956 }
1957
1958 /* Output the insns. */
1959 for (insn = first; insn;)
1960 {
1961 if (HAVE_ATTR_length1)
1962 {
1963 if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ()(insn_addresses_.length ()))
1964 {
1965 /* This can be triggered by bugs elsewhere in the compiler if
1966 new insns are created after init_insn_lengths is called. */
1967 gcc_assert (NOTE_P (insn))((void)(!((((enum rtx_code) (insn)->code) == NOTE)) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 1967, __FUNCTION__), 0 : 0))
;
1968 insn_current_address = -1;
1969 }
1970 else
1971 insn_current_address = INSN_ADDRESSES (INSN_UID (insn))(insn_addresses_[INSN_UID (insn)]);
1972 /* final can be seen as an iteration of shorten_branches that
1973 does nothing (since a fixed point has already been reached). */
1974 insn_last_address = insn_current_address;
1975 }
1976
1977 dump_basic_block_info (file, insn, start_to_bb, end_to_bb,
1978 bb_map_size, &bb_seqn);
1979 insn = final_scan_insn (insn, file, optimize_p, 0, &seen);
1980 }
1981
1982 maybe_output_next_view (&seen);
1983
1984 if (flag_debug_asmglobal_options.x_flag_debug_asm)
1985 {
1986 free (start_to_bb);
1987 free (end_to_bb);
1988 }
1989
1990 /* Remove CFI notes, to avoid compare-debug failures. */
1991 for (insn = first; insn; insn = next)
1992 {
1993 next = NEXT_INSN (insn);
1994 if (NOTE_P (insn)(((enum rtx_code) (insn)->code) == NOTE)
1995 && (NOTE_KIND (insn)(((insn)->u.fld[4]).rt_int) == NOTE_INSN_CFI
1996 || NOTE_KIND (insn)(((insn)->u.fld[4]).rt_int) == NOTE_INSN_CFI_LABEL))
1997 delete_insn (insn);
1998 }
1999}
2000
2001/* This is an exported final_1, callable without SEEN. */
2002
2003void
2004final (rtx_insn *first, FILE *file, int optimize_p)
2005{
2006 /* Those that use the internal final_start_function_1/final_1 API
2007 skip initial debug bind notes in final_start_function_1, and pass
2008 the modified FIRST to final_1. But those that use the public
2009 final_start_function/final APIs, final_start_function can't move
2010 FIRST because it's not passed by reference, so if they were
2011 skipped there, skip them again here. */
2012 while (in_initial_view_p (first))
2013 first = NEXT_INSN (first);
2014
2015 final_1 (first, file, 0, optimize_p);
2016}
2017
2018const char *
2019get_insn_template (int code, rtx_insn *insn)
2020{
2021 switch (insn_data[code].output_format)
2022 {
2023 case INSN_OUTPUT_FORMAT_SINGLE1:
2024 return insn_data[code].output.single;
2025 case INSN_OUTPUT_FORMAT_MULTI2:
2026 return insn_data[code].output.multi[which_alternative];
2027 case INSN_OUTPUT_FORMAT_FUNCTION3:
2028 gcc_assert (insn)((void)(!(insn) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2028, __FUNCTION__), 0 : 0))
;
2029 return (*insn_data[code].output.function) (recog_data.operand, insn);
2030
2031 default:
2032 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2032, __FUNCTION__))
;
2033 }
2034}
2035
2036/* Emit the appropriate declaration for an alternate-entry-point
2037 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
2038 LABEL_KIND != LABEL_NORMAL.
2039
2040 The case fall-through in this function is intentional. */
2041static void
2042output_alternate_entry_point (FILE *file, rtx_insn *insn)
2043{
2044 const char *name = LABEL_NAME (insn)(((insn)->u.fld[6]).rt_str);
2045
2046 switch (LABEL_KIND (insn)__extension__ ({ __typeof (insn) const _label = (insn); if (!
(((enum rtx_code) (_label)->code) == CODE_LABEL)) rtl_check_failed_flag
("LABEL_KIND", _label, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2046, __FUNCTION__); (enum label_kind) ((_label->jump <<
1) | _label->call); })
)
2047 {
2048 case LABEL_WEAK_ENTRY:
2049#ifdef ASM_WEAKEN_LABEL
2050 ASM_WEAKEN_LABEL (file, name)do { fputs ("\t.weak\t", (file)); assemble_name ((file), (name
)); fputc ('\n', (file)); } while (0)
;
2051 gcc_fallthrough ();
2052#endif
2053 case LABEL_GLOBAL_ENTRY:
2054 targetm.asm_out.globalize_label (file, name);
2055 gcc_fallthrough ();
2056 case LABEL_STATIC_ENTRY:
2057#ifdef ASM_OUTPUT_TYPE_DIRECTIVE
2058 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function")do { fputs ("\t.type\t", file); assemble_name (file, name); fputs
(", ", file); fprintf (file, "@%s", "function"); putc ('\n',
file); } while (0)
;
2059#endif
2060 ASM_OUTPUT_LABEL (file, name)do { assemble_name ((file), (name)); fputs (":\n", (file)); }
while (0)
;
2061 break;
2062
2063 case LABEL_NORMAL:
2064 default:
2065 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2065, __FUNCTION__))
;
2066 }
2067}
2068
2069/* Given a CALL_INSN, find and return the nested CALL. */
2070static rtx
2071call_from_call_insn (rtx_call_insn *insn)
2072{
2073 rtx x;
2074 gcc_assert (CALL_P (insn))((void)(!((((enum rtx_code) (insn)->code) == CALL_INSN)) ?
fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2074, __FUNCTION__), 0 : 0))
;
2075 x = PATTERN (insn);
2076
2077 while (GET_CODE (x)((enum rtx_code) (x)->code) != CALL)
2078 {
2079 switch (GET_CODE (x)((enum rtx_code) (x)->code))
2080 {
2081 default:
2082 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2082, __FUNCTION__))
;
2083 case COND_EXEC:
2084 x = COND_EXEC_CODE (x)(((x)->u.fld[1]).rt_rtx);
2085 break;
2086 case PARALLEL:
2087 x = XVECEXP (x, 0, 0)(((((x)->u.fld[0]).rt_rtvec))->elem[0]);
2088 break;
2089 case SET:
2090 x = XEXP (x, 1)(((x)->u.fld[1]).rt_rtx);
2091 break;
2092 }
2093 }
2094 return x;
2095}
2096
2097/* Print a comment into the asm showing FILENAME, LINENUM, and the
2098 corresponding source line, if available. */
2099
2100static void
2101asm_show_source (const char *filename, int linenum)
2102{
2103 if (!filename)
2104 return;
2105
2106 char_span line = location_get_source_line (filename, linenum);
2107 if (!line)
2108 return;
2109
2110 fprintf (asm_out_file, "%s %s:%i: ", ASM_COMMENT_START"#", filename, linenum);
2111 /* "line" is not 0-terminated, so we must use its length. */
2112 fwrite (line.get_buffer (), 1, line.length (), asm_out_file);
2113 fputc ('\n', asm_out_file);
2114}
2115
2116/* Judge if an absolute jump table is relocatable. */
2117
2118bool
2119jumptable_relocatable (void)
2120{
2121 bool relocatable = false;
2122
2123 if (!CASE_VECTOR_PC_RELATIVE0
2124 && !targetm.asm_out.generate_pic_addr_diff_vec ()
2125 && targetm_common.have_named_sections)
2126 relocatable = targetm.asm_out.reloc_rw_mask ();
2127
2128 return relocatable;
2129}
2130
2131/* The final scan for one insn, INSN.
2132 Args are same as in `final', except that INSN
2133 is the insn being scanned.
2134 Value returned is the next insn to be scanned.
2135
2136 NOPEEPHOLES is the flag to disallow peephole processing (currently
2137 used for within delayed branch sequence output).
2138
2139 SEEN is used to track the end of the prologue, for emitting
2140 debug information. We force the emission of a line note after
2141 both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG. */
2142
2143static rtx_insn *
2144final_scan_insn_1 (rtx_insn *insn, FILE *file, int optimize_p ATTRIBUTE_UNUSED__attribute__ ((__unused__)),
2145 int nopeepholes ATTRIBUTE_UNUSED__attribute__ ((__unused__)), int *seen)
2146{
2147 rtx_insn *next;
2148 rtx_jump_table_data *table;
2149
2150 insn_counter++;
2151
2152 /* Ignore deleted insns. These can occur when we split insns (due to a
2153 template of "#") while not optimizing. */
2154 if (insn->deleted ())
1
Assuming the condition is false
2
Taking false branch
2155 return NEXT_INSN (insn);
2156
2157 switch (GET_CODE (insn)((enum rtx_code) (insn)->code))
3
Control jumps to 'case CODE_LABEL:' at line 2387
2158 {
2159 case NOTE:
2160 switch (NOTE_KIND (insn)(((insn)->u.fld[4]).rt_int))
2161 {
2162 case NOTE_INSN_DELETED:
2163 case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
2164 break;
2165
2166 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
2167 maybe_output_next_view (seen);
2168
2169 output_function_exception_table (0);
2170
2171 if (targetm.asm_out.unwind_emit)
2172 targetm.asm_out.unwind_emit (asm_out_file, insn);
2173
2174 in_cold_section_p = !in_cold_section_p;
2175
2176 gcc_checking_assert (in_cold_section_p)((void)(!(in_cold_section_p) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2176, __FUNCTION__), 0 : 0))
;
2177 if (in_cold_section_p)
2178 cold_function_name
2179 = clone_function_name (current_function_decl, "cold");
2180
2181 if (dwarf2out_do_frame ())
2182 {
2183 dwarf2out_switch_text_section ();
2184 if (!dwarf2_debug_info_emitted_p (current_function_decl)
2185 && !DECL_IGNORED_P (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2185, __FUNCTION__))->decl_common.ignored_flag)
)
2186 debug_hooks->switch_text_section ();
2187 }
2188 else if (!DECL_IGNORED_P (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2188, __FUNCTION__))->decl_common.ignored_flag)
)
2189 debug_hooks->switch_text_section ();
2190 if (DECL_IGNORED_P (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2190, __FUNCTION__))->decl_common.ignored_flag)
&& last_linenum
2191 && last_filename)
2192 debug_hooks->set_ignored_loc (last_linenum, last_columnnum,
2193 last_filename);
2194
2195 switch_to_section (current_function_section ());
2196 targetm.asm_out.function_switched_text_sections (asm_out_file,
2197 current_function_decl,
2198 in_cold_section_p);
2199 /* Emit a label for the split cold section. Form label name by
2200 suffixing "cold" to the original function's name. */
2201 if (in_cold_section_p)
2202 {
2203#ifdef ASM_DECLARE_COLD_FUNCTION_NAME
2204 ASM_DECLARE_COLD_FUNCTION_NAME (asm_out_file,do { do { fputs ("\t.type\t", asm_out_file); assemble_name (asm_out_file
, ((const char *) (tree_check ((cold_function_name), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2206, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)); fputs (", ", asm_out_file); fprintf (asm_out_file, "@%s",
"function"); putc ('\n', asm_out_file); } while (0); ; ix86_asm_output_function_label
((asm_out_file), (((const char *) (tree_check ((cold_function_name
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2206, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)), (current_function_decl)); } while (0)
2205 IDENTIFIER_POINTERdo { do { fputs ("\t.type\t", asm_out_file); assemble_name (asm_out_file
, ((const char *) (tree_check ((cold_function_name), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2206, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)); fputs (", ", asm_out_file); fprintf (asm_out_file, "@%s",
"function"); putc ('\n', asm_out_file); } while (0); ; ix86_asm_output_function_label
((asm_out_file), (((const char *) (tree_check ((cold_function_name
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2206, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)), (current_function_decl)); } while (0)
2206 (cold_function_name),do { do { fputs ("\t.type\t", asm_out_file); assemble_name (asm_out_file
, ((const char *) (tree_check ((cold_function_name), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2206, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)); fputs (", ", asm_out_file); fprintf (asm_out_file, "@%s",
"function"); putc ('\n', asm_out_file); } while (0); ; ix86_asm_output_function_label
((asm_out_file), (((const char *) (tree_check ((cold_function_name
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2206, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)), (current_function_decl)); } while (0)
2207 current_function_decl)do { do { fputs ("\t.type\t", asm_out_file); assemble_name (asm_out_file
, ((const char *) (tree_check ((cold_function_name), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2206, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)); fputs (", ", asm_out_file); fprintf (asm_out_file, "@%s",
"function"); putc ('\n', asm_out_file); } while (0); ; ix86_asm_output_function_label
((asm_out_file), (((const char *) (tree_check ((cold_function_name
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2206, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)), (current_function_decl)); } while (0)
;
2208#else
2209 ASM_OUTPUT_LABEL (asm_out_file,do { assemble_name ((asm_out_file), (((const char *) (tree_check
((cold_function_name), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2210, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
))); fputs (":\n", (asm_out_file)); } while (0)
2210 IDENTIFIER_POINTER (cold_function_name))do { assemble_name ((asm_out_file), (((const char *) (tree_check
((cold_function_name), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2210, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
))); fputs (":\n", (asm_out_file)); } while (0)
;
2211#endif
2212 if (dwarf2out_do_frame ()
2213 && cfun(cfun + 0)->fde->dw_fde_second_begin != NULL__null)
2214 ASM_OUTPUT_LABEL (asm_out_file, cfun->fde->dw_fde_second_begin)do { assemble_name ((asm_out_file), ((cfun + 0)->fde->dw_fde_second_begin
)); fputs (":\n", (asm_out_file)); } while (0)
;
2215 }
2216 break;
2217
2218 case NOTE_INSN_BASIC_BLOCK:
2219 if (need_profile_function)
2220 {
2221 profile_function (asm_out_file);
2222 need_profile_function = false;
2223 }
2224
2225 if (targetm.asm_out.unwind_emit)
2226 targetm.asm_out.unwind_emit (asm_out_file, insn);
2227
2228 break;
2229
2230 case NOTE_INSN_EH_REGION_BEG:
2231 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",(*targetm.asm_out.internal_label) (asm_out_file, "LEHB", (((insn
)->u.fld[3]).rt_int))
2232 NOTE_EH_HANDLER (insn))(*targetm.asm_out.internal_label) (asm_out_file, "LEHB", (((insn
)->u.fld[3]).rt_int))
;
2233 break;
2234
2235 case NOTE_INSN_EH_REGION_END:
2236 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",(*targetm.asm_out.internal_label) (asm_out_file, "LEHE", (((insn
)->u.fld[3]).rt_int))
2237 NOTE_EH_HANDLER (insn))(*targetm.asm_out.internal_label) (asm_out_file, "LEHE", (((insn
)->u.fld[3]).rt_int))
;
2238 break;
2239
2240 case NOTE_INSN_PROLOGUE_END:
2241 targetm.asm_out.function_end_prologue (file);
2242 profile_after_prologue (file);
2243
2244 if ((*seen & (SEEN_EMITTED2 | SEEN_NOTE1)) == SEEN_NOTE1)
2245 {
2246 *seen |= SEEN_EMITTED2;
2247 force_source_line = true;
2248 }
2249 else
2250 *seen |= SEEN_NOTE1;
2251
2252 break;
2253
2254 case NOTE_INSN_EPILOGUE_BEG:
2255 if (!DECL_IGNORED_P (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2255, __FUNCTION__))->decl_common.ignored_flag)
)
2256 (*debug_hooks->begin_epilogue) (last_linenum, last_filename);
2257 targetm.asm_out.function_begin_epilogue (file);
2258 break;
2259
2260 case NOTE_INSN_CFI:
2261 dwarf2out_emit_cfi (NOTE_CFI (insn)(((insn)->u.fld[3]).rt_cfi));
2262 break;
2263
2264 case NOTE_INSN_CFI_LABEL:
2265 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LCFI",(*targetm.asm_out.internal_label) (asm_out_file, "LCFI", (((insn
)->u.fld[3]).rt_int))
2266 NOTE_LABEL_NUMBER (insn))(*targetm.asm_out.internal_label) (asm_out_file, "LCFI", (((insn
)->u.fld[3]).rt_int))
;
2267 break;
2268
2269 case NOTE_INSN_FUNCTION_BEG:
2270 if (need_profile_function)
2271 {
2272 profile_function (asm_out_file);
2273 need_profile_function = false;
2274 }
2275
2276 app_disable ();
2277 if (!DECL_IGNORED_P (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2277, __FUNCTION__))->decl_common.ignored_flag)
)
2278 debug_hooks->end_prologue (last_linenum, last_filename);
2279
2280 if ((*seen & (SEEN_EMITTED2 | SEEN_NOTE1)) == SEEN_NOTE1)
2281 {
2282 *seen |= SEEN_EMITTED2;
2283 force_source_line = true;
2284 }
2285 else
2286 *seen |= SEEN_NOTE1;
2287
2288 break;
2289
2290 case NOTE_INSN_BLOCK_BEG:
2291 if (debug_info_levelglobal_options.x_debug_info_level >= DINFO_LEVEL_NORMAL
2292 || dwarf_debuginfo_p ()
2293 || write_symbolsglobal_options.x_write_symbols == VMS_DEBUG(1U << DINFO_TYPE_VMS))
2294 {
2295 int n = BLOCK_NUMBER (NOTE_BLOCK (insn))((tree_check (((((insn)->u.fld[3]).rt_tree)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2295, __FUNCTION__, (BLOCK)))->block.block_num)
;
2296
2297 app_disable ();
2298 ++block_depth;
2299 high_block_linenum = last_linenum;
2300
2301 /* Output debugging info about the symbol-block beginning. */
2302 if (!DECL_IGNORED_P (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2302, __FUNCTION__))->decl_common.ignored_flag)
)
2303 debug_hooks->begin_block (last_linenum, n);
2304
2305 /* Mark this block as output. */
2306 TREE_ASM_WRITTEN (NOTE_BLOCK (insn))(((((insn)->u.fld[3]).rt_tree))->base.asm_written_flag) = 1;
2307 BLOCK_IN_COLD_SECTION_P (NOTE_BLOCK (insn))((tree_check (((((insn)->u.fld[3]).rt_tree)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2307, __FUNCTION__, (BLOCK)))->base.u.bits.atomic_flag)
= in_cold_section_p;
2308 }
2309 break;
2310
2311 case NOTE_INSN_BLOCK_END:
2312 maybe_output_next_view (seen);
2313
2314 if (debug_info_levelglobal_options.x_debug_info_level >= DINFO_LEVEL_NORMAL
2315 || dwarf_debuginfo_p ()
2316 || write_symbolsglobal_options.x_write_symbols == VMS_DEBUG(1U << DINFO_TYPE_VMS))
2317 {
2318 int n = BLOCK_NUMBER (NOTE_BLOCK (insn))((tree_check (((((insn)->u.fld[3]).rt_tree)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2318, __FUNCTION__, (BLOCK)))->block.block_num)
;
2319
2320 app_disable ();
2321
2322 /* End of a symbol-block. */
2323 --block_depth;
2324 gcc_assert (block_depth >= 0)((void)(!(block_depth >= 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2324, __FUNCTION__), 0 : 0))
;
2325
2326 if (!DECL_IGNORED_P (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2326, __FUNCTION__))->decl_common.ignored_flag)
)
2327 debug_hooks->end_block (high_block_linenum, n);
2328 gcc_assert (BLOCK_IN_COLD_SECTION_P (NOTE_BLOCK (insn))((void)(!(((tree_check (((((insn)->u.fld[3]).rt_tree)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2328, __FUNCTION__, (BLOCK)))->base.u.bits.atomic_flag) ==
in_cold_section_p) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2329, __FUNCTION__), 0 : 0))
2329 == in_cold_section_p)((void)(!(((tree_check (((((insn)->u.fld[3]).rt_tree)), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2328, __FUNCTION__, (BLOCK)))->base.u.bits.atomic_flag) ==
in_cold_section_p) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2329, __FUNCTION__), 0 : 0))
;
2330 }
2331 break;
2332
2333 case NOTE_INSN_DELETED_LABEL:
2334 /* Emit the label. We may have deleted the CODE_LABEL because
2335 the label could be proved to be unreachable, though still
2336 referenced (in the form of having its address taken. */
2337 ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn))(*targetm.asm_out.internal_label) (file, "L", (((insn)->u.
fld[5]).rt_int))
;
2338 break;
2339
2340 case NOTE_INSN_DELETED_DEBUG_LABEL:
2341 /* Similarly, but need to use different namespace for it. */
2342 if (CODE_LABEL_NUMBER (insn)(((insn)->u.fld[5]).rt_int) != -1)
2343 ASM_OUTPUT_DEBUG_LABEL (file, "LDL", CODE_LABEL_NUMBER (insn))(*targetm.asm_out.internal_label) (file, "LDL", (((insn)->
u.fld[5]).rt_int))
;
2344 break;
2345
2346 case NOTE_INSN_VAR_LOCATION:
2347 if (!DECL_IGNORED_P (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2347, __FUNCTION__))->decl_common.ignored_flag)
)
2348 {
2349 debug_hooks->var_location (insn);
2350 set_next_view_needed (seen);
2351 }
2352 break;
2353
2354 case NOTE_INSN_BEGIN_STMT:
2355 gcc_checking_assert (cfun->debug_nonbind_markers)((void)(!((cfun + 0)->debug_nonbind_markers) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2355, __FUNCTION__), 0 : 0))
;
2356 if (!DECL_IGNORED_P (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2356, __FUNCTION__))->decl_common.ignored_flag)
2357 && notice_source_line (insn, NULL__null))
2358 {
2359 output_source_line:
2360 (*debug_hooks->source_line) (last_linenum, last_columnnum,
2361 last_filename, last_discriminator,
2362 true);
2363 clear_next_view_needed (seen);
2364 }
2365 break;
2366
2367 case NOTE_INSN_INLINE_ENTRY:
2368 gcc_checking_assert (cfun->debug_nonbind_markers)((void)(!((cfun + 0)->debug_nonbind_markers) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2368, __FUNCTION__), 0 : 0))
;
2369 if (!DECL_IGNORED_P (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2369, __FUNCTION__))->decl_common.ignored_flag)
2370 && notice_source_line (insn, NULL__null))
2371 {
2372 (*debug_hooks->inline_entry) (LOCATION_BLOCK((tree) ((IS_ADHOC_LOC ((((insn)->u.fld[3]).rt_uint))) ? get_data_from_adhoc_loc
(line_table, ((((insn)->u.fld[3]).rt_uint))) : __null))
2373 (NOTE_MARKER_LOCATION (insn))((tree) ((IS_ADHOC_LOC ((((insn)->u.fld[3]).rt_uint))) ? get_data_from_adhoc_loc
(line_table, ((((insn)->u.fld[3]).rt_uint))) : __null))
);
2374 goto output_source_line;
2375 }
2376 break;
2377
2378 default:
2379 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2379, __FUNCTION__))
;
2380 break;
2381 }
2382 break;
2383
2384 case BARRIER:
2385 break;
2386
2387 case CODE_LABEL:
2388 /* The target port might emit labels in the output function for
2389 some insn, e.g. sh.cc output_branchy_insn. */
2390 if (CODE_LABEL_NUMBER (insn)(((insn)->u.fld[5]).rt_int) <= max_labelno)
4
Assuming 'max_labelno' is < field 'rt_int'
2391 {
2392 align_flags alignment = LABEL_TO_ALIGNMENT (insn)(label_align[(((insn)->u.fld[5]).rt_int) - min_labelno]);
2393 if (alignment.levels[0].log && NEXT_INSN (insn))
2394 {
2395#ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2396 /* Output both primary and secondary alignment. */
2397 ASM_OUTPUT_MAX_SKIP_ALIGN (file, alignment.levels[0].log,do { if ((alignment.levels[0].log) != 0) { if ((alignment.levels
[0].maxskip) == 0 || (alignment.levels[0].maxskip) >= (1 <<
(alignment.levels[0].log)) - 1) fprintf ((file), "\t.p2align %d\n"
, (alignment.levels[0].log)); else fprintf ((file), "\t.p2align %d,,%d\n"
, (alignment.levels[0].log), (alignment.levels[0].maxskip)); }
} while (0)
2398 alignment.levels[0].maxskip)do { if ((alignment.levels[0].log) != 0) { if ((alignment.levels
[0].maxskip) == 0 || (alignment.levels[0].maxskip) >= (1 <<
(alignment.levels[0].log)) - 1) fprintf ((file), "\t.p2align %d\n"
, (alignment.levels[0].log)); else fprintf ((file), "\t.p2align %d,,%d\n"
, (alignment.levels[0].log), (alignment.levels[0].maxskip)); }
} while (0)
;
2399 ASM_OUTPUT_MAX_SKIP_ALIGN (file, alignment.levels[1].log,do { if ((alignment.levels[1].log) != 0) { if ((alignment.levels
[1].maxskip) == 0 || (alignment.levels[1].maxskip) >= (1 <<
(alignment.levels[1].log)) - 1) fprintf ((file), "\t.p2align %d\n"
, (alignment.levels[1].log)); else fprintf ((file), "\t.p2align %d,,%d\n"
, (alignment.levels[1].log), (alignment.levels[1].maxskip)); }
} while (0)
2400 alignment.levels[1].maxskip)do { if ((alignment.levels[1].log) != 0) { if ((alignment.levels
[1].maxskip) == 0 || (alignment.levels[1].maxskip) >= (1 <<
(alignment.levels[1].log)) - 1) fprintf ((file), "\t.p2align %d\n"
, (alignment.levels[1].log)); else fprintf ((file), "\t.p2align %d,,%d\n"
, (alignment.levels[1].log), (alignment.levels[1].maxskip)); }
} while (0)
;
2401#else
2402#ifdef ASM_OUTPUT_ALIGN_WITH_NOP
2403 ASM_OUTPUT_ALIGN_WITH_NOP (file, alignment.levels[0].log);
2404#else
2405 ASM_OUTPUT_ALIGN (file, alignment.levels[0].log)if ((alignment.levels[0].log) != 0) fprintf ((file), "\t.align %d\n"
, 1 << (alignment.levels[0].log))
;
2406#endif
2407#endif
2408 }
2409 }
2410 CC_STATUS_INIT;
2411
2412 if (!DECL_IGNORED_P (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2412, __FUNCTION__))->decl_common.ignored_flag)
&& LABEL_NAME (insn)(((insn)->u.fld[6]).rt_str))
5
Assuming field 'ignored_flag' is not equal to 0
2413 debug_hooks->label (as_a <rtx_code_label *> (insn));
2414
2415 app_disable ();
2416
2417 /* If this label is followed by a jump-table, make sure we put
2418 the label in the read-only section. Also possibly write the
2419 label and jump table together. */
2420 table = jump_table_for_label (as_a <rtx_code_label *> (insn));
6
Calling 'jump_table_for_label'
25
Returning from 'jump_table_for_label'
2421 if (table
25.1
'table' is non-null
25.1
'table' is non-null
25.1
'table' is non-null
25.1
'table' is non-null
)
2422 {
2423#if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2424 /* In this case, the case vector is being moved by the
2425 target, so don't output the label at all. Leave that
2426 to the back end macros. */
2427#else
2428 if (! JUMP_TABLES_IN_TEXT_SECTION(global_options.x_flag_pic && !(((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) || 1))
)
26
Assuming field 'x_flag_pic' is 0
27
Taking true branch
2429 {
2430 int log_align;
2431
2432 switch_to_section (targetm.asm_out.function_rodata_section
2433 (current_function_decl,
2434 jumptable_relocatable ()));
2435
2436#ifdef ADDR_VEC_ALIGN
2437 log_align = ADDR_VEC_ALIGN (table)final_addr_vec_align (table);
28
Calling 'final_addr_vec_align'
38
Returning from 'final_addr_vec_align'
39
The value -1 is assigned to 'log_align'
2438#else
2439 log_align = exact_log2 (BIGGEST_ALIGNMENT(((global_options.x_target_flags & (1U << 12)) != 0
) ? 32 : (((global_options.x_ix86_isa_flags & (1UL <<
15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & (
1UL << 8)) != 0) ? 256 : 128)))
/ BITS_PER_UNIT(8));
2440#endif
2441 ASM_OUTPUT_ALIGN (file, log_align)if ((log_align) != 0) fprintf ((file), "\t.align %d\n", 1 <<
(log_align))
;
40
Taking true branch
41
The result of the left shift is undefined because the right operand is negative
2442 }
2443 else
2444 switch_to_section (current_function_section ());
2445
2446#ifdef ASM_OUTPUT_CASE_LABEL
2447 ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn), table)do { if ((2) != 0) fprintf (((file)), "\t.align %d\n", 1 <<
(2)); (*targetm.asm_out.internal_label) (file, "L", (((insn)
->u.fld[5]).rt_int)); } while (0)
;
2448#else
2449 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn)(((insn)->u.fld[5]).rt_int));
2450#endif
2451#endif
2452 break;
2453 }
2454 if (LABEL_ALT_ENTRY_P (insn)(__extension__ ({ __typeof (insn) const _label = (insn); if (
! (((enum rtx_code) (_label)->code) == CODE_LABEL)) rtl_check_failed_flag
("LABEL_KIND", _label, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2454, __FUNCTION__); (enum label_kind) ((_label->jump <<
1) | _label->call); }) != LABEL_NORMAL)
)
2455 output_alternate_entry_point (file, insn);
2456 else
2457 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn)(((insn)->u.fld[5]).rt_int));
2458 break;
2459
2460 default:
2461 {
2462 rtx body = PATTERN (insn);
2463 int insn_code_number;
2464 const char *templ;
2465 bool is_stmt, *is_stmt_p;
2466
2467 if (MAY_HAVE_DEBUG_MARKER_INSNSglobal_options.x_debug_nonbind_markers_p && cfun(cfun + 0)->debug_nonbind_markers)
2468 {
2469 is_stmt = false;
2470 is_stmt_p = NULL__null;
2471 }
2472 else
2473 is_stmt_p = &is_stmt;
2474
2475 /* Reset this early so it is correct for ASM statements. */
2476 current_insn_predicate = NULL_RTX(rtx) 0;
2477
2478 /* An INSN, JUMP_INSN or CALL_INSN.
2479 First check for special kinds that recog doesn't recognize. */
2480
2481 if (GET_CODE (body)((enum rtx_code) (body)->code) == USE /* These are just declarations. */
2482 || GET_CODE (body)((enum rtx_code) (body)->code) == CLOBBER)
2483 break;
2484
2485 /* Detect insns that are really jump-tables
2486 and output them as such. */
2487
2488 if (JUMP_TABLE_DATA_P (insn)(((enum rtx_code) (insn)->code) == JUMP_TABLE_DATA))
2489 {
2490#if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2491 int vlen, idx;
2492#endif
2493
2494 if (! JUMP_TABLES_IN_TEXT_SECTION(global_options.x_flag_pic && !(((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) || 1))
)
2495 switch_to_section (targetm.asm_out.function_rodata_section
2496 (current_function_decl,
2497 jumptable_relocatable ()));
2498 else
2499 switch_to_section (current_function_section ());
2500
2501 app_disable ();
2502
2503#if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2504 if (GET_CODE (body)((enum rtx_code) (body)->code) == ADDR_VEC)
2505 {
2506#ifdef ASM_OUTPUT_ADDR_VEC
2507 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2508#else
2509 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2509, __FUNCTION__))
;
2510#endif
2511 }
2512 else
2513 {
2514#ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2515 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2516#else
2517 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2517, __FUNCTION__))
;
2518#endif
2519 }
2520#else
2521 vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC)(((((body)->u.fld[((enum rtx_code) (body)->code) == ADDR_DIFF_VEC
]).rt_rtvec))->num_elem)
;
2522 for (idx = 0; idx < vlen; idx++)
2523 {
2524 if (GET_CODE (body)((enum rtx_code) (body)->code) == ADDR_VEC)
2525 {
2526#ifdef ASM_OUTPUT_ADDR_VEC_ELT
2527 ASM_OUTPUT_ADDR_VEC_ELTix86_output_addr_vec_elt ((file), ((((((((((((body)->u.fld
[0]).rt_rtvec))->elem[idx]))->u.fld[0]).rt_rtx))->u.
fld[5]).rt_int)))
2528 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)))ix86_output_addr_vec_elt ((file), ((((((((((((body)->u.fld
[0]).rt_rtvec))->elem[idx]))->u.fld[0]).rt_rtx))->u.
fld[5]).rt_int)))
;
2529#else
2530 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2530, __FUNCTION__))
;
2531#endif
2532 }
2533 else
2534 {
2535#ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2536 ASM_OUTPUT_ADDR_DIFF_ELTix86_output_addr_diff_elt ((file), ((((((((((((body)->u.fld
[1]).rt_rtvec))->elem[idx]))->u.fld[0]).rt_rtx))->u.
fld[5]).rt_int)), ((((((((((body)->u.fld[0]).rt_rtx))->
u.fld[0]).rt_rtx))->u.fld[5]).rt_int)))
2537 (file,ix86_output_addr_diff_elt ((file), ((((((((((((body)->u.fld
[1]).rt_rtvec))->elem[idx]))->u.fld[0]).rt_rtx))->u.
fld[5]).rt_int)), ((((((((((body)->u.fld[0]).rt_rtx))->
u.fld[0]).rt_rtx))->u.fld[5]).rt_int)))
2538 body,ix86_output_addr_diff_elt ((file), ((((((((((((body)->u.fld
[1]).rt_rtvec))->elem[idx]))->u.fld[0]).rt_rtx))->u.
fld[5]).rt_int)), ((((((((((body)->u.fld[0]).rt_rtx))->
u.fld[0]).rt_rtx))->u.fld[5]).rt_int)))
2539 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),ix86_output_addr_diff_elt ((file), ((((((((((((body)->u.fld
[1]).rt_rtvec))->elem[idx]))->u.fld[0]).rt_rtx))->u.
fld[5]).rt_int)), ((((((((((body)->u.fld[0]).rt_rtx))->
u.fld[0]).rt_rtx))->u.fld[5]).rt_int)))
2540 CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)))ix86_output_addr_diff_elt ((file), ((((((((((((body)->u.fld
[1]).rt_rtvec))->elem[idx]))->u.fld[0]).rt_rtx))->u.
fld[5]).rt_int)), ((((((((((body)->u.fld[0]).rt_rtx))->
u.fld[0]).rt_rtx))->u.fld[5]).rt_int)))
;
2541#else
2542 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2542, __FUNCTION__))
;
2543#endif
2544 }
2545 }
2546#ifdef ASM_OUTPUT_CASE_END
2547 ASM_OUTPUT_CASE_END (file,
2548 CODE_LABEL_NUMBER (PREV_INSN (insn))(((PREV_INSN (insn))->u.fld[5]).rt_int),
2549 insn);
2550#endif
2551#endif
2552
2553 switch_to_section (current_function_section ());
2554
2555 if (debug_variable_location_viewsglobal_options.x_debug_variable_location_views
2556 && !DECL_IGNORED_P (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2556, __FUNCTION__))->decl_common.ignored_flag)
)
2557 debug_hooks->var_location (insn);
2558
2559 break;
2560 }
2561 /* Output this line note if it is the first or the last line
2562 note in a row. */
2563 if (!DECL_IGNORED_P (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2563, __FUNCTION__))->decl_common.ignored_flag)
2564 && notice_source_line (insn, is_stmt_p))
2565 {
2566 if (flag_verbose_asmglobal_options.x_flag_verbose_asm)
2567 asm_show_source (last_filename, last_linenum);
2568 (*debug_hooks->source_line) (last_linenum, last_columnnum,
2569 last_filename, last_discriminator,
2570 is_stmt);
2571 clear_next_view_needed (seen);
2572 }
2573 else
2574 maybe_output_next_view (seen);
2575
2576 gcc_checking_assert (!DEBUG_INSN_P (insn))((void)(!(!(((enum rtx_code) (insn)->code) == DEBUG_INSN))
? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2576, __FUNCTION__), 0 : 0))
;
2577
2578 if (GET_CODE (body)((enum rtx_code) (body)->code) == PARALLEL
2579 && GET_CODE (XVECEXP (body, 0, 0))((enum rtx_code) ((((((body)->u.fld[0]).rt_rtvec))->elem
[0]))->code)
== ASM_INPUT)
2580 body = XVECEXP (body, 0, 0)(((((body)->u.fld[0]).rt_rtvec))->elem[0]);
2581
2582 if (GET_CODE (body)((enum rtx_code) (body)->code) == ASM_INPUT)
2583 {
2584 const char *string = XSTR (body, 0)(((body)->u.fld[0]).rt_str);
2585
2586 /* There's no telling what that did to the condition codes. */
2587 CC_STATUS_INIT;
2588
2589 if (string[0])
2590 {
2591 expanded_location loc;
2592
2593 app_enable ();
2594 loc = expand_location (ASM_INPUT_SOURCE_LOCATION (body)(((body)->u.fld[1]).rt_uint));
2595 if (*loc.file && loc.line)
2596 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2597 ASM_COMMENT_START"#", loc.line, loc.file);
2598 fprintf (asm_out_file, "\t%s\n", string);
2599#if HAVE_AS_LINE_ZERO1
2600 if (*loc.file && loc.line)
2601 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START"#");
2602#endif
2603 }
2604 break;
2605 }
2606
2607 /* Detect `asm' construct with operands. */
2608 if (asm_noperands (body) >= 0)
2609 {
2610 unsigned int noperands = asm_noperands (body);
2611 rtx *ops = XALLOCAVEC (rtx, noperands)((rtx *) __builtin_alloca(sizeof (rtx) * (noperands)));
2612 const char *string;
2613 location_t loc;
2614 expanded_location expanded;
2615
2616 /* There's no telling what that did to the condition codes. */
2617 CC_STATUS_INIT;
2618
2619 /* Get out the operand values. */
2620 string = decode_asm_operands (body, ops, NULL__null, NULL__null, NULL__null, &loc);
2621 /* Inhibit dying on what would otherwise be compiler bugs. */
2622 insn_noperands = noperands;
2623 this_is_asm_operands = insn;
2624 expanded = expand_location (loc);
2625
2626#ifdef FINAL_PRESCAN_INSN
2627 FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2628#endif
2629
2630 /* Output the insn using them. */
2631 if (string[0])
2632 {
2633 app_enable ();
2634 if (expanded.file && expanded.line)
2635 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2636 ASM_COMMENT_START"#", expanded.line, expanded.file);
2637 output_asm_insn (string, ops);
2638#if HAVE_AS_LINE_ZERO1
2639 if (expanded.file && expanded.line)
2640 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START"#");
2641#endif
2642 }
2643
2644 if (targetm.asm_out.final_postscan_insn)
2645 targetm.asm_out.final_postscan_insn (file, insn, ops,
2646 insn_noperands);
2647
2648 this_is_asm_operands = 0;
2649 break;
2650 }
2651
2652 app_disable ();
2653
2654 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (body))
2655 {
2656 /* A delayed-branch sequence */
2657 int i;
2658
2659 final_sequence = seq;
2660
2661 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2662 force the restoration of a comparison that was previously
2663 thought unnecessary. If that happens, cancel this sequence
2664 and cause that insn to be restored. */
2665
2666 next = final_scan_insn (seq->insn (0), file, 0, 1, seen);
2667 if (next != seq->insn (1))
2668 {
2669 final_sequence = 0;
2670 return next;
2671 }
2672
2673 for (i = 1; i < seq->len (); i++)
2674 {
2675 rtx_insn *insn = seq->insn (i);
2676 rtx_insn *next = NEXT_INSN (insn);
2677 /* We loop in case any instruction in a delay slot gets
2678 split. */
2679 do
2680 insn = final_scan_insn (insn, file, 0, 1, seen);
2681 while (insn != next);
2682 }
2683#ifdef DBR_OUTPUT_SEQEND
2684 DBR_OUTPUT_SEQEND (file);
2685#endif
2686 final_sequence = 0;
2687
2688 /* If the insn requiring the delay slot was a CALL_INSN, the
2689 insns in the delay slot are actually executed before the
2690 called function. Hence we don't preserve any CC-setting
2691 actions in these insns and the CC must be marked as being
2692 clobbered by the function. */
2693 if (CALL_P (seq->insn (0))(((enum rtx_code) (seq->insn (0))->code) == CALL_INSN))
2694 {
2695 CC_STATUS_INIT;
2696 }
2697 break;
2698 }
2699
2700 /* We have a real machine instruction as rtl. */
2701
2702 body = PATTERN (insn);
2703
2704 /* Do machine-specific peephole optimizations if desired. */
2705
2706 if (HAVE_peephole0 && optimize_p && !flag_no_peepholeglobal_options.x_flag_no_peephole && !nopeepholes)
2707 {
2708 rtx_insn *next = peephole (insn);
2709 /* When peepholing, if there were notes within the peephole,
2710 emit them before the peephole. */
2711 if (next != 0 && next != NEXT_INSN (insn))
2712 {
2713 rtx_insn *note, *prev = PREV_INSN (insn);
2714
2715 for (note = NEXT_INSN (insn); note != next;
2716 note = NEXT_INSN (note))
2717 final_scan_insn (note, file, optimize_p, nopeepholes, seen);
2718
2719 /* Put the notes in the proper position for a later
2720 rescan. For example, the SH target can do this
2721 when generating a far jump in a delayed branch
2722 sequence. */
2723 note = NEXT_INSN (insn);
2724 SET_PREV_INSN (note) = prev;
2725 SET_NEXT_INSN (prev) = note;
2726 SET_NEXT_INSN (PREV_INSN (next)) = insn;
2727 SET_PREV_INSN (insn) = PREV_INSN (next);
2728 SET_NEXT_INSN (insn) = next;
2729 SET_PREV_INSN (next) = insn;
2730 }
2731
2732 /* PEEPHOLE might have changed this. */
2733 body = PATTERN (insn);
2734 }
2735
2736 /* Try to recognize the instruction.
2737 If successful, verify that the operands satisfy the
2738 constraints for the instruction. Crash if they don't,
2739 since `reload' should have changed them so that they do. */
2740
2741 insn_code_number = recog_memoized (insn);
2742 cleanup_subreg_operands (insn);
2743
2744 /* Dump the insn in the assembly for debugging (-dAP).
2745 If the final dump is requested as slim RTL, dump slim
2746 RTL to the assembly file also. */
2747 if (flag_dump_rtl_in_asmglobal_options.x_flag_dump_rtl_in_asm)
2748 {
2749 print_rtx_head = ASM_COMMENT_START"#";
2750 if (! (dump_flags & TDF_SLIM))
2751 print_rtl_single (asm_out_file, insn);
2752 else
2753 dump_insn_slim (asm_out_file, insn);
2754 print_rtx_head = "";
2755 }
2756
2757 if (! constrain_operands_cached (insn, 1))
2758 fatal_insn_not_found (insn)_fatal_insn_not_found (insn, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2758, __FUNCTION__)
;
2759
2760 /* Some target machines need to prescan each insn before
2761 it is output. */
2762
2763#ifdef FINAL_PRESCAN_INSN
2764 FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2765#endif
2766
2767 if (targetm.have_conditional_execution ()
2768 && GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == COND_EXEC)
2769 current_insn_predicate = COND_EXEC_TEST (PATTERN (insn))(((PATTERN (insn))->u.fld[0]).rt_rtx);
2770
2771 current_output_insn = debug_insn = insn;
2772
2773 /* Find the proper template for this insn. */
2774 templ = get_insn_template (insn_code_number, insn);
2775
2776 /* If the C code returns 0, it means that it is a jump insn
2777 which follows a deleted test insn, and that test insn
2778 needs to be reinserted. */
2779 if (templ == 0)
2780 {
2781 rtx_insn *prev;
2782
2783 gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare)((void)(!(prev_nonnote_insn (insn) == last_ignored_compare) ?
fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2783, __FUNCTION__), 0 : 0))
;
2784
2785 /* We have already processed the notes between the setter and
2786 the user. Make sure we don't process them again, this is
2787 particularly important if one of the notes is a block
2788 scope note or an EH note. */
2789 for (prev = insn;
2790 prev != last_ignored_compare;
2791 prev = PREV_INSN (prev))
2792 {
2793 if (NOTE_P (prev)(((enum rtx_code) (prev)->code) == NOTE))
2794 delete_insn (prev); /* Use delete_note. */
2795 }
2796
2797 return prev;
2798 }
2799
2800 /* If the template is the string "#", it means that this insn must
2801 be split. */
2802 if (templ[0] == '#' && templ[1] == '\0')
2803 {
2804 rtx_insn *new_rtx = try_split (body, insn, 0);
2805
2806 /* If we didn't split the insn, go away. */
2807 if (new_rtx == insn && PATTERN (new_rtx) == body)
2808 fatal_insn ("could not split insn", insn)_fatal_insn ("could not split insn", insn, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2808, __FUNCTION__)
;
2809
2810 /* If we have a length attribute, this instruction should have
2811 been split in shorten_branches, to ensure that we would have
2812 valid length info for the splitees. */
2813 gcc_assert (!HAVE_ATTR_length)((void)(!(!1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2813, __FUNCTION__), 0 : 0))
;
2814
2815 return new_rtx;
2816 }
2817
2818 /* ??? This will put the directives in the wrong place if
2819 get_insn_template outputs assembly directly. However calling it
2820 before get_insn_template breaks if the insns is split. */
2821 if (targetm.asm_out.unwind_emit_before_insn
2822 && targetm.asm_out.unwind_emit)
2823 targetm.asm_out.unwind_emit (asm_out_file, insn);
2824
2825 rtx_call_insn *call_insn = dyn_cast <rtx_call_insn *> (insn);
2826 if (call_insn != NULL__null)
2827 {
2828 rtx x = call_from_call_insn (call_insn);
2829 x = XEXP (x, 0)(((x)->u.fld[0]).rt_rtx);
2830 if (x && MEM_P (x)(((enum rtx_code) (x)->code) == MEM) && GET_CODE (XEXP (x, 0))((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == SYMBOL_REF)
2831 {
2832 tree t;
2833 x = XEXP (x, 0)(((x)->u.fld[0]).rt_rtx);
2834 t = SYMBOL_REF_DECL (x)((__extension__ ({ __typeof ((x)) const _rtx = ((x)); if (((enum
rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("CONSTANT_POOL_ADDRESS_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2834, __FUNCTION__); _rtx; })->unchanging) ? __null : ((
((x))->u.fld[1]).rt_tree))
;
2835 if (t)
2836 assemble_external (t);
2837 }
2838 }
2839
2840 /* Output assembler code from the template. */
2841 output_asm_insn (templ, recog_data.operand);
2842
2843 /* Some target machines need to postscan each insn after
2844 it is output. */
2845 if (targetm.asm_out.final_postscan_insn)
2846 targetm.asm_out.final_postscan_insn (file, insn, recog_data.operand,
2847 recog_data.n_operands);
2848
2849 if (!targetm.asm_out.unwind_emit_before_insn
2850 && targetm.asm_out.unwind_emit)
2851 targetm.asm_out.unwind_emit (asm_out_file, insn);
2852
2853 /* Let the debug info back-end know about this call. We do this only
2854 after the instruction has been emitted because labels that may be
2855 created to reference the call instruction must appear after it. */
2856 if ((debug_variable_location_viewsglobal_options.x_debug_variable_location_views || call_insn != NULL__null)
2857 && !DECL_IGNORED_P (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2857, __FUNCTION__))->decl_common.ignored_flag)
)
2858 debug_hooks->var_location (insn);
2859
2860 current_output_insn = debug_insn = 0;
2861 }
2862 }
2863 return NEXT_INSN (insn);
2864}
2865
2866/* This is a wrapper around final_scan_insn_1 that allows ports to
2867 call it recursively without a known value for SEEN. The value is
2868 saved at the outermost call, and recovered for recursive calls.
2869 Recursive calls MUST pass NULL, or the same pointer if they can
2870 otherwise get to it. */
2871
2872rtx_insn *
2873final_scan_insn (rtx_insn *insn, FILE *file, int optimize_p,
2874 int nopeepholes, int *seen)
2875{
2876 static int *enclosing_seen;
2877 static int recursion_counter;
2878
2879 gcc_assert (seen || recursion_counter)((void)(!(seen || recursion_counter) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2879, __FUNCTION__), 0 : 0))
;
2880 gcc_assert (!recursion_counter || !seen || seen == enclosing_seen)((void)(!(!recursion_counter || !seen || seen == enclosing_seen
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2880, __FUNCTION__), 0 : 0))
;
2881
2882 if (!recursion_counter++)
2883 enclosing_seen = seen;
2884 else if (!seen)
2885 seen = enclosing_seen;
2886
2887 rtx_insn *ret = final_scan_insn_1 (insn, file, optimize_p, nopeepholes, seen);
2888
2889 if (!--recursion_counter)
2890 enclosing_seen = NULL__null;
2891
2892 return ret;
2893}
2894
2895
2896
2897/* Map DECLs to instance discriminators. This is allocated and
2898 defined in ada/gcc-interfaces/trans.cc, when compiling with -gnateS.
2899 Mappings from this table are saved and restored for LTO, so
2900 link-time compilation will have this map set, at least in
2901 partitions containing at least one DECL with an associated instance
2902 discriminator. */
2903
2904decl_to_instance_map_t *decl_to_instance_map;
2905
2906/* Return the instance number assigned to DECL. */
2907
2908static inline int
2909map_decl_to_instance (const_tree decl)
2910{
2911 int *inst;
2912
2913 if (!decl_to_instance_map || !decl || !DECL_P (decl)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code
) (decl)->base.code))] == tcc_declaration)
)
2914 return 0;
2915
2916 inst = decl_to_instance_map->get (decl);
2917
2918 if (!inst)
2919 return 0;
2920
2921 return *inst;
2922}
2923
2924/* Set DISCRIMINATOR to the appropriate value, possibly derived from LOC. */
2925
2926static inline int
2927compute_discriminator (location_t loc)
2928{
2929 int discriminator;
2930
2931 if (!decl_to_instance_map)
2932 discriminator = get_discriminator_from_loc (loc);
2933 else
2934 {
2935 tree block = LOCATION_BLOCK (loc)((tree) ((IS_ADHOC_LOC (loc)) ? get_data_from_adhoc_loc (line_table
, (loc)) : __null))
;
2936
2937 while (block && TREE_CODE (block)((enum tree_code) (block)->base.code) == BLOCK
2938 && !inlined_function_outer_scope_p (block))
2939 block = BLOCK_SUPERCONTEXT (block)((tree_check ((block), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 2939, __FUNCTION__, (BLOCK)))->block.supercontext)
;
2940
2941 tree decl;
2942
2943 if (!block)
2944 decl = current_function_decl;
2945 else if (DECL_P (block)(tree_code_type_tmpl <0>::tree_code_type[(int) (((enum tree_code
) (block)->base.code))] == tcc_declaration)
)
2946 decl = block;
2947 else
2948 decl = block_ultimate_origin (block);
2949
2950 discriminator = map_decl_to_instance (decl);
2951 }
2952
2953 return discriminator;
2954}
2955
2956/* Return discriminator of the statement that produced this insn. */
2957int
2958insn_discriminator (const rtx_insn *insn)
2959{
2960 return compute_discriminator (INSN_LOCATION (insn));
2961}
2962
2963/* Return whether a source line note needs to be emitted before INSN.
2964 Sets IS_STMT to TRUE if the line should be marked as a possible
2965 breakpoint location. */
2966
2967static bool
2968notice_source_line (rtx_insn *insn, bool *is_stmt)
2969{
2970 const char *filename;
2971 int linenum, columnnum;
2972 int discriminator;
2973
2974 if (NOTE_MARKER_P (insn)((((enum rtx_code) (insn)->code) == NOTE) && ((((insn
)->u.fld[4]).rt_int) == NOTE_INSN_BEGIN_STMT || (((insn)->
u.fld[4]).rt_int) == NOTE_INSN_INLINE_ENTRY))
)
2975 {
2976 location_t loc = NOTE_MARKER_LOCATION (insn)(((insn)->u.fld[3]).rt_uint);
2977 expanded_location xloc = expand_location (loc);
2978 if (xloc.line == 0
2979 && (LOCATION_LOCUS (loc)((IS_ADHOC_LOC (loc)) ? get_location_from_adhoc_loc (line_table
, loc) : (loc))
== UNKNOWN_LOCATION((location_t) 0)
2980 || LOCATION_LOCUS (loc)((IS_ADHOC_LOC (loc)) ? get_location_from_adhoc_loc (line_table
, loc) : (loc))
== BUILTINS_LOCATION((location_t) 1)))
2981 return false;
2982
2983 filename = xloc.file;
2984 linenum = xloc.line;
2985 columnnum = xloc.column;
2986 discriminator = compute_discriminator (loc);
2987 force_source_line = true;
2988 }
2989 else if (override_filename)
2990 {
2991 filename = override_filename;
2992 linenum = override_linenum;
2993 columnnum = override_columnnum;
2994 discriminator = override_discriminator;
2995 }
2996 else if (INSN_HAS_LOCATION (insn))
2997 {
2998 expanded_location xloc = insn_location (insn);
2999 filename = xloc.file;
3000 linenum = xloc.line;
3001 columnnum = xloc.column;
3002 discriminator = insn_discriminator (insn);
3003 }
3004 else
3005 {
3006 filename = NULL__null;
3007 linenum = 0;
3008 columnnum = 0;
3009 discriminator = 0;
3010 }
3011
3012 if (filename == NULL__null)
3013 return false;
3014
3015 if (force_source_line
3016 || filename != last_filename
3017 || last_linenum != linenum
3018 || (debug_column_infoglobal_options.x_debug_column_info && last_columnnum != columnnum))
3019 {
3020 force_source_line = false;
3021 last_filename = filename;
3022 last_linenum = linenum;
3023 last_columnnum = columnnum;
3024 last_discriminator = discriminator;
3025 if (is_stmt)
3026 *is_stmt = true;
3027 high_block_linenum = MAX (last_linenum, high_block_linenum)((last_linenum) > (high_block_linenum) ? (last_linenum) : (
high_block_linenum))
;
3028 high_function_linenum = MAX (last_linenum, high_function_linenum)((last_linenum) > (high_function_linenum) ? (last_linenum)
: (high_function_linenum))
;
3029 return true;
3030 }
3031
3032 if (SUPPORTS_DISCRIMINATOR1 && last_discriminator != discriminator)
3033 {
3034 /* If the discriminator changed, but the line number did not,
3035 output the line table entry with is_stmt false so the
3036 debugger does not treat this as a breakpoint location. */
3037 last_discriminator = discriminator;
3038 if (is_stmt)
3039 *is_stmt = false;
3040 return true;
3041 }
3042
3043 return false;
3044}
3045
3046/* For each operand in INSN, simplify (subreg (reg)) so that it refers
3047 directly to the desired hard register. */
3048
3049void
3050cleanup_subreg_operands (rtx_insn *insn)
3051{
3052 int i;
3053 bool changed = false;
3054 extract_insn_cached (insn);
3055 for (i = 0; i < recog_data.n_operands; i++)
3056 {
3057 /* The following test cannot use recog_data.operand when testing
3058 for a SUBREG: the underlying object might have been changed
3059 already if we are inside a match_operator expression that
3060 matches the else clause. Instead we test the underlying
3061 expression directly. */
3062 if (GET_CODE (*recog_data.operand_loc[i])((enum rtx_code) (*recog_data.operand_loc[i])->code) == SUBREG)
3063 {
3064 recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i], true);
3065 changed = true;
3066 }
3067 else if (GET_CODE (recog_data.operand[i])((enum rtx_code) (recog_data.operand[i])->code) == PLUS
3068 || GET_CODE (recog_data.operand[i])((enum rtx_code) (recog_data.operand[i])->code) == MULT
3069 || MEM_P (recog_data.operand[i])(((enum rtx_code) (recog_data.operand[i])->code) == MEM))
3070 recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i], &changed);
3071 }
3072
3073 for (i = 0; i < recog_data.n_dups; i++)
3074 {
3075 if (GET_CODE (*recog_data.dup_loc[i])((enum rtx_code) (*recog_data.dup_loc[i])->code) == SUBREG)
3076 {
3077 *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i], true);
3078 changed = true;
3079 }
3080 else if (GET_CODE (*recog_data.dup_loc[i])((enum rtx_code) (*recog_data.dup_loc[i])->code) == PLUS
3081 || GET_CODE (*recog_data.dup_loc[i])((enum rtx_code) (*recog_data.dup_loc[i])->code) == MULT
3082 || MEM_P (*recog_data.dup_loc[i])(((enum rtx_code) (*recog_data.dup_loc[i])->code) == MEM))
3083 *recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i], &changed);
3084 }
3085 if (changed)
3086 df_insn_rescan (insn);
3087}
3088
3089/* If X is a SUBREG, try to replace it with a REG or a MEM, based on
3090 the thing it is a subreg of. Do it anyway if FINAL_P. */
3091
3092rtx
3093alter_subreg (rtx *xp, bool final_p)
3094{
3095 rtx x = *xp;
3096 rtx y = SUBREG_REG (x)(((x)->u.fld[0]).rt_rtx);
3097
3098 /* simplify_subreg does not remove subreg from volatile references.
3099 We are required to. */
3100 if (MEM_P (y)(((enum rtx_code) (y)->code) == MEM))
3101 {
3102 poly_int64 offset = SUBREG_BYTE (x)(((x)->u.fld[1]).rt_subreg);
3103
3104 /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
3105 contains 0 instead of the proper offset. See simplify_subreg. */
3106 if (paradoxical_subreg_p (x))
3107 offset = byte_lowpart_offset (GET_MODE (x)((machine_mode) (x)->mode), GET_MODE (y)((machine_mode) (y)->mode));
3108
3109 if (final_p)
3110 *xp = adjust_address (y, GET_MODE (x), offset)adjust_address_1 (y, ((machine_mode) (x)->mode), offset, 1
, 1, 0, 0)
;
3111 else
3112 *xp = adjust_address_nv (y, GET_MODE (x), offset)adjust_address_1 (y, ((machine_mode) (x)->mode), offset, 0
, 1, 0, 0)
;
3113 }
3114 else if (REG_P (y)(((enum rtx_code) (y)->code) == REG) && HARD_REGISTER_P (y)((((rhs_regno(y))) < 76)))
3115 {
3116 rtx new_rtx = simplify_subreg (GET_MODE (x)((machine_mode) (x)->mode), y, GET_MODE (y)((machine_mode) (y)->mode),
3117 SUBREG_BYTE (x)(((x)->u.fld[1]).rt_subreg));
3118
3119 if (new_rtx != 0)
3120 *xp = new_rtx;
3121 else if (final_p && REG_P (y)(((enum rtx_code) (y)->code) == REG))
3122 {
3123 /* Simplify_subreg can't handle some REG cases, but we have to. */
3124 unsigned int regno;
3125 poly_int64 offset;
3126
3127 regno = subreg_regno (x);
3128 if (subreg_lowpart_p (x))
3129 offset = byte_lowpart_offset (GET_MODE (x)((machine_mode) (x)->mode), GET_MODE (y)((machine_mode) (y)->mode));
3130 else
3131 offset = SUBREG_BYTE (x)(((x)->u.fld[1]).rt_subreg);
3132 *xp = gen_rtx_REG_offset (y, GET_MODE (x)((machine_mode) (x)->mode), regno, offset);
3133 }
3134 }
3135
3136 return *xp;
3137}
3138
3139/* Do alter_subreg on all the SUBREGs contained in X. */
3140
3141static rtx
3142walk_alter_subreg (rtx *xp, bool *changed)
3143{
3144 rtx x = *xp;
3145 switch (GET_CODE (x)((enum rtx_code) (x)->code))
3146 {
3147 case PLUS:
3148 case MULT:
3149 case AND:
3150 XEXP (x, 0)(((x)->u.fld[0]).rt_rtx) = walk_alter_subreg (&XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), changed);
3151 XEXP (x, 1)(((x)->u.fld[1]).rt_rtx) = walk_alter_subreg (&XEXP (x, 1)(((x)->u.fld[1]).rt_rtx), changed);
3152 break;
3153
3154 case MEM:
3155 case ZERO_EXTEND:
3156 XEXP (x, 0)(((x)->u.fld[0]).rt_rtx) = walk_alter_subreg (&XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), changed);
3157 break;
3158
3159 case SUBREG:
3160 *changed = true;
3161 return alter_subreg (xp, true);
3162
3163 default:
3164 break;
3165 }
3166
3167 return *xp;
3168}
3169
3170/* Report inconsistency between the assembler template and the operands.
3171 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
3172
3173void
3174output_operand_lossage (const char *cmsgid, ...)
3175{
3176 char *fmt_string;
3177 char *new_message;
3178 const char *pfx_str;
3179 va_list ap;
3180
3181 va_start (ap, cmsgid)__builtin_va_start(ap, cmsgid);
3182
3183 pfx_str = this_is_asm_operands ? _("invalid 'asm': ")gettext ("invalid 'asm': ") : "output_operand: ";
3184 fmt_string = xasprintf ("%s%s", pfx_str, _(cmsgid)gettext (cmsgid));
3185 new_message = xvasprintf (fmt_string, ap);
3186
3187 if (this_is_asm_operands)
3188 error_for_asm (this_is_asm_operands, "%s", new_message);
3189 else
3190 internal_error ("%s", new_message);
3191
3192 free (fmt_string);
3193 free (new_message);
3194 va_end (ap)__builtin_va_end(ap);
3195}
3196
3197/* Output of assembler code from a template, and its subroutines. */
3198
3199/* Annotate the assembly with a comment describing the pattern and
3200 alternative used. */
3201
3202static void
3203output_asm_name (void)
3204{
3205 if (debug_insn)
3206 {
3207 fprintf (asm_out_file, "\t%s %d\t",
3208 ASM_COMMENT_START"#", INSN_UID (debug_insn));
3209
3210 fprintf (asm_out_file, "[c=%d",
3211 insn_cost (debug_insn, optimize_insn_for_speed_p ()));
3212 if (HAVE_ATTR_length1)
3213 fprintf (asm_out_file, " l=%d",
3214 get_attr_length (debug_insn));
3215 fprintf (asm_out_file, "] ");
3216
3217 int num = INSN_CODE (debug_insn)(((debug_insn)->u.fld[5]).rt_int);
3218 fprintf (asm_out_file, "%s", insn_data[num].name);
3219 if (insn_data[num].n_alternatives > 1)
3220 fprintf (asm_out_file, "/%d", which_alternative);
3221
3222 /* Clear this so only the first assembler insn
3223 of any rtl insn will get the special comment for -dp. */
3224 debug_insn = 0;
3225 }
3226}
3227
3228/* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
3229 or its address, return that expr . Set *PADDRESSP to 1 if the expr
3230 corresponds to the address of the object and 0 if to the object. */
3231
3232static tree
3233get_mem_expr_from_op (rtx op, int *paddressp)
3234{
3235 tree expr;
3236 int inner_addressp;
3237
3238 *paddressp = 0;
3239
3240 if (REG_P (op)(((enum rtx_code) (op)->code) == REG))
3241 return REG_EXPR (op)(((&(op)->u.reg)->attrs) == 0 ? 0 : ((&(op)->
u.reg)->attrs)->decl)
;
3242 else if (!MEM_P (op)(((enum rtx_code) (op)->code) == MEM))
3243 return 0;
3244
3245 if (MEM_EXPR (op)(get_mem_attrs (op)->expr) != 0)
3246 return MEM_EXPR (op)(get_mem_attrs (op)->expr);
3247
3248 /* Otherwise we have an address, so indicate it and look at the address. */
3249 *paddressp = 1;
3250 op = XEXP (op, 0)(((op)->u.fld[0]).rt_rtx);
3251
3252 /* First check if we have a decl for the address, then look at the right side
3253 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
3254 But don't allow the address to itself be indirect. */
3255 if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
3256 return expr;
3257 else if (GET_CODE (op)((enum rtx_code) (op)->code) == PLUS
3258 && (expr = get_mem_expr_from_op (XEXP (op, 1)(((op)->u.fld[1]).rt_rtx), &inner_addressp)))
3259 return expr;
3260
3261 while (UNARY_P (op)((rtx_class[(int) (((enum rtx_code) (op)->code))]) == RTX_UNARY
)
3262 || GET_RTX_CLASS (GET_CODE (op))(rtx_class[(int) (((enum rtx_code) (op)->code))]) == RTX_BIN_ARITH)
3263 op = XEXP (op, 0)(((op)->u.fld[0]).rt_rtx);
3264
3265 expr = get_mem_expr_from_op (op, &inner_addressp);
3266 return inner_addressp ? 0 : expr;
3267}
3268
3269/* Output operand names for assembler instructions. OPERANDS is the
3270 operand vector, OPORDER is the order to write the operands, and NOPS
3271 is the number of operands to write. */
3272
3273static void
3274output_asm_operand_names (rtx *operands, int *oporder, int nops)
3275{
3276 int wrote = 0;
3277 int i;
3278
3279 for (i = 0; i < nops; i++)
3280 {
3281 int addressp;
3282 rtx op = operands[oporder[i]];
3283 tree expr = get_mem_expr_from_op (op, &addressp);
3284
3285 fprintf (asm_out_file, "%c%s",
3286 wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START"#");
3287 wrote = 1;
3288 if (expr)
3289 {
3290 fprintf (asm_out_file, "%s",
3291 addressp ? "*" : "");
3292 print_mem_expr (asm_out_file, expr);
3293 wrote = 1;
3294 }
3295 else if (REG_P (op)(((enum rtx_code) (op)->code) == REG) && ORIGINAL_REGNO (op)(__extension__ ({ __typeof ((op)) const _rtx = ((op)); if (((
enum rtx_code) (_rtx)->code) != REG) rtl_check_failed_flag
("ORIGINAL_REGNO", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 3295, __FUNCTION__); _rtx; })->u2.original_regno)
3296 && ORIGINAL_REGNO (op)(__extension__ ({ __typeof ((op)) const _rtx = ((op)); if (((
enum rtx_code) (_rtx)->code) != REG) rtl_check_failed_flag
("ORIGINAL_REGNO", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 3296, __FUNCTION__); _rtx; })->u2.original_regno)
!= REGNO (op)(rhs_regno(op)))
3297 fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op)(__extension__ ({ __typeof ((op)) const _rtx = ((op)); if (((
enum rtx_code) (_rtx)->code) != REG) rtl_check_failed_flag
("ORIGINAL_REGNO", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 3297, __FUNCTION__); _rtx; })->u2.original_regno)
);
3298 }
3299}
3300
3301#ifdef ASSEMBLER_DIALECT(global_options.x_ix86_asm_dialect)
3302/* Helper function to parse assembler dialects in the asm string.
3303 This is called from output_asm_insn and asm_fprintf. */
3304static const char *
3305do_assembler_dialects (const char *p, int *dialect)
3306{
3307 char c = *(p - 1);
3308
3309 switch (c)
3310 {
3311 case '{':
3312 {
3313 int i;
3314
3315 if (*dialect)
3316 output_operand_lossage ("nested assembly dialect alternatives");
3317 else
3318 *dialect = 1;
3319
3320 /* If we want the first dialect, do nothing. Otherwise, skip
3321 DIALECT_NUMBER of strings ending with '|'. */
3322 for (i = 0; i < dialect_number; i++)
3323 {
3324 while (*p && *p != '}')
3325 {
3326 if (*p == '|')
3327 {
3328 p++;
3329 break;
3330 }
3331
3332 /* Skip over any character after a percent sign. */
3333 if (*p == '%')
3334 p++;
3335 if (*p)
3336 p++;
3337 }
3338
3339 if (*p == '}')
3340 break;
3341 }
3342
3343 if (*p == '\0')
3344 output_operand_lossage ("unterminated assembly dialect alternative");
3345 }
3346 break;
3347
3348 case '|':
3349 if (*dialect)
3350 {
3351 /* Skip to close brace. */
3352 do
3353 {
3354 if (*p == '\0')
3355 {
3356 output_operand_lossage ("unterminated assembly dialect alternative");
3357 break;
3358 }
3359
3360 /* Skip over any character after a percent sign. */
3361 if (*p == '%' && p[1])
3362 {
3363 p += 2;
3364 continue;
3365 }
3366
3367 if (*p++ == '}')
3368 break;
3369 }
3370 while (1);
3371
3372 *dialect = 0;
3373 }
3374 else
3375 putc (c, asm_out_file);
3376 break;
3377
3378 case '}':
3379 if (! *dialect)
3380 putc (c, asm_out_file);
3381 *dialect = 0;
3382 break;
3383 default:
3384 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 3384, __FUNCTION__))
;
3385 }
3386
3387 return p;
3388}
3389#endif
3390
3391/* Output text from TEMPLATE to the assembler output file,
3392 obeying %-directions to substitute operands taken from
3393 the vector OPERANDS.
3394
3395 %N (for N a digit) means print operand N in usual manner.
3396 %lN means require operand N to be a CODE_LABEL or LABEL_REF
3397 and print the label name with no punctuation.
3398 %cN means require operand N to be a constant
3399 and print the constant expression with no punctuation.
3400 %aN means expect operand N to be a memory address
3401 (not a memory reference!) and print a reference
3402 to that address.
3403 %nN means expect operand N to be a constant
3404 and print a constant expression for minus the value
3405 of the operand, with no other punctuation. */
3406
3407void
3408output_asm_insn (const char *templ, rtx *operands)
3409{
3410 const char *p;
3411 int c;
3412#ifdef ASSEMBLER_DIALECT(global_options.x_ix86_asm_dialect)
3413 int dialect = 0;
3414#endif
3415 int oporder[MAX_RECOG_OPERANDS30];
3416 char opoutput[MAX_RECOG_OPERANDS30];
3417 int ops = 0;
3418
3419 /* An insn may return a null string template
3420 in a case where no assembler code is needed. */
3421 if (*templ == 0)
3422 return;
3423
3424 memset (opoutput, 0, sizeof opoutput);
3425 p = templ;
3426 putc ('\t', asm_out_file);
3427
3428#ifdef ASM_OUTPUT_OPCODE
3429 ASM_OUTPUT_OPCODE (asm_out_file, p){ if (((p))[0] == '%' && ((p))[1] == 'v') ((p)) += ((
global_options.x_ix86_isa_flags & (1UL << 8)) != 0)
? 1 : 2; }
;
3430#endif
3431
3432 while ((c = *p++))
3433 switch (c)
3434 {
3435 case '\n':
3436 if (flag_verbose_asmglobal_options.x_flag_verbose_asm)
3437 output_asm_operand_names (operands, oporder, ops);
3438 if (flag_print_asm_nameglobal_options.x_flag_print_asm_name)
3439 output_asm_name ();
3440
3441 ops = 0;
3442 memset (opoutput, 0, sizeof opoutput);
3443
3444 putc (c, asm_out_file);
3445#ifdef ASM_OUTPUT_OPCODE
3446 while ((c = *p) == '\t')
3447 {
3448 putc (c, asm_out_file);
3449 p++;
3450 }
3451 ASM_OUTPUT_OPCODE (asm_out_file, p){ if (((p))[0] == '%' && ((p))[1] == 'v') ((p)) += ((
global_options.x_ix86_isa_flags & (1UL << 8)) != 0)
? 1 : 2; }
;
3452#endif
3453 break;
3454
3455#ifdef ASSEMBLER_DIALECT(global_options.x_ix86_asm_dialect)
3456 case '{':
3457 case '}':
3458 case '|':
3459 p = do_assembler_dialects (p, &dialect);
3460 break;
3461#endif
3462
3463 case '%':
3464 /* %% outputs a single %. %{, %} and %| print {, } and | respectively
3465 if ASSEMBLER_DIALECT defined and these characters have a special
3466 meaning as dialect delimiters.*/
3467 if (*p == '%'
3468#ifdef ASSEMBLER_DIALECT(global_options.x_ix86_asm_dialect)
3469 || *p == '{' || *p == '}' || *p == '|'
3470#endif
3471 )
3472 {
3473 putc (*p, asm_out_file);
3474 p++;
3475 }
3476 /* %= outputs a number which is unique to each insn in the entire
3477 compilation. This is useful for making local labels that are
3478 referred to more than once in a given insn. */
3479 else if (*p == '=')
3480 {
3481 p++;
3482 fprintf (asm_out_file, "%d", insn_counter);
3483 }
3484 /* % followed by a letter and some digits
3485 outputs an operand in a special way depending on the letter.
3486 Letters `acln' are implemented directly.
3487 Other letters are passed to `output_operand' so that
3488 the TARGET_PRINT_OPERAND hook can define them. */
3489 else if (ISALPHA (*p)(_sch_istable[(*p) & 0xff] & (unsigned short)(_sch_isalpha
))
)
3490 {
3491 int letter = *p++;
3492 unsigned long opnum;
3493 char *endptr;
3494
3495 opnum = strtoul (p, &endptr, 10);
3496
3497 if (endptr == p)
3498 output_operand_lossage ("operand number missing "
3499 "after %%-letter");
3500 else if (this_is_asm_operands && opnum >= insn_noperands)
3501 output_operand_lossage ("operand number out of range");
3502 else if (letter == 'l')
3503 output_asm_label (operands[opnum]);
3504 else if (letter == 'a')
3505 output_address (VOIDmode((void) 0, E_VOIDmode), operands[opnum]);
3506 else if (letter == 'c')
3507 {
3508 if (CONSTANT_ADDRESS_P (operands[opnum])constant_address_p (operands[opnum]))
3509 output_addr_const (asm_out_file, operands[opnum]);
3510 else
3511 output_operand (operands[opnum], 'c');
3512 }
3513 else if (letter == 'n')
3514 {
3515 if (CONST_INT_P (operands[opnum])(((enum rtx_code) (operands[opnum])->code) == CONST_INT))
3516 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC"%" "l" "d",
3517 - INTVAL (operands[opnum])((operands[opnum])->u.hwint[0]));
3518 else
3519 {
3520 putc ('-', asm_out_file);
3521 output_addr_const (asm_out_file, operands[opnum]);
3522 }
3523 }
3524 else
3525 output_operand (operands[opnum], letter);
3526
3527 if (!opoutput[opnum])
3528 oporder[ops++] = opnum;
3529 opoutput[opnum] = 1;
3530
3531 p = endptr;
3532 c = *p;
3533 }
3534 /* % followed by a digit outputs an operand the default way. */
3535 else if (ISDIGIT (*p)(_sch_istable[(*p) & 0xff] & (unsigned short)(_sch_isdigit
))
)
3536 {
3537 unsigned long opnum;
3538 char *endptr;
3539
3540 opnum = strtoul (p, &endptr, 10);
3541 if (this_is_asm_operands && opnum >= insn_noperands)
3542 output_operand_lossage ("operand number out of range");
3543 else
3544 output_operand (operands[opnum], 0);
3545
3546 if (!opoutput[opnum])
3547 oporder[ops++] = opnum;
3548 opoutput[opnum] = 1;
3549
3550 p = endptr;
3551 c = *p;
3552 }
3553 /* % followed by punctuation: output something for that
3554 punctuation character alone, with no operand. The
3555 TARGET_PRINT_OPERAND hook decides what is actually done. */
3556 else if (targetm.asm_out.print_operand_punct_valid_p ((unsigned char) *p))
3557 output_operand (NULL_RTX(rtx) 0, *p++);
3558 else
3559 output_operand_lossage ("invalid %%-code");
3560 break;
3561
3562 default:
3563 putc (c, asm_out_file);
3564 }
3565
3566 /* Try to keep the asm a bit more readable. */
3567 if ((flag_verbose_asmglobal_options.x_flag_verbose_asm || flag_print_asm_nameglobal_options.x_flag_print_asm_name) && strlen (templ) < 9)
3568 putc ('\t', asm_out_file);
3569
3570 /* Write out the variable names for operands, if we know them. */
3571 if (flag_verbose_asmglobal_options.x_flag_verbose_asm)
3572 output_asm_operand_names (operands, oporder, ops);
3573 if (flag_print_asm_nameglobal_options.x_flag_print_asm_name)
3574 output_asm_name ();
3575
3576 putc ('\n', asm_out_file);
3577}
3578
3579/* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3580
3581void
3582output_asm_label (rtx x)
3583{
3584 char buf[256];
3585
3586 if (GET_CODE (x)((enum rtx_code) (x)->code) == LABEL_REF)
3587 x = label_ref_label (x);
3588 if (LABEL_P (x)(((enum rtx_code) (x)->code) == CODE_LABEL)
3589 || (NOTE_P (x)(((enum rtx_code) (x)->code) == NOTE)
3590 && NOTE_KIND (x)(((x)->u.fld[4]).rt_int) == NOTE_INSN_DELETED_LABEL))
3591 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x))do { char *__p; (buf)[0] = '*'; (buf)[1] = '.'; __p = stpcpy (
&(buf)[2], "L"); sprint_ul (__p, (unsigned long) ((((x)->
u.fld[5]).rt_int))); } while (0)
;
3592 else
3593 output_operand_lossage ("'%%l' operand isn't a label");
3594
3595 assemble_name (asm_out_file, buf);
3596}
3597
3598/* Marks SYMBOL_REFs in x as referenced through use of assemble_external. */
3599
3600void
3601mark_symbol_refs_as_used (rtx x)
3602{
3603 subrtx_iterator::array_type array;
3604 FOR_EACH_SUBRTX (iter, array, x, ALL)for (subrtx_iterator iter (array, x, rtx_all_subrtx_bounds); !
iter.at_end (); iter.next ())
3605 {
3606 const_rtx x = *iter;
3607 if (GET_CODE (x)((enum rtx_code) (x)->code) == SYMBOL_REF)
3608 if (tree t = SYMBOL_REF_DECL (x)((__extension__ ({ __typeof ((x)) const _rtx = ((x)); if (((enum
rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("CONSTANT_POOL_ADDRESS_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 3608, __FUNCTION__); _rtx; })->unchanging) ? __null : ((
((x))->u.fld[1]).rt_tree))
)
3609 assemble_external (t);
3610 }
3611}
3612
3613/* Print operand X using machine-dependent assembler syntax.
3614 CODE is a non-digit that preceded the operand-number in the % spec,
3615 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3616 between the % and the digits.
3617 When CODE is a non-letter, X is 0.
3618
3619 The meanings of the letters are machine-dependent and controlled
3620 by TARGET_PRINT_OPERAND. */
3621
3622void
3623output_operand (rtx x, int code ATTRIBUTE_UNUSED__attribute__ ((__unused__)))
3624{
3625 if (x && GET_CODE (x)((enum rtx_code) (x)->code) == SUBREG)
3626 x = alter_subreg (&x, true);
3627
3628 /* X must not be a pseudo reg. */
3629 if (!targetm.no_register_allocation)
3630 gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER)((void)(!(!x || !(((enum rtx_code) (x)->code) == REG) || (
rhs_regno(x)) < 76) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 3630, __FUNCTION__), 0 : 0))
;
3631
3632 targetm.asm_out.print_operand (asm_out_file, x, code);
3633
3634 if (x == NULL_RTX(rtx) 0)
3635 return;
3636
3637 mark_symbol_refs_as_used (x);
3638}
3639
3640/* Print a memory reference operand for address X using
3641 machine-dependent assembler syntax. */
3642
3643void
3644output_address (machine_mode mode, rtx x)
3645{
3646 bool changed = false;
3647 walk_alter_subreg (&x, &changed);
3648 targetm.asm_out.print_operand_address (asm_out_file, mode, x);
3649}
3650
3651/* Print an integer constant expression in assembler syntax.
3652 Addition and subtraction are the only arithmetic
3653 that may appear in these expressions. */
3654
3655void
3656output_addr_const (FILE *file, rtx x)
3657{
3658 char buf[256];
3659
3660 restart:
3661 switch (GET_CODE (x)((enum rtx_code) (x)->code))
3662 {
3663 case PC:
3664 putc ('.', file);
3665 break;
3666
3667 case SYMBOL_REF:
3668 if (SYMBOL_REF_DECL (x)((__extension__ ({ __typeof ((x)) const _rtx = ((x)); if (((enum
rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("CONSTANT_POOL_ADDRESS_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 3668, __FUNCTION__); _rtx; })->unchanging) ? __null : ((
((x))->u.fld[1]).rt_tree))
)
3669 assemble_external (SYMBOL_REF_DECL (x)((__extension__ ({ __typeof ((x)) const _rtx = ((x)); if (((enum
rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("CONSTANT_POOL_ADDRESS_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 3669, __FUNCTION__); _rtx; })->unchanging) ? __null : ((
((x))->u.fld[1]).rt_tree))
);
3670#ifdef ASM_OUTPUT_SYMBOL_REF
3671 ASM_OUTPUT_SYMBOL_REF (file, x)do { const char *name = assemble_name_resolve ((((x)->u.fld
[0]).rt_str)); if ((global_options.x_ix86_asm_dialect) == ASM_ATT
&& name[0] == '$' && user_label_prefix[0] ==
'\0') { fputc ('(', (file)); assemble_name_raw ((file), name
); fputc (')', (file)); } else assemble_name_raw ((file), name
); } while (0)
;
3672#else
3673 assemble_name (file, XSTR (x, 0)(((x)->u.fld[0]).rt_str));
3674#endif
3675 break;
3676
3677 case LABEL_REF:
3678 x = label_ref_label (x);
3679 /* Fall through. */
3680 case CODE_LABEL:
3681 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x))do { char *__p; (buf)[0] = '*'; (buf)[1] = '.'; __p = stpcpy (
&(buf)[2], "L"); sprint_ul (__p, (unsigned long) ((((x)->
u.fld[5]).rt_int))); } while (0)
;
3682#ifdef ASM_OUTPUT_LABEL_REF
3683 ASM_OUTPUT_LABEL_REF (file, buf);
3684#else
3685 assemble_name (file, buf);
3686#endif
3687 break;
3688
3689 case CONST_INT:
3690 fprintf (file, HOST_WIDE_INT_PRINT_DEC"%" "l" "d", INTVAL (x)((x)->u.hwint[0]));
3691 break;
3692
3693 case CONST:
3694 /* This used to output parentheses around the expression,
3695 but that does not work on the 386 (either ATT or BSD assembler). */
3696 output_addr_const (file, XEXP (x, 0)(((x)->u.fld[0]).rt_rtx));
3697 break;
3698
3699 case CONST_WIDE_INT:
3700 /* We do not know the mode here so we have to use a round about
3701 way to build a wide-int to get it printed properly. */
3702 {
3703 wide_int w = wide_int::from_array (&CONST_WIDE_INT_ELT (x, 0)((x)->u.hwiv.elem[0]),
3704 CONST_WIDE_INT_NUNITS (x)((int)__extension__ ({ __typeof ((x)) const _rtx = ((x)); if (
((enum rtx_code) (_rtx)->code) != CONST_WIDE_INT) rtl_check_failed_flag
("CWI_GET_NUM_ELEM", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 3704, __FUNCTION__); _rtx; })->u2.num_elem)
,
3705 CONST_WIDE_INT_NUNITS (x)((int)__extension__ ({ __typeof ((x)) const _rtx = ((x)); if (
((enum rtx_code) (_rtx)->code) != CONST_WIDE_INT) rtl_check_failed_flag
("CWI_GET_NUM_ELEM", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 3705, __FUNCTION__); _rtx; })->u2.num_elem)
3706 * HOST_BITS_PER_WIDE_INT64,
3707 false);
3708 print_decs (w, file);
3709 }
3710 break;
3711
3712 case CONST_DOUBLE:
3713 if (CONST_DOUBLE_AS_INT_P (x)(((enum rtx_code) (x)->code) == CONST_DOUBLE && ((
machine_mode) (x)->mode) == ((void) 0, E_VOIDmode))
)
3714 {
3715 /* We can use %d if the number is one word and positive. */
3716 if (CONST_DOUBLE_HIGH (x)((x)->u.hwint[1]))
3717 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX"0x%" "l" "x" "%016" "l" "x",
3718 (unsigned HOST_WIDE_INTlong) CONST_DOUBLE_HIGH (x)((x)->u.hwint[1]),
3719 (unsigned HOST_WIDE_INTlong) CONST_DOUBLE_LOW (x)((x)->u.hwint[0]));
3720 else if (CONST_DOUBLE_LOW (x)((x)->u.hwint[0]) < 0)
3721 fprintf (file, HOST_WIDE_INT_PRINT_HEX"%#" "l" "x",
3722 (unsigned HOST_WIDE_INTlong) CONST_DOUBLE_LOW (x)((x)->u.hwint[0]));
3723 else
3724 fprintf (file, HOST_WIDE_INT_PRINT_DEC"%" "l" "d", CONST_DOUBLE_LOW (x)((x)->u.hwint[0]));
3725 }
3726 else
3727 /* We can't handle floating point constants;
3728 PRINT_OPERAND must handle them. */
3729 output_operand_lossage ("floating constant misused");
3730 break;
3731
3732 case CONST_FIXED:
3733 fprintf (file, HOST_WIDE_INT_PRINT_DEC"%" "l" "d", CONST_FIXED_VALUE_LOW (x)((long) (((const struct fixed_value *) (&(x)->u.fv))->
data.low))
);
3734 break;
3735
3736 case PLUS:
3737 /* Some assemblers need integer constants to appear last (eg masm). */
3738 if (CONST_INT_P (XEXP (x, 0))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == CONST_INT
)
)
3739 {
3740 output_addr_const (file, XEXP (x, 1)(((x)->u.fld[1]).rt_rtx));
3741 if (INTVAL (XEXP (x, 0))(((((x)->u.fld[0]).rt_rtx))->u.hwint[0]) >= 0)
3742 fprintf (file, "+");
3743 output_addr_const (file, XEXP (x, 0)(((x)->u.fld[0]).rt_rtx));
3744 }
3745 else
3746 {
3747 output_addr_const (file, XEXP (x, 0)(((x)->u.fld[0]).rt_rtx));
3748 if (!CONST_INT_P (XEXP (x, 1))(((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) == CONST_INT
)
3749 || INTVAL (XEXP (x, 1))(((((x)->u.fld[1]).rt_rtx))->u.hwint[0]) >= 0)
3750 fprintf (file, "+");
3751 output_addr_const (file, XEXP (x, 1)(((x)->u.fld[1]).rt_rtx));
3752 }
3753 break;
3754
3755 case MINUS:
3756 /* Avoid outputting things like x-x or x+5-x,
3757 since some assemblers can't handle that. */
3758 x = simplify_subtraction (x);
3759 if (GET_CODE (x)((enum rtx_code) (x)->code) != MINUS)
3760 goto restart;
3761
3762 output_addr_const (file, XEXP (x, 0)(((x)->u.fld[0]).rt_rtx));
3763 fprintf (file, "-");
3764 if ((CONST_INT_P (XEXP (x, 1))(((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) == CONST_INT
)
&& INTVAL (XEXP (x, 1))(((((x)->u.fld[1]).rt_rtx))->u.hwint[0]) >= 0)
3765 || GET_CODE (XEXP (x, 1))((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) == PC
3766 || GET_CODE (XEXP (x, 1))((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) == SYMBOL_REF)
3767 output_addr_const (file, XEXP (x, 1)(((x)->u.fld[1]).rt_rtx));
3768 else
3769 {
3770 fputs (targetm.asm_out.open_paren, file);
3771 output_addr_const (file, XEXP (x, 1)(((x)->u.fld[1]).rt_rtx));
3772 fputs (targetm.asm_out.close_paren, file);
3773 }
3774 break;
3775
3776 case ZERO_EXTEND:
3777 case SIGN_EXTEND:
3778 case SUBREG:
3779 case TRUNCATE:
3780 output_addr_const (file, XEXP (x, 0)(((x)->u.fld[0]).rt_rtx));
3781 break;
3782
3783 default:
3784 if (targetm.asm_out.output_addr_const_extra (file, x))
3785 break;
3786
3787 output_operand_lossage ("invalid expression as operand");
3788 }
3789}
3790
3791/* Output a quoted string. */
3792
3793void
3794output_quoted_string (FILE *asm_file, const char *string)
3795{
3796#ifdef OUTPUT_QUOTED_STRING
3797 OUTPUT_QUOTED_STRING (asm_file, string);
3798#else
3799 char c;
3800
3801 putc ('\"', asm_file);
3802 while ((c = *string++) != 0)
3803 {
3804 if (ISPRINT (c)(_sch_istable[(c) & 0xff] & (unsigned short)(_sch_isprint
))
)
3805 {
3806 if (c == '\"' || c == '\\')
3807 putc ('\\', asm_file);
3808 putc (c, asm_file);
3809 }
3810 else
3811 fprintf (asm_file, "\\%03o", (unsigned char) c);
3812 }
3813 putc ('\"', asm_file);
3814#endif
3815}
3816
3817/* Write a HOST_WIDE_INT number in hex form 0x1234, fast. */
3818
3819void
3820fprint_whex (FILE *f, unsigned HOST_WIDE_INTlong value)
3821{
3822 char buf[2 + CHAR_BIT8 * sizeof (value) / 4];
3823 if (value == 0)
3824 putc ('0', f);
3825 else
3826 {
3827 char *p = buf + sizeof (buf);
3828 do
3829 *--p = "0123456789abcdef"[value % 16];
3830 while ((value /= 16) != 0);
3831 *--p = 'x';
3832 *--p = '0';
3833 fwrite (p, 1, buf + sizeof (buf) - p, f);
3834 }
3835}
3836
3837/* Internal function that prints an unsigned long in decimal in reverse.
3838 The output string IS NOT null-terminated. */
3839
3840static int
3841sprint_ul_rev (char *s, unsigned long value)
3842{
3843 int i = 0;
3844 do
3845 {
3846 s[i] = "0123456789"[value % 10];
3847 value /= 10;
3848 i++;
3849 /* alternate version, without modulo */
3850 /* oldval = value; */
3851 /* value /= 10; */
3852 /* s[i] = "0123456789" [oldval - 10*value]; */
3853 /* i++ */
3854 }
3855 while (value != 0);
3856 return i;
3857}
3858
3859/* Write an unsigned long as decimal to a file, fast. */
3860
3861void
3862fprint_ul (FILE *f, unsigned long value)
3863{
3864 /* python says: len(str(2**64)) == 20 */
3865 char s[20];
3866 int i;
3867
3868 i = sprint_ul_rev (s, value);
3869
3870 /* It's probably too small to bother with string reversal and fputs. */
3871 do
3872 {
3873 i--;
3874 putc (s[i], f);
3875 }
3876 while (i != 0);
3877}
3878
3879/* Write an unsigned long as decimal to a string, fast.
3880 s must be wide enough to not overflow, at least 21 chars.
3881 Returns the length of the string (without terminating '\0'). */
3882
3883int
3884sprint_ul (char *s, unsigned long value)
3885{
3886 int len = sprint_ul_rev (s, value);
3887 s[len] = '\0';
3888
3889 std::reverse (s, s + len);
3890 return len;
3891}
3892
3893/* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
3894 %R prints the value of REGISTER_PREFIX.
3895 %L prints the value of LOCAL_LABEL_PREFIX.
3896 %U prints the value of USER_LABEL_PREFIX.
3897 %I prints the value of IMMEDIATE_PREFIX.
3898 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
3899 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
3900
3901 We handle alternate assembler dialects here, just like output_asm_insn. */
3902
3903void
3904asm_fprintf (FILE *file, const char *p, ...)
3905{
3906 char buf[10];
3907 char *q, c;
3908#ifdef ASSEMBLER_DIALECT(global_options.x_ix86_asm_dialect)
3909 int dialect = 0;
3910#endif
3911 va_list argptr;
3912
3913 va_start (argptr, p)__builtin_va_start(argptr, p);
3914
3915 buf[0] = '%';
3916
3917 while ((c = *p++))
3918 switch (c)
3919 {
3920#ifdef ASSEMBLER_DIALECT(global_options.x_ix86_asm_dialect)
3921 case '{':
3922 case '}':
3923 case '|':
3924 p = do_assembler_dialects (p, &dialect);
3925 break;
3926#endif
3927
3928 case '%':
3929 c = *p++;
3930 q = &buf[1];
3931 while (strchr ("-+ #0", c))
3932 {
3933 *q++ = c;
3934 c = *p++;
3935 }
3936 while (ISDIGIT (c)(_sch_istable[(c) & 0xff] & (unsigned short)(_sch_isdigit
))
|| c == '.')
3937 {
3938 *q++ = c;
3939 c = *p++;
3940 }
3941 switch (c)
3942 {
3943 case '%':
3944 putc ('%', file);
3945 break;
3946
3947 case 'd': case 'i': case 'u':
3948 case 'x': case 'X': case 'o':
3949 case 'c':
3950 *q++ = c;
3951 *q = 0;
3952 fprintf (file, buf, va_arg (argptr, int)__builtin_va_arg(argptr, int));
3953 break;
3954
3955 case 'w':
3956 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
3957 'o' cases, but we do not check for those cases. It
3958 means that the value is a HOST_WIDE_INT, which may be
3959 either `long' or `long long'. */
3960 memcpy (q, HOST_WIDE_INT_PRINT"l", strlen (HOST_WIDE_INT_PRINT"l"));
3961 q += strlen (HOST_WIDE_INT_PRINT"l");
3962 *q++ = *p++;
3963 *q = 0;
3964 fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT)__builtin_va_arg(argptr, long));
3965 break;
3966
3967 case 'l':
3968 *q++ = c;
3969#ifdef HAVE_LONG_LONG1
3970 if (*p == 'l')
3971 {
3972 *q++ = *p++;
3973 *q++ = *p++;
3974 *q = 0;
3975 fprintf (file, buf, va_arg (argptr, long long)__builtin_va_arg(argptr, long long));
3976 }
3977 else
3978#endif
3979 {
3980 *q++ = *p++;
3981 *q = 0;
3982 fprintf (file, buf, va_arg (argptr, long)__builtin_va_arg(argptr, long));
3983 }
3984
3985 break;
3986
3987 case 's':
3988 *q++ = c;
3989 *q = 0;
3990 fprintf (file, buf, va_arg (argptr, char *)__builtin_va_arg(argptr, char *));
3991 break;
3992
3993 case 'O':
3994#ifdef ASM_OUTPUT_OPCODE
3995 ASM_OUTPUT_OPCODE (asm_out_file, p){ if (((p))[0] == '%' && ((p))[1] == 'v') ((p)) += ((
global_options.x_ix86_isa_flags & (1UL << 8)) != 0)
? 1 : 2; }
;
3996#endif
3997 break;
3998
3999 case 'R':
4000#ifdef REGISTER_PREFIX
4001 fprintf (file, "%s", REGISTER_PREFIX);
4002#endif
4003 break;
4004
4005 case 'I':
4006#ifdef IMMEDIATE_PREFIX
4007 fprintf (file, "%s", IMMEDIATE_PREFIX);
4008#endif
4009 break;
4010
4011 case 'L':
4012#ifdef LOCAL_LABEL_PREFIX"."
4013 fprintf (file, "%s", LOCAL_LABEL_PREFIX".");
4014#endif
4015 break;
4016
4017 case 'U':
4018 fputs (user_label_prefix, file);
4019 break;
4020
4021#ifdef ASM_FPRINTF_EXTENSIONS
4022 /* Uppercase letters are reserved for general use by asm_fprintf
4023 and so are not available to target specific code. In order to
4024 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
4025 they are defined here. As they get turned into real extensions
4026 to asm_fprintf they should be removed from this list. */
4027 case 'A': case 'B': case 'C': case 'D': case 'E':
4028 case 'F': case 'G': case 'H': case 'J': case 'K':
4029 case 'M': case 'N': case 'P': case 'Q': case 'S':
4030 case 'T': case 'V': case 'W': case 'Y': case 'Z':
4031 break;
4032
4033 ASM_FPRINTF_EXTENSIONS (file, argptr, p)case 'z': fputc (((global_options.x_ix86_isa_flags & (1UL
<< 1)) != 0) ? 'q' : 'l', (file)); break; case 'r': { unsigned
int regno = __builtin_va_arg((argptr), int); if ((((unsigned
long) ((regno)) - (unsigned long) (0) <= (unsigned long) (
7) - (unsigned long) (0)))) fputc (((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) ? 'r' : 'e', (file)); fputs ((
this_target_hard_regs->x_reg_names)[regno], (file)); break
; }
4034#endif
4035 default:
4036 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 4036, __FUNCTION__))
;
4037 }
4038 break;
4039
4040 default:
4041 putc (c, file);
4042 }
4043 va_end (argptr)__builtin_va_end(argptr);
4044}
4045
4046/* Return nonzero if this function has no function calls. */
4047
4048int
4049leaf_function_p (void)
4050{
4051 rtx_insn *insn;
4052
4053 /* Ensure we walk the entire function body. */
4054 gcc_assert (!in_sequence_p ())((void)(!(!in_sequence_p ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 4054, __FUNCTION__), 0 : 0))
;
4055
4056 /* Some back-ends (e.g. s390) want leaf functions to stay leaf
4057 functions even if they call mcount. */
4058 if (crtl(&x_rtl)->profile && !targetm.keep_leaf_when_profiled ())
4059 return 0;
4060
4061 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4062 {
4063 if (CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN)
4064 && ! SIBLING_CALL_P (insn)(__extension__ ({ __typeof ((insn)) const _rtx = ((insn)); if
(((enum rtx_code) (_rtx)->code) != CALL_INSN) rtl_check_failed_flag
("SIBLING_CALL_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 4064, __FUNCTION__); _rtx; })->jump)
4065 && ! FAKE_CALL_P (insn)(__extension__ ({ __typeof ((insn)) const _rtx = ((insn)); if
(((enum rtx_code) (_rtx)->code) != CALL_INSN) rtl_check_failed_flag
("FAKE_CALL_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 4065, __FUNCTION__); _rtx; })->used)
)
4066 return 0;
4067 if (NONJUMP_INSN_P (insn)(((enum rtx_code) (insn)->code) == INSN)
4068 && GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == SEQUENCE
4069 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))(((enum rtx_code) ((((((PATTERN (insn))->u.fld[0]).rt_rtvec
))->elem[0]))->code) == CALL_INSN)
4070 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0))(__extension__ ({ __typeof (((((((PATTERN (insn))->u.fld[0
]).rt_rtvec))->elem[0]))) const _rtx = (((((((PATTERN (insn
))->u.fld[0]).rt_rtvec))->elem[0]))); if (((enum rtx_code
) (_rtx)->code) != CALL_INSN) rtl_check_failed_flag ("SIBLING_CALL_P"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 4070, __FUNCTION__); _rtx; })->jump)
)
4071 return 0;
4072 }
4073
4074 return 1;
4075}
4076
4077/* Return 1 if branch is a forward branch.
4078 Uses insn_shuid array, so it works only in the final pass. May be used by
4079 output templates to customary add branch prediction hints.
4080 */
4081int
4082final_forward_branch_p (rtx_insn *insn)
4083{
4084 int insn_id, label_id;
4085
4086 gcc_assert (uid_shuid)((void)(!(uid_shuid) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 4086, __FUNCTION__), 0 : 0))
;
4087 insn_id = INSN_SHUID (insn)(uid_shuid[INSN_UID (insn)]);
4088 label_id = INSN_SHUID (JUMP_LABEL (insn))(uid_shuid[INSN_UID ((((insn)->u.fld[7]).rt_rtx))]);
4089 /* We've hit some insns that does not have id information available. */
4090 gcc_assert (insn_id && label_id)((void)(!(insn_id && label_id) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 4090, __FUNCTION__), 0 : 0))
;
4091 return insn_id < label_id;
4092}
4093
4094/* On some machines, a function with no call insns
4095 can run faster if it doesn't create its own register window.
4096 When output, the leaf function should use only the "output"
4097 registers. Ordinarily, the function would be compiled to use
4098 the "input" registers to find its arguments; it is a candidate
4099 for leaf treatment if it uses only the "input" registers.
4100 Leaf function treatment means renumbering so the function
4101 uses the "output" registers instead. */
4102
4103#ifdef LEAF_REGISTERS
4104
4105/* Return 1 if this function uses only the registers that can be
4106 safely renumbered. */
4107
4108int
4109only_leaf_regs_used (void)
4110{
4111 int i;
4112 const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
4113
4114 for (i = 0; i < FIRST_PSEUDO_REGISTER76; i++)
4115 if ((df_regs_ever_live_p (i) || global_regs[i])
4116 && ! permitted_reg_in_leaf_functions[i])
4117 return 0;
4118
4119 if (crtl(&x_rtl)->uses_pic_offset_table
4120 && pic_offset_table_rtx(this_target_rtl->x_pic_offset_table_rtx) != 0
4121 && REG_P (pic_offset_table_rtx)(((enum rtx_code) ((this_target_rtl->x_pic_offset_table_rtx
))->code) == REG)
4122 && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)(rhs_regno((this_target_rtl->x_pic_offset_table_rtx)))])
4123 return 0;
4124
4125 return 1;
4126}
4127
4128/* Scan all instructions and renumber all registers into those
4129 available in leaf functions. */
4130
4131static void
4132leaf_renumber_regs (rtx_insn *first)
4133{
4134 rtx_insn *insn;
4135
4136 /* Renumber only the actual patterns.
4137 The reg-notes can contain frame pointer refs,
4138 and renumbering them could crash, and should not be needed. */
4139 for (insn = first; insn; insn = NEXT_INSN (insn))
4140 if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code
) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)->
code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) ==
DEBUG_INSN))
)
4141 leaf_renumber_regs_insn (PATTERN (insn));
4142}
4143
4144/* Scan IN_RTX and its subexpressions, and renumber all regs into those
4145 available in leaf functions. */
4146
4147void
4148leaf_renumber_regs_insn (rtx in_rtx)
4149{
4150 int i, j;
4151 const char *format_ptr;
4152
4153 if (in_rtx == 0)
4154 return;
4155
4156 /* Renumber all input-registers into output-registers.
4157 renumbered_regs would be 1 for an output-register;
4158 they */
4159
4160 if (REG_P (in_rtx)(((enum rtx_code) (in_rtx)->code) == REG))
4161 {
4162 int newreg;
4163
4164 /* Don't renumber the same reg twice. */
4165 if (in_rtx->used)
4166 return;
4167
4168 newreg = REGNO (in_rtx)(rhs_regno(in_rtx));
4169 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
4170 to reach here as part of a REG_NOTE. */
4171 if (newreg >= FIRST_PSEUDO_REGISTER76)
4172 {
4173 in_rtx->used = 1;
4174 return;
4175 }
4176 newreg = LEAF_REG_REMAP (newreg);
4177 gcc_assert (newreg >= 0)((void)(!(newreg >= 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 4177, __FUNCTION__), 0 : 0))
;
4178 df_set_regs_ever_live (REGNO (in_rtx)(rhs_regno(in_rtx)), false);
4179 df_set_regs_ever_live (newreg, true);
4180 SET_REGNO (in_rtx, newreg)(df_ref_change_reg_with_loc (in_rtx, newreg));
4181 in_rtx->used = 1;
4182 return;
4183 }
4184
4185 if (INSN_P (in_rtx)(((((enum rtx_code) (in_rtx)->code) == INSN) || (((enum rtx_code
) (in_rtx)->code) == JUMP_INSN) || (((enum rtx_code) (in_rtx
)->code) == CALL_INSN)) || (((enum rtx_code) (in_rtx)->
code) == DEBUG_INSN))
)
4186 {
4187 /* Inside a SEQUENCE, we find insns.
4188 Renumber just the patterns of these insns,
4189 just as we do for the top-level insns. */
4190 leaf_renumber_regs_insn (PATTERN (in_rtx));
4191 return;
4192 }
4193
4194 format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx))(rtx_format[(int) (((enum rtx_code) (in_rtx)->code))]);
4195
4196 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx))(rtx_length[(int) (((enum rtx_code) (in_rtx)->code))]); i++)
4197 switch (*format_ptr++)
4198 {
4199 case 'e':
4200 leaf_renumber_regs_insn (XEXP (in_rtx, i)(((in_rtx)->u.fld[i]).rt_rtx));
4201 break;
4202
4203 case 'E':
4204 if (XVEC (in_rtx, i)(((in_rtx)->u.fld[i]).rt_rtvec) != NULL__null)
4205 for (j = 0; j < XVECLEN (in_rtx, i)(((((in_rtx)->u.fld[i]).rt_rtvec))->num_elem); j++)
4206 leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j)(((((in_rtx)->u.fld[i]).rt_rtvec))->elem[j]));
4207 break;
4208
4209 case 'S':
4210 case 's':
4211 case '0':
4212 case 'i':
4213 case 'w':
4214 case 'p':
4215 case 'n':
4216 case 'u':
4217 break;
4218
4219 default:
4220 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 4220, __FUNCTION__))
;
4221 }
4222}
4223#endif
4224
4225/* Turn the RTL into assembly. */
4226static unsigned int
4227rest_of_handle_final (void)
4228{
4229 const char *fnname = get_fnname_from_decl (current_function_decl);
4230
4231 /* Turn debug markers into notes if the var-tracking pass has not
4232 been invoked. */
4233 if (!flag_var_trackingglobal_options.x_flag_var_tracking && MAY_HAVE_DEBUG_MARKER_INSNSglobal_options.x_debug_nonbind_markers_p)
4234 delete_vta_debug_insns (false);
4235
4236 assemble_start_function (current_function_decl, fnname);
4237 rtx_insn *first = get_insns ();
4238 int seen = 0;
4239 final_start_function_1 (&first, asm_out_file, &seen, optimizeglobal_options.x_optimize);
4240 final_1 (first, asm_out_file, seen, optimizeglobal_options.x_optimize);
4241 if (flag_ipa_raglobal_options.x_flag_ipa_ra
4242 && !lookup_attribute ("noipa", DECL_ATTRIBUTES (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 4242, __FUNCTION__))->decl_common.attributes)
)
4243 /* Functions with naked attributes are supported only with basic asm
4244 statements in the body, thus for supported use cases the information
4245 on clobbered registers is not available. */
4246 && !lookup_attribute ("naked", DECL_ATTRIBUTES (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 4246, __FUNCTION__))->decl_common.attributes)
))
4247 collect_fn_hard_reg_usage ();
4248 final_end_function ();
4249
4250 /* The IA-64 ".handlerdata" directive must be issued before the ".endp"
4251 directive that closes the procedure descriptor. Similarly, for x64 SEH.
4252 Otherwise it's not strictly necessary, but it doesn't hurt either. */
4253 output_function_exception_table (crtl(&x_rtl)->has_bb_partition ? 1 : 0);
4254
4255 assemble_end_function (current_function_decl, fnname);
4256
4257 /* Free up reg info memory. */
4258 free_reg_info ();
4259
4260 if (! quiet_flagglobal_options.x_quiet_flag)
4261 fflush (asm_out_file);
4262
4263 /* Note that for those inline functions where we don't initially
4264 know for certain that we will be generating an out-of-line copy,
4265 the first invocation of this routine (rest_of_compilation) will
4266 skip over this code by doing a `goto exit_rest_of_compilation;'.
4267 Later on, wrapup_global_declarations will (indirectly) call
4268 rest_of_compilation again for those inline functions that need
4269 to have out-of-line copies generated. During that call, we
4270 *will* be routed past here. */
4271
4272 timevar_push (TV_SYMOUT);
4273 if (!DECL_IGNORED_P (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 4273, __FUNCTION__))->decl_common.ignored_flag)
)
4274 debug_hooks->function_decl (current_function_decl);
4275 timevar_pop (TV_SYMOUT);
4276
4277 /* Release the blocks that are linked to DECL_INITIAL() to free the memory. */
4278 DECL_INITIAL (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_COMMON
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 4278, __FUNCTION__))->decl_common.initial)
= error_mark_nodeglobal_trees[TI_ERROR_MARK];
4279
4280 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)((tree_check ((current_function_decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 4280, __FUNCTION__, (FUNCTION_DECL)))->function_decl.static_ctor_flag
)
4281 && targetm.have_ctors_dtors)
4282 targetm.asm_out.constructor (XEXP (DECL_RTL (current_function_decl), 0)(((((contains_struct_check ((current_function_decl), (TS_DECL_WRTL
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 4282, __FUNCTION__))->decl_with_rtl.rtl ? (current_function_decl
)->decl_with_rtl.rtl : (make_decl_rtl (current_function_decl
), (current_function_decl)->decl_with_rtl.rtl)))->u.fld
[0]).rt_rtx)
,
4283 decl_init_priority_lookup
4284 (current_function_decl));
4285 if (DECL_STATIC_DESTRUCTOR (current_function_decl)((tree_check ((current_function_decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 4285, __FUNCTION__, (FUNCTION_DECL)))->function_decl.static_dtor_flag
)
4286 && targetm.have_ctors_dtors)
4287 targetm.asm_out.destructor (XEXP (DECL_RTL (current_function_decl), 0)(((((contains_struct_check ((current_function_decl), (TS_DECL_WRTL
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 4287, __FUNCTION__))->decl_with_rtl.rtl ? (current_function_decl
)->decl_with_rtl.rtl : (make_decl_rtl (current_function_decl
), (current_function_decl)->decl_with_rtl.rtl)))->u.fld
[0]).rt_rtx)
,
4288 decl_fini_priority_lookup
4289 (current_function_decl));
4290 return 0;
4291}
4292
4293namespace {
4294
4295const pass_data pass_data_final =
4296{
4297 RTL_PASS, /* type */
4298 "final", /* name */
4299 OPTGROUP_NONE, /* optinfo_flags */
4300 TV_FINAL, /* tv_id */
4301 0, /* properties_required */
4302 0, /* properties_provided */
4303 0, /* properties_destroyed */
4304 0, /* todo_flags_start */
4305 0, /* todo_flags_finish */
4306};
4307
4308class pass_final : public rtl_opt_pass
4309{
4310public:
4311 pass_final (gcc::context *ctxt)
4312 : rtl_opt_pass (pass_data_final, ctxt)
4313 {}
4314
4315 /* opt_pass methods: */
4316 unsigned int execute (function *) final override
4317 {
4318 return rest_of_handle_final ();
4319 }
4320
4321}; // class pass_final
4322
4323} // anon namespace
4324
4325rtl_opt_pass *
4326make_pass_final (gcc::context *ctxt)
4327{
4328 return new pass_final (ctxt);
4329}
4330
4331
4332static unsigned int
4333rest_of_handle_shorten_branches (void)
4334{
4335 /* Shorten branches. */
4336 shorten_branches (get_insns ());
4337 return 0;
4338}
4339
4340namespace {
4341
4342const pass_data pass_data_shorten_branches =
4343{
4344 RTL_PASS, /* type */
4345 "shorten", /* name */
4346 OPTGROUP_NONE, /* optinfo_flags */
4347 TV_SHORTEN_BRANCH, /* tv_id */
4348 0, /* properties_required */
4349 0, /* properties_provided */
4350 0, /* properties_destroyed */
4351 0, /* todo_flags_start */
4352 0, /* todo_flags_finish */
4353};
4354
4355class pass_shorten_branches : public rtl_opt_pass
4356{
4357public:
4358 pass_shorten_branches (gcc::context *ctxt)
4359 : rtl_opt_pass (pass_data_shorten_branches, ctxt)
4360 {}
4361
4362 /* opt_pass methods: */
4363 unsigned int execute (function *) final override
4364 {
4365 return rest_of_handle_shorten_branches ();
4366 }
4367
4368}; // class pass_shorten_branches
4369
4370} // anon namespace
4371
4372rtl_opt_pass *
4373make_pass_shorten_branches (gcc::context *ctxt)
4374{
4375 return new pass_shorten_branches (ctxt);
4376}
4377
4378
4379static unsigned int
4380rest_of_clean_state (void)
4381{
4382 rtx_insn *insn, *next;
4383 FILE *final_output = NULL__null;
4384 int save_unnumbered = flag_dump_unnumberedglobal_options.x_flag_dump_unnumbered;
4385 int save_noaddr = flag_dump_noaddrglobal_options.x_flag_dump_noaddr;
4386
4387 if (flag_dump_final_insnsglobal_options.x_flag_dump_final_insns)
4388 {
4389 final_output = fopen (flag_dump_final_insnsglobal_options.x_flag_dump_final_insns, "a");
4390 if (!final_output)
4391 {
4392 error ("could not open final insn dump file %qs: %m",
4393 flag_dump_final_insnsglobal_options.x_flag_dump_final_insns);
4394 flag_dump_final_insnsglobal_options.x_flag_dump_final_insns = NULL__null;
4395 }
4396 else
4397 {
4398 flag_dump_noaddrglobal_options.x_flag_dump_noaddr = flag_dump_unnumberedglobal_options.x_flag_dump_unnumbered = 1;
4399 if (flag_compare_debug_optglobal_options.x_flag_compare_debug_opt || flag_compare_debugglobal_options.x_flag_compare_debug)
4400 dump_flags |= TDF_NOUID | TDF_COMPARE_DEBUG;
4401 dump_function_header (final_output, current_function_decl,
4402 dump_flags);
4403 final_insns_dump_p = true;
4404
4405 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4406 if (LABEL_P (insn)(((enum rtx_code) (insn)->code) == CODE_LABEL))
4407 INSN_UID (insn) = CODE_LABEL_NUMBER (insn)(((insn)->u.fld[5]).rt_int);
4408 else
4409 {
4410 if (NOTE_P (insn)(((enum rtx_code) (insn)->code) == NOTE))
4411 set_block_for_insn (insn, NULL__null);
4412 INSN_UID (insn) = 0;
4413 }
4414 }
4415 }
4416
4417 /* It is very important to decompose the RTL instruction chain here:
4418 debug information keeps pointing into CODE_LABEL insns inside the function
4419 body. If these remain pointing to the other insns, we end up preserving
4420 whole RTL chain and attached detailed debug info in memory. */
4421 for (insn = get_insns (); insn; insn = next)
4422 {
4423 next = NEXT_INSN (insn);
4424 SET_NEXT_INSN (insn) = NULL__null;
4425 SET_PREV_INSN (insn) = NULL__null;
4426
4427 rtx_insn *call_insn = insn;
4428 if (NONJUMP_INSN_P (call_insn)(((enum rtx_code) (call_insn)->code) == INSN)
4429 && GET_CODE (PATTERN (call_insn))((enum rtx_code) (PATTERN (call_insn))->code) == SEQUENCE)
4430 {
4431 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (call_insn));
4432 call_insn = seq->insn (0);
4433 }
4434 if (CALL_P (call_insn)(((enum rtx_code) (call_insn)->code) == CALL_INSN))
4435 {
4436 rtx note
4437 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX(rtx) 0);
4438 if (note)
4439 remove_note (call_insn, note);
4440 }
4441
4442 if (final_output
4443 && (!NOTE_P (insn)(((enum rtx_code) (insn)->code) == NOTE)
4444 || (NOTE_KIND (insn)(((insn)->u.fld[4]).rt_int) != NOTE_INSN_VAR_LOCATION
4445 && NOTE_KIND (insn)(((insn)->u.fld[4]).rt_int) != NOTE_INSN_BEGIN_STMT
4446 && NOTE_KIND (insn)(((insn)->u.fld[4]).rt_int) != NOTE_INSN_INLINE_ENTRY
4447 && NOTE_KIND (insn)(((insn)->u.fld[4]).rt_int) != NOTE_INSN_BLOCK_BEG
4448 && NOTE_KIND (insn)(((insn)->u.fld[4]).rt_int) != NOTE_INSN_BLOCK_END
4449 && NOTE_KIND (insn)(((insn)->u.fld[4]).rt_int) != NOTE_INSN_DELETED_DEBUG_LABEL)))
4450 print_rtl_single (final_output, insn);
4451 }
4452
4453 if (final_output)
4454 {
4455 flag_dump_noaddrglobal_options.x_flag_dump_noaddr = save_noaddr;
4456 flag_dump_unnumberedglobal_options.x_flag_dump_unnumbered = save_unnumbered;
4457 final_insns_dump_p = false;
4458
4459 if (fclose (final_output))
4460 {
4461 error ("could not close final insn dump file %qs: %m",
4462 flag_dump_final_insnsglobal_options.x_flag_dump_final_insns);
4463 flag_dump_final_insnsglobal_options.x_flag_dump_final_insns = NULL__null;
4464 }
4465 }
4466
4467 flag_rerun_cse_after_global_opts = 0;
4468 reload_completed = 0;
4469 epilogue_completed = 0;
4470#ifdef STACK_REGS
4471 regstack_completed = 0;
4472#endif
4473
4474 /* Clear out the insn_length contents now that they are no
4475 longer valid. */
4476 init_insn_lengths ();
4477
4478 /* Show no temporary slots allocated. */
4479 init_temp_slots ();
4480
4481 free_bb_for_insn ();
4482
4483 if (cfun(cfun + 0)->gimple_df)
4484 delete_tree_ssa (cfun(cfun + 0));
4485
4486 /* We can reduce stack alignment on call site only when we are sure that
4487 the function body just produced will be actually used in the final
4488 executable. */
4489 if (flag_ipa_stack_alignmentglobal_options.x_flag_ipa_stack_alignment
4490 && decl_binds_to_current_def_p (current_function_decl))
4491 {
4492 unsigned int pref = crtl(&x_rtl)->preferred_stack_boundary;
4493 if (crtl(&x_rtl)->stack_alignment_needed > crtl(&x_rtl)->preferred_stack_boundary)
4494 pref = crtl(&x_rtl)->stack_alignment_needed;
4495 cgraph_node::rtl_info (current_function_decl)
4496 ->preferred_incoming_stack_boundary = pref;
4497 }
4498
4499 /* Make sure volatile mem refs aren't considered valid operands for
4500 arithmetic insns. We must call this here if this is a nested inline
4501 function, since the above code leaves us in the init_recog state,
4502 and the function context push/pop code does not save/restore volatile_ok.
4503
4504 ??? Maybe it isn't necessary for expand_start_function to call this
4505 anymore if we do it here? */
4506
4507 init_recog_no_volatile ();
4508
4509 /* We're done with this function. Free up memory if we can. */
4510 free_after_parsing (cfun(cfun + 0));
4511 free_after_compilation (cfun(cfun + 0));
4512 return 0;
4513}
4514
4515namespace {
4516
4517const pass_data pass_data_clean_state =
4518{
4519 RTL_PASS, /* type */
4520 "*clean_state", /* name */
4521 OPTGROUP_NONE, /* optinfo_flags */
4522 TV_FINAL, /* tv_id */
4523 0, /* properties_required */
4524 0, /* properties_provided */
4525 PROP_rtl(1 << 7), /* properties_destroyed */
4526 0, /* todo_flags_start */
4527 0, /* todo_flags_finish */
4528};
4529
4530class pass_clean_state : public rtl_opt_pass
4531{
4532public:
4533 pass_clean_state (gcc::context *ctxt)
4534 : rtl_opt_pass (pass_data_clean_state, ctxt)
4535 {}
4536
4537 /* opt_pass methods: */
4538 unsigned int execute (function *) final override
4539 {
4540 return rest_of_clean_state ();
4541 }
4542
4543}; // class pass_clean_state
4544
4545} // anon namespace
4546
4547rtl_opt_pass *
4548make_pass_clean_state (gcc::context *ctxt)
4549{
4550 return new pass_clean_state (ctxt);
4551}
4552
4553/* Return true if INSN is a call to the current function. */
4554
4555static bool
4556self_recursive_call_p (rtx_insn *insn)
4557{
4558 tree fndecl = get_call_fndecl (insn);
4559 return (fndecl == current_function_decl
4560 && decl_binds_to_current_def_p (fndecl));
4561}
4562
4563/* Collect hard register usage for the current function. */
4564
4565static void
4566collect_fn_hard_reg_usage (void)
4567{
4568 rtx_insn *insn;
4569#ifdef STACK_REGS
4570 int i;
4571#endif
4572 struct cgraph_rtl_info *node;
4573 HARD_REG_SET function_used_regs;
4574
4575 /* ??? To be removed when all the ports have been fixed. */
4576 if (!targetm.call_fusage_contains_non_callee_clobbers)
4577 return;
4578
4579 /* Be conservative - mark fixed and global registers as used. */
4580 function_used_regs = fixed_reg_set(this_target_hard_regs->x_fixed_reg_set);
4581
4582#ifdef STACK_REGS
4583 /* Handle STACK_REGS conservatively, since the df-framework does not
4584 provide accurate information for them. */
4585
4586 for (i = FIRST_STACK_REG8; i <= LAST_STACK_REG15; i++)
4587 SET_HARD_REG_BIT (function_used_regs, i);
4588#endif
4589
4590 for (insn = get_insns (); insn != NULL_RTX(rtx) 0; insn = next_insn (insn))
4591 {
4592 HARD_REG_SET insn_used_regs;
4593
4594 if (!NONDEBUG_INSN_P (insn)((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code
) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)->
code) == CALL_INSN))
)
4595 continue;
4596
4597 if (CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN)
4598 && !self_recursive_call_p (insn))
4599 function_used_regs
4600 |= insn_callee_abi (insn).full_and_partial_reg_clobbers ();
4601
4602 find_all_hard_reg_sets (insn, &insn_used_regs, false);
4603 function_used_regs |= insn_used_regs;
4604
4605 if (hard_reg_set_subset_p (crtl(&x_rtl)->abi->full_and_partial_reg_clobbers (),
4606 function_used_regs))
4607 return;
4608 }
4609
4610 /* Mask out fully-saved registers, so that they don't affect equality
4611 comparisons between function_abis. */
4612 function_used_regs &= crtl(&x_rtl)->abi->full_and_partial_reg_clobbers ();
4613
4614 node = cgraph_node::rtl_info (current_function_decl);
4615 gcc_assert (node != NULL)((void)(!(node != __null) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/final.cc"
, 4615, __FUNCTION__), 0 : 0))
;
4616
4617 node->function_used_regs = function_used_regs;
4618}

/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h

1/* Register Transfer Language (RTL) definitions for GCC
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#ifndef GCC_RTL_H
21#define GCC_RTL_H
22
23/* This file is occasionally included by generator files which expect
24 machmode.h and other files to exist and would not normally have been
25 included by coretypes.h. */
26#ifdef GENERATOR_FILE
27#include "real.h"
28#include "fixed-value.h"
29#include "statistics.h"
30#include "vec.h"
31#include "hash-table.h"
32#include "hash-set.h"
33#include "input.h"
34#include "is-a.h"
35#endif /* GENERATOR_FILE */
36
37#include "hard-reg-set.h"
38
39class predefined_function_abi;
40
41/* Value used by some passes to "recognize" noop moves as valid
42 instructions. */
43#define NOOP_MOVE_INSN_CODE2147483647 INT_MAX2147483647
44
45/* Register Transfer Language EXPRESSIONS CODES */
46
47#define RTX_CODEenum rtx_code enum rtx_code
48enum rtx_code {
49
50#define DEF_RTL_EXPR(ENUM, NAME, FORMAT, CLASS) ENUM ,
51#include "rtl.def" /* rtl expressions are documented here */
52#undef DEF_RTL_EXPR
53
54 LAST_AND_UNUSED_RTX_CODE}; /* A convenient way to get a value for
55 NUM_RTX_CODE.
56 Assumes default enum value assignment. */
57
58/* The cast here, saves many elsewhere. */
59#define NUM_RTX_CODE((int) LAST_AND_UNUSED_RTX_CODE) ((int) LAST_AND_UNUSED_RTX_CODE)
60
61/* Similar, but since generator files get more entries... */
62#ifdef GENERATOR_FILE
63# define NON_GENERATOR_NUM_RTX_CODE ((int) MATCH_OPERAND)
64#endif
65
66/* Register Transfer Language EXPRESSIONS CODE CLASSES */
67
68enum rtx_class {
69 /* We check bit 0-1 of some rtx class codes in the predicates below. */
70
71 /* Bit 0 = comparison if 0, arithmetic is 1
72 Bit 1 = 1 if commutative. */
73 RTX_COMPARE, /* 0 */
74 RTX_COMM_COMPARE,
75 RTX_BIN_ARITH,
76 RTX_COMM_ARITH,
77
78 /* Must follow the four preceding values. */
79 RTX_UNARY, /* 4 */
80
81 RTX_EXTRA,
82 RTX_MATCH,
83 RTX_INSN,
84
85 /* Bit 0 = 1 if constant. */
86 RTX_OBJ, /* 8 */
87 RTX_CONST_OBJ,
88
89 RTX_TERNARY,
90 RTX_BITFIELD_OPS,
91 RTX_AUTOINC
92};
93
94#define RTX_OBJ_MASK(~1) (~1)
95#define RTX_OBJ_RESULT(RTX_OBJ & (~1)) (RTX_OBJ & RTX_OBJ_MASK(~1))
96#define RTX_COMPARE_MASK(~1) (~1)
97#define RTX_COMPARE_RESULT(RTX_COMPARE & (~1)) (RTX_COMPARE & RTX_COMPARE_MASK(~1))
98#define RTX_ARITHMETIC_MASK(~1) (~1)
99#define RTX_ARITHMETIC_RESULT(RTX_COMM_ARITH & (~1)) (RTX_COMM_ARITH & RTX_ARITHMETIC_MASK(~1))
100#define RTX_BINARY_MASK(~3) (~3)
101#define RTX_BINARY_RESULT(RTX_COMPARE & (~3)) (RTX_COMPARE & RTX_BINARY_MASK(~3))
102#define RTX_COMMUTATIVE_MASK(~2) (~2)
103#define RTX_COMMUTATIVE_RESULT(RTX_COMM_COMPARE & (~2)) (RTX_COMM_COMPARE & RTX_COMMUTATIVE_MASK(~2))
104#define RTX_NON_COMMUTATIVE_RESULT(RTX_COMPARE & (~2)) (RTX_COMPARE & RTX_COMMUTATIVE_MASK(~2))
105
106extern const unsigned char rtx_length[NUM_RTX_CODE((int) LAST_AND_UNUSED_RTX_CODE)];
107#define GET_RTX_LENGTH(CODE)(rtx_length[(int) (CODE)]) (rtx_length[(int) (CODE)])
108
109extern const char * const rtx_name[NUM_RTX_CODE((int) LAST_AND_UNUSED_RTX_CODE)];
110#define GET_RTX_NAME(CODE)(rtx_name[(int) (CODE)]) (rtx_name[(int) (CODE)])
111
112extern const char * const rtx_format[NUM_RTX_CODE((int) LAST_AND_UNUSED_RTX_CODE)];
113#define GET_RTX_FORMAT(CODE)(rtx_format[(int) (CODE)]) (rtx_format[(int) (CODE)])
114
115extern const enum rtx_class rtx_class[NUM_RTX_CODE((int) LAST_AND_UNUSED_RTX_CODE)];
116#define GET_RTX_CLASS(CODE)(rtx_class[(int) (CODE)]) (rtx_class[(int) (CODE)])
117
118/* True if CODE is part of the insn chain (i.e. has INSN_UID, PREV_INSN
119 and NEXT_INSN fields). */
120#define INSN_CHAIN_CODE_P(CODE)((unsigned long) (CODE) - (unsigned long) (DEBUG_INSN) <= (
unsigned long) (NOTE) - (unsigned long) (DEBUG_INSN))
IN_RANGE (CODE, DEBUG_INSN, NOTE)((unsigned long) (CODE) - (unsigned long) (DEBUG_INSN) <= (
unsigned long) (NOTE) - (unsigned long) (DEBUG_INSN))
121
122extern const unsigned char rtx_code_size[NUM_RTX_CODE((int) LAST_AND_UNUSED_RTX_CODE)];
123extern const unsigned char rtx_next[NUM_RTX_CODE((int) LAST_AND_UNUSED_RTX_CODE)];
124
125/* The flags and bitfields of an ADDR_DIFF_VEC. BASE is the base label
126 relative to which the offsets are calculated, as explained in rtl.def. */
127struct addr_diff_vec_flags
128{
129 /* Set at the start of shorten_branches - ONLY WHEN OPTIMIZING - : */
130 unsigned min_align: 8;
131 /* Flags: */
132 unsigned base_after_vec: 1; /* BASE is after the ADDR_DIFF_VEC. */
133 unsigned min_after_vec: 1; /* minimum address target label is
134 after the ADDR_DIFF_VEC. */
135 unsigned max_after_vec: 1; /* maximum address target label is
136 after the ADDR_DIFF_VEC. */
137 unsigned min_after_base: 1; /* minimum address target label is
138 after BASE. */
139 unsigned max_after_base: 1; /* maximum address target label is
140 after BASE. */
141 /* Set by the actual branch shortening process - ONLY WHEN OPTIMIZING - : */
142 unsigned offset_unsigned: 1; /* offsets have to be treated as unsigned. */
143 unsigned : 2;
144 unsigned scale : 8;
145};
146
147/* Structure used to describe the attributes of a MEM. These are hashed
148 so MEMs that the same attributes share a data structure. This means
149 they cannot be modified in place. */
150class GTY(()) mem_attrs
151{
152public:
153 mem_attrs ();
154
155 /* The expression that the MEM accesses, or null if not known.
156 This expression might be larger than the memory reference itself.
157 (In other words, the MEM might access only part of the object.) */
158 tree expr;
159
160 /* The offset of the memory reference from the start of EXPR.
161 Only valid if OFFSET_KNOWN_P. */
162 poly_int64 offset;
163
164 /* The size of the memory reference in bytes. Only valid if
165 SIZE_KNOWN_P. */
166 poly_int64 size;
167
168 /* The alias set of the memory reference. */
169 alias_set_type alias;
170
171 /* The alignment of the reference in bits. Always a multiple of
172 BITS_PER_UNIT. Note that EXPR may have a stricter alignment
173 than the memory reference itself. */
174 unsigned int align;
175
176 /* The address space that the memory reference uses. */
177 unsigned char addrspace;
178
179 /* True if OFFSET is known. */
180 bool offset_known_p;
181
182 /* True if SIZE is known. */
183 bool size_known_p;
184};
185
186/* Structure used to describe the attributes of a REG in similar way as
187 mem_attrs does for MEM above. Note that the OFFSET field is calculated
188 in the same way as for mem_attrs, rather than in the same way as a
189 SUBREG_BYTE. For example, if a big-endian target stores a byte
190 object in the low part of a 4-byte register, the OFFSET field
191 will be -3 rather than 0. */
192
193class GTY((for_user)) reg_attrs {
194public:
195 tree decl; /* decl corresponding to REG. */
196 poly_int64 offset; /* Offset from start of DECL. */
197};
198
199/* Common union for an element of an rtx. */
200
201union rtunion
202{
203 int rt_int;
204 unsigned int rt_uint;
205 poly_uint16_pod rt_subreg;
206 const char *rt_str;
207 rtx rt_rtx;
208 rtvec rt_rtvec;
209 machine_mode rt_type;
210 addr_diff_vec_flags rt_addr_diff_vec_flags;
211 struct cselib_val *rt_cselib;
212 tree rt_tree;
213 basic_block rt_bb;
214 mem_attrs *rt_mem;
215 class constant_descriptor_rtx *rt_constant;
216 struct dw_cfi_node *rt_cfi;
217};
218
219/* Describes the properties of a REG. */
220struct GTY(()) reg_info {
221 /* The value of REGNO. */
222 unsigned int regno;
223
224 /* The value of REG_NREGS. */
225 unsigned int nregs : 8;
226 unsigned int unused : 24;
227
228 /* The value of REG_ATTRS. */
229 reg_attrs *attrs;
230};
231
232/* This structure remembers the position of a SYMBOL_REF within an
233 object_block structure. A SYMBOL_REF only provides this information
234 if SYMBOL_REF_HAS_BLOCK_INFO_P is true. */
235struct GTY(()) block_symbol {
236 /* The usual SYMBOL_REF fields. */
237 rtunion GTY ((skip)) fld[2];
238
239 /* The block that contains this object. */
240 struct object_block *block;
241
242 /* The offset of this object from the start of its block. It is negative
243 if the symbol has not yet been assigned an offset. */
244 HOST_WIDE_INTlong offset;
245};
246
247/* Describes a group of objects that are to be placed together in such
248 a way that their relative positions are known. */
249struct GTY((for_user)) object_block {
250 /* The section in which these objects should be placed. */
251 section *sect;
252
253 /* The alignment of the first object, measured in bits. */
254 unsigned int alignment;
255
256 /* The total size of the objects, measured in bytes. */
257 HOST_WIDE_INTlong size;
258
259 /* The SYMBOL_REFs for each object. The vector is sorted in
260 order of increasing offset and the following conditions will
261 hold for each element X:
262
263 SYMBOL_REF_HAS_BLOCK_INFO_P (X)
264 !SYMBOL_REF_ANCHOR_P (X)
265 SYMBOL_REF_BLOCK (X) == [address of this structure]
266 SYMBOL_REF_BLOCK_OFFSET (X) >= 0. */
267 vec<rtx, va_gc> *objects;
268
269 /* All the anchor SYMBOL_REFs used to address these objects, sorted
270 in order of increasing offset, and then increasing TLS model.
271 The following conditions will hold for each element X in this vector:
272
273 SYMBOL_REF_HAS_BLOCK_INFO_P (X)
274 SYMBOL_REF_ANCHOR_P (X)
275 SYMBOL_REF_BLOCK (X) == [address of this structure]
276 SYMBOL_REF_BLOCK_OFFSET (X) >= 0. */
277 vec<rtx, va_gc> *anchors;
278};
279
280struct GTY((variable_size)) hwivec_def {
281 HOST_WIDE_INTlong elem[1];
282};
283
284/* Number of elements of the HWIVEC if RTX is a CONST_WIDE_INT. */
285#define CWI_GET_NUM_ELEM(RTX)((int)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX));
if (((enum rtx_code) (_rtx)->code) != CONST_WIDE_INT) rtl_check_failed_flag
("CWI_GET_NUM_ELEM", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 285, __FUNCTION__); _rtx; })->u2.num_elem)
\
286 ((int)RTL_FLAG_CHECK1("CWI_GET_NUM_ELEM", (RTX), CONST_WIDE_INT)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != CONST_WIDE_INT) rtl_check_failed_flag
("CWI_GET_NUM_ELEM", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 286, __FUNCTION__); _rtx; })
->u2.num_elem)
287#define CWI_PUT_NUM_ELEM(RTX, NUM)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != CONST_WIDE_INT) rtl_check_failed_flag
("CWI_PUT_NUM_ELEM", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 287, __FUNCTION__); _rtx; })->u2.num_elem = (NUM))
\
288 (RTL_FLAG_CHECK1("CWI_PUT_NUM_ELEM", (RTX), CONST_WIDE_INT)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != CONST_WIDE_INT) rtl_check_failed_flag
("CWI_PUT_NUM_ELEM", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 288, __FUNCTION__); _rtx; })
->u2.num_elem = (NUM))
289
290struct GTY((variable_size)) const_poly_int_def {
291 trailing_wide_ints<NUM_POLY_INT_COEFFS1> coeffs;
292};
293
294/* RTL expression ("rtx"). */
295
296/* The GTY "desc" and "tag" options below are a kludge: we need a desc
297 field for gengtype to recognize that inheritance is occurring,
298 so that all subclasses are redirected to the traversal hook for the
299 base class.
300 However, all of the fields are in the base class, and special-casing
301 is at work. Hence we use desc and tag of 0, generating a switch
302 statement of the form:
303 switch (0)
304 {
305 case 0: // all the work happens here
306 }
307 in order to work with the existing special-casing in gengtype. */
308
309struct GTY((desc("0"), tag("0"),
310 chain_next ("RTX_NEXT (&%h)"),
311 chain_prev ("RTX_PREV (&%h)"))) rtx_def {
312 /* The kind of expression this is. */
313 ENUM_BITFIELD(rtx_code)enum rtx_code code: 16;
314
315 /* The kind of value the expression has. */
316 ENUM_BITFIELD(machine_mode)enum machine_mode mode : 8;
317
318 /* 1 in a MEM if we should keep the alias set for this mem unchanged
319 when we access a component.
320 1 in a JUMP_INSN if it is a crossing jump.
321 1 in a CALL_INSN if it is a sibling call.
322 1 in a SET that is for a return.
323 In a CODE_LABEL, part of the two-bit alternate entry field.
324 1 in a CONCAT is VAL_EXPR_IS_COPIED in var-tracking.cc.
325 1 in a VALUE is SP_BASED_VALUE_P in cselib.cc.
326 1 in a SUBREG generated by LRA for reload insns.
327 1 in a REG if this is a static chain register.
328 Dumped as "/j" in RTL dumps. */
329 unsigned int jump : 1;
330 /* In a CODE_LABEL, part of the two-bit alternate entry field.
331 1 in a MEM if it cannot trap.
332 1 in a CALL_INSN logically equivalent to
333 ECF_LOOPING_CONST_OR_PURE and DECL_LOOPING_CONST_OR_PURE_P.
334 1 in a VALUE is SP_DERIVED_VALUE_P in cselib.cc.
335 Dumped as "/c" in RTL dumps. */
336 unsigned int call : 1;
337 /* 1 in a REG, MEM, or CONCAT if the value is set at most once, anywhere.
338 1 in a SUBREG used for SUBREG_PROMOTED_UNSIGNED_P.
339 1 in a SYMBOL_REF if it addresses something in the per-function
340 constants pool.
341 1 in a CALL_INSN logically equivalent to ECF_CONST and TREE_READONLY.
342 1 in a NOTE, or EXPR_LIST for a const call.
343 1 in a JUMP_INSN of an annulling branch.
344 1 in a CONCAT is VAL_EXPR_IS_CLOBBERED in var-tracking.cc.
345 1 in a preserved VALUE is PRESERVED_VALUE_P in cselib.cc.
346 1 in a clobber temporarily created for LRA.
347 Dumped as "/u" in RTL dumps. */
348 unsigned int unchanging : 1;
349 /* 1 in a MEM or ASM_OPERANDS expression if the memory reference is volatile.
350 1 in an INSN, CALL_INSN, JUMP_INSN, CODE_LABEL, BARRIER, or NOTE
351 if it has been deleted.
352 1 in a REG expression if corresponds to a variable declared by the user,
353 0 for an internally generated temporary.
354 1 in a SUBREG used for SUBREG_PROMOTED_UNSIGNED_P.
355 1 in a LABEL_REF, REG_LABEL_TARGET or REG_LABEL_OPERAND note for a
356 non-local label.
357 In a SYMBOL_REF, this flag is used for machine-specific purposes.
358 In a PREFETCH, this flag indicates that it should be considered a
359 scheduling barrier.
360 1 in a CONCAT is VAL_NEEDS_RESOLUTION in var-tracking.cc.
361 Dumped as "/v" in RTL dumps. */
362 unsigned int volatil : 1;
363 /* 1 in a REG if the register is used only in exit code a loop.
364 1 in a SUBREG expression if was generated from a variable with a
365 promoted mode.
366 1 in a CODE_LABEL if the label is used for nonlocal gotos
367 and must not be deleted even if its count is zero.
368 1 in an INSN, JUMP_INSN or CALL_INSN if this insn must be scheduled
369 together with the preceding insn. Valid only within sched.
370 1 in an INSN, JUMP_INSN, or CALL_INSN if insn is in a delay slot and
371 from the target of a branch. Valid from reorg until end of compilation;
372 cleared before used.
373
374 The name of the field is historical. It used to be used in MEMs
375 to record whether the MEM accessed part of a structure.
376 Dumped as "/s" in RTL dumps. */
377 unsigned int in_struct : 1;
378 /* At the end of RTL generation, 1 if this rtx is used. This is used for
379 copying shared structure. See `unshare_all_rtl'.
380 In a REG, this is not needed for that purpose, and used instead
381 in `leaf_renumber_regs_insn'.
382 1 in a SYMBOL_REF, means that emit_library_call
383 has used it as the function.
384 1 in a CONCAT is VAL_HOLDS_TRACK_EXPR in var-tracking.cc.
385 1 in a VALUE or DEBUG_EXPR is VALUE_RECURSED_INTO in var-tracking.cc. */
386 unsigned int used : 1;
387 /* 1 in an INSN or a SET if this rtx is related to the call frame,
388 either changing how we compute the frame address or saving and
389 restoring registers in the prologue and epilogue.
390 1 in a REG or MEM if it is a pointer.
391 1 in a SYMBOL_REF if it addresses something in the per-function
392 constant string pool.
393 1 in a VALUE is VALUE_CHANGED in var-tracking.cc.
394 Dumped as "/f" in RTL dumps. */
395 unsigned frame_related : 1;
396 /* 1 in a REG or PARALLEL that is the current function's return value.
397 1 in a SYMBOL_REF for a weak symbol.
398 1 in a CALL_INSN logically equivalent to ECF_PURE and DECL_PURE_P.
399 1 in a CONCAT is VAL_EXPR_HAS_REVERSE in var-tracking.cc.
400 1 in a VALUE or DEBUG_EXPR is NO_LOC_P in var-tracking.cc.
401 Dumped as "/i" in RTL dumps. */
402 unsigned return_val : 1;
403
404 union {
405 /* The final union field is aligned to 64 bits on LP64 hosts,
406 giving a 32-bit gap after the fields above. We optimize the
407 layout for that case and use the gap for extra code-specific
408 information. */
409
410 /* The ORIGINAL_REGNO of a REG. */
411 unsigned int original_regno;
412
413 /* The INSN_UID of an RTX_INSN-class code. */
414 int insn_uid;
415
416 /* The SYMBOL_REF_FLAGS of a SYMBOL_REF. */
417 unsigned int symbol_ref_flags;
418
419 /* The PAT_VAR_LOCATION_STATUS of a VAR_LOCATION. */
420 enum var_init_status var_location_status;
421
422 /* In a CONST_WIDE_INT (aka hwivec_def), this is the number of
423 HOST_WIDE_INTs in the hwivec_def. */
424 unsigned int num_elem;
425
426 /* Information about a CONST_VECTOR. */
427 struct
428 {
429 /* The value of CONST_VECTOR_NPATTERNS. */
430 unsigned int npatterns : 16;
431
432 /* The value of CONST_VECTOR_NELTS_PER_PATTERN. */
433 unsigned int nelts_per_pattern : 8;
434
435 /* For future expansion. */
436 unsigned int unused : 8;
437 } const_vector;
438 } GTY ((skip)) u2;
439
440 /* The first element of the operands of this rtx.
441 The number of operands and their types are controlled
442 by the `code' field, according to rtl.def. */
443 union u {
444 rtunion fld[1];
445 HOST_WIDE_INTlong hwint[1];
446 struct reg_info reg;
447 struct block_symbol block_sym;
448 struct real_value rv;
449 struct fixed_value fv;
450 struct hwivec_def hwiv;
451 struct const_poly_int_def cpi;
452 } GTY ((special ("rtx_def"), desc ("GET_CODE (&%0)"))) u;
453};
454
455/* A node for constructing singly-linked lists of rtx. */
456
457struct GTY(()) rtx_expr_list : public rtx_def
458{
459private:
460 /* No extra fields, but adds invariant: (GET_CODE (X) == EXPR_LIST). */
461
462public:
463 /* Get next in list. */
464 rtx_expr_list *next () const;
465
466 /* Get at the underlying rtx. */
467 rtx element () const;
468};
469
470template <>
471template <>
472inline bool
473is_a_helper <rtx_expr_list *>::test (rtx rt)
474{
475 return rt->code == EXPR_LIST;
476}
477
478struct GTY(()) rtx_insn_list : public rtx_def
479{
480private:
481 /* No extra fields, but adds invariant: (GET_CODE (X) == INSN_LIST).
482
483 This is an instance of:
484
485 DEF_RTL_EXPR(INSN_LIST, "insn_list", "ue", RTX_EXTRA)
486
487 i.e. a node for constructing singly-linked lists of rtx_insn *, where
488 the list is "external" to the insn (as opposed to the doubly-linked
489 list embedded within rtx_insn itself). */
490
491public:
492 /* Get next in list. */
493 rtx_insn_list *next () const;
494
495 /* Get at the underlying instruction. */
496 rtx_insn *insn () const;
497
498};
499
500template <>
501template <>
502inline bool
503is_a_helper <rtx_insn_list *>::test (rtx rt)
504{
505 return rt->code == INSN_LIST;
506}
507
508/* A node with invariant GET_CODE (X) == SEQUENCE i.e. a vector of rtx,
509 typically (but not always) of rtx_insn *, used in the late passes. */
510
511struct GTY(()) rtx_sequence : public rtx_def
512{
513private:
514 /* No extra fields, but adds invariant: (GET_CODE (X) == SEQUENCE). */
515
516public:
517 /* Get number of elements in sequence. */
518 int len () const;
519
520 /* Get i-th element of the sequence. */
521 rtx element (int index) const;
522
523 /* Get i-th element of the sequence, with a checked cast to
524 rtx_insn *. */
525 rtx_insn *insn (int index) const;
526};
527
528template <>
529template <>
530inline bool
531is_a_helper <rtx_sequence *>::test (rtx rt)
532{
533 return rt->code == SEQUENCE;
534}
535
536template <>
537template <>
538inline bool
539is_a_helper <const rtx_sequence *>::test (const_rtx rt)
540{
541 return rt->code == SEQUENCE;
542}
543
544struct GTY(()) rtx_insn : public rtx_def
545{
546public:
547 /* No extra fields, but adds the invariant:
548
549 (INSN_P (X)
550 || NOTE_P (X)
551 || JUMP_TABLE_DATA_P (X)
552 || BARRIER_P (X)
553 || LABEL_P (X))
554
555 i.e. that we must be able to use the following:
556 INSN_UID ()
557 NEXT_INSN ()
558 PREV_INSN ()
559 i.e. we have an rtx that has an INSN_UID field and can be part of
560 a linked list of insns.
561 */
562
563 /* Returns true if this insn has been deleted. */
564
565 bool deleted () const { return volatil; }
566
567 /* Mark this insn as deleted. */
568
569 void set_deleted () { volatil = true; }
570
571 /* Mark this insn as not deleted. */
572
573 void set_undeleted () { volatil = false; }
574};
575
576/* Subclasses of rtx_insn. */
577
578struct GTY(()) rtx_debug_insn : public rtx_insn
579{
580 /* No extra fields, but adds the invariant:
581 DEBUG_INSN_P (X) aka (GET_CODE (X) == DEBUG_INSN)
582 i.e. an annotation for tracking variable assignments.
583
584 This is an instance of:
585 DEF_RTL_EXPR(DEBUG_INSN, "debug_insn", "uuBeiie", RTX_INSN)
586 from rtl.def. */
587};
588
589struct GTY(()) rtx_nonjump_insn : public rtx_insn
590{
591 /* No extra fields, but adds the invariant:
592 NONJUMP_INSN_P (X) aka (GET_CODE (X) == INSN)
593 i.e an instruction that cannot jump.
594
595 This is an instance of:
596 DEF_RTL_EXPR(INSN, "insn", "uuBeiie", RTX_INSN)
597 from rtl.def. */
598};
599
600struct GTY(()) rtx_jump_insn : public rtx_insn
601{
602public:
603 /* No extra fields, but adds the invariant:
604 JUMP_P (X) aka (GET_CODE (X) == JUMP_INSN)
605 i.e. an instruction that can possibly jump.
606
607 This is an instance of:
608 DEF_RTL_EXPR(JUMP_INSN, "jump_insn", "uuBeiie0", RTX_INSN)
609 from rtl.def. */
610
611 /* Returns jump target of this instruction. The returned value is not
612 necessarily a code label: it may also be a RETURN or SIMPLE_RETURN
613 expression. Also, when the code label is marked "deleted", it is
614 replaced by a NOTE. In some cases the value is NULL_RTX. */
615
616 inline rtx jump_label () const;
617
618 /* Returns jump target cast to rtx_code_label *. */
619
620 inline rtx_code_label *jump_target () const;
621
622 /* Set jump target. */
623
624 inline void set_jump_target (rtx_code_label *);
625};
626
627struct GTY(()) rtx_call_insn : public rtx_insn
628{
629 /* No extra fields, but adds the invariant:
630 CALL_P (X) aka (GET_CODE (X) == CALL_INSN)
631 i.e. an instruction that can possibly call a subroutine
632 but which will not change which instruction comes next
633 in the current function.
634
635 This is an instance of:
636 DEF_RTL_EXPR(CALL_INSN, "call_insn", "uuBeiiee", RTX_INSN)
637 from rtl.def. */
638};
639
640struct GTY(()) rtx_jump_table_data : public rtx_insn
641{
642 /* No extra fields, but adds the invariant:
643 JUMP_TABLE_DATA_P (X) aka (GET_CODE (INSN) == JUMP_TABLE_DATA)
644 i.e. a data for a jump table, considered an instruction for
645 historical reasons.
646
647 This is an instance of:
648 DEF_RTL_EXPR(JUMP_TABLE_DATA, "jump_table_data", "uuBe0000", RTX_INSN)
649 from rtl.def. */
650
651 /* This can be either:
652
653 (a) a table of absolute jumps, in which case PATTERN (this) is an
654 ADDR_VEC with arg 0 a vector of labels, or
655
656 (b) a table of relative jumps (e.g. for -fPIC), in which case
657 PATTERN (this) is an ADDR_DIFF_VEC, with arg 0 a LABEL_REF and
658 arg 1 the vector of labels.
659
660 This method gets the underlying vec. */
661
662 inline rtvec get_labels () const;
663 inline scalar_int_mode get_data_mode () const;
664};
665
666struct GTY(()) rtx_barrier : public rtx_insn
667{
668 /* No extra fields, but adds the invariant:
669 BARRIER_P (X) aka (GET_CODE (X) == BARRIER)
670 i.e. a marker that indicates that control will not flow through.
671
672 This is an instance of:
673 DEF_RTL_EXPR(BARRIER, "barrier", "uu00000", RTX_EXTRA)
674 from rtl.def. */
675};
676
677struct GTY(()) rtx_code_label : public rtx_insn
678{
679 /* No extra fields, but adds the invariant:
680 LABEL_P (X) aka (GET_CODE (X) == CODE_LABEL)
681 i.e. a label in the assembler.
682
683 This is an instance of:
684 DEF_RTL_EXPR(CODE_LABEL, "code_label", "uuB00is", RTX_EXTRA)
685 from rtl.def. */
686};
687
688struct GTY(()) rtx_note : public rtx_insn
689{
690 /* No extra fields, but adds the invariant:
691 NOTE_P(X) aka (GET_CODE (X) == NOTE)
692 i.e. a note about the corresponding source code.
693
694 This is an instance of:
695 DEF_RTL_EXPR(NOTE, "note", "uuB0ni", RTX_EXTRA)
696 from rtl.def. */
697};
698
699/* The size in bytes of an rtx header (code, mode and flags). */
700#define RTX_HDR_SIZE__builtin_offsetof(struct rtx_def, u) offsetof (struct rtx_def, u)__builtin_offsetof(struct rtx_def, u)
701
702/* The size in bytes of an rtx with code CODE. */
703#define RTX_CODE_SIZE(CODE)rtx_code_size[CODE] rtx_code_size[CODE]
704
705#define NULL_RTX(rtx) 0 (rtx) 0
706
707/* The "next" and "previous" RTX, relative to this one. */
708
709#define RTX_NEXT(X)(rtx_next[((enum rtx_code) (X)->code)] == 0 ? __null : *(rtx
*)(((char *)X) + rtx_next[((enum rtx_code) (X)->code)]))
(rtx_next[GET_CODE (X)((enum rtx_code) (X)->code)] == 0 ? NULL__null \
710 : *(rtx *)(((char *)X) + rtx_next[GET_CODE (X)((enum rtx_code) (X)->code)]))
711
712/* FIXME: the "NEXT_INSN (PREV_INSN (X)) == X" condition shouldn't be needed.
713 */
714#define RTX_PREV(X)(((((((enum rtx_code) (X)->code) == INSN) || (((enum rtx_code
) (X)->code) == JUMP_INSN) || (((enum rtx_code) (X)->code
) == CALL_INSN)) || (((enum rtx_code) (X)->code) == DEBUG_INSN
)) || (((enum rtx_code) (X)->code) == NOTE) || (((enum rtx_code
) (X)->code) == JUMP_TABLE_DATA) || (((enum rtx_code) (X)->
code) == BARRIER) || (((enum rtx_code) (X)->code) == CODE_LABEL
)) && PREV_INSN (as_a <rtx_insn *> (X)) != __null
&& NEXT_INSN (PREV_INSN (as_a <rtx_insn *> (X)
)) == X ? PREV_INSN (as_a <rtx_insn *> (X)) : __null)
((INSN_P (X)(((((enum rtx_code) (X)->code) == INSN) || (((enum rtx_code
) (X)->code) == JUMP_INSN) || (((enum rtx_code) (X)->code
) == CALL_INSN)) || (((enum rtx_code) (X)->code) == DEBUG_INSN
))
\
715 || NOTE_P (X)(((enum rtx_code) (X)->code) == NOTE) \
716 || JUMP_TABLE_DATA_P (X)(((enum rtx_code) (X)->code) == JUMP_TABLE_DATA) \
717 || BARRIER_P (X)(((enum rtx_code) (X)->code) == BARRIER) \
718 || LABEL_P (X)(((enum rtx_code) (X)->code) == CODE_LABEL)) \
719 && PREV_INSN (as_a <rtx_insn *> (X)) != NULL__null \
720 && NEXT_INSN (PREV_INSN (as_a <rtx_insn *> (X))) == X \
721 ? PREV_INSN (as_a <rtx_insn *> (X)) : NULL__null)
722
723/* Define macros to access the `code' field of the rtx. */
724
725#define GET_CODE(RTX)((enum rtx_code) (RTX)->code) ((enum rtx_code) (RTX)->code)
726#define PUT_CODE(RTX, CODE)((RTX)->code = (CODE)) ((RTX)->code = (CODE))
727
728#define GET_MODE(RTX)((machine_mode) (RTX)->mode) ((machine_mode) (RTX)->mode)
729#define PUT_MODE_RAW(RTX, MODE)((RTX)->mode = (MODE)) ((RTX)->mode = (MODE))
730
731/* RTL vector. These appear inside RTX's when there is a need
732 for a variable number of things. The principle use is inside
733 PARALLEL expressions. */
734
735struct GTY(()) rtvec_def {
736 int num_elem; /* number of elements */
737 rtx GTY ((length ("%h.num_elem"))) elem[1];
738};
739
740#define NULL_RTVEC(rtvec) 0 (rtvec) 0
741
742#define GET_NUM_ELEM(RTVEC)((RTVEC)->num_elem) ((RTVEC)->num_elem)
743#define PUT_NUM_ELEM(RTVEC, NUM)((RTVEC)->num_elem = (NUM)) ((RTVEC)->num_elem = (NUM))
744
745/* Predicate yielding nonzero iff X is an rtx for a register. */
746#define REG_P(X)(((enum rtx_code) (X)->code) == REG) (GET_CODE (X)((enum rtx_code) (X)->code) == REG)
747
748/* Predicate yielding nonzero iff X is an rtx for a memory location. */
749#define MEM_P(X)(((enum rtx_code) (X)->code) == MEM) (GET_CODE (X)((enum rtx_code) (X)->code) == MEM)
750
751#if TARGET_SUPPORTS_WIDE_INT1
752
753/* Match CONST_*s that can represent compile-time constant integers. */
754#define CASE_CONST_SCALAR_INTcase CONST_INT: case CONST_WIDE_INT \
755 case CONST_INT: \
756 case CONST_WIDE_INT
757
758/* Match CONST_*s for which pointer equality corresponds to value
759 equality. */
760#define CASE_CONST_UNIQUEcase CONST_INT: case CONST_WIDE_INT: case CONST_POLY_INT: case
CONST_DOUBLE: case CONST_FIXED
\
761 case CONST_INT: \
762 case CONST_WIDE_INT: \
763 case CONST_POLY_INT: \
764 case CONST_DOUBLE: \
765 case CONST_FIXED
766
767/* Match all CONST_* rtxes. */
768#define CASE_CONST_ANYcase CONST_INT: case CONST_WIDE_INT: case CONST_POLY_INT: case
CONST_DOUBLE: case CONST_FIXED: case CONST_VECTOR
\
769 case CONST_INT: \
770 case CONST_WIDE_INT: \
771 case CONST_POLY_INT: \
772 case CONST_DOUBLE: \
773 case CONST_FIXED: \
774 case CONST_VECTOR
775
776#else
777
778/* Match CONST_*s that can represent compile-time constant integers. */
779#define CASE_CONST_SCALAR_INTcase CONST_INT: case CONST_WIDE_INT \
780 case CONST_INT: \
781 case CONST_DOUBLE
782
783/* Match CONST_*s for which pointer equality corresponds to value
784 equality. */
785#define CASE_CONST_UNIQUEcase CONST_INT: case CONST_WIDE_INT: case CONST_POLY_INT: case
CONST_DOUBLE: case CONST_FIXED
\
786 case CONST_INT: \
787 case CONST_DOUBLE: \
788 case CONST_FIXED
789
790/* Match all CONST_* rtxes. */
791#define CASE_CONST_ANYcase CONST_INT: case CONST_WIDE_INT: case CONST_POLY_INT: case
CONST_DOUBLE: case CONST_FIXED: case CONST_VECTOR
\
792 case CONST_INT: \
793 case CONST_DOUBLE: \
794 case CONST_FIXED: \
795 case CONST_VECTOR
796#endif
797
798/* Predicate yielding nonzero iff X is an rtx for a constant integer. */
799#define CONST_INT_P(X)(((enum rtx_code) (X)->code) == CONST_INT) (GET_CODE (X)((enum rtx_code) (X)->code) == CONST_INT)
800
801/* Predicate yielding nonzero iff X is an rtx for a constant integer. */
802#define CONST_WIDE_INT_P(X)(((enum rtx_code) (X)->code) == CONST_WIDE_INT) (GET_CODE (X)((enum rtx_code) (X)->code) == CONST_WIDE_INT)
803
804/* Predicate yielding nonzero iff X is an rtx for a polynomial constant
805 integer. */
806#define CONST_POLY_INT_P(X)(1 > 1 && ((enum rtx_code) (X)->code) == CONST_POLY_INT
)
\
807 (NUM_POLY_INT_COEFFS1 > 1 && GET_CODE (X)((enum rtx_code) (X)->code) == CONST_POLY_INT)
808
809/* Predicate yielding nonzero iff X is an rtx for a constant fixed-point. */
810#define CONST_FIXED_P(X)(((enum rtx_code) (X)->code) == CONST_FIXED) (GET_CODE (X)((enum rtx_code) (X)->code) == CONST_FIXED)
811
812/* Predicate yielding true iff X is an rtx for a double-int
813 or floating point constant. */
814#define CONST_DOUBLE_P(X)(((enum rtx_code) (X)->code) == CONST_DOUBLE) (GET_CODE (X)((enum rtx_code) (X)->code) == CONST_DOUBLE)
815
816/* Predicate yielding true iff X is an rtx for a double-int. */
817#define CONST_DOUBLE_AS_INT_P(X)(((enum rtx_code) (X)->code) == CONST_DOUBLE && ((
machine_mode) (X)->mode) == ((void) 0, E_VOIDmode))
\
818 (GET_CODE (X)((enum rtx_code) (X)->code) == CONST_DOUBLE && GET_MODE (X)((machine_mode) (X)->mode) == VOIDmode((void) 0, E_VOIDmode))
819
820/* Predicate yielding true iff X is an rtx for a integer const. */
821#if TARGET_SUPPORTS_WIDE_INT1
822#define CONST_SCALAR_INT_P(X)((((enum rtx_code) (X)->code) == CONST_INT) || (((enum rtx_code
) (X)->code) == CONST_WIDE_INT))
\
823 (CONST_INT_P (X)(((enum rtx_code) (X)->code) == CONST_INT) || CONST_WIDE_INT_P (X)(((enum rtx_code) (X)->code) == CONST_WIDE_INT))
824#else
825#define CONST_SCALAR_INT_P(X)((((enum rtx_code) (X)->code) == CONST_INT) || (((enum rtx_code
) (X)->code) == CONST_WIDE_INT))
\
826 (CONST_INT_P (X)(((enum rtx_code) (X)->code) == CONST_INT) || CONST_DOUBLE_AS_INT_P (X)(((enum rtx_code) (X)->code) == CONST_DOUBLE && ((
machine_mode) (X)->mode) == ((void) 0, E_VOIDmode))
)
827#endif
828
829/* Predicate yielding true iff X is an rtx for a double-int. */
830#define CONST_DOUBLE_AS_FLOAT_P(X)(((enum rtx_code) (X)->code) == CONST_DOUBLE && ((
machine_mode) (X)->mode) != ((void) 0, E_VOIDmode))
\
831 (GET_CODE (X)((enum rtx_code) (X)->code) == CONST_DOUBLE && GET_MODE (X)((machine_mode) (X)->mode) != VOIDmode((void) 0, E_VOIDmode))
832
833/* Predicate yielding nonzero iff X is an rtx for a constant vector. */
834#define CONST_VECTOR_P(X)(((enum rtx_code) (X)->code) == CONST_VECTOR) (GET_CODE (X)((enum rtx_code) (X)->code) == CONST_VECTOR)
835
836/* Predicate yielding nonzero iff X is a label insn. */
837#define LABEL_P(X)(((enum rtx_code) (X)->code) == CODE_LABEL) (GET_CODE (X)((enum rtx_code) (X)->code) == CODE_LABEL)
838
839/* Predicate yielding nonzero iff X is a jump insn. */
840#define JUMP_P(X)(((enum rtx_code) (X)->code) == JUMP_INSN) (GET_CODE (X)((enum rtx_code) (X)->code) == JUMP_INSN)
841
842/* Predicate yielding nonzero iff X is a call insn. */
843#define CALL_P(X)(((enum rtx_code) (X)->code) == CALL_INSN) (GET_CODE (X)((enum rtx_code) (X)->code) == CALL_INSN)
844
845/* 1 if RTX is a call_insn for a fake call.
846 CALL_INSN use "used" flag to indicate it's a fake call. */
847#define FAKE_CALL_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != CALL_INSN) rtl_check_failed_flag
("FAKE_CALL_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 847, __FUNCTION__); _rtx; })->used)
\
848 (RTL_FLAG_CHECK1 ("FAKE_CALL_P", (RTX), CALL_INSN)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != CALL_INSN) rtl_check_failed_flag
("FAKE_CALL_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 848, __FUNCTION__); _rtx; })
->used)
849
850/* Predicate yielding nonzero iff X is an insn that cannot jump. */
851#define NONJUMP_INSN_P(X)(((enum rtx_code) (X)->code) == INSN) (GET_CODE (X)((enum rtx_code) (X)->code) == INSN)
852
853/* Predicate yielding nonzero iff X is a debug note/insn. */
854#define DEBUG_INSN_P(X)(((enum rtx_code) (X)->code) == DEBUG_INSN) (GET_CODE (X)((enum rtx_code) (X)->code) == DEBUG_INSN)
855
856/* Predicate yielding nonzero iff X is an insn that is not a debug insn. */
857#define NONDEBUG_INSN_P(X)((((enum rtx_code) (X)->code) == INSN) || (((enum rtx_code
) (X)->code) == JUMP_INSN) || (((enum rtx_code) (X)->code
) == CALL_INSN))
(NONJUMP_INSN_P (X)(((enum rtx_code) (X)->code) == INSN) || JUMP_P (X)(((enum rtx_code) (X)->code) == JUMP_INSN) || CALL_P (X)(((enum rtx_code) (X)->code) == CALL_INSN))
858
859/* Nonzero if DEBUG_MARKER_INSN_P may possibly hold. */
860#define MAY_HAVE_DEBUG_MARKER_INSNSglobal_options.x_debug_nonbind_markers_p debug_nonbind_markers_pglobal_options.x_debug_nonbind_markers_p
861/* Nonzero if DEBUG_BIND_INSN_P may possibly hold. */
862#define MAY_HAVE_DEBUG_BIND_INSNSglobal_options.x_flag_var_tracking_assignments flag_var_tracking_assignmentsglobal_options.x_flag_var_tracking_assignments
863/* Nonzero if DEBUG_INSN_P may possibly hold. */
864#define MAY_HAVE_DEBUG_INSNS(global_options.x_debug_nonbind_markers_p || global_options.x_flag_var_tracking_assignments
)
\
865 (MAY_HAVE_DEBUG_MARKER_INSNSglobal_options.x_debug_nonbind_markers_p || MAY_HAVE_DEBUG_BIND_INSNSglobal_options.x_flag_var_tracking_assignments)
866
867/* Predicate yielding nonzero iff X is a real insn. */
868#define INSN_P(X)(((((enum rtx_code) (X)->code) == INSN) || (((enum rtx_code
) (X)->code) == JUMP_INSN) || (((enum rtx_code) (X)->code
) == CALL_INSN)) || (((enum rtx_code) (X)->code) == DEBUG_INSN
))
(NONDEBUG_INSN_P (X)((((enum rtx_code) (X)->code) == INSN) || (((enum rtx_code
) (X)->code) == JUMP_INSN) || (((enum rtx_code) (X)->code
) == CALL_INSN))
|| DEBUG_INSN_P (X)(((enum rtx_code) (X)->code) == DEBUG_INSN))
869
870/* Predicate yielding nonzero iff X is a note insn. */
871#define NOTE_P(X)(((enum rtx_code) (X)->code) == NOTE) (GET_CODE (X)((enum rtx_code) (X)->code) == NOTE)
872
873/* Predicate yielding nonzero iff X is a barrier insn. */
874#define BARRIER_P(X)(((enum rtx_code) (X)->code) == BARRIER) (GET_CODE (X)((enum rtx_code) (X)->code) == BARRIER)
875
876/* Predicate yielding nonzero iff X is a data for a jump table. */
877#define JUMP_TABLE_DATA_P(INSN)(((enum rtx_code) (INSN)->code) == JUMP_TABLE_DATA) (GET_CODE (INSN)((enum rtx_code) (INSN)->code) == JUMP_TABLE_DATA)
878
879/* Predicate yielding nonzero iff RTX is a subreg. */
880#define SUBREG_P(RTX)(((enum rtx_code) (RTX)->code) == SUBREG) (GET_CODE (RTX)((enum rtx_code) (RTX)->code) == SUBREG)
881
882/* Predicate yielding true iff RTX is a symbol ref. */
883#define SYMBOL_REF_P(RTX)(((enum rtx_code) (RTX)->code) == SYMBOL_REF) (GET_CODE (RTX)((enum rtx_code) (RTX)->code) == SYMBOL_REF)
884
885template <>
886template <>
887inline bool
888is_a_helper <rtx_insn *>::test (rtx rt)
889{
890 return (INSN_P (rt)(((((enum rtx_code) (rt)->code) == INSN) || (((enum rtx_code
) (rt)->code) == JUMP_INSN) || (((enum rtx_code) (rt)->
code) == CALL_INSN)) || (((enum rtx_code) (rt)->code) == DEBUG_INSN
))
891 || NOTE_P (rt)(((enum rtx_code) (rt)->code) == NOTE)
892 || JUMP_TABLE_DATA_P (rt)(((enum rtx_code) (rt)->code) == JUMP_TABLE_DATA)
893 || BARRIER_P (rt)(((enum rtx_code) (rt)->code) == BARRIER)
894 || LABEL_P (rt)(((enum rtx_code) (rt)->code) == CODE_LABEL));
895}
896
897template <>
898template <>
899inline bool
900is_a_helper <const rtx_insn *>::test (const_rtx rt)
901{
902 return (INSN_P (rt)(((((enum rtx_code) (rt)->code) == INSN) || (((enum rtx_code
) (rt)->code) == JUMP_INSN) || (((enum rtx_code) (rt)->
code) == CALL_INSN)) || (((enum rtx_code) (rt)->code) == DEBUG_INSN
))
903 || NOTE_P (rt)(((enum rtx_code) (rt)->code) == NOTE)
904 || JUMP_TABLE_DATA_P (rt)(((enum rtx_code) (rt)->code) == JUMP_TABLE_DATA)
905 || BARRIER_P (rt)(((enum rtx_code) (rt)->code) == BARRIER)
906 || LABEL_P (rt)(((enum rtx_code) (rt)->code) == CODE_LABEL));
907}
908
909template <>
910template <>
911inline bool
912is_a_helper <rtx_debug_insn *>::test (rtx rt)
913{
914 return DEBUG_INSN_P (rt)(((enum rtx_code) (rt)->code) == DEBUG_INSN);
915}
916
917template <>
918template <>
919inline bool
920is_a_helper <rtx_nonjump_insn *>::test (rtx rt)
921{
922 return NONJUMP_INSN_P (rt)(((enum rtx_code) (rt)->code) == INSN);
923}
924
925template <>
926template <>
927inline bool
928is_a_helper <rtx_jump_insn *>::test (rtx rt)
929{
930 return JUMP_P (rt)(((enum rtx_code) (rt)->code) == JUMP_INSN);
931}
932
933template <>
934template <>
935inline bool
936is_a_helper <rtx_jump_insn *>::test (rtx_insn *insn)
937{
938 return JUMP_P (insn)(((enum rtx_code) (insn)->code) == JUMP_INSN);
939}
940
941template <>
942template <>
943inline bool
944is_a_helper <rtx_call_insn *>::test (rtx rt)
945{
946 return CALL_P (rt)(((enum rtx_code) (rt)->code) == CALL_INSN);
947}
948
949template <>
950template <>
951inline bool
952is_a_helper <rtx_call_insn *>::test (rtx_insn *insn)
953{
954 return CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN);
955}
956
957template <>
958template <>
959inline bool
960is_a_helper <rtx_jump_table_data *>::test (rtx rt)
961{
962 return JUMP_TABLE_DATA_P (rt)(((enum rtx_code) (rt)->code) == JUMP_TABLE_DATA);
963}
964
965template <>
966template <>
967inline bool
968is_a_helper <rtx_jump_table_data *>::test (rtx_insn *insn)
969{
970 return JUMP_TABLE_DATA_P (insn)(((enum rtx_code) (insn)->code) == JUMP_TABLE_DATA);
971}
972
973template <>
974template <>
975inline bool
976is_a_helper <rtx_barrier *>::test (rtx rt)
977{
978 return BARRIER_P (rt)(((enum rtx_code) (rt)->code) == BARRIER);
979}
980
981template <>
982template <>
983inline bool
984is_a_helper <rtx_code_label *>::test (rtx rt)
985{
986 return LABEL_P (rt)(((enum rtx_code) (rt)->code) == CODE_LABEL);
987}
988
989template <>
990template <>
991inline bool
992is_a_helper <rtx_code_label *>::test (rtx_insn *insn)
993{
994 return LABEL_P (insn)(((enum rtx_code) (insn)->code) == CODE_LABEL);
995}
996
997template <>
998template <>
999inline bool
1000is_a_helper <rtx_note *>::test (rtx rt)
1001{
1002 return NOTE_P (rt)(((enum rtx_code) (rt)->code) == NOTE);
1003}
1004
1005template <>
1006template <>
1007inline bool
1008is_a_helper <rtx_note *>::test (rtx_insn *insn)
1009{
1010 return NOTE_P (insn)(((enum rtx_code) (insn)->code) == NOTE);
1011}
1012
1013/* Predicate yielding nonzero iff X is a return or simple_return. */
1014#define ANY_RETURN_P(X)(((enum rtx_code) (X)->code) == RETURN || ((enum rtx_code)
(X)->code) == SIMPLE_RETURN)
\
1015 (GET_CODE (X)((enum rtx_code) (X)->code) == RETURN || GET_CODE (X)((enum rtx_code) (X)->code) == SIMPLE_RETURN)
1016
1017/* 1 if X is a unary operator. */
1018
1019#define UNARY_P(X)((rtx_class[(int) (((enum rtx_code) (X)->code))]) == RTX_UNARY
)
\
1020 (GET_RTX_CLASS (GET_CODE (X))(rtx_class[(int) (((enum rtx_code) (X)->code))]) == RTX_UNARY)
1021
1022/* 1 if X is a binary operator. */
1023
1024#define BINARY_P(X)(((rtx_class[(int) (((enum rtx_code) (X)->code))]) & (
~3)) == (RTX_COMPARE & (~3)))
\
1025 ((GET_RTX_CLASS (GET_CODE (X))(rtx_class[(int) (((enum rtx_code) (X)->code))]) & RTX_BINARY_MASK(~3)) == RTX_BINARY_RESULT(RTX_COMPARE & (~3)))
1026
1027/* 1 if X is an arithmetic operator. */
1028
1029#define ARITHMETIC_P(X)(((rtx_class[(int) (((enum rtx_code) (X)->code))]) & (
~1)) == (RTX_COMM_ARITH & (~1)))
\
1030 ((GET_RTX_CLASS (GET_CODE (X))(rtx_class[(int) (((enum rtx_code) (X)->code))]) & RTX_ARITHMETIC_MASK(~1)) \
1031 == RTX_ARITHMETIC_RESULT(RTX_COMM_ARITH & (~1)))
1032
1033/* 1 if X is an arithmetic operator. */
1034
1035#define COMMUTATIVE_ARITH_P(X)((rtx_class[(int) (((enum rtx_code) (X)->code))]) == RTX_COMM_ARITH
)
\
1036 (GET_RTX_CLASS (GET_CODE (X))(rtx_class[(int) (((enum rtx_code) (X)->code))]) == RTX_COMM_ARITH)
1037
1038/* 1 if X is a commutative arithmetic operator or a comparison operator.
1039 These two are sometimes selected together because it is possible to
1040 swap the two operands. */
1041
1042#define SWAPPABLE_OPERANDS_P(X)((1 << (rtx_class[(int) (((enum rtx_code) (X)->code)
)])) & ((1 << RTX_COMM_ARITH) | (1 << RTX_COMM_COMPARE
) | (1 << RTX_COMPARE)))
\
1043 ((1 << GET_RTX_CLASS (GET_CODE (X))(rtx_class[(int) (((enum rtx_code) (X)->code))])) \
1044 & ((1 << RTX_COMM_ARITH) | (1 << RTX_COMM_COMPARE) \
1045 | (1 << RTX_COMPARE)))
1046
1047/* 1 if X is a non-commutative operator. */
1048
1049#define NON_COMMUTATIVE_P(X)(((rtx_class[(int) (((enum rtx_code) (X)->code))]) & (
~2)) == (RTX_COMPARE & (~2)))
\
1050 ((GET_RTX_CLASS (GET_CODE (X))(rtx_class[(int) (((enum rtx_code) (X)->code))]) & RTX_COMMUTATIVE_MASK(~2)) \
1051 == RTX_NON_COMMUTATIVE_RESULT(RTX_COMPARE & (~2)))
1052
1053/* 1 if X is a commutative operator on integers. */
1054
1055#define COMMUTATIVE_P(X)(((rtx_class[(int) (((enum rtx_code) (X)->code))]) & (
~2)) == (RTX_COMM_COMPARE & (~2)))
\
1056 ((GET_RTX_CLASS (GET_CODE (X))(rtx_class[(int) (((enum rtx_code) (X)->code))]) & RTX_COMMUTATIVE_MASK(~2)) \
1057 == RTX_COMMUTATIVE_RESULT(RTX_COMM_COMPARE & (~2)))
1058
1059/* 1 if X is a relational operator. */
1060
1061#define COMPARISON_P(X)(((rtx_class[(int) (((enum rtx_code) (X)->code))]) & (
~1)) == (RTX_COMPARE & (~1)))
\
1062 ((GET_RTX_CLASS (GET_CODE (X))(rtx_class[(int) (((enum rtx_code) (X)->code))]) & RTX_COMPARE_MASK(~1)) == RTX_COMPARE_RESULT(RTX_COMPARE & (~1)))
1063
1064/* 1 if X is a constant value that is an integer. */
1065
1066#define CONSTANT_P(X)((rtx_class[(int) (((enum rtx_code) (X)->code))]) == RTX_CONST_OBJ
)
\
1067 (GET_RTX_CLASS (GET_CODE (X))(rtx_class[(int) (((enum rtx_code) (X)->code))]) == RTX_CONST_OBJ)
1068
1069/* 1 if X is a LABEL_REF. */
1070#define LABEL_REF_P(X)(((enum rtx_code) (X)->code) == LABEL_REF) \
1071 (GET_CODE (X)((enum rtx_code) (X)->code) == LABEL_REF)
1072
1073/* 1 if X can be used to represent an object. */
1074#define OBJECT_P(X)(((rtx_class[(int) (((enum rtx_code) (X)->code))]) & (
~1)) == (RTX_OBJ & (~1)))
\
1075 ((GET_RTX_CLASS (GET_CODE (X))(rtx_class[(int) (((enum rtx_code) (X)->code))]) & RTX_OBJ_MASK(~1)) == RTX_OBJ_RESULT(RTX_OBJ & (~1)))
1076
1077/* General accessor macros for accessing the fields of an rtx. */
1078
1079#if defined ENABLE_RTL_CHECKING && (GCC_VERSION(4 * 1000 + 2) >= 2007)
1080/* The bit with a star outside the statement expr and an & inside is
1081 so that N can be evaluated only once. */
1082#define RTL_CHECK1(RTX, N, C1)((RTX)->u.fld[N]) __extension__ \
1083(*({ __typeof (RTX) const _rtx = (RTX); const int _n = (N); \
1084 const enum rtx_code _code = GET_CODE (_rtx)((enum rtx_code) (_rtx)->code); \
1085 if (_n < 0 || _n >= GET_RTX_LENGTH (_code)(rtx_length[(int) (_code)])) \
1086 rtl_check_failed_bounds (_rtx, _n, __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1086, \
1087 __FUNCTION__); \
1088 if (GET_RTX_FORMAT (_code)(rtx_format[(int) (_code)])[_n] != C1) \
1089 rtl_check_failed_type1 (_rtx, _n, C1, __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1089, \
1090 __FUNCTION__); \
1091 &_rtx->u.fld[_n]; }))
1092
1093#define RTL_CHECK2(RTX, N, C1, C2)((RTX)->u.fld[N]) __extension__ \
1094(*({ __typeof (RTX) const _rtx = (RTX); const int _n = (N); \
1095 const enum rtx_code _code = GET_CODE (_rtx)((enum rtx_code) (_rtx)->code); \
1096 if (_n < 0 || _n >= GET_RTX_LENGTH (_code)(rtx_length[(int) (_code)])) \
1097 rtl_check_failed_bounds (_rtx, _n, __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1097, \
1098 __FUNCTION__); \
1099 if (GET_RTX_FORMAT (_code)(rtx_format[(int) (_code)])[_n] != C1 \
1100 && GET_RTX_FORMAT (_code)(rtx_format[(int) (_code)])[_n] != C2) \
1101 rtl_check_failed_type2 (_rtx, _n, C1, C2, __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1101, \
1102 __FUNCTION__); \
1103 &_rtx->u.fld[_n]; }))
1104
1105#define RTL_CHECKC1(RTX, N, C)((RTX)->u.fld[N]) __extension__ \
1106(*({ __typeof (RTX) const _rtx = (RTX); const int _n = (N); \
1107 if (GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != (C)) \
1108 rtl_check_failed_code1 (_rtx, (C), __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1108, \
1109 __FUNCTION__); \
1110 &_rtx->u.fld[_n]; }))
1111
1112#define RTL_CHECKC2(RTX, N, C1, C2)((RTX)->u.fld[N]) __extension__ \
1113(*({ __typeof (RTX) const _rtx = (RTX); const int _n = (N); \
1114 const enum rtx_code _code = GET_CODE (_rtx)((enum rtx_code) (_rtx)->code); \
1115 if (_code != (C1) && _code != (C2)) \
1116 rtl_check_failed_code2 (_rtx, (C1), (C2), __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1116, \
1117 __FUNCTION__); \
1118 &_rtx->u.fld[_n]; }))
1119
1120#define RTL_CHECKC3(RTX, N, C1, C2, C3)((RTX)->u.fld[N]) __extension__ \
1121(*({ __typeof (RTX) const _rtx = (RTX); const int _n = (N); \
1122 const enum rtx_code _code = GET_CODE (_rtx)((enum rtx_code) (_rtx)->code); \
1123 if (_code != (C1) && _code != (C2) && _code != (C3)) \
1124 rtl_check_failed_code3 (_rtx, (C1), (C2), (C3), __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", \
1125 __LINE__1125, __FUNCTION__); \
1126 &_rtx->u.fld[_n]; }))
1127
1128#define RTVEC_ELT(RTVEC, I)((RTVEC)->elem[I]) __extension__ \
1129(*({ __typeof (RTVEC) const _rtvec = (RTVEC); const int _i = (I); \
1130 if (_i < 0 || _i >= GET_NUM_ELEM (_rtvec)((_rtvec)->num_elem)) \
1131 rtvec_check_failed_bounds (_rtvec, _i, __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1131, \
1132 __FUNCTION__); \
1133 &_rtvec->elem[_i]; }))
1134
1135#define XWINT(RTX, N)((RTX)->u.hwint[N]) __extension__ \
1136(*({ __typeof (RTX) const _rtx = (RTX); const int _n = (N); \
1137 const enum rtx_code _code = GET_CODE (_rtx)((enum rtx_code) (_rtx)->code); \
1138 if (_n < 0 || _n >= GET_RTX_LENGTH (_code)(rtx_length[(int) (_code)])) \
1139 rtl_check_failed_bounds (_rtx, _n, __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1139, \
1140 __FUNCTION__); \
1141 if (GET_RTX_FORMAT (_code)(rtx_format[(int) (_code)])[_n] != 'w') \
1142 rtl_check_failed_type1 (_rtx, _n, 'w', __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1142, \
1143 __FUNCTION__); \
1144 &_rtx->u.hwint[_n]; }))
1145
1146#define CWI_ELT(RTX, I)((RTX)->u.hwiv.elem[I]) __extension__ \
1147(*({ __typeof (RTX) const _cwi = (RTX); \
1148 int _max = CWI_GET_NUM_ELEM (_cwi)((int)__extension__ ({ __typeof ((_cwi)) const