Bug Summary

File:build/gcc/rtl.h
Warning:line 1493, column 3
Returning null reference

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-suse-linux -analyze -disable-free -clear-ast-before-backend -disable-llvm-verifier -discard-value-names -main-file-name lra-assigns.cc -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -ffp-contract=on -fno-rounding-math -mconstructor-aliases -funwind-tables=2 -target-cpu x86-64 -tune-cpu generic -debugger-tuning=gdb -fcoverage-compilation-dir=/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -resource-dir /usr/lib64/clang/15.0.7 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13/backward -internal-isystem /usr/lib64/clang/15.0.7/include -internal-isystem /usr/local/include -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../x86_64-suse-linux/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir=/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -D__GCC_HAVE_DWARF2_CFI_ASM=1 -o /buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2023-03-27-141847-20772-1/report-7rf9uh.plist -x c++ /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/lra-assigns.cc

/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/lra-assigns.cc

1/* Assign reload pseudos.
2 Copyright (C) 2010-2023 Free Software Foundation, Inc.
3 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21
22/* This file's main objective is to assign hard registers to reload
23 pseudos. It also tries to allocate hard registers to other
24 pseudos, but at a lower priority than the reload pseudos. The pass
25 does not transform the RTL.
26
27 We must allocate a hard register to every reload pseudo. We try to
28 increase the chances of finding a viable allocation by assigning
29 the pseudos in order of fewest available hard registers first. If
30 we still fail to find a hard register, we spill other (non-reload)
31 pseudos in order to make room.
32
33 find_hard_regno_for finds hard registers for allocation without
34 spilling. spill_for does the same with spilling. Both functions
35 use a cost model to determine the most profitable choice of hard
36 and spill registers.
37
38 Once we have finished allocating reload pseudos, we also try to
39 assign registers to other (non-reload) pseudos. This is useful if
40 hard registers were freed up by the spilling just described.
41
42 We try to assign hard registers by collecting pseudos into threads.
43 These threads contain reload and inheritance pseudos that are
44 connected by copies (move insns). Doing this improves the chances
45 of pseudos in the thread getting the same hard register and, as a
46 result, of allowing some move insns to be deleted.
47
48 When we assign a hard register to a pseudo, we decrease the cost of
49 using the same hard register for pseudos that are connected by
50 copies.
51
52 If two hard registers have the same frequency-derived cost, we
53 prefer hard registers with higher priorities. The mapping of
54 registers to priorities is controlled by the register_priority
55 target hook. For example, x86-64 has a few register priorities:
56 hard registers with and without REX prefixes have different
57 priorities. This permits us to generate smaller code as insns
58 without REX prefixes are shorter.
59
60 If a few hard registers are still equally good for the assignment,
61 we choose the least used hard register. It is called leveling and
62 may be profitable for some targets.
63
64 Only insns with changed allocation pseudos are processed on the
65 next constraint pass.
66
67 The pseudo live-ranges are used to find conflicting pseudos.
68
69 For understanding the code, it is important to keep in mind that
70 inheritance, split, and reload pseudos created since last
71 constraint pass have regno >= lra_constraint_new_regno_start.
72 Inheritance and split pseudos created on any pass are in the
73 corresponding bitmaps. Inheritance and split pseudos since the
74 last constraint pass have also the corresponding non-negative
75 restore_regno. */
76
77#include "config.h"
78#include "system.h"
79#include "coretypes.h"
80#include "backend.h"
81#include "target.h"
82#include "rtl.h"
83#include "tree.h"
84#include "predict.h"
85#include "df.h"
86#include "memmodel.h"
87#include "tm_p.h"
88#include "insn-config.h"
89#include "regs.h"
90#include "ira.h"
91#include "recog.h"
92#include "rtl-error.h"
93#include "sparseset.h"
94#include "lra.h"
95#include "lra-int.h"
96#include "function-abi.h"
97
98/* Current iteration number of the pass and current iteration number
99 of the pass after the latest spill pass when any former reload
100 pseudo was spilled. */
101int lra_assignment_iter;
102int lra_assignment_iter_after_spill;
103
104/* Flag of spilling former reload pseudos on this pass. */
105static bool former_reload_pseudo_spill_p;
106
107/* Array containing corresponding values of function
108 lra_get_allocno_class. It is used to speed up the code. */
109static enum reg_class *regno_allocno_class_array;
110
111/* Array containing lengths of pseudo live ranges. It is used to
112 speed up the code. */
113static int *regno_live_length;
114
115/* Information about the thread to which a pseudo belongs. Threads are
116 a set of connected reload and inheritance pseudos with the same set of
117 available hard registers. Lone registers belong to their own threads. */
118struct regno_assign_info
119{
120 /* First/next pseudo of the same thread. */
121 int first, next;
122 /* Frequency of the thread (execution frequency of only reload
123 pseudos in the thread when the thread contains a reload pseudo).
124 Defined only for the first thread pseudo. */
125 int freq;
126};
127
128/* Map regno to the corresponding regno assignment info. */
129static struct regno_assign_info *regno_assign_info;
130
131/* All inherited, subreg or optional pseudos created before last spill
132 sub-pass. Such pseudos are permitted to get memory instead of hard
133 regs. */
134static bitmap_head non_reload_pseudos;
135
136/* Process a pseudo copy with execution frequency COPY_FREQ connecting
137 REGNO1 and REGNO2 to form threads. */
138static void
139process_copy_to_form_thread (int regno1, int regno2, int copy_freq)
140{
141 int last, regno1_first, regno2_first;
142
143 lra_assert (regno1 >= lra_constraint_new_regno_start((void)(!(regno1 >= lra_constraint_new_regno_start &&
regno2 >= lra_constraint_new_regno_start) ? fancy_abort (
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/lra-assigns.cc"
, 144, __FUNCTION__), 0 : 0))
144 && regno2 >= lra_constraint_new_regno_start)((void)(!(regno1 >= lra_constraint_new_regno_start &&
regno2 >= lra_constraint_new_regno_start) ? fancy_abort (
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/lra-assigns.cc"
, 144, __FUNCTION__), 0 : 0))
;
145 regno1_first = regno_assign_info[regno1].first;
146 regno2_first = regno_assign_info[regno2].first;
147 if (regno1_first != regno2_first)
148 {
149 for (last = regno2_first;
150 regno_assign_info[last].next >= 0;
151 last = regno_assign_info[last].next)
152 regno_assign_info[last].first = regno1_first;
153 regno_assign_info[last].first = regno1_first;
154 regno_assign_info[last].next = regno_assign_info[regno1_first].next;
155 regno_assign_info[regno1_first].next = regno2_first;
156 regno_assign_info[regno1_first].freq
157 += regno_assign_info[regno2_first].freq;
158 }
159 regno_assign_info[regno1_first].freq -= 2 * copy_freq;
160 lra_assert (regno_assign_info[regno1_first].freq >= 0)((void)(!(regno_assign_info[regno1_first].freq >= 0) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/lra-assigns.cc"
, 160, __FUNCTION__), 0 : 0))
;
161}
162
163/* Initialize REGNO_ASSIGN_INFO and form threads. */
164static void
165init_regno_assign_info (void)
166{
167 int i, regno1, regno2, max_regno = max_reg_num ();
168 lra_copy_t cp;
169
170 regno_assign_info = XNEWVEC (struct regno_assign_info, max_regno)((struct regno_assign_info *) xmalloc (sizeof (struct regno_assign_info
) * (max_regno)))
;
171 for (i = FIRST_PSEUDO_REGISTER76; i < max_regno; i++)
172 {
173 regno_assign_info[i].first = i;
174 regno_assign_info[i].next = -1;
175 regno_assign_info[i].freq = lra_reg_info[i].freq;
176 }
177 /* Form the threads. */
178 for (i = 0; (cp = lra_get_copy (i)) != NULLnullptr; i++)
179 if ((regno1 = cp->regno1) >= lra_constraint_new_regno_start
180 && (regno2 = cp->regno2) >= lra_constraint_new_regno_start
181 && reg_renumber[regno1] < 0 && lra_reg_info[regno1].nrefs != 0
182 && reg_renumber[regno2] < 0 && lra_reg_info[regno2].nrefs != 0
183 && (ira_class_hard_regs_num(this_target_ira->x_ira_class_hard_regs_num)[regno_allocno_class_array[regno1]]
184 == ira_class_hard_regs_num(this_target_ira->x_ira_class_hard_regs_num)[regno_allocno_class_array[regno2]]))
185 process_copy_to_form_thread (regno1, regno2, cp->freq);
186}
187
188/* Free REGNO_ASSIGN_INFO. */
189static void
190finish_regno_assign_info (void)
191{
192 free (regno_assign_info);
193}
194
195/* The function is used to sort *reload* and *inheritance* pseudos to
196 try to assign them hard registers. We put pseudos from the same
197 thread always nearby. */
198static int
199reload_pseudo_compare_func (const void *v1p, const void *v2p)
200{
201 int r1 = *(const int *) v1p, r2 = *(const int *) v2p;
202 enum reg_class cl1 = regno_allocno_class_array[r1];
203 enum reg_class cl2 = regno_allocno_class_array[r2];
204 int diff;
205
206 lra_assert (r1 >= lra_constraint_new_regno_start((void)(!(r1 >= lra_constraint_new_regno_start && r2
>= lra_constraint_new_regno_start) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/lra-assigns.cc"
, 207, __FUNCTION__), 0 : 0))
207 && r2 >= lra_constraint_new_regno_start)((void)(!(r1 >= lra_constraint_new_regno_start && r2
>= lra_constraint_new_regno_start) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/lra-assigns.cc"
, 207, __FUNCTION__), 0 : 0))
;
208
209 /* Prefer to assign reload registers with smaller classes first to
210 guarantee assignment to all reload registers. */
211 if ((diff = (ira_class_hard_regs_num(this_target_ira->x_ira_class_hard_regs_num)[cl1]
212 - ira_class_hard_regs_num(this_target_ira->x_ira_class_hard_regs_num)[cl2])) != 0)
213 return diff;
214 /* Allocate bigger pseudos first to avoid register file
215 fragmentation. */
216 if ((diff
217 = (ira_reg_class_max_nregs(this_target_ira->x_ira_reg_class_max_nregs)[cl2][lra_reg_info[r2].biggest_mode]
218 - ira_reg_class_max_nregs(this_target_ira->x_ira_reg_class_max_nregs)[cl1][lra_reg_info[r1].biggest_mode])) != 0)
219 return diff;
220 if ((diff = (regno_assign_info[regno_assign_info[r2].first].freq
221 - regno_assign_info[regno_assign_info[r1].first].freq)) != 0)
222 return diff;
223 /* Put pseudos from the thread nearby. */
224 if ((diff = regno_assign_info[r1].first - regno_assign_info[r2].first) != 0)
225 return diff;
226 /* Prefer pseudos with longer live ranges. It sets up better
227 prefered hard registers for the thread pseudos and decreases
228 register-register moves between the thread pseudos. */
229 if ((diff = regno_live_length[r2] - regno_live_length[r1]) != 0)
230 return diff;
231 /* If regs are equally good, sort by their numbers, so that the
232 results of qsort leave nothing to chance. */
233 return r1 - r2;
234}
235
236/* The function is used to sort *non-reload* pseudos to try to assign
237 them hard registers. The order calculation is simpler than in the
238 previous function and based on the pseudo frequency usage. */
239static int
240pseudo_compare_func (const void *v1p, const void *v2p)
241{
242 int r1 = *(const int *) v1p, r2 = *(const int *) v2p;
243 int diff;
244
245 /* Assign hard reg to static chain pointer first pseudo when
246 non-local goto is used. */
247 if ((diff = (non_spilled_static_chain_regno_p (r2)
248 - non_spilled_static_chain_regno_p (r1))) != 0)
249 return diff;
250
251 /* Prefer to assign more frequently used registers first. */
252 if ((diff = lra_reg_info[r2].freq - lra_reg_info[r1].freq) != 0)
253 return diff;
254
255 /* If regs are equally good, sort by their numbers, so that the
256 results of qsort leave nothing to chance. */
257 return r1 - r2;
258}
259
260/* Arrays of size LRA_LIVE_MAX_POINT mapping a program point to the
261 pseudo live ranges with given start point. We insert only live
262 ranges of pseudos interesting for assignment purposes. They are
263 reload pseudos and pseudos assigned to hard registers. */
264static lra_live_range_t *start_point_ranges;
265
266/* Used as a flag that a live range is not inserted in the start point
267 chain. */
268static struct lra_live_range not_in_chain_mark;
269
270/* Create and set up START_POINT_RANGES. */
271static void
272create_live_range_start_chains (void)
273{
274 int i, max_regno;
275 lra_live_range_t r;
276
277 start_point_ranges = XCNEWVEC (lra_live_range_t, lra_live_max_point)((lra_live_range_t *) xcalloc ((lra_live_max_point), sizeof (
lra_live_range_t)))
;
278 max_regno = max_reg_num ();
279 for (i = FIRST_PSEUDO_REGISTER76; i < max_regno; i++)
280 if (i >= lra_constraint_new_regno_start || reg_renumber[i] >= 0)
281 {
282 for (r = lra_reg_info[i].live_ranges; r != NULLnullptr; r = r->next)
283 {
284 r->start_next = start_point_ranges[r->start];
285 start_point_ranges[r->start] = r;
286 }
287 }
288 else
289 {
290 for (r = lra_reg_info[i].live_ranges; r != NULLnullptr; r = r->next)
291 r->start_next = &not_in_chain_mark;
292 }
293}
294
295/* Insert live ranges of pseudo REGNO into start chains if they are
296 not there yet. */
297static void
298insert_in_live_range_start_chain (int regno)
299{
300 lra_live_range_t r = lra_reg_info[regno].live_ranges;
301
302 if (r->start_next != &not_in_chain_mark)
303 return;
304 for (; r != NULLnullptr; r = r->next)
305 {
306 r->start_next = start_point_ranges[r->start];
307 start_point_ranges[r->start] = r;
308 }
309}
310
311/* Free START_POINT_RANGES. */
312static void
313finish_live_range_start_chains (void)
314{
315 gcc_assert (start_point_ranges != NULL)((void)(!(start_point_ranges != nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/lra-assigns.cc"
, 315, __FUNCTION__), 0 : 0))
;
316 free (start_point_ranges);
317 start_point_ranges = NULLnullptr;
318}
319
320/* Map: program point -> bitmap of all pseudos living at the point and
321 assigned to hard registers. */
322static bitmap_head *live_hard_reg_pseudos;
323static bitmap_obstack live_hard_reg_pseudos_bitmap_obstack;
324
325/* reg_renumber corresponding to pseudos marked in
326 live_hard_reg_pseudos. reg_renumber might be not matched to
327 live_hard_reg_pseudos but live_pseudos_reg_renumber always reflects
328 live_hard_reg_pseudos. */
329static int *live_pseudos_reg_renumber;
330
331/* Sparseset used to calculate living hard reg pseudos for some program
332 point range. */
333static sparseset live_range_hard_reg_pseudos;
334
335/* Sparseset used to calculate living reload/inheritance pseudos for
336 some program point range. */
337static sparseset live_range_reload_inheritance_pseudos;
338
339/* Allocate and initialize the data about living pseudos at program
340 points. */
341static void
342init_lives (void)
343{
344 int i, max_regno = max_reg_num ();
345
346 live_range_hard_reg_pseudos = sparseset_alloc (max_regno);
347 live_range_reload_inheritance_pseudos = sparseset_alloc (max_regno);
348 live_hard_reg_pseudos = XNEWVEC (bitmap_head, lra_live_max_point)((bitmap_head *) xmalloc (sizeof (bitmap_head) * (lra_live_max_point
)))
;
349 bitmap_obstack_initialize (&live_hard_reg_pseudos_bitmap_obstack);
350 for (i = 0; i < lra_live_max_point; i++)
351 bitmap_initialize (&live_hard_reg_pseudos[i],
352 &live_hard_reg_pseudos_bitmap_obstack);
353 live_pseudos_reg_renumber = XNEWVEC (int, max_regno)((int *) xmalloc (sizeof (int) * (max_regno)));
354 for (i = 0; i < max_regno; i++)
355 live_pseudos_reg_renumber[i] = -1;
356}
357
358/* Free the data about living pseudos at program points. */
359static void
360finish_lives (void)
361{
362 sparseset_free (live_range_hard_reg_pseudos)free(live_range_hard_reg_pseudos);
363 sparseset_free (live_range_reload_inheritance_pseudos)free(live_range_reload_inheritance_pseudos);
364 free (live_hard_reg_pseudos);
365 bitmap_obstack_release (&live_hard_reg_pseudos_bitmap_obstack);
366 free (live_pseudos_reg_renumber);
367}
368
369/* Update the LIVE_HARD_REG_PSEUDOS and LIVE_PSEUDOS_REG_RENUMBER
370 entries for pseudo REGNO. Assume that the register has been
371 spilled if FREE_P, otherwise assume that it has been assigned
372 reg_renumber[REGNO] (if >= 0). We also insert the pseudo live
373 ranges in the start chains when it is assumed to be assigned to a
374 hard register because we use the chains of pseudos assigned to hard
375 registers during allocation. */
376static void
377update_lives (int regno, bool free_p)
378{
379 int p;
380 lra_live_range_t r;
381
382 if (reg_renumber[regno] < 0)
383 return;
384 live_pseudos_reg_renumber[regno] = free_p ? -1 : reg_renumber[regno];
385 for (r = lra_reg_info[regno].live_ranges; r != NULLnullptr; r = r->next)
386 {
387 for (p = r->start; p <= r->finish; p++)
388 if (free_p)
389 bitmap_clear_bit (&live_hard_reg_pseudos[p], regno);
390 else
391 {
392 bitmap_set_bit (&live_hard_reg_pseudos[p], regno);
393 insert_in_live_range_start_chain (regno);
394 }
395 }
396}
397
398/* Sparseset used to calculate reload pseudos conflicting with a given
399 pseudo when we are trying to find a hard register for the given
400 pseudo. */
401static sparseset conflict_reload_and_inheritance_pseudos;
402
403/* Map: program point -> bitmap of all reload and inheritance pseudos
404 living at the point. */
405static bitmap_head *live_reload_and_inheritance_pseudos;
406static bitmap_obstack live_reload_and_inheritance_pseudos_bitmap_obstack;
407
408/* Allocate and initialize data about living reload pseudos at any
409 given program point. */
410static void
411init_live_reload_and_inheritance_pseudos (void)
412{
413 int i, p, max_regno = max_reg_num ();
414 lra_live_range_t r;
415
416 conflict_reload_and_inheritance_pseudos = sparseset_alloc (max_regno);
417 live_reload_and_inheritance_pseudos = XNEWVEC (bitmap_head, lra_live_max_point)((bitmap_head *) xmalloc (sizeof (bitmap_head) * (lra_live_max_point
)))
;
418 bitmap_obstack_initialize (&live_reload_and_inheritance_pseudos_bitmap_obstack);
419 for (p = 0; p < lra_live_max_point; p++)
420 bitmap_initialize (&live_reload_and_inheritance_pseudos[p],
421 &live_reload_and_inheritance_pseudos_bitmap_obstack);
422 for (i = lra_constraint_new_regno_start; i < max_regno; i++)
423 {
424 for (r = lra_reg_info[i].live_ranges; r != NULLnullptr; r = r->next)
425 for (p = r->start; p <= r->finish; p++)
426 bitmap_set_bit (&live_reload_and_inheritance_pseudos[p], i);
427 }
428}
429
430/* Finalize data about living reload pseudos at any given program
431 point. */
432static void
433finish_live_reload_and_inheritance_pseudos (void)
434{
435 sparseset_free (conflict_reload_and_inheritance_pseudos)free(conflict_reload_and_inheritance_pseudos);
436 free (live_reload_and_inheritance_pseudos);
437 bitmap_obstack_release (&live_reload_and_inheritance_pseudos_bitmap_obstack);
438}
439
440/* The value used to check that cost of given hard reg is really
441 defined currently. */
442static int curr_hard_regno_costs_check = 0;
443/* Array used to check that cost of the corresponding hard reg (the
444 array element index) is really defined currently. */
445static int hard_regno_costs_check[FIRST_PSEUDO_REGISTER76];
446/* The current costs of allocation of hard regs. Defined only if the
447 value of the corresponding element of the previous array is equal to
448 CURR_HARD_REGNO_COSTS_CHECK. */
449static int hard_regno_costs[FIRST_PSEUDO_REGISTER76];
450
451/* Adjust cost of HARD_REGNO by INCR. Reset the cost first if it is
452 not defined yet. */
453static inline void
454adjust_hard_regno_cost (int hard_regno, int incr)
455{
456 if (hard_regno_costs_check[hard_regno] != curr_hard_regno_costs_check)
457 hard_regno_costs[hard_regno] = 0;
458 hard_regno_costs_check[hard_regno] = curr_hard_regno_costs_check;
459 hard_regno_costs[hard_regno] += incr;
460}
461
462/* Try to find a free hard register for pseudo REGNO. Return the
463 hard register on success and set *COST to the cost of using
464 that register. (If several registers have equal cost, the one with
465 the highest priority wins.) Return -1 on failure.
466
467 If FIRST_P, return the first available hard reg ignoring other
468 criteria, e.g. allocation cost. This approach results in less hard
469 reg pool fragmentation and permit to allocate hard regs to reload
470 pseudos in complicated situations where pseudo sizes are different.
471
472 If TRY_ONLY_HARD_REGNO >= 0, consider only that hard register,
473 otherwise consider all hard registers in REGNO's class.
474
475 If REGNO_SET is not empty, only hard registers from the set are
476 considered. */
477static int
478find_hard_regno_for_1 (int regno, int *cost, int try_only_hard_regno,
479 bool first_p, HARD_REG_SET regno_set)
480{
481 HARD_REG_SET conflict_set;
482 int best_cost = INT_MAX2147483647, best_priority = INT_MIN(-2147483647 -1), best_usage = INT_MAX2147483647;
483 lra_live_range_t r;
484 int p, i, j, rclass_size, best_hard_regno, priority, hard_regno;
485 int hr, conflict_hr, nregs;
486 machine_mode biggest_mode;
487 unsigned int k, conflict_regno;
488 poly_int64 offset;
489 int val, biggest_nregs, nregs_diff;
490 enum reg_class rclass;
491 bitmap_iterator bi;
492 bool *rclass_intersect_p;
493 HARD_REG_SET impossible_start_hard_regs, available_regs;
494
495 if (hard_reg_set_empty_p (regno_set))
496 conflict_set = lra_no_alloc_regs;
497 else
498 conflict_set = ~regno_set | lra_no_alloc_regs;
499 rclass = regno_allocno_class_array[regno];
500 rclass_intersect_p = ira_reg_classes_intersect_p(this_target_ira->x_ira_reg_classes_intersect_p)[rclass];
501 curr_hard_regno_costs_check++;
502 sparseset_clear (conflict_reload_and_inheritance_pseudos);
503 sparseset_clear (live_range_hard_reg_pseudos);
504 conflict_set |= lra_reg_info[regno].conflict_hard_regs;
505 biggest_mode = lra_reg_info[regno].biggest_mode;
506 for (r = lra_reg_info[regno].live_ranges; r != NULLnullptr; r = r->next)
507 {
508 EXECUTE_IF_SET_IN_BITMAP (&live_hard_reg_pseudos[r->start], 0, k, bi)for (bmp_iter_set_init (&(bi), (&live_hard_reg_pseudos
[r->start]), (0), &(k)); bmp_iter_set (&(bi), &
(k)); bmp_iter_next (&(bi), &(k)))
509 if (rclass_intersect_p[regno_allocno_class_array[k]])
510 sparseset_set_bit (live_range_hard_reg_pseudos, k);
511 EXECUTE_IF_SET_IN_BITMAP (&live_reload_and_inheritance_pseudos[r->start],for (bmp_iter_set_init (&(bi), (&live_reload_and_inheritance_pseudos
[r->start]), (0), &(k)); bmp_iter_set (&(bi), &
(k)); bmp_iter_next (&(bi), &(k)))
512 0, k, bi)for (bmp_iter_set_init (&(bi), (&live_reload_and_inheritance_pseudos
[r->start]), (0), &(k)); bmp_iter_set (&(bi), &
(k)); bmp_iter_next (&(bi), &(k)))
513 if (lra_reg_info[k].preferred_hard_regno1 >= 0
514 && live_pseudos_reg_renumber[k] < 0
515 && rclass_intersect_p[regno_allocno_class_array[k]])
516 sparseset_set_bit (conflict_reload_and_inheritance_pseudos, k);
517 for (p = r->start + 1; p <= r->finish; p++)
518 {
519 lra_live_range_t r2;
520
521 for (r2 = start_point_ranges[p];
522 r2 != NULLnullptr;
523 r2 = r2->start_next)
524 {
525 if (r2->regno >= lra_constraint_new_regno_start
526 && lra_reg_info[r2->regno].preferred_hard_regno1 >= 0
527 && live_pseudos_reg_renumber[r2->regno] < 0
528 && rclass_intersect_p[regno_allocno_class_array[r2->regno]])
529 sparseset_set_bit (conflict_reload_and_inheritance_pseudos,
530 r2->regno);
531 if (live_pseudos_reg_renumber[r2->regno] >= 0
532 && rclass_intersect_p[regno_allocno_class_array[r2->regno]])
533 sparseset_set_bit (live_range_hard_reg_pseudos, r2->regno);
534 }
535 }
536 }
537 if ((hard_regno = lra_reg_info[regno].preferred_hard_regno1) >= 0)
538 {
539 adjust_hard_regno_cost
540 (hard_regno, -lra_reg_info[regno].preferred_hard_regno_profit1);
541 if ((hard_regno = lra_reg_info[regno].preferred_hard_regno2) >= 0)
542 adjust_hard_regno_cost
543 (hard_regno, -lra_reg_info[regno].preferred_hard_regno_profit2);
544 }
545#ifdef STACK_REGS
546 if (lra_reg_info[regno].no_stack_p)
547 for (i = FIRST_STACK_REG8; i <= LAST_STACK_REG15; i++)
548 SET_HARD_REG_BIT (conflict_set, i);
549#endif
550 sparseset_clear_bit (conflict_reload_and_inheritance_pseudos, regno);
551 val = lra_reg_info[regno].val;
552 offset = lra_reg_info[regno].offset;
553 impossible_start_hard_regs = lra_reg_info[regno].exclude_start_hard_regs;
554 EXECUTE_IF_SET_IN_SPARSESET (live_range_hard_reg_pseudos, conflict_regno)for (sparseset_iter_init (live_range_hard_reg_pseudos); sparseset_iter_p
(live_range_hard_reg_pseudos) && (((conflict_regno) =
sparseset_iter_elm (live_range_hard_reg_pseudos)) || 1); sparseset_iter_next
(live_range_hard_reg_pseudos))
555 {
556 conflict_hr = live_pseudos_reg_renumber[conflict_regno];
557 if (lra_reg_val_equal_p (conflict_regno, val, offset))
558 {
559 conflict_hr = live_pseudos_reg_renumber[conflict_regno];
560 nregs = hard_regno_nregs (conflict_hr,
561 lra_reg_info[conflict_regno].biggest_mode);
562 /* Remember about multi-register pseudos. For example, 2
563 hard register pseudos can start on the same hard register
564 but cannot start on HR and HR+1/HR-1. */
565 for (hr = conflict_hr + 1;
566 hr < FIRST_PSEUDO_REGISTER76 && hr < conflict_hr + nregs;
567 hr++)
568 SET_HARD_REG_BIT (impossible_start_hard_regs, hr);
569 for (hr = conflict_hr - 1;
570 hr >= 0 && (int) end_hard_regno (biggest_mode, hr) > conflict_hr;
571 hr--)
572 SET_HARD_REG_BIT (impossible_start_hard_regs, hr);
573 }
574 else
575 {
576 machine_mode biggest_conflict_mode
577 = lra_reg_info[conflict_regno].biggest_mode;
578 int biggest_conflict_nregs
579 = hard_regno_nregs (conflict_hr, biggest_conflict_mode);
580
581 nregs_diff
582 = (biggest_conflict_nregs
583 - hard_regno_nregs (conflict_hr,
584 PSEUDO_REGNO_MODE (conflict_regno)((machine_mode) (regno_reg_rtx[conflict_regno])->mode)));
585 add_to_hard_reg_set (&conflict_set,
586 biggest_conflict_mode,
587 conflict_hr
588 - (WORDS_BIG_ENDIAN0 ? nregs_diff : 0));
589 if (hard_reg_set_subset_p (reg_class_contents(this_target_hard_regs->x_reg_class_contents)[rclass],
590 conflict_set))
591 return -1;
592 }
593 }
594 EXECUTE_IF_SET_IN_SPARSESET (conflict_reload_and_inheritance_pseudos,for (sparseset_iter_init (conflict_reload_and_inheritance_pseudos
); sparseset_iter_p (conflict_reload_and_inheritance_pseudos)
&& (((conflict_regno) = sparseset_iter_elm (conflict_reload_and_inheritance_pseudos
)) || 1); sparseset_iter_next (conflict_reload_and_inheritance_pseudos
))
595 conflict_regno)for (sparseset_iter_init (conflict_reload_and_inheritance_pseudos
); sparseset_iter_p (conflict_reload_and_inheritance_pseudos)
&& (((conflict_regno) = sparseset_iter_elm (conflict_reload_and_inheritance_pseudos
)) || 1); sparseset_iter_next (conflict_reload_and_inheritance_pseudos
))
596 if (!lra_reg_val_equal_p (conflict_regno, val, offset))
597 {
598 lra_assert (live_pseudos_reg_renumber[conflict_regno] < 0)((void)(!(live_pseudos_reg_renumber[conflict_regno] < 0) ?
fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/lra-assigns.cc"
, 598, __FUNCTION__), 0 : 0))
;
599 if ((hard_regno
600 = lra_reg_info[conflict_regno].preferred_hard_regno1) >= 0)
601 {
602 adjust_hard_regno_cost
603 (hard_regno,
604 lra_reg_info[conflict_regno].preferred_hard_regno_profit1);
605 if ((hard_regno
606 = lra_reg_info[conflict_regno].preferred_hard_regno2) >= 0)
607 adjust_hard_regno_cost
608 (hard_regno,
609 lra_reg_info[conflict_regno].preferred_hard_regno_profit2);
610 }
611 }
612 /* Make sure that all registers in a multi-word pseudo belong to the
613 required class. */
614 conflict_set |= ~reg_class_contents(this_target_hard_regs->x_reg_class_contents)[rclass];
615 lra_assert (rclass != NO_REGS)((void)(!(rclass != NO_REGS) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/lra-assigns.cc"
, 615, __FUNCTION__), 0 : 0))
;
616 rclass_size = ira_class_hard_regs_num(this_target_ira->x_ira_class_hard_regs_num)[rclass];
617 best_hard_regno = -1;
618 hard_regno = ira_class_hard_regs(this_target_ira->x_ira_class_hard_regs)[rclass][0];
619 biggest_nregs = hard_regno_nregs (hard_regno, biggest_mode);
620 nregs_diff = (biggest_nregs
621 - hard_regno_nregs (hard_regno, PSEUDO_REGNO_MODE (regno)((machine_mode) (regno_reg_rtx[regno])->mode)));
622 available_regs = reg_class_contents(this_target_hard_regs->x_reg_class_contents)[rclass] & ~lra_no_alloc_regs;
623 for (i = 0; i < rclass_size; i++)
624 {
625 if (try_only_hard_regno >= 0)
626 hard_regno = try_only_hard_regno;
627 else
628 hard_regno = ira_class_hard_regs(this_target_ira->x_ira_class_hard_regs)[rclass][i];
629 if (! overlaps_hard_reg_set_p (conflict_set,
630 PSEUDO_REGNO_MODE (regno)((machine_mode) (regno_reg_rtx[regno])->mode), hard_regno)
631 && targetm.hard_regno_mode_ok (hard_regno,
632 PSEUDO_REGNO_MODE (regno)((machine_mode) (regno_reg_rtx[regno])->mode))
633 /* We cannot use prohibited_class_mode_regs for all classes
634 because it is not defined for all classes. */
635 && (ira_allocno_class_translate(this_target_ira->x_ira_allocno_class_translate)[rclass] != rclass
636 || ! TEST_HARD_REG_BIT (ira_prohibited_class_mode_regs(this_target_ira->x_ira_prohibited_class_mode_regs)
637 [rclass][PSEUDO_REGNO_MODE (regno)((machine_mode) (regno_reg_rtx[regno])->mode)],
638 hard_regno))
639 && ! TEST_HARD_REG_BIT (impossible_start_hard_regs, hard_regno)
640 && (nregs_diff == 0
641 || (WORDS_BIG_ENDIAN0
642 ? (hard_regno - nregs_diff >= 0
643 && TEST_HARD_REG_BIT (available_regs,
644 hard_regno - nregs_diff))
645 : TEST_HARD_REG_BIT (available_regs,
646 hard_regno + nregs_diff))))
647 {
648 if (hard_regno_costs_check[hard_regno]
649 != curr_hard_regno_costs_check)
650 {
651 hard_regno_costs_check[hard_regno] = curr_hard_regno_costs_check;
652 hard_regno_costs[hard_regno] = 0;
653 }
654 for (j = 0;
655 j < hard_regno_nregs (hard_regno, PSEUDO_REGNO_MODE (regno)((machine_mode) (regno_reg_rtx[regno])->mode));
656 j++)
657 if (! crtl(&x_rtl)->abi->clobbers_full_reg_p (hard_regno + j)
658 && ! df_regs_ever_live_p (hard_regno + j))
659 /* It needs save restore. */
660 hard_regno_costs[hard_regno]
661 += (2
662 * REG_FREQ_FROM_BB (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)((optimize_function_for_size_p ((cfun + 0)) || !(cfun + 0)->
cfg->count_max.initialized_p ()) ? 1000 : (((((cfun + 0))->
cfg->x_entry_block_ptr)->next_bb)->count.to_frequency
((cfun + 0)) * 1000 / 10000) ? (((((cfun + 0))->cfg->x_entry_block_ptr
)->next_bb)->count.to_frequency ((cfun + 0)) * 1000 / 10000
) : 1)
663 + 1);
664 priority = targetm.register_priority (hard_regno);
665 if (best_hard_regno < 0 || hard_regno_costs[hard_regno] < best_cost
666 || (hard_regno_costs[hard_regno] == best_cost
667 && (priority > best_priority
668 || (targetm.register_usage_leveling_p ()
669 && priority == best_priority
670 && best_usage > lra_hard_reg_usage[hard_regno]))))
671 {
672 best_hard_regno = hard_regno;
673 best_cost = hard_regno_costs[hard_regno];
674 best_priority = priority;
675 best_usage = lra_hard_reg_usage[hard_regno];
676 }
677 }
678 if (try_only_hard_regno >= 0 || (first_p && best_hard_regno >= 0))
679 break;
680 }
681 if (best_hard_regno >= 0)
682 *cost = best_cost - lra_reg_info[regno].freq;
683 return best_hard_regno;
684}
685
686/* A wrapper for find_hard_regno_for_1 (see comments for that function
687 description). This function tries to find a hard register for
688 preferred class first if it is worth. */
689static int
690find_hard_regno_for (int regno, int *cost, int try_only_hard_regno, bool first_p)
691{
692 int hard_regno;
693 HARD_REG_SET regno_set;
694
695 /* Only original pseudos can have a different preferred class. */
696 if (try_only_hard_regno < 0 && regno < lra_new_regno_start)
697 {
698 enum reg_class pref_class = reg_preferred_class (regno);
699
700 if (regno_allocno_class_array[regno] != pref_class)
701 {
702 hard_regno = find_hard_regno_for_1 (regno, cost, -1, first_p,
703 reg_class_contents(this_target_hard_regs->x_reg_class_contents)[pref_class]);
704 if (hard_regno >= 0)
705 return hard_regno;
706 }
707 }
708 CLEAR_HARD_REG_SET (regno_set);
709 return find_hard_regno_for_1 (regno, cost, try_only_hard_regno, first_p,
710 regno_set);
711}
712
713/* Current value used for checking elements in
714 update_hard_regno_preference_check. */
715static int curr_update_hard_regno_preference_check;
716/* If an element value is equal to the above variable value, then the
717 corresponding regno has been processed for preference
718 propagation. */
719static int *update_hard_regno_preference_check;
720
721/* Update the preference for using HARD_REGNO for pseudos that are
722 connected directly or indirectly with REGNO. Apply divisor DIV
723 to any preference adjustments.
724
725 The more indirectly a pseudo is connected, the smaller its effect
726 should be. We therefore increase DIV on each "hop". */
727static void
728update_hard_regno_preference (int regno, int hard_regno, int div)
729{
730 int another_regno, cost;
731 lra_copy_t cp, next_cp;
732
733 /* Search depth 5 seems to be enough. */
734 if (div > (1 << 5))
735 return;
736 for (cp = lra_reg_info[regno].copies; cp != NULLnullptr; cp = next_cp)
737 {
738 if (cp->regno1 == regno)
739 {
740 next_cp = cp->regno1_next;
741 another_regno = cp->regno2;
742 }
743 else if (cp->regno2 == regno)
744 {
745 next_cp = cp->regno2_next;
746 another_regno = cp->regno1;
747 }
748 else
749 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/lra-assigns.cc"
, 749, __FUNCTION__))
;
750 if (reg_renumber[another_regno] < 0
751 && (update_hard_regno_preference_check[another_regno]
752 != curr_update_hard_regno_preference_check))
753 {
754 update_hard_regno_preference_check[another_regno]
755 = curr_update_hard_regno_preference_check;
756 cost = cp->freq < div ? 1 : cp->freq / div;
757 lra_setup_reload_pseudo_preferenced_hard_reg
758 (another_regno, hard_regno, cost);
759 update_hard_regno_preference (another_regno, hard_regno, div * 2);
760 }
761 }
762}
763
764/* Return prefix title for pseudo REGNO. */
765static const char *
766pseudo_prefix_title (int regno)
767{
768 return
769 (regno < lra_constraint_new_regno_start ? ""
770 : bitmap_bit_p (&lra_inheritance_pseudos, regno) ? "inheritance "
771 : bitmap_bit_p (&lra_split_regs, regno) ? "split "
772 : bitmap_bit_p (&lra_optional_reload_pseudos, regno) ? "optional reload "
773 : bitmap_bit_p (&lra_subreg_reload_pseudos, regno) ? "subreg reload "
774 : "reload ");
775}
776
777/* Update REG_RENUMBER and other pseudo preferences by assignment of
778 HARD_REGNO to pseudo REGNO and print about it if PRINT_P. */
779void
780lra_setup_reg_renumber (int regno, int hard_regno, bool print_p)
781{
782 int i, hr;
783
784 /* We cannot just reassign hard register. */
785 lra_assert (hard_regno < 0 || reg_renumber[regno] < 0)((void)(!(hard_regno < 0 || reg_renumber[regno] < 0) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/lra-assigns.cc"
, 785, __FUNCTION__), 0 : 0))
;
786 if ((hr = hard_regno) < 0)
787 hr = reg_renumber[regno];
788 reg_renumber[regno] = hard_regno;
789 lra_assert (hr >= 0)((void)(!(hr >= 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/lra-assigns.cc"
, 789, __FUNCTION__), 0 : 0))
;
790 for (i = 0; i < hard_regno_nregs (hr, PSEUDO_REGNO_MODE (regno)((machine_mode) (regno_reg_rtx[regno])->mode)); i++)
791 if (hard_regno < 0)
792 lra_hard_reg_usage[hr + i] -= lra_reg_info[regno].freq;
793 else
794 lra_hard_reg_usage[hr + i] += lra_reg_info[regno].freq;
795 if (print_p && lra_dump_file != NULLnullptr)
796 fprintf (lra_dump_file, " Assign %d to %sr%d (freq=%d)\n",
797 reg_renumber[regno], pseudo_prefix_title (regno),
798 regno, lra_reg_info[regno].freq);
799 if (hard_regno >= 0)
800 {
801 curr_update_hard_regno_preference_check++;
802 update_hard_regno_preference (regno, hard_regno, 1);
803 }
804}
805
806/* Pseudos which occur in insns containing a particular pseudo. */
807static bitmap_head insn_conflict_pseudos;
808
809/* Bitmaps used to contain spill pseudos for given pseudo hard regno
810 and best spill pseudos for given pseudo (and best hard regno). */
811static bitmap_head spill_pseudos_bitmap, best_spill_pseudos_bitmap;
812
813/* Current pseudo check for validity of elements in
814 TRY_HARD_REG_PSEUDOS. */
815static int curr_pseudo_check;
816/* Array used for validity of elements in TRY_HARD_REG_PSEUDOS. */
817static int try_hard_reg_pseudos_check[FIRST_PSEUDO_REGISTER76];
818/* Pseudos who hold given hard register at the considered points. */
819static bitmap_head try_hard_reg_pseudos[FIRST_PSEUDO_REGISTER76];
820
821/* Set up try_hard_reg_pseudos for given program point P and class
822 RCLASS. Those are pseudos living at P and assigned to a hard
823 register of RCLASS. In other words, those are pseudos which can be
824 spilled to assign a hard register of RCLASS to a pseudo living at
825 P. */
826static void
827setup_try_hard_regno_pseudos (int p, enum reg_class rclass)
828{
829 int i, hard_regno;
830 machine_mode mode;
831 unsigned int spill_regno;
832 bitmap_iterator bi;
833
834 /* Find what pseudos could be spilled. */
835 EXECUTE_IF_SET_IN_BITMAP (&live_hard_reg_pseudos[p], 0, spill_regno, bi)for (bmp_iter_set_init (&(bi), (&live_hard_reg_pseudos
[p]), (0), &(spill_regno)); bmp_iter_set (&(bi), &
(spill_regno)); bmp_iter_next (&(bi), &(spill_regno))
)
836 {
837 mode = PSEUDO_REGNO_MODE (spill_regno)((machine_mode) (regno_reg_rtx[spill_regno])->mode);
838 hard_regno = live_pseudos_reg_renumber[spill_regno];
839 if (overlaps_hard_reg_set_p (reg_class_contents(this_target_hard_regs->x_reg_class_contents)[rclass],
840 mode, hard_regno))
841 {
842 for (i = hard_regno_nregs (hard_regno, mode) - 1; i >= 0; i--)
843 {
844 if (try_hard_reg_pseudos_check[hard_regno + i]
845 != curr_pseudo_check)
846 {
847 try_hard_reg_pseudos_check[hard_regno + i]
848 = curr_pseudo_check;
849 bitmap_clear (&try_hard_reg_pseudos[hard_regno + i]);
850 }
851 bitmap_set_bit (&try_hard_reg_pseudos[hard_regno + i],
852 spill_regno);
853 }
854 }
855 }
856}
857
858/* Assign temporarily HARD_REGNO to pseudo REGNO. Temporary
859 assignment means that we might undo the data change. */
860static void
861assign_temporarily (int regno, int hard_regno)
862{
863 int p;
864 lra_live_range_t r;
865
866 for (r = lra_reg_info[regno].live_ranges; r != NULLnullptr; r = r->next)
867 {
868 for (p = r->start; p <= r->finish; p++)
869 if (hard_regno < 0)
870 bitmap_clear_bit (&live_hard_reg_pseudos[p], regno);
871 else
872 {
873 bitmap_set_bit (&live_hard_reg_pseudos[p], regno);
874 insert_in_live_range_start_chain (regno);
875 }
876 }
877 live_pseudos_reg_renumber[regno] = hard_regno;
878}
879
880/* Return true iff there is a reason why pseudo SPILL_REGNO should not
881 be spilled. */
882static bool
883must_not_spill_p (unsigned spill_regno)
884{
885 if ((pic_offset_table_rtx(this_target_rtl->x_pic_offset_table_rtx) != NULLnullptr
886 && spill_regno == REGNO (pic_offset_table_rtx)(rhs_regno((this_target_rtl->x_pic_offset_table_rtx))))
887 || ((int) spill_regno >= lra_constraint_new_regno_start
888 && ! bitmap_bit_p (&lra_inheritance_pseudos, spill_regno)
889 && ! bitmap_bit_p (&lra_split_regs, spill_regno)
890 && ! bitmap_bit_p (&lra_subreg_reload_pseudos, spill_regno)
891 && ! bitmap_bit_p (&lra_optional_reload_pseudos, spill_regno)))
892 return true;
893 /* A reload pseudo that requires a singleton register class should
894 not be spilled.
895 FIXME: this mitigates the issue on certain i386 patterns, but
896 does not solve the general case where existing reloads fully
897 cover a limited register class. */
898 if (!bitmap_bit_p (&non_reload_pseudos, spill_regno)
899 && reg_class_size(this_target_hard_regs->x_reg_class_size) [reg_preferred_class (spill_regno)] == 1
900 && reg_alternate_class (spill_regno) == NO_REGS)
901 return true;
902 return false;
903}
904
905/* Array used for sorting reload pseudos for subsequent allocation
906 after spilling some pseudo. */
907static int *sorted_reload_pseudos;
908
909/* Spill some pseudos for a reload pseudo REGNO and return hard
910 register which should be used for pseudo after spilling. The
911 function adds spilled pseudos to SPILLED_PSEUDO_BITMAP. When we
912 choose hard register (and pseudos occupying the hard registers and
913 to be spilled), we take into account not only how REGNO will
914 benefit from the spills but also how other reload pseudos not yet
915 assigned to hard registers benefit from the spills too. In very
916 rare cases, the function can fail and return -1.
917
918 If FIRST_P, return the first available hard reg ignoring other
919 criteria, e.g. allocation cost and cost of spilling non-reload
920 pseudos. This approach results in less hard reg pool fragmentation
921 and permit to allocate hard regs to reload pseudos in complicated
922 situations where pseudo sizes are different. */
923static int
924spill_for (int regno, bitmap spilled_pseudo_bitmap, bool first_p)
925{
926 int i, j, n, p, hard_regno, best_hard_regno, cost, best_cost, rclass_size;
927 int reload_hard_regno, reload_cost;
928 bool static_p, best_static_p;
929 machine_mode mode;
930 enum reg_class rclass;
931 unsigned int spill_regno, reload_regno, uid;
932 int insn_pseudos_num, best_insn_pseudos_num;
933 int bad_spills_num, smallest_bad_spills_num;
934 lra_live_range_t r;
935 bitmap_iterator bi;
936
937 rclass = regno_allocno_class_array[regno];
938 lra_assert (reg_renumber[regno] < 0 && rclass != NO_REGS)((void)(!(reg_renumber[regno] < 0 && rclass != NO_REGS
) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/lra-assigns.cc"
, 938, __FUNCTION__), 0 : 0))
;
939 bitmap_clear (&insn_conflict_pseudos);
940 bitmap_clear (&best_spill_pseudos_bitmap);
941 EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi)for (bmp_iter_set_init (&(bi), (&lra_reg_info[regno].
insn_bitmap), (0), &(uid)); bmp_iter_set (&(bi), &
(uid)); bmp_iter_next (&(bi), &(uid)))
942 {
943 struct lra_insn_reg *ir;
944
945 for (ir = lra_get_insn_regs (uid); ir != NULLnullptr; ir = ir->next)
946 if (ir->regno >= FIRST_PSEUDO_REGISTER76)
947 bitmap_set_bit (&insn_conflict_pseudos, ir->regno);
948 }
949 best_hard_regno = -1;
950 best_cost = INT_MAX2147483647;
951 best_static_p = TRUEtrue;
952 best_insn_pseudos_num = INT_MAX2147483647;
953 smallest_bad_spills_num = INT_MAX2147483647;
954 rclass_size = ira_class_hard_regs_num(this_target_ira->x_ira_class_hard_regs_num)[rclass];
955 mode = PSEUDO_REGNO_MODE (regno)((machine_mode) (regno_reg_rtx[regno])->mode);
956 /* Invalidate try_hard_reg_pseudos elements. */
957 curr_pseudo_check++;
958 for (r = lra_reg_info[regno].live_ranges; r != NULLnullptr; r = r->next)
959 for (p = r->start; p <= r->finish; p++)
960 setup_try_hard_regno_pseudos (p, rclass);
961 for (i = 0; i < rclass_size; i++)
962 {
963 hard_regno = ira_class_hard_regs(this_target_ira->x_ira_class_hard_regs)[rclass][i];
964 bitmap_clear (&spill_pseudos_bitmap);
965 for (j = hard_regno_nregs (hard_regno, mode) - 1; j >= 0; j--)
966 {
967 if (hard_regno + j >= FIRST_PSEUDO_REGISTER76)
968 break;
969 if (try_hard_reg_pseudos_check[hard_regno + j] != curr_pseudo_check)
970 continue;
971 lra_assert (!bitmap_empty_p (&try_hard_reg_pseudos[hard_regno + j]))((void)(!(!bitmap_empty_p (&try_hard_reg_pseudos[hard_regno
+ j])) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/lra-assigns.cc"
, 971, __FUNCTION__), 0 : 0))
;
972 bitmap_ior_into (&spill_pseudos_bitmap,
973 &try_hard_reg_pseudos[hard_regno + j]);
974 }
975 /* Spill pseudos. */
976 static_p = false;
977 EXECUTE_IF_SET_IN_BITMAP (&spill_pseudos_bitmap, 0, spill_regno, bi)for (bmp_iter_set_init (&(bi), (&spill_pseudos_bitmap
), (0), &(spill_regno)); bmp_iter_set (&(bi), &(spill_regno
)); bmp_iter_next (&(bi), &(spill_regno)))
978 if (must_not_spill_p (spill_regno))
979 goto fail;
980 else if (non_spilled_static_chain_regno_p (spill_regno))
981 static_p = true;
982 insn_pseudos_num = 0;
983 bad_spills_num = 0;
984 if (lra_dump_file != NULLnullptr)
985 fprintf (lra_dump_file, " Trying %d:", hard_regno);
986 sparseset_clear (live_range_reload_inheritance_pseudos);
987 EXECUTE_IF_SET_IN_BITMAP (&spill_pseudos_bitmap, 0, spill_regno, bi)for (bmp_iter_set_init (&(bi), (&spill_pseudos_bitmap
), (0), &(spill_regno)); bmp_iter_set (&(bi), &(spill_regno
)); bmp_iter_next (&(bi), &(spill_regno)))
988 {
989 if (bitmap_bit_p (&insn_conflict_pseudos, spill_regno))
990 insn_pseudos_num++;
991 if (spill_regno >= (unsigned int) lra_bad_spill_regno_start)
992 bad_spills_num++;
993 for (r = lra_reg_info[spill_regno].live_ranges;
994 r != NULLnullptr;
995 r = r->next)
996 {
997 for (p = r->start; p <= r->finish; p++)
998 {
999 lra_live_range_t r2;
1000
1001 for (r2 = start_point_ranges[p];
1002 r2 != NULLnullptr;
1003 r2 = r2->start_next)
1004 if (r2->regno >= lra_constraint_new_regno_start)
1005 sparseset_set_bit (live_range_reload_inheritance_pseudos,
1006 r2->regno);
1007 }
1008 }
1009 }
1010 n = 0;
1011 if (sparseset_cardinality (live_range_reload_inheritance_pseudos)
1012 <= (unsigned)param_lra_max_considered_reload_pseudosglobal_options.x_param_lra_max_considered_reload_pseudos)
1013 EXECUTE_IF_SET_IN_SPARSESET (live_range_reload_inheritance_pseudos,for (sparseset_iter_init (live_range_reload_inheritance_pseudos
); sparseset_iter_p (live_range_reload_inheritance_pseudos) &&
(((reload_regno) = sparseset_iter_elm (live_range_reload_inheritance_pseudos
)) || 1); sparseset_iter_next (live_range_reload_inheritance_pseudos
))
1014 reload_regno)for (sparseset_iter_init (live_range_reload_inheritance_pseudos
); sparseset_iter_p (live_range_reload_inheritance_pseudos) &&
(((reload_regno) = sparseset_iter_elm (live_range_reload_inheritance_pseudos
)) || 1); sparseset_iter_next (live_range_reload_inheritance_pseudos
))
1015 if ((int) reload_regno != regno
1016 && (ira_reg_classes_intersect_p(this_target_ira->x_ira_reg_classes_intersect_p)
1017 [rclass][regno_allocno_class_array[reload_regno]])
1018 && live_pseudos_reg_renumber[reload_regno] < 0
1019 && find_hard_regno_for (reload_regno, &cost, -1, first_p) < 0)
1020 sorted_reload_pseudos[n++] = reload_regno;
1021 EXECUTE_IF_SET_IN_BITMAP (&spill_pseudos_bitmap, 0, spill_regno, bi)for (bmp_iter_set_init (&(bi), (&spill_pseudos_bitmap
), (0), &(spill_regno)); bmp_iter_set (&(bi), &(spill_regno
)); bmp_iter_next (&(bi), &(spill_regno)))
1022 {
1023 update_lives (spill_regno, true);
1024 if (lra_dump_file != NULLnullptr)
1025 fprintf (lra_dump_file, " spill %d(freq=%d)",
1026 spill_regno, lra_reg_info[spill_regno].freq);
1027 }
1028 hard_regno = find_hard_regno_for (regno, &cost, -1, first_p);
1029 if (hard_regno >= 0)
1030 {
1031 assign_temporarily (regno, hard_regno);
1032 qsort (sorted_reload_pseudos, n, sizeof (int),gcc_qsort (sorted_reload_pseudos, n, sizeof (int), reload_pseudo_compare_func
)
1033 reload_pseudo_compare_func)gcc_qsort (sorted_reload_pseudos, n, sizeof (int), reload_pseudo_compare_func
)
;
1034 for (j = 0; j < n; j++)
1035 {
1036 reload_regno = sorted_reload_pseudos[j];
1037 lra_assert (live_pseudos_reg_renumber[reload_regno] < 0)((void)(!(live_pseudos_reg_renumber[reload_regno] < 0) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/lra-assigns.cc"
, 1037, __FUNCTION__), 0 : 0))
;
1038 if ((reload_hard_regno
1039 = find_hard_regno_for (reload_regno,
1040 &reload_cost, -1, first_p)) >= 0)
1041 {
1042 if (lra_dump_file != NULLnullptr)
1043 fprintf (lra_dump_file, " assign %d(cost=%d)",
1044 reload_regno, reload_cost);
1045 assign_temporarily (reload_regno, reload_hard_regno);
1046 cost += reload_cost;
1047 }
1048 }
1049 EXECUTE_IF_SET_IN_BITMAP (&spill_pseudos_bitmap, 0, spill_regno, bi)for (bmp_iter_set_init (&(bi), (&spill_pseudos_bitmap
), (0), &(spill_regno)); bmp_iter_set (&(bi), &(spill_regno
)); bmp_iter_next (&(bi), &(spill_regno)))
1050 {
1051 rtx_insn_list *x;
1052
1053 cost += lra_reg_info[spill_regno].freq;
1054 if (ira_reg_equiv[spill_regno].memory != NULLnullptr
1055 || ira_reg_equiv[spill_regno].constant != NULLnullptr)
1056 for (x = ira_reg_equiv[spill_regno].init_insns;
1057 x != NULLnullptr;
1058 x = x->next ())
1059 cost -= REG_FREQ_FROM_BB (BLOCK_FOR_INSN (x->insn ()))((optimize_function_for_size_p ((cfun + 0)) || !(cfun + 0)->
cfg->count_max.initialized_p ()) ? 1000 : ((BLOCK_FOR_INSN
(x->insn ()))->count.to_frequency ((cfun + 0)) * 1000 /
10000) ? ((BLOCK_FOR_INSN (x->insn ()))->count.to_frequency
((cfun + 0)) * 1000 / 10000) : 1)
;
1060 }
1061 /* Avoid spilling static chain pointer pseudo when non-local
1062 goto is used. */
1063 if ((! static_p && best_static_p)
1064 || (static_p == best_static_p
1065 && (best_insn_pseudos_num > insn_pseudos_num
1066 || (best_insn_pseudos_num == insn_pseudos_num
1067 && (bad_spills_num < smallest_bad_spills_num
1068 || (bad_spills_num == smallest_bad_spills_num
1069 && best_cost > cost))))))
1070 {
1071 best_insn_pseudos_num = insn_pseudos_num;
1072 smallest_bad_spills_num = bad_spills_num;
1073 best_static_p = static_p;
1074 best_cost = cost;
1075 best_hard_regno = hard_regno;
1076 bitmap_copy (&best_spill_pseudos_bitmap, &spill_pseudos_bitmap);
1077 if (lra_dump_file != NULLnullptr)
1078 fprintf (lra_dump_file,
1079 " Now best %d(cost=%d, bad_spills=%d, insn_pseudos=%d)\n",
1080 hard_regno, cost, bad_spills_num, insn_pseudos_num);
1081 }
1082 assign_temporarily (regno, -1);
1083 for (j = 0; j < n; j++)
1084 {
1085 reload_regno = sorted_reload_pseudos[j];
1086 if (live_pseudos_reg_renumber[reload_regno] >= 0)
1087 assign_temporarily (reload_regno, -1);
1088 }
1089 }
1090 if (lra_dump_file != NULLnullptr)
1091 fprintf (lra_dump_file, "\n");
1092 /* Restore the live hard reg pseudo info for spilled pseudos. */
1093 EXECUTE_IF_SET_IN_BITMAP (&spill_pseudos_bitmap, 0, spill_regno, bi)for (bmp_iter_set_init (&(bi), (&spill_pseudos_bitmap
), (0), &(spill_regno)); bmp_iter_set (&(bi), &(spill_regno
)); bmp_iter_next (&(bi), &(spill_regno)))
1094 update_lives (spill_regno, false);
1095 fail:
1096 ;
1097 }
1098 /* Spill: */
1099 EXECUTE_IF_SET_IN_BITMAP (&best_spill_pseudos_bitmap, 0, spill_regno, bi)for (bmp_iter_set_init (&(bi), (&best_spill_pseudos_bitmap
), (0), &(spill_regno)); bmp_iter_set (&(bi), &(spill_regno
)); bmp_iter_next (&(bi), &(spill_regno)))
1100 {
1101 if ((int) spill_regno >= lra_constraint_new_regno_start)
1102 former_reload_pseudo_spill_p = true;
1103 if (lra_dump_file != NULLnullptr)
1104 fprintf (lra_dump_file, " Spill %sr%d(hr=%d, freq=%d) for r%d\n",
1105 pseudo_prefix_title (spill_regno),
1106 spill_regno, reg_renumber[spill_regno],
1107 lra_reg_info[spill_regno].freq, regno);
1108 update_lives (spill_regno, true);
1109 lra_setup_reg_renumber (spill_regno, -1, false);
1110 }
1111 bitmap_ior_into (spilled_pseudo_bitmap, &best_spill_pseudos_bitmap);
1112 return best_hard_regno;
1113}
1114
1115/* Assign HARD_REGNO to REGNO. */
1116static void
1117assign_hard_regno (int hard_regno, int regno)
1118{
1119 int i;
1120
1121 lra_assert (hard_regno >= 0)((void)(!(hard_regno >= 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/lra-assigns.cc"
, 1121, __FUNCTION__), 0 : 0))
;
1122 lra_setup_reg_renumber (regno, hard_regno, true);
1123 update_lives (regno, false);
1124 for (i = 0;
1125 i < hard_regno_nregs (hard_regno, lra_reg_info[regno].biggest_mode);
1126 i++)
1127 df_set_regs_ever_live (hard_regno + i, true);
1128}
1129
1130/* Array used for sorting different pseudos. */
1131static int *sorted_pseudos;
1132
1133/* The constraints pass is allowed to create equivalences between
1134 pseudos that make the current allocation "incorrect" (in the sense
1135 that pseudos are assigned to hard registers from their own conflict
1136 sets). The global variable check_and_force_assignment_correctness_p says
1137 whether this might have happened.
1138
1139 Process pseudos assigned to hard registers (less frequently used
1140 first), spill if a conflict is found, and mark the spilled pseudos
1141 in SPILLED_PSEUDO_BITMAP. Set up LIVE_HARD_REG_PSEUDOS from
1142 pseudos, assigned to hard registers. */
1143static void
1144setup_live_pseudos_and_spill_after_risky_transforms (bitmap
1145 spilled_pseudo_bitmap)
1146{
1147 int p, i, j, n, regno, hard_regno, biggest_nregs, nregs_diff;
1148 unsigned int k, conflict_regno;
1149 poly_int64 offset;
1150 int val;
1151 HARD_REG_SET conflict_set;
1152 machine_mode mode, biggest_mode;
1153 lra_live_range_t r;
1154 bitmap_iterator bi;
1155 int max_regno = max_reg_num ();
1156
1157 if (! check_and_force_assignment_correctness_p)
1158 {
1159 for (i = FIRST_PSEUDO_REGISTER76; i < max_regno; i++)
1160 if (reg_renumber[i] >= 0 && lra_reg_info[i].nrefs > 0)
1161 update_lives (i, false);
1162 return;
1163 }
1164 for (n = 0, i = FIRST_PSEUDO_REGISTER76; i < max_regno; i++)
1165 if ((pic_offset_table_rtx(this_target_rtl->x_pic_offset_table_rtx) == NULL_RTX(rtx) 0
1166 || i != (int) REGNO (pic_offset_table_rtx)(rhs_regno((this_target_rtl->x_pic_offset_table_rtx))))
1167 && (hard_regno = reg_renumber[i]) >= 0 && lra_reg_info[i].nrefs > 0)
1168 {
1169 biggest_mode = lra_reg_info[i].biggest_mode;
1170 biggest_nregs = hard_regno_nregs (hard_regno, biggest_mode);
1171 nregs_diff = (biggest_nregs
1172 - hard_regno_nregs (hard_regno, PSEUDO_REGNO_MODE (i)((machine_mode) (regno_reg_rtx[i])->mode)));
1173 enum reg_class rclass = lra_get_allocno_class (i);
1174
1175 if ((WORDS_BIG_ENDIAN0
1176 && (hard_regno - nregs_diff < 0
1177 || !TEST_HARD_REG_BIT (reg_class_contents(this_target_hard_regs->x_reg_class_contents)[rclass],
1178 hard_regno - nregs_diff)))
1179 || (!WORDS_BIG_ENDIAN0
1180 && (hard_regno + nregs_diff >= FIRST_PSEUDO_REGISTER76
1181 || !TEST_HARD_REG_BIT (reg_class_contents(this_target_hard_regs->x_reg_class_contents)[rclass],
1182 hard_regno + nregs_diff))))
1183 {
1184 /* Hard registers of paradoxical sub-registers are out of
1185 range of pseudo register class. Spill the pseudo. */
1186 reg_renumber[i] = -1;
1187 continue;
1188 }
1189 sorted_pseudos[n++] = i;
1190 }
1191 qsort (sorted_pseudos, n, sizeof (int), pseudo_compare_func)gcc_qsort (sorted_pseudos, n, sizeof (int), pseudo_compare_func
)
;
1192 if (pic_offset_table_rtx(this_target_rtl->x_pic_offset_table_rtx) != NULL_RTX(rtx) 0
1193 && (regno = REGNO (pic_offset_table_rtx)(rhs_regno((this_target_rtl->x_pic_offset_table_rtx)))) >= FIRST_PSEUDO_REGISTER76
1194 && reg_renumber[regno] >= 0 && lra_reg_info[regno].nrefs > 0)
1195 sorted_pseudos[n++] = regno;
1196 for (i = n - 1; i >= 0; i--)
1197 {
1198 regno = sorted_pseudos[i];
1199 hard_regno = reg_renumber[regno];
1200 lra_assert (hard_regno >= 0)((void)(!(hard_regno >= 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/lra-assigns.cc"
, 1200, __FUNCTION__), 0 : 0))
;
1201 mode = lra_reg_info[regno].biggest_mode;
1202 sparseset_clear (live_range_hard_reg_pseudos);
1203 for (r = lra_reg_info[regno].live_ranges; r != NULLnullptr; r = r->next)
1204 {
1205 EXECUTE_IF_SET_IN_BITMAP (&live_hard_reg_pseudos[r->start], 0, k, bi)for (bmp_iter_set_init (&(bi), (&live_hard_reg_pseudos
[r->start]), (0), &(k)); bmp_iter_set (&(bi), &
(k)); bmp_iter_next (&(bi), &(k)))
1206 sparseset_set_bit (live_range_hard_reg_pseudos, k);
1207 for (p = r->start + 1; p <= r->finish; p++)
1208 {
1209 lra_live_range_t r2;
1210
1211 for (r2 = start_point_ranges[p];
1212 r2 != NULLnullptr;
1213 r2 = r2->start_next)
1214 if (live_pseudos_reg_renumber[r2->regno] >= 0)
1215 sparseset_set_bit (live_range_hard_reg_pseudos, r2->regno);
1216 }
1217 }
1218 conflict_set = lra_no_alloc_regs;
1219 conflict_set |= lra_reg_info[regno].conflict_hard_regs;
1220 val = lra_reg_info[regno].val;
1221 offset = lra_reg_info[regno].offset;
1222 EXECUTE_IF_SET_IN_SPARSESET (live_range_hard_reg_pseudos, conflict_regno)for (sparseset_iter_init (live_range_hard_reg_pseudos); sparseset_iter_p
(live_range_hard_reg_pseudos) && (((conflict_regno) =
sparseset_iter_elm (live_range_hard_reg_pseudos)) || 1); sparseset_iter_next
(live_range_hard_reg_pseudos))
1223 if (!lra_reg_val_equal_p (conflict_regno, val, offset)
1224 /* If it is multi-register pseudos they should start on
1225 the same hard register. */
1226 || hard_regno != reg_renumber[conflict_regno])
1227 {
1228 int conflict_hard_regno = reg_renumber[conflict_regno];
1229
1230 biggest_mode = lra_reg_info[conflict_regno].biggest_mode;
1231 biggest_nregs = hard_regno_nregs (conflict_hard_regno,
1232 biggest_mode);
1233 nregs_diff
1234 = (biggest_nregs
1235 - hard_regno_nregs (conflict_hard_regno,
1236 PSEUDO_REGNO_MODE (conflict_regno)((machine_mode) (regno_reg_rtx[conflict_regno])->mode)));
1237 add_to_hard_reg_set (&conflict_set,
1238 biggest_mode,
1239 conflict_hard_regno
1240 - (WORDS_BIG_ENDIAN0 ? nregs_diff : 0));
1241 }
1242 if (! overlaps_hard_reg_set_p (conflict_set, mode, hard_regno))
1243 {
1244 update_lives (regno, false);
1245 continue;
1246 }
1247 bitmap_set_bit (spilled_pseudo_bitmap, regno);
1248 for (j = 0;
1249 j < hard_regno_nregs (hard_regno, PSEUDO_REGNO_MODE (regno)((machine_mode) (regno_reg_rtx[regno])->mode));
1250 j++)
1251 lra_hard_reg_usage[hard_regno + j] -= lra_reg_info[regno].freq;
1252 reg_renumber[regno] = -1;
1253 if (regno >= lra_constraint_new_regno_start)
1254 former_reload_pseudo_spill_p = true;
1255 if (lra_dump_file != NULLnullptr)
1256 fprintf (lra_dump_file, " Spill r%d after risky transformations\n",
1257 regno);
1258 }
1259}
1260
1261/* Improve allocation by assigning the same hard regno of inheritance
1262 pseudos to the connected pseudos. We need this because inheritance
1263 pseudos are allocated after reload pseudos in the thread and when
1264 we assign a hard register to a reload pseudo we don't know yet that
1265 the connected inheritance pseudos can get the same hard register.
1266 Add pseudos with changed allocation to bitmap CHANGED_PSEUDOS. */
1267static void
1268improve_inheritance (bitmap changed_pseudos)
1269{
1270 unsigned int k;
1271 int regno, another_regno, hard_regno, another_hard_regno, cost, i, n;
1272 lra_copy_t cp, next_cp;
1273 bitmap_iterator bi;
1274
1275 if (lra_inheritance_iter > LRA_MAX_INHERITANCE_PASSES2)
1276 return;
1277 n = 0;
1278 EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, k, bi)for (bmp_iter_set_init (&(bi), (&lra_inheritance_pseudos
), (0), &(k)); bmp_iter_set (&(bi), &(k)); bmp_iter_next
(&(bi), &(k)))
1279 if (reg_renumber[k] >= 0 && lra_reg_info[k].nrefs != 0)
1280 sorted_pseudos[n++] = k;
1281 qsort (sorted_pseudos, n, sizeof (int), pseudo_compare_func)gcc_qsort (sorted_pseudos, n, sizeof (int), pseudo_compare_func
)
;
1282 for (i = 0; i < n; i++)
1283 {
1284 regno = sorted_pseudos[i];
1285 hard_regno = reg_renumber[regno];
1286 lra_assert (hard_regno >= 0)((void)(!(hard_regno >= 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/lra-assigns.cc"
, 1286, __FUNCTION__), 0 : 0))
;
1287 for (cp = lra_reg_info[regno].copies; cp != NULLnullptr; cp = next_cp)
1288 {
1289 if (cp->regno1 == regno)
1290 {
1291 next_cp = cp->regno1_next;
1292 another_regno = cp->regno2;
1293 }
1294 else if (cp->regno2 == regno)
1295 {
1296 next_cp = cp->regno2_next;
1297 another_regno = cp->regno1;
1298 }
1299 else
1300 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/lra-assigns.cc"
, 1300, __FUNCTION__))
;
1301 /* Don't change reload pseudo allocation. It might have
1302 this allocation for a purpose and changing it can result
1303 in LRA cycling. */
1304 if ((another_regno < lra_constraint_new_regno_start
1305 || bitmap_bit_p (&lra_inheritance_pseudos, another_regno))
1306 && (another_hard_regno = reg_renumber[another_regno]) >= 0
1307 && another_hard_regno != hard_regno)
1308 {
1309 if (lra_dump_file != NULLnullptr)
1310 fprintf
1311 (lra_dump_file,
1312 " Improving inheritance for %d(%d) and %d(%d)...\n",
1313 regno, hard_regno, another_regno, another_hard_regno);
1314 update_lives (another_regno, true);
1315 lra_setup_reg_renumber (another_regno, -1, false);
1316 if (hard_regno == find_hard_regno_for (another_regno, &cost,
1317 hard_regno, false))
1318 assign_hard_regno (hard_regno, another_regno);
1319 else
1320 assign_hard_regno (another_hard_regno, another_regno);
1321 bitmap_set_bit (changed_pseudos, another_regno);
1322 }
1323 }
1324 }
1325}
1326
1327
1328/* Bitmap finally containing all pseudos spilled on this assignment
1329 pass. */
1330static bitmap_head all_spilled_pseudos;
1331/* All pseudos whose allocation was changed. */
1332static bitmap_head changed_pseudo_bitmap;
1333
1334
1335/* Add to LIVE_RANGE_HARD_REG_PSEUDOS all pseudos conflicting with
1336 REGNO and whose hard regs can be assigned to REGNO. */
1337static void
1338find_all_spills_for (int regno)
1339{
1340 int p;
1341 lra_live_range_t r;
1342 unsigned int k;
1343 bitmap_iterator bi;
1344 enum reg_class rclass;
1345 bool *rclass_intersect_p;
1346
1347 rclass = regno_allocno_class_array[regno];
1348 rclass_intersect_p = ira_reg_classes_intersect_p(this_target_ira->x_ira_reg_classes_intersect_p)[rclass];
1349 for (r = lra_reg_info[regno].live_ranges; r != NULLnullptr; r = r->next)
1350 {
1351 EXECUTE_IF_SET_IN_BITMAP (&live_hard_reg_pseudos[r->start], 0, k, bi)for (bmp_iter_set_init (&(bi), (&live_hard_reg_pseudos
[r->start]), (0), &(k)); bmp_iter_set (&(bi), &
(k)); bmp_iter_next (&(bi), &(k)))
1352 if (rclass_intersect_p[regno_allocno_class_array[k]])
1353 sparseset_set_bit (live_range_hard_reg_pseudos, k);
1354 for (p = r->start + 1; p <= r->finish; p++)
1355 {
1356 lra_live_range_t r2;
1357
1358 for (r2 = start_point_ranges[p];
1359 r2 != NULLnullptr;
1360 r2 = r2->start_next)
1361 {
1362 if (live_pseudos_reg_renumber[r2->regno] >= 0
1363 && ! sparseset_bit_p (live_range_hard_reg_pseudos, r2->regno)
1364 && rclass_intersect_p[regno_allocno_class_array[r2->regno]]
1365 && ((int) r2->regno < lra_constraint_new_regno_start
1366 || bitmap_bit_p (&lra_inheritance_pseudos, r2->regno)
1367 || bitmap_bit_p (&lra_split_regs, r2->regno)
1368 || bitmap_bit_p (&lra_optional_reload_pseudos, r2->regno)
1369 /* There is no sense to consider another reload
1370 pseudo if it has the same class. */
1371 || regno_allocno_class_array[r2->regno] != rclass))
1372 sparseset_set_bit (live_range_hard_reg_pseudos, r2->regno);
1373 }
1374 }
1375 }
1376}
1377
1378/* Assign hard registers to reload pseudos and other pseudos. Return
1379 true if we was not able to assign hard registers to all reload
1380 pseudos. */
1381static bool
1382assign_by_spills (void)
1383{
1384 int i, n, nfails, iter, regno, regno2, hard_regno, cost;
1385 rtx restore_rtx;
1386 bitmap_head changed_insns, do_not_assign_nonreload_pseudos;
1387 unsigned int u, conflict_regno;
1388 bitmap_iterator bi;
1389 bool reload_p, fails_p = false;
1390 int max_regno = max_reg_num ();
1391
1392 for (n = 0, i = lra_constraint_new_regno_start; i < max_regno; i++)
1393 if (reg_renumber[i] < 0 && lra_reg_info[i].nrefs != 0
1394 && regno_allocno_class_array[i] != NO_REGS)
1395 sorted_pseudos[n++] = i;
1396 bitmap_initialize (&insn_conflict_pseudos, &reg_obstack);
1397 bitmap_initialize (&spill_pseudos_bitmap, &reg_obstack);
1398 bitmap_initialize (&best_spill_pseudos_bitmap, &reg_obstack);
1399 update_hard_regno_preference_check = XCNEWVEC (int, max_regno)((int *) xcalloc ((max_regno), sizeof (int)));
1400 curr_update_hard_regno_preference_check = 0;
1401 memset (try_hard_reg_pseudos_check, 0, sizeof (try_hard_reg_pseudos_check));
1402 for (i = 0; i < FIRST_PSEUDO_REGISTER76; i++)
1403 bitmap_initialize (&try_hard_reg_pseudos[i], &reg_obstack);
1404 curr_pseudo_check = 0;
1405 bitmap_initialize (&changed_insns, &reg_obstack);
1406 bitmap_initialize (&non_reload_pseudos, &reg_obstack);
1407 bitmap_ior (&non_reload_pseudos, &lra_inheritance_pseudos, &lra_split_regs);
1408 bitmap_ior_into (&non_reload_pseudos, &lra_subreg_reload_pseudos);
1409 bitmap_ior_into (&non_reload_pseudos, &lra_optional_reload_pseudos);
1410 for (iter = 0; iter <= 1; iter++)
1411 {
1412 qsort (sorted_pseudos, n, sizeof (int), reload_pseudo_compare_func)gcc_qsort (sorted_pseudos, n, sizeof (int), reload_pseudo_compare_func
)
;
1413 nfails = 0;
1414 for (i = 0; i < n; i++)
1415 {
1416 regno = sorted_pseudos[i];
1417 if (reg_renumber[regno] >= 0)
1418 continue;
1419 if (lra_dump_file != NULLnullptr)
1420 fprintf (lra_dump_file, " Assigning to %d "
1421 "(cl=%s, orig=%d, freq=%d, tfirst=%d, tfreq=%d)...\n",
1422 regno, reg_class_names[regno_allocno_class_array[regno]],
1423 ORIGINAL_REGNO (regno_reg_rtx[regno])(__extension__ ({ __typeof ((regno_reg_rtx[regno])) const _rtx
= ((regno_reg_rtx[regno])); if (((enum rtx_code) (_rtx)->
code) != REG) rtl_check_failed_flag ("ORIGINAL_REGNO", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/lra-assigns.cc"
, 1423, __FUNCTION__); _rtx; })->u2.original_regno)
,
1424 lra_reg_info[regno].freq, regno_assign_info[regno].first,
1425 regno_assign_info[regno_assign_info[regno].first].freq);
1426 hard_regno = find_hard_regno_for (regno, &cost, -1, iter == 1);
1427 reload_p = ! bitmap_bit_p (&non_reload_pseudos, regno);
1428 if (hard_regno < 0 && reload_p)
1429 hard_regno = spill_for (regno, &all_spilled_pseudos, iter == 1);
1430 if (hard_regno < 0)
1431 {
1432 if (reload_p) {
1433 /* Put unassigned reload pseudo first in the
1434 array. */
1435 regno2 = sorted_pseudos[nfails];
1436 sorted_pseudos[nfails++] = regno;
1437 sorted_pseudos[i] = regno2;
1438 }
1439 }
1440 else
1441 {
1442 /* This register might have been spilled by the previous
1443 pass. Indicate that it is no longer spilled. */
1444 bitmap_clear_bit (&all_spilled_pseudos, regno);
1445 assign_hard_regno (hard_regno, regno);
1446 if (! reload_p)
1447 /* As non-reload pseudo assignment is changed we
1448 should reconsider insns referring for the
1449 pseudo. */
1450 bitmap_set_bit (&changed_pseudo_bitmap, regno);
1451 }
1452 }
1453 if (nfails == 0 || iter > 0)
1454 {
1455 fails_p = nfails != 0;
1456 break;
1457 }
1458 /* This is a very rare event. We cannot assign a hard register
1459 to reload pseudo because the hard register was assigned to
1460 another reload pseudo on a previous assignment pass. For x86
1461 example, on the 1st pass we assigned CX (although another
1462 hard register could be used for this) to reload pseudo in an
1463 insn, on the 2nd pass we need CX (and only this) hard
1464 register for a new reload pseudo in the same insn. Another
1465 possible situation may occur in assigning to multi-regs
1466 reload pseudos when hard regs pool is too fragmented even
1467 after spilling non-reload pseudos.
1468
1469 We should do something radical here to succeed. Here we
1470 spill *all* conflicting pseudos and reassign them. */
1471 if (lra_dump_file != NULLnullptr)
1472 fprintf (lra_dump_file, " 2nd iter for reload pseudo assignments:\n");
1473 sparseset_clear (live_range_hard_reg_pseudos);
1474 for (i = 0; i < nfails; i++)
1475 {
1476 if (lra_dump_file != NULLnullptr)
1477 fprintf (lra_dump_file, " Reload r%d assignment failure\n",
1478 sorted_pseudos[i]);
1479 find_all_spills_for (sorted_pseudos[i]);
1480 }
1481 EXECUTE_IF_SET_IN_SPARSESET (live_range_hard_reg_pseudos, conflict_regno)for (sparseset_iter_init (live_range_hard_reg_pseudos); sparseset_iter_p
(live_range_hard_reg_pseudos) && (((conflict_regno) =
sparseset_iter_elm (live_range_hard_reg_pseudos)) || 1); sparseset_iter_next
(live_range_hard_reg_pseudos))
1482 {
1483 if ((int) conflict_regno >= lra_constraint_new_regno_start)
1484 {
1485 sorted_pseudos[nfails++] = conflict_regno;
1486 former_reload_pseudo_spill_p = true;
1487 }
1488 else
1489 /* It is better to do reloads before spilling as after the
1490 spill-subpass we will reload memory instead of pseudos
1491 and this will make reusing reload pseudos more
1492 complicated. Going directly to the spill pass in such
1493 case might result in worse code performance or even LRA
1494 cycling if we have few registers. */
1495 bitmap_set_bit (&all_spilled_pseudos, conflict_regno);
1496 if (lra_dump_file != NULLnullptr)
1497 fprintf (lra_dump_file, " Spill %s r%d(hr=%d, freq=%d)\n",
1498 pseudo_prefix_title (conflict_regno), conflict_regno,
1499 reg_renumber[conflict_regno],
1500 lra_reg_info[conflict_regno].freq);
1501 update_lives (conflict_regno, true);
1502 lra_setup_reg_renumber (conflict_regno, -1, false);
1503 }
1504 if (n < nfails)
1505 n = nfails;
1506 }
1507 improve_inheritance (&changed_pseudo_bitmap);
1508 bitmap_clear (&non_reload_pseudos);
1509 bitmap_clear (&changed_insns);
1510 if (! lra_simple_p)
1511 {
1512 /* We should not assign to original pseudos of inheritance
1513 pseudos or split pseudos if any its inheritance pseudo did
1514 not get hard register or any its split pseudo was not split
1515 because undo inheritance/split pass will extend live range of
1516 such inheritance or split pseudos. */
1517 bitmap_initialize (&do_not_assign_nonreload_pseudos, &reg_obstack);
1518 EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, u, bi)for (bmp_iter_set_init (&(bi), (&lra_inheritance_pseudos
), (0), &(u)); bmp_iter_set (&(bi), &(u)); bmp_iter_next
(&(bi), &(u)))
1519 if ((restore_rtx = lra_reg_info[u].restore_rtx) != NULL_RTX(rtx) 0
1520 && REG_P (restore_rtx)(((enum rtx_code) (restore_rtx)->code) == REG)
1521 && reg_renumber[u] < 0
1522 && bitmap_bit_p (&lra_inheritance_pseudos, u))
1523 bitmap_set_bit (&do_not_assign_nonreload_pseudos, REGNO (restore_rtx)(rhs_regno(restore_rtx)));
1524 EXECUTE_IF_SET_IN_BITMAP (&lra_split_regs, 0, u, bi)for (bmp_iter_set_init (&(bi), (&lra_split_regs), (0)
, &(u)); bmp_iter_set (&(bi), &(u)); bmp_iter_next
(&(bi), &(u)))
1525 if ((restore_rtx = lra_reg_info[u].restore_rtx) != NULL_RTX(rtx) 0
1526 && reg_renumber[u] >= 0)
1527 {
1528 lra_assert (REG_P (restore_rtx))((void)(!((((enum rtx_code) (restore_rtx)->code) == REG)) ?
fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/lra-assigns.cc"
, 1528, __FUNCTION__), 0 : 0))
;
1529 bitmap_set_bit (&do_not_assign_nonreload_pseudos, REGNO (restore_rtx)(rhs_regno(restore_rtx)));
1530 }
1531 for (n = 0, i = FIRST_PSEUDO_REGISTER76; i < max_regno; i++)
1532 if (((i < lra_constraint_new_regno_start
1533 && ! bitmap_bit_p (&do_not_assign_nonreload_pseudos, i))
1534 || (bitmap_bit_p (&lra_inheritance_pseudos, i)
1535 && lra_reg_info[i].restore_rtx != NULL_RTX(rtx) 0)
1536 || (bitmap_bit_p (&lra_split_regs, i)
1537 && lra_reg_info[i].restore_rtx != NULL_RTX(rtx) 0)
1538 || bitmap_bit_p (&lra_subreg_reload_pseudos, i)
1539 || bitmap_bit_p (&lra_optional_reload_pseudos, i))
1540 && reg_renumber[i] < 0 && lra_reg_info[i].nrefs != 0
1541 && regno_allocno_class_array[i] != NO_REGS)
1542 sorted_pseudos[n++] = i;
1543 bitmap_clear (&do_not_assign_nonreload_pseudos);
1544 if (n != 0 && lra_dump_file != NULLnullptr)
1545 fprintf (lra_dump_file, " Reassigning non-reload pseudos\n");
1546 qsort (sorted_pseudos, n, sizeof (int), pseudo_compare_func)gcc_qsort (sorted_pseudos, n, sizeof (int), pseudo_compare_func
)
;
1547 for (i = 0; i < n; i++)
1548 {
1549 regno = sorted_pseudos[i];
1550 hard_regno = find_hard_regno_for (regno, &cost, -1, false);
1551 if (hard_regno >= 0)
1552 {
1553 assign_hard_regno (hard_regno, regno);
1554 /* We change allocation for non-reload pseudo on this
1555 iteration -- mark the pseudo for invalidation of used
1556 alternatives of insns containing the pseudo. */
1557 bitmap_set_bit (&changed_pseudo_bitmap, regno);
1558 }
1559 else
1560 {
1561 enum reg_class rclass = lra_get_allocno_class (regno);
1562 enum reg_class spill_class;
1563
1564 if (targetm.spill_class == NULLnullptr
1565 || lra_reg_info[regno].restore_rtx == NULL_RTX(rtx) 0
1566 || ! bitmap_bit_p (&lra_inheritance_pseudos, regno)
1567 || (spill_class
1568 = ((enum reg_class)
1569 targetm.spill_class
1570 ((reg_class_t) rclass,
1571 PSEUDO_REGNO_MODE (regno)((machine_mode) (regno_reg_rtx[regno])->mode)))) == NO_REGS)
1572 continue;
1573 regno_allocno_class_array[regno] = spill_class;
1574 hard_regno = find_hard_regno_for (regno, &cost, -1, false);
1575 if (hard_regno < 0)
1576 regno_allocno_class_array[regno] = rclass;
1577 else
1578 {
1579 setup_reg_classes
1580 (regno, spill_class, spill_class, spill_class);
1581 assign_hard_regno (hard_regno, regno);
1582 bitmap_set_bit (&changed_pseudo_bitmap, regno);
1583 }
1584 }
1585 }
1586 }
1587 free (update_hard_regno_preference_check);
1588 bitmap_clear (&best_spill_pseudos_bitmap);
1589 bitmap_clear (&spill_pseudos_bitmap);
1590 bitmap_clear (&insn_conflict_pseudos);
1591 return fails_p;
1592}
1593
1594/* Entry function to assign hard registers to new reload pseudos
1595 starting with LRA_CONSTRAINT_NEW_REGNO_START (by possible spilling
1596 of old pseudos) and possibly to the old pseudos. The function adds
1597 what insns to process for the next constraint pass. Those are all
1598 insns who contains non-reload and non-inheritance pseudos with
1599 changed allocation.
1600
1601 Return true if we did not spill any non-reload and non-inheritance
1602 pseudos. Set up FAILS_P if we failed to assign hard registers to
1603 all reload pseudos. */
1604bool
1605lra_assign (bool &fails_p)
1606{
1607 int i;
1608 unsigned int u;
1609 bitmap_iterator bi;
1610 bitmap_head insns_to_process;
1611 bool no_spills_p;
1612 int max_regno = max_reg_num ();
1613
1614 timevar_push (TV_LRA_ASSIGN);
1615 lra_assignment_iter++;
1616 if (lra_dump_file != NULLnullptr)
1617 fprintf (lra_dump_file, "\n********** Assignment #%d: **********\n\n",
1618 lra_assignment_iter);
1619 init_lives ();
1620 sorted_pseudos = XNEWVEC (int, max_regno)((int *) xmalloc (sizeof (int) * (max_regno)));
1621 sorted_reload_pseudos = XNEWVEC (int, max_regno)((int *) xmalloc (sizeof (int) * (max_regno)));
1622 regno_allocno_class_array = XNEWVEC (enum reg_class, max_regno)((enum reg_class *) xmalloc (sizeof (enum reg_class) * (max_regno
)))
;
1623 regno_live_length = XNEWVEC (int, max_regno)((int *) xmalloc (sizeof (int) * (max_regno)));
1624 for (i = FIRST_PSEUDO_REGISTER76; i < max_regno; i++)
1625 {
1626 int l;
1627 lra_live_range_t r;
1628
1629 regno_allocno_class_array[i] = lra_get_allocno_class (i);
1630 for (l = 0, r = lra_reg_info[i].live_ranges; r != NULLnullptr; r = r->next)
1631 l += r->finish - r->start + 1;
1632 regno_live_length[i] = l;
1633 }
1634 former_reload_pseudo_spill_p = false;
1635 init_regno_assign_info ();
1636 bitmap_initialize (&all_spilled_pseudos, &reg_obstack);
1637 create_live_range_start_chains ();
1638 setup_live_pseudos_and_spill_after_risky_transforms (&all_spilled_pseudos);
1639 if (! lra_hard_reg_split_p && ! lra_asm_error_p && flag_checkingglobal_options.x_flag_checking)
1640 /* Check correctness of allocation but only when there are no hard reg
1641 splits and asm errors as in the case of errors explicit insns involving
1642 hard regs are added or the asm is removed and this can result in
1643 incorrect allocation. */
1644 for (i = FIRST_PSEUDO_REGISTER76; i < max_regno; i++)
1645 if (lra_reg_info[i].nrefs != 0
1646 && reg_renumber[i] >= 0
1647 && overlaps_hard_reg_set_p (lra_reg_info[i].conflict_hard_regs,
1648 PSEUDO_REGNO_MODE (i)((machine_mode) (regno_reg_rtx[i])->mode), reg_renumber[i]))
1649 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/lra-assigns.cc"
, 1649, __FUNCTION__))
;
1650 /* Setup insns to process on the next constraint pass. */
1651 bitmap_initialize (&changed_pseudo_bitmap, &reg_obstack);
1652 init_live_reload_and_inheritance_pseudos ();
1653 fails_p = assign_by_spills ();
1654 finish_live_reload_and_inheritance_pseudos ();
1655 bitmap_ior_into (&changed_pseudo_bitmap, &all_spilled_pseudos);
1656 no_spills_p = true;
1657 EXECUTE_IF_SET_IN_BITMAP (&all_spilled_pseudos, 0, u, bi)for (bmp_iter_set_init (&(bi), (&all_spilled_pseudos)
, (0), &(u)); bmp_iter_set (&(bi), &(u)); bmp_iter_next
(&(bi), &(u)))
1658 /* We ignore spilled pseudos created on last inheritance pass
1659 because they will be removed. */
1660 if (lra_reg_info[u].restore_rtx == NULL_RTX(rtx) 0)
1661 {
1662 no_spills_p = false;
1663 break;
1664 }
1665 finish_live_range_start_chains ();
1666 bitmap_clear (&all_spilled_pseudos);
1667 bitmap_initialize (&insns_to_process, &reg_obstack);
1668 EXECUTE_IF_SET_IN_BITMAP (&changed_pseudo_bitmap, 0, u, bi)for (bmp_iter_set_init (&(bi), (&changed_pseudo_bitmap
), (0), &(u)); bmp_iter_set (&(bi), &(u)); bmp_iter_next
(&(bi), &(u)))
1669 bitmap_ior_into (&insns_to_process, &lra_reg_info[u].insn_bitmap);
1670 bitmap_clear (&changed_pseudo_bitmap);
1671 EXECUTE_IF_SET_IN_BITMAP (&insns_to_process, 0, u, bi)for (bmp_iter_set_init (&(bi), (&insns_to_process), (
0), &(u)); bmp_iter_set (&(bi), &(u)); bmp_iter_next
(&(bi), &(u)))
1672 {
1673 lra_push_insn_by_uid (u);
1674 /* Invalidate alternatives for insn should be processed. */
1675 lra_set_used_insn_alternative_by_uid (u, -1);
1676 }
1677 bitmap_clear (&insns_to_process);
1678 finish_regno_assign_info ();
1679 free (regno_live_length);
1680 free (regno_allocno_class_array);
1681 free (sorted_pseudos);
1682 free (sorted_reload_pseudos);
1683 finish_lives ();
1684 timevar_pop (TV_LRA_ASSIGN);
1685 if (former_reload_pseudo_spill_p)
1686 lra_assignment_iter_after_spill++;
1687 /* This is conditional on flag_checking because valid code can take
1688 more than this maximum number of iteration, but at the same time
1689 the test can uncover errors in machine descriptions. */
1690 if (flag_checkingglobal_options.x_flag_checking
1691 && (lra_assignment_iter_after_spill
1692 > LRA_MAX_ASSIGNMENT_ITERATION_NUMBER30))
1693 internal_error
1694 ("maximum number of LRA assignment passes is achieved (%d)",
1695 LRA_MAX_ASSIGNMENT_ITERATION_NUMBER30);
1696 /* Reset the assignment correctness flag: */
1697 check_and_force_assignment_correctness_p = false;
1698 return no_spills_p;
1699}
1700
1701/* Find start and finish insns for reload pseudo REGNO. Return true
1702 if we managed to find the expected insns. Return false,
1703 otherwise. */
1704static bool
1705find_reload_regno_insns (int regno, rtx_insn * &start, rtx_insn * &finish)
1706{
1707 unsigned int uid;
1708 bitmap_iterator bi;
1709 int insns_num = 0;
1710 bool clobber_p = false;
1711 rtx_insn *prev_insn, *next_insn;
1712 rtx_insn *start_insn = NULLnullptr, *first_insn = NULLnullptr, *second_insn = NULLnullptr;
10
'start_insn' initialized to a null pointer value
1713
1714 EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi)for (bmp_iter_set_init (&(bi), (&lra_reg_info[regno].
insn_bitmap), (0), &(uid)); bmp_iter_set (&(bi), &
(uid)); bmp_iter_next (&(bi), &(uid)))
11
Loop condition is false. Execution continues on line 1725
1715 {
1716 if (start_insn == NULLnullptr)
1717 start_insn = lra_insn_recog_data[uid]->insn;
1718 if (GET_CODE (PATTERN (lra_insn_recog_data[uid]->insn))((enum rtx_code) (PATTERN (lra_insn_recog_data[uid]->insn)
)->code)
== CLOBBER)
1719 clobber_p = true;
1720 else
1721 insns_num++;
1722 }
1723 /* For reload pseudo we should have at most 3 insns besides clobber referring for
1724 it: input/output reload insns and the original insn. */
1725 if (insns_num
11.1
'insns_num' is <= 3
11.1
'insns_num' is <= 3
> 3)
12
Taking false branch
1726 return false;
1727 if (clobber_p
12.1
'clobber_p' is false
12.1
'clobber_p' is false
)
13
Taking false branch
1728 insns_num++;
1729 if (insns_num
13.1
'insns_num' is <= 1
13.1
'insns_num' is <= 1
> 1)
1730 {
1731 for (prev_insn = PREV_INSN (start_insn),
1732 next_insn = NEXT_INSN (start_insn);
1733 insns_num != 1 && (prev_insn != NULLnullptr
1734 || (next_insn != NULLnullptr && second_insn == NULLnullptr)); )
1735 {
1736 if (prev_insn != NULLnullptr)
1737 {
1738 if (bitmap_bit_p (&lra_reg_info[regno].insn_bitmap,
1739 INSN_UID (prev_insn)))
1740 {
1741 first_insn = prev_insn;
1742 insns_num--;
1743 }
1744 prev_insn = PREV_INSN (prev_insn);
1745 }
1746 if (next_insn != NULLnullptr && second_insn == NULLnullptr)
1747 {
1748 if (! bitmap_bit_p (&lra_reg_info[regno].insn_bitmap,
1749 INSN_UID (next_insn)))
1750 next_insn = NEXT_INSN (next_insn);
1751 else
1752 {
1753 second_insn = next_insn;
1754 insns_num--;
1755 }
1756 }
1757 }
1758 if (insns_num > 1)
1759 return false;
1760 }
1761 start = first_insn != NULLnullptr ? first_insn : start_insn;
14
Taking false branch
15
'?' condition is false
16
Null pointer value stored to 'first'
1762 finish = second_insn != NULLnullptr ? second_insn : start_insn;
17
'?' condition is false
1763 return true;
1764}
1765
1766/* Process reload pseudos which did not get a hard reg, split a hard
1767 reg live range in live range of a reload pseudo, and then return
1768 TRUE. If we did not split a hard reg live range, report an error,
1769 and return FALSE. */
1770bool
1771lra_split_hard_reg_for (void)
1772{
1773 int i, regno;
1774 rtx_insn *insn, *first, *last;
1775 unsigned int u;
1776 bitmap_iterator bi;
1777 enum reg_class rclass;
1778 int max_regno = max_reg_num ();
1779 /* We did not assign hard regs to reload pseudos after two
1780 iterations. Either it's an asm and something is wrong with the
1781 constraints, or we have run out of spill registers; error out in
1782 either case. */
1783 bool asm_p = false, spill_p = false;
1784 bitmap_head failed_reload_insns, failed_reload_pseudos, over_split_insns;
1785
1786 if (lra_dump_file != NULLnullptr)
1
Assuming the condition is false
2
Taking false branch
1787 fprintf (lra_dump_file,
1788 "\n****** Splitting a hard reg after assignment #%d: ******\n\n",
1789 lra_assignment_iter);
1790 bitmap_initialize (&failed_reload_pseudos, &reg_obstack);
1791 bitmap_initialize (&non_reload_pseudos, &reg_obstack);
1792 bitmap_ior (&non_reload_pseudos, &lra_inheritance_pseudos, &lra_split_regs);
1793 bitmap_ior_into (&non_reload_pseudos, &lra_subreg_reload_pseudos);
1794 bitmap_ior_into (&non_reload_pseudos, &lra_optional_reload_pseudos);
1795 bitmap_initialize (&over_split_insns, &reg_obstack);
1796 for (i = lra_constraint_new_regno_start; i < max_regno; i++)
3
Assuming 'i' is < 'max_regno'
1797 if (reg_renumber[i] < 0 && lra_reg_info[i].nrefs != 0
4
Assuming the condition is true
5
Assuming field 'nrefs' is not equal to 0
8
Taking true branch
1798 && (rclass = lra_get_allocno_class (i)) != NO_REGS
6
Assuming the condition is true
1799 && ! bitmap_bit_p (&non_reload_pseudos, i))
7
Assuming the condition is true
1800 {
1801 if (! find_reload_regno_insns (i, first, last))
9
Calling 'find_reload_regno_insns'
18
Returning from 'find_reload_regno_insns'
19
Taking false branch
1802 continue;
1803 if (BLOCK_FOR_INSN (first) == BLOCK_FOR_INSN (last))
20
Passing null pointer value via 1st parameter 'insn'
21
Calling 'BLOCK_FOR_INSN'
1804 {
1805 /* Check that we are not trying to split over the same insn
1806 requiring reloads to avoid splitting the same hard reg twice or
1807 more. If we need several hard regs splitting over the same insn
1808 it can be finished on the next iterations.
1809
1810 The following loop iteration number is small as we split hard
1811 reg in a very small range. */
1812 for (insn = first;
1813 insn != NEXT_INSN (last);
1814 insn = NEXT_INSN (insn))
1815 if (bitmap_bit_p (&over_split_insns, INSN_UID (insn)))
1816 break;
1817 if (insn != NEXT_INSN (last)
1818 || !spill_hard_reg_in_range (i, rclass, first, last))
1819 {
1820 bitmap_set_bit (&failed_reload_pseudos, i);
1821 }
1822 else
1823 {
1824 for (insn = first;
1825 insn != NEXT_INSN (last);
1826 insn = NEXT_INSN (insn))
1827 bitmap_set_bit (&over_split_insns, INSN_UID (insn));
1828 spill_p = true;
1829 }
1830 }
1831 }
1832 bitmap_clear (&over_split_insns);
1833 if (spill_p)
1834 {
1835 bitmap_clear (&failed_reload_pseudos);
1836 return true;
1837 }
1838 bitmap_clear (&non_reload_pseudos);
1839 bitmap_initialize (&failed_reload_insns, &reg_obstack);
1840 EXECUTE_IF_SET_IN_BITMAP (&failed_reload_pseudos, 0, u, bi)for (bmp_iter_set_init (&(bi), (&failed_reload_pseudos
), (0), &(u)); bmp_iter_set (&(bi), &(u)); bmp_iter_next
(&(bi), &(u)))
1841 {
1842 regno = u;
1843 bitmap_ior_into (&failed_reload_insns,
1844 &lra_reg_info[regno].insn_bitmap);
1845 lra_setup_reg_renumber
1846 (regno, ira_class_hard_regs(this_target_ira->x_ira_class_hard_regs)[lra_get_allocno_class (regno)][0], false);
1847 }
1848 EXECUTE_IF_SET_IN_BITMAP (&failed_reload_insns, 0, u, bi)for (bmp_iter_set_init (&(bi), (&failed_reload_insns)
, (0), &(u)); bmp_iter_set (&(bi), &(u)); bmp_iter_next
(&(bi), &(u)))
1849 {
1850 insn = lra_insn_recog_data[u]->insn;
1851 if (asm_noperands (PATTERN (insn)) >= 0)
1852 {
1853 lra_asm_error_p = asm_p = true;
1854 error_for_asm (insn,
1855 "%<asm%> operand has impossible constraints");
1856 /* Avoid further trouble with this insn. */
1857 if (JUMP_P (insn)(((enum rtx_code) (insn)->code) == JUMP_INSN))
1858 {
1859 ira_nullify_asm_goto (insn);
1860 lra_update_insn_regno_info (insn);
1861 }
1862 else
1863 {
1864 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx)gen_rtx_fmt_e_stat ((USE), ((((void) 0, E_VOIDmode))), (((const_int_rtx
[64]))) )
;
1865 lra_set_insn_deleted (insn);
1866 }
1867 }
1868 else if (!asm_p)
1869 {
1870 error ("unable to find a register to spill");
1871 fatal_insn ("this is the insn:", insn)_fatal_insn ("this is the insn:", insn, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/lra-assigns.cc"
, 1871, __FUNCTION__)
;
1872 }
1873 }
1874 bitmap_clear (&failed_reload_pseudos);
1875 bitmap_clear (&failed_reload_insns);
1876 return false;
1877}

/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h

1/* Register Transfer Language (RTL) definitions for GCC
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#ifndef GCC_RTL_H
21#define GCC_RTL_H
22
23/* This file is occasionally included by generator files which expect
24 machmode.h and other files to exist and would not normally have been
25 included by coretypes.h. */
26#ifdef GENERATOR_FILE
27#include "real.h"
28#include "fixed-value.h"
29#include "statistics.h"
30#include "vec.h"
31#include "hash-table.h"
32#include "hash-set.h"
33#include "input.h"
34#include "is-a.h"
35#endif /* GENERATOR_FILE */
36
37#include "hard-reg-set.h"
38
39class predefined_function_abi;
40
41/* Value used by some passes to "recognize" noop moves as valid
42 instructions. */
43#define NOOP_MOVE_INSN_CODE2147483647 INT_MAX2147483647
44
45/* Register Transfer Language EXPRESSIONS CODES */
46
47#define RTX_CODEenum rtx_code enum rtx_code
48enum rtx_code {
49
50#define DEF_RTL_EXPR(ENUM, NAME, FORMAT, CLASS) ENUM ,
51#include "rtl.def" /* rtl expressions are documented here */
52#undef DEF_RTL_EXPR
53
54 LAST_AND_UNUSED_RTX_CODE}; /* A convenient way to get a value for
55 NUM_RTX_CODE.
56 Assumes default enum value assignment. */
57
58/* The cast here, saves many elsewhere. */
59#define NUM_RTX_CODE((int) LAST_AND_UNUSED_RTX_CODE) ((int) LAST_AND_UNUSED_RTX_CODE)
60
61/* Similar, but since generator files get more entries... */
62#ifdef GENERATOR_FILE
63# define NON_GENERATOR_NUM_RTX_CODE ((int) MATCH_OPERAND)
64#endif
65
66/* Register Transfer Language EXPRESSIONS CODE CLASSES */
67
68enum rtx_class {
69 /* We check bit 0-1 of some rtx class codes in the predicates below. */
70
71 /* Bit 0 = comparison if 0, arithmetic is 1
72 Bit 1 = 1 if commutative. */
73 RTX_COMPARE, /* 0 */
74 RTX_COMM_COMPARE,
75 RTX_BIN_ARITH,
76 RTX_COMM_ARITH,
77
78 /* Must follow the four preceding values. */
79 RTX_UNARY, /* 4 */
80
81 RTX_EXTRA,
82 RTX_MATCH,
83 RTX_INSN,
84
85 /* Bit 0 = 1 if constant. */
86 RTX_OBJ, /* 8 */
87 RTX_CONST_OBJ,
88
89 RTX_TERNARY,
90 RTX_BITFIELD_OPS,
91 RTX_AUTOINC
92};
93
94#define RTX_OBJ_MASK(~1) (~1)
95#define RTX_OBJ_RESULT(RTX_OBJ & (~1)) (RTX_OBJ & RTX_OBJ_MASK(~1))
96#define RTX_COMPARE_MASK(~1) (~1)
97#define RTX_COMPARE_RESULT(RTX_COMPARE & (~1)) (RTX_COMPARE & RTX_COMPARE_MASK(~1))
98#define RTX_ARITHMETIC_MASK(~1) (~1)
99#define RTX_ARITHMETIC_RESULT(RTX_COMM_ARITH & (~1)) (RTX_COMM_ARITH & RTX_ARITHMETIC_MASK(~1))
100#define RTX_BINARY_MASK(~3) (~3)
101#define RTX_BINARY_RESULT(RTX_COMPARE & (~3)) (RTX_COMPARE & RTX_BINARY_MASK(~3))
102#define RTX_COMMUTATIVE_MASK(~2) (~2)
103#define RTX_COMMUTATIVE_RESULT(RTX_COMM_COMPARE & (~2)) (RTX_COMM_COMPARE & RTX_COMMUTATIVE_MASK(~2))
104#define RTX_NON_COMMUTATIVE_RESULT(RTX_COMPARE & (~2)) (RTX_COMPARE & RTX_COMMUTATIVE_MASK(~2))
105
106extern const unsigned char rtx_length[NUM_RTX_CODE((int) LAST_AND_UNUSED_RTX_CODE)];
107#define GET_RTX_LENGTH(CODE)(rtx_length[(int) (CODE)]) (rtx_length[(int) (CODE)])
108
109extern const char * const rtx_name[NUM_RTX_CODE((int) LAST_AND_UNUSED_RTX_CODE)];
110#define GET_RTX_NAME(CODE)(rtx_name[(int) (CODE)]) (rtx_name[(int) (CODE)])
111
112extern const char * const rtx_format[NUM_RTX_CODE((int) LAST_AND_UNUSED_RTX_CODE)];
113#define GET_RTX_FORMAT(CODE)(rtx_format[(int) (CODE)]) (rtx_format[(int) (CODE)])
114
115extern const enum rtx_class rtx_class[NUM_RTX_CODE((int) LAST_AND_UNUSED_RTX_CODE)];
116#define GET_RTX_CLASS(CODE)(rtx_class[(int) (CODE)]) (rtx_class[(int) (CODE)])
117
118/* True if CODE is part of the insn chain (i.e. has INSN_UID, PREV_INSN
119 and NEXT_INSN fields). */
120#define INSN_CHAIN_CODE_P(CODE)((unsigned long) (CODE) - (unsigned long) (DEBUG_INSN) <= (
unsigned long) (NOTE) - (unsigned long) (DEBUG_INSN))
IN_RANGE (CODE, DEBUG_INSN, NOTE)((unsigned long) (CODE) - (unsigned long) (DEBUG_INSN) <= (
unsigned long) (NOTE) - (unsigned long) (DEBUG_INSN))
121
122extern const unsigned char rtx_code_size[NUM_RTX_CODE((int) LAST_AND_UNUSED_RTX_CODE)];
123extern const unsigned char rtx_next[NUM_RTX_CODE((int) LAST_AND_UNUSED_RTX_CODE)];
124
125/* The flags and bitfields of an ADDR_DIFF_VEC. BASE is the base label
126 relative to which the offsets are calculated, as explained in rtl.def. */
127struct addr_diff_vec_flags
128{
129 /* Set at the start of shorten_branches - ONLY WHEN OPTIMIZING - : */
130 unsigned min_align: 8;
131 /* Flags: */
132 unsigned base_after_vec: 1; /* BASE is after the ADDR_DIFF_VEC. */
133 unsigned min_after_vec: 1; /* minimum address target label is
134 after the ADDR_DIFF_VEC. */
135 unsigned max_after_vec: 1; /* maximum address target label is
136 after the ADDR_DIFF_VEC. */
137 unsigned min_after_base: 1; /* minimum address target label is
138 after BASE. */
139 unsigned max_after_base: 1; /* maximum address target label is
140 after BASE. */
141 /* Set by the actual branch shortening process - ONLY WHEN OPTIMIZING - : */
142 unsigned offset_unsigned: 1; /* offsets have to be treated as unsigned. */
143 unsigned : 2;
144 unsigned scale : 8;
145};
146
147/* Structure used to describe the attributes of a MEM. These are hashed
148 so MEMs that the same attributes share a data structure. This means
149 they cannot be modified in place. */
150class GTY(()) mem_attrs
151{
152public:
153 mem_attrs ();
154
155 /* The expression that the MEM accesses, or null if not known.
156 This expression might be larger than the memory reference itself.
157 (In other words, the MEM might access only part of the object.) */
158 tree expr;
159
160 /* The offset of the memory reference from the start of EXPR.
161 Only valid if OFFSET_KNOWN_P. */
162 poly_int64 offset;
163
164 /* The size of the memory reference in bytes. Only valid if
165 SIZE_KNOWN_P. */
166 poly_int64 size;
167
168 /* The alias set of the memory reference. */
169 alias_set_type alias;
170
171 /* The alignment of the reference in bits. Always a multiple of
172 BITS_PER_UNIT. Note that EXPR may have a stricter alignment
173 than the memory reference itself. */
174 unsigned int align;
175
176 /* The address space that the memory reference uses. */
177 unsigned char addrspace;
178
179 /* True if OFFSET is known. */
180 bool offset_known_p;
181
182 /* True if SIZE is known. */
183 bool size_known_p;
184};
185
186/* Structure used to describe the attributes of a REG in similar way as
187 mem_attrs does for MEM above. Note that the OFFSET field is calculated
188 in the same way as for mem_attrs, rather than in the same way as a
189 SUBREG_BYTE. For example, if a big-endian target stores a byte
190 object in the low part of a 4-byte register, the OFFSET field
191 will be -3 rather than 0. */
192
193class GTY((for_user)) reg_attrs {
194public:
195 tree decl; /* decl corresponding to REG. */
196 poly_int64 offset; /* Offset from start of DECL. */
197};
198
199/* Common union for an element of an rtx. */
200
201union rtunion
202{
203 int rt_int;
204 unsigned int rt_uint;
205 poly_uint16_pod rt_subreg;
206 const char *rt_str;
207 rtx rt_rtx;
208 rtvec rt_rtvec;
209 machine_mode rt_type;
210 addr_diff_vec_flags rt_addr_diff_vec_flags;
211 struct cselib_val *rt_cselib;
212 tree rt_tree;
213 basic_block rt_bb;
214 mem_attrs *rt_mem;
215 class constant_descriptor_rtx *rt_constant;
216 struct dw_cfi_node *rt_cfi;
217};
218
219/* Describes the properties of a REG. */
220struct GTY(()) reg_info {
221 /* The value of REGNO. */
222 unsigned int regno;
223
224 /* The value of REG_NREGS. */
225 unsigned int nregs : 8;
226 unsigned int unused : 24;
227
228 /* The value of REG_ATTRS. */
229 reg_attrs *attrs;
230};
231
232/* This structure remembers the position of a SYMBOL_REF within an
233 object_block structure. A SYMBOL_REF only provides this information
234 if SYMBOL_REF_HAS_BLOCK_INFO_P is true. */
235struct GTY(()) block_symbol {
236 /* The usual SYMBOL_REF fields. */
237 rtunion GTY ((skip)) fld[2];
238
239 /* The block that contains this object. */
240 struct object_block *block;
241
242 /* The offset of this object from the start of its block. It is negative
243 if the symbol has not yet been assigned an offset. */
244 HOST_WIDE_INTlong offset;
245};
246
247/* Describes a group of objects that are to be placed together in such
248 a way that their relative positions are known. */
249struct GTY((for_user)) object_block {
250 /* The section in which these objects should be placed. */
251 section *sect;
252
253 /* The alignment of the first object, measured in bits. */
254 unsigned int alignment;
255
256 /* The total size of the objects, measured in bytes. */
257 HOST_WIDE_INTlong size;
258
259 /* The SYMBOL_REFs for each object. The vector is sorted in
260 order of increasing offset and the following conditions will
261 hold for each element X:
262
263 SYMBOL_REF_HAS_BLOCK_INFO_P (X)
264 !SYMBOL_REF_ANCHOR_P (X)
265 SYMBOL_REF_BLOCK (X) == [address of this structure]
266 SYMBOL_REF_BLOCK_OFFSET (X) >= 0. */
267 vec<rtx, va_gc> *objects;
268
269 /* All the anchor SYMBOL_REFs used to address these objects, sorted
270 in order of increasing offset, and then increasing TLS model.
271 The following conditions will hold for each element X in this vector:
272
273 SYMBOL_REF_HAS_BLOCK_INFO_P (X)
274 SYMBOL_REF_ANCHOR_P (X)
275 SYMBOL_REF_BLOCK (X) == [address of this structure]
276 SYMBOL_REF_BLOCK_OFFSET (X) >= 0. */
277 vec<rtx, va_gc> *anchors;
278};
279
280struct GTY((variable_size)) hwivec_def {
281 HOST_WIDE_INTlong elem[1];
282};
283
284/* Number of elements of the HWIVEC if RTX is a CONST_WIDE_INT. */
285#define CWI_GET_NUM_ELEM(RTX)((int)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX));
if (((enum rtx_code) (_rtx)->code) != CONST_WIDE_INT) rtl_check_failed_flag
("CWI_GET_NUM_ELEM", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 285, __FUNCTION__); _rtx; })->u2.num_elem)
\
286 ((int)RTL_FLAG_CHECK1("CWI_GET_NUM_ELEM", (RTX), CONST_WIDE_INT)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != CONST_WIDE_INT) rtl_check_failed_flag
("CWI_GET_NUM_ELEM", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 286, __FUNCTION__); _rtx; })
->u2.num_elem)
287#define CWI_PUT_NUM_ELEM(RTX, NUM)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != CONST_WIDE_INT) rtl_check_failed_flag
("CWI_PUT_NUM_ELEM", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 287, __FUNCTION__); _rtx; })->u2.num_elem = (NUM))
\
288 (RTL_FLAG_CHECK1("CWI_PUT_NUM_ELEM", (RTX), CONST_WIDE_INT)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != CONST_WIDE_INT) rtl_check_failed_flag
("CWI_PUT_NUM_ELEM", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 288, __FUNCTION__); _rtx; })
->u2.num_elem = (NUM))
289
290struct GTY((variable_size)) const_poly_int_def {
291 trailing_wide_ints<NUM_POLY_INT_COEFFS1> coeffs;
292};
293
294/* RTL expression ("rtx"). */
295
296/* The GTY "desc" and "tag" options below are a kludge: we need a desc
297 field for gengtype to recognize that inheritance is occurring,
298 so that all subclasses are redirected to the traversal hook for the
299 base class.
300 However, all of the fields are in the base class, and special-casing
301 is at work. Hence we use desc and tag of 0, generating a switch
302 statement of the form:
303 switch (0)
304 {
305 case 0: // all the work happens here
306 }
307 in order to work with the existing special-casing in gengtype. */
308
309struct GTY((desc("0"), tag("0"),
310 chain_next ("RTX_NEXT (&%h)"),
311 chain_prev ("RTX_PREV (&%h)"))) rtx_def {
312 /* The kind of expression this is. */
313 ENUM_BITFIELD(rtx_code)enum rtx_code code: 16;
314
315 /* The kind of value the expression has. */
316 ENUM_BITFIELD(machine_mode)enum machine_mode mode : 8;
317
318 /* 1 in a MEM if we should keep the alias set for this mem unchanged
319 when we access a component.
320 1 in a JUMP_INSN if it is a crossing jump.
321 1 in a CALL_INSN if it is a sibling call.
322 1 in a SET that is for a return.
323 In a CODE_LABEL, part of the two-bit alternate entry field.
324 1 in a CONCAT is VAL_EXPR_IS_COPIED in var-tracking.cc.
325 1 in a VALUE is SP_BASED_VALUE_P in cselib.cc.
326 1 in a SUBREG generated by LRA for reload insns.
327 1 in a REG if this is a static chain register.
328 Dumped as "/j" in RTL dumps. */
329 unsigned int jump : 1;
330 /* In a CODE_LABEL, part of the two-bit alternate entry field.
331 1 in a MEM if it cannot trap.
332 1 in a CALL_INSN logically equivalent to
333 ECF_LOOPING_CONST_OR_PURE and DECL_LOOPING_CONST_OR_PURE_P.
334 1 in a VALUE is SP_DERIVED_VALUE_P in cselib.cc.
335 Dumped as "/c" in RTL dumps. */
336 unsigned int call : 1;
337 /* 1 in a REG, MEM, or CONCAT if the value is set at most once, anywhere.
338 1 in a SUBREG used for SUBREG_PROMOTED_UNSIGNED_P.
339 1 in a SYMBOL_REF if it addresses something in the per-function
340 constants pool.
341 1 in a CALL_INSN logically equivalent to ECF_CONST and TREE_READONLY.
342 1 in a NOTE, or EXPR_LIST for a const call.
343 1 in a JUMP_INSN of an annulling branch.
344 1 in a CONCAT is VAL_EXPR_IS_CLOBBERED in var-tracking.cc.
345 1 in a preserved VALUE is PRESERVED_VALUE_P in cselib.cc.
346 1 in a clobber temporarily created for LRA.
347 Dumped as "/u" in RTL dumps. */
348 unsigned int unchanging : 1;
349 /* 1 in a MEM or ASM_OPERANDS expression if the memory reference is volatile.
350 1 in an INSN, CALL_INSN, JUMP_INSN, CODE_LABEL, BARRIER, or NOTE
351 if it has been deleted.
352 1 in a REG expression if corresponds to a variable declared by the user,
353 0 for an internally generated temporary.
354 1 in a SUBREG used for SUBREG_PROMOTED_UNSIGNED_P.
355 1 in a LABEL_REF, REG_LABEL_TARGET or REG_LABEL_OPERAND note for a
356 non-local label.
357 In a SYMBOL_REF, this flag is used for machine-specific purposes.
358 In a PREFETCH, this flag indicates that it should be considered a
359 scheduling barrier.
360 1 in a CONCAT is VAL_NEEDS_RESOLUTION in var-tracking.cc.
361 Dumped as "/v" in RTL dumps. */
362 unsigned int volatil : 1;
363 /* 1 in a REG if the register is used only in exit code a loop.
364 1 in a SUBREG expression if was generated from a variable with a
365 promoted mode.
366 1 in a CODE_LABEL if the label is used for nonlocal gotos
367 and must not be deleted even if its count is zero.
368 1 in an INSN, JUMP_INSN or CALL_INSN if this insn must be scheduled
369 together with the preceding insn. Valid only within sched.
370 1 in an INSN, JUMP_INSN, or CALL_INSN if insn is in a delay slot and
371 from the target of a branch. Valid from reorg until end of compilation;
372 cleared before used.
373
374 The name of the field is historical. It used to be used in MEMs
375 to record whether the MEM accessed part of a structure.
376 Dumped as "/s" in RTL dumps. */
377 unsigned int in_struct : 1;
378 /* At the end of RTL generation, 1 if this rtx is used. This is used for
379 copying shared structure. See `unshare_all_rtl'.
380 In a REG, this is not needed for that purpose, and used instead
381 in `leaf_renumber_regs_insn'.
382 1 in a SYMBOL_REF, means that emit_library_call
383 has used it as the function.
384 1 in a CONCAT is VAL_HOLDS_TRACK_EXPR in var-tracking.cc.
385 1 in a VALUE or DEBUG_EXPR is VALUE_RECURSED_INTO in var-tracking.cc. */
386 unsigned int used : 1;
387 /* 1 in an INSN or a SET if this rtx is related to the call frame,
388 either changing how we compute the frame address or saving and
389 restoring registers in the prologue and epilogue.
390 1 in a REG or MEM if it is a pointer.
391 1 in a SYMBOL_REF if it addresses something in the per-function
392 constant string pool.
393 1 in a VALUE is VALUE_CHANGED in var-tracking.cc.
394 Dumped as "/f" in RTL dumps. */
395 unsigned frame_related : 1;
396 /* 1 in a REG or PARALLEL that is the current function's return value.
397 1 in a SYMBOL_REF for a weak symbol.
398 1 in a CALL_INSN logically equivalent to ECF_PURE and DECL_PURE_P.
399 1 in a CONCAT is VAL_EXPR_HAS_REVERSE in var-tracking.cc.
400 1 in a VALUE or DEBUG_EXPR is NO_LOC_P in var-tracking.cc.
401 Dumped as "/i" in RTL dumps. */
402 unsigned return_val : 1;
403
404 union {
405 /* The final union field is aligned to 64 bits on LP64 hosts,
406 giving a 32-bit gap after the fields above. We optimize the
407 layout for that case and use the gap for extra code-specific
408 information. */
409
410 /* The ORIGINAL_REGNO of a REG. */
411 unsigned int original_regno;
412
413 /* The INSN_UID of an RTX_INSN-class code. */
414 int insn_uid;
415
416 /* The SYMBOL_REF_FLAGS of a SYMBOL_REF. */
417 unsigned int symbol_ref_flags;
418
419 /* The PAT_VAR_LOCATION_STATUS of a VAR_LOCATION. */
420 enum var_init_status var_location_status;
421
422 /* In a CONST_WIDE_INT (aka hwivec_def), this is the number of
423 HOST_WIDE_INTs in the hwivec_def. */
424 unsigned int num_elem;
425
426 /* Information about a CONST_VECTOR. */
427 struct
428 {
429 /* The value of CONST_VECTOR_NPATTERNS. */
430 unsigned int npatterns : 16;
431
432 /* The value of CONST_VECTOR_NELTS_PER_PATTERN. */
433 unsigned int nelts_per_pattern : 8;
434
435 /* For future expansion. */
436 unsigned int unused : 8;
437 } const_vector;
438 } GTY ((skip)) u2;
439
440 /* The first element of the operands of this rtx.
441 The number of operands and their types are controlled
442 by the `code' field, according to rtl.def. */
443 union u {
444 rtunion fld[1];
445 HOST_WIDE_INTlong hwint[1];
446 struct reg_info reg;
447 struct block_symbol block_sym;
448 struct real_value rv;
449 struct fixed_value fv;
450 struct hwivec_def hwiv;
451 struct const_poly_int_def cpi;
452 } GTY ((special ("rtx_def"), desc ("GET_CODE (&%0)"))) u;
453};
454
455/* A node for constructing singly-linked lists of rtx. */
456
457struct GTY(()) rtx_expr_list : public rtx_def
458{
459private:
460 /* No extra fields, but adds invariant: (GET_CODE (X) == EXPR_LIST). */
461
462public:
463 /* Get next in list. */
464 rtx_expr_list *next () const;
465
466 /* Get at the underlying rtx. */
467 rtx element () const;
468};
469
470template <>
471template <>
472inline bool
473is_a_helper <rtx_expr_list *>::test (rtx rt)
474{
475 return rt->code == EXPR_LIST;
476}
477
478struct GTY(()) rtx_insn_list : public rtx_def
479{
480private:
481 /* No extra fields, but adds invariant: (GET_CODE (X) == INSN_LIST).
482
483 This is an instance of:
484
485 DEF_RTL_EXPR(INSN_LIST, "insn_list", "ue", RTX_EXTRA)
486
487 i.e. a node for constructing singly-linked lists of rtx_insn *, where
488 the list is "external" to the insn (as opposed to the doubly-linked
489 list embedded within rtx_insn itself). */
490
491public:
492 /* Get next in list. */
493 rtx_insn_list *next () const;
494
495 /* Get at the underlying instruction. */
496 rtx_insn *insn () const;
497
498};
499
500template <>
501template <>
502inline bool
503is_a_helper <rtx_insn_list *>::test (rtx rt)
504{
505 return rt->code == INSN_LIST;
506}
507
508/* A node with invariant GET_CODE (X) == SEQUENCE i.e. a vector of rtx,
509 typically (but not always) of rtx_insn *, used in the late passes. */
510
511struct GTY(()) rtx_sequence : public rtx_def
512{
513private:
514 /* No extra fields, but adds invariant: (GET_CODE (X) == SEQUENCE). */
515
516public:
517 /* Get number of elements in sequence. */
518 int len () const;
519
520 /* Get i-th element of the sequence. */
521 rtx element (int index) const;
522
523 /* Get i-th element of the sequence, with a checked cast to
524 rtx_insn *. */
525 rtx_insn *insn (int index) const;
526};
527
528template <>
529template <>
530inline bool
531is_a_helper <rtx_sequence *>::test (rtx rt)
532{
533 return rt->code == SEQUENCE;
534}
535
536template <>
537template <>
538inline bool
539is_a_helper <const rtx_sequence *>::test (const_rtx rt)
540{
541 return rt->code == SEQUENCE;
542}
543
544struct GTY(()) rtx_insn : public rtx_def
545{
546public:
547 /* No extra fields, but adds the invariant:
548
549 (INSN_P (X)
550 || NOTE_P (X)
551 || JUMP_TABLE_DATA_P (X)
552 || BARRIER_P (X)
553 || LABEL_P (X))
554
555 i.e. that we must be able to use the following:
556 INSN_UID ()
557 NEXT_INSN ()
558 PREV_INSN ()
559 i.e. we have an rtx that has an INSN_UID field and can be part of
560 a linked list of insns.
561 */
562
563 /* Returns true if this insn has been deleted. */
564
565 bool deleted () const { return volatil; }
566
567 /* Mark this insn as deleted. */
568
569 void set_deleted () { volatil = true; }
570
571 /* Mark this insn as not deleted. */
572
573 void set_undeleted () { volatil = false; }
574};
575
576/* Subclasses of rtx_insn. */
577
578struct GTY(()) rtx_debug_insn : public rtx_insn
579{
580 /* No extra fields, but adds the invariant:
581 DEBUG_INSN_P (X) aka (GET_CODE (X) == DEBUG_INSN)
582 i.e. an annotation for tracking variable assignments.
583
584 This is an instance of:
585 DEF_RTL_EXPR(DEBUG_INSN, "debug_insn", "uuBeiie", RTX_INSN)
586 from rtl.def. */
587};
588
589struct GTY(()) rtx_nonjump_insn : public rtx_insn
590{
591 /* No extra fields, but adds the invariant:
592 NONJUMP_INSN_P (X) aka (GET_CODE (X) == INSN)
593 i.e an instruction that cannot jump.
594
595 This is an instance of:
596 DEF_RTL_EXPR(INSN, "insn", "uuBeiie", RTX_INSN)
597 from rtl.def. */
598};
599
600struct GTY(()) rtx_jump_insn : public rtx_insn
601{
602public:
603 /* No extra fields, but adds the invariant:
604 JUMP_P (X) aka (GET_CODE (X) == JUMP_INSN)
605 i.e. an instruction that can possibly jump.
606
607 This is an instance of:
608 DEF_RTL_EXPR(JUMP_INSN, "jump_insn", "uuBeiie0", RTX_INSN)
609 from rtl.def. */
610
611 /* Returns jump target of this instruction. The returned value is not
612 necessarily a code label: it may also be a RETURN or SIMPLE_RETURN
613 expression. Also, when the code label is marked "deleted", it is
614 replaced by a NOTE. In some cases the value is NULL_RTX. */
615
616 inline rtx jump_label () const;
617
618 /* Returns jump target cast to rtx_code_label *. */
619
620 inline rtx_code_label *jump_target () const;
621
622 /* Set jump target. */
623
624 inline void set_jump_target (rtx_code_label *);
625};
626
627struct GTY(()) rtx_call_insn : public rtx_insn
628{
629 /* No extra fields, but adds the invariant:
630 CALL_P (X) aka (GET_CODE (X) == CALL_INSN)
631 i.e. an instruction that can possibly call a subroutine
632 but which will not change which instruction comes next
633 in the current function.
634
635 This is an instance of:
636 DEF_RTL_EXPR(CALL_INSN, "call_insn", "uuBeiiee", RTX_INSN)
637 from rtl.def. */
638};
639
640struct GTY(()) rtx_jump_table_data : public rtx_insn
641{
642 /* No extra fields, but adds the invariant:
643 JUMP_TABLE_DATA_P (X) aka (GET_CODE (INSN) == JUMP_TABLE_DATA)
644 i.e. a data for a jump table, considered an instruction for
645 historical reasons.
646
647 This is an instance of:
648 DEF_RTL_EXPR(JUMP_TABLE_DATA, "jump_table_data", "uuBe0000", RTX_INSN)
649 from rtl.def. */
650
651 /* This can be either:
652
653 (a) a table of absolute jumps, in which case PATTERN (this) is an
654 ADDR_VEC with arg 0 a vector of labels, or
655
656 (b) a table of relative jumps (e.g. for -fPIC), in which case
657 PATTERN (this) is an ADDR_DIFF_VEC, with arg 0 a LABEL_REF and
658 arg 1 the vector of labels.
659
660 This method gets the underlying vec. */
661
662 inline rtvec get_labels () const;
663 inline scalar_int_mode get_data_mode () const;
664};
665
666struct GTY(()) rtx_barrier : public rtx_insn
667{
668 /* No extra fields, but adds the invariant:
669 BARRIER_P (X) aka (GET_CODE (X) == BARRIER)
670 i.e. a marker that indicates that control will not flow through.
671
672 This is an instance of:
673 DEF_RTL_EXPR(BARRIER, "barrier", "uu00000", RTX_EXTRA)
674 from rtl.def. */
675};
676
677struct GTY(()) rtx_code_label : public rtx_insn
678{
679 /* No extra fields, but adds the invariant:
680 LABEL_P (X) aka (GET_CODE (X) == CODE_LABEL)
681 i.e. a label in the assembler.
682
683 This is an instance of:
684 DEF_RTL_EXPR(CODE_LABEL, "code_label", "uuB00is", RTX_EXTRA)
685 from rtl.def. */
686};
687
688struct GTY(()) rtx_note : public rtx_insn
689{
690 /* No extra fields, but adds the invariant:
691 NOTE_P(X) aka (GET_CODE (X) == NOTE)
692 i.e. a note about the corresponding source code.
693
694 This is an instance of:
695 DEF_RTL_EXPR(NOTE, "note", "uuB0ni", RTX_EXTRA)
696 from rtl.def. */
697};
698
699/* The size in bytes of an rtx header (code, mode and flags). */
700#define RTX_HDR_SIZE__builtin_offsetof(struct rtx_def, u) offsetof (struct rtx_def, u)__builtin_offsetof(struct rtx_def, u)
701
702/* The size in bytes of an rtx with code CODE. */
703#define RTX_CODE_SIZE(CODE)rtx_code_size[CODE] rtx_code_size[CODE]
704
705#define NULL_RTX(rtx) 0 (rtx) 0
706
707/* The "next" and "previous" RTX, relative to this one. */
708
709#define RTX_NEXT(X)(rtx_next[((enum rtx_code) (X)->code)] == 0 ? nullptr : *(
rtx *)(((char *)X) + rtx_next[((enum rtx_code) (X)->code)]
))
(rtx_next[GET_CODE (X)((enum rtx_code) (X)->code)] == 0 ? NULLnullptr \
710 : *(rtx *)(((char *)X) + rtx_next[GET_CODE (X)((enum rtx_code) (X)->code)]))
711
712/* FIXME: the "NEXT_INSN (PREV_INSN (X)) == X" condition shouldn't be needed.
713 */
714#define RTX_PREV(X)(((((((enum rtx_code) (X)->code) == INSN) || (((enum rtx_code
) (X)->code) == JUMP_INSN) || (((enum rtx_code) (X)->code
) == CALL_INSN)) || (((enum rtx_code) (X)->code) == DEBUG_INSN
)) || (((enum rtx_code) (X)->code) == NOTE) || (((enum rtx_code
) (X)->code) == JUMP_TABLE_DATA) || (((enum rtx_code) (X)->
code) == BARRIER) || (((enum rtx_code) (X)->code) == CODE_LABEL
)) && PREV_INSN (as_a <rtx_insn *> (X)) != nullptr
&& NEXT_INSN (PREV_INSN (as_a <rtx_insn *> (X)
)) == X ? PREV_INSN (as_a <rtx_insn *> (X)) : nullptr)
((INSN_P (X)(((((enum rtx_code) (X)->code) == INSN) || (((enum rtx_code
) (X)->code) == JUMP_INSN) || (((enum rtx_code) (X)->code
) == CALL_INSN)) || (((enum rtx_code) (X)->code) == DEBUG_INSN
))
\
715 || NOTE_P (X)(((enum rtx_code) (X)->code) == NOTE) \
716 || JUMP_TABLE_DATA_P (X)(((enum rtx_code) (X)->code) == JUMP_TABLE_DATA) \
717 || BARRIER_P (X)(((enum rtx_code) (X)->code) == BARRIER) \
718 || LABEL_P (X)(((enum rtx_code) (X)->code) == CODE_LABEL)) \
719 && PREV_INSN (as_a <rtx_insn *> (X)) != NULLnullptr \
720 && NEXT_INSN (PREV_INSN (as_a <rtx_insn *> (X))) == X \
721 ? PREV_INSN (as_a <rtx_insn *> (X)) : NULLnullptr)
722
723/* Define macros to access the `code' field of the rtx. */
724
725#define GET_CODE(RTX)((enum rtx_code) (RTX)->code) ((enum rtx_code) (RTX)->code)
726#define PUT_CODE(RTX, CODE)((RTX)->code = (CODE)) ((RTX)->code = (CODE))
727
728#define GET_MODE(RTX)((machine_mode) (RTX)->mode) ((machine_mode) (RTX)->mode)
729#define PUT_MODE_RAW(RTX, MODE)((RTX)->mode = (MODE)) ((RTX)->mode = (MODE))
730
731/* RTL vector. These appear inside RTX's when there is a need
732 for a variable number of things. The principle use is inside
733 PARALLEL expressions. */
734
735struct GTY(()) rtvec_def {
736 int num_elem; /* number of elements */
737 rtx GTY ((length ("%h.num_elem"))) elem[1];
738};
739
740#define NULL_RTVEC(rtvec) 0 (rtvec) 0
741
742#define GET_NUM_ELEM(RTVEC)((RTVEC)->num_elem) ((RTVEC)->num_elem)
743#define PUT_NUM_ELEM(RTVEC, NUM)((RTVEC)->num_elem = (NUM)) ((RTVEC)->num_elem = (NUM))
744
745/* Predicate yielding nonzero iff X is an rtx for a register. */
746#define REG_P(X)(((enum rtx_code) (X)->code) == REG) (GET_CODE (X)((enum rtx_code) (X)->code) == REG)
747
748/* Predicate yielding nonzero iff X is an rtx for a memory location. */
749#define MEM_P(X)(((enum rtx_code) (X)->code) == MEM) (GET_CODE (X)((enum rtx_code) (X)->code) == MEM)
750
751#if TARGET_SUPPORTS_WIDE_INT1
752
753/* Match CONST_*s that can represent compile-time constant integers. */
754#define CASE_CONST_SCALAR_INTcase CONST_INT: case CONST_WIDE_INT \
755 case CONST_INT: \
756 case CONST_WIDE_INT
757
758/* Match CONST_*s for which pointer equality corresponds to value
759 equality. */
760#define CASE_CONST_UNIQUEcase CONST_INT: case CONST_WIDE_INT: case CONST_POLY_INT: case
CONST_DOUBLE: case CONST_FIXED
\
761 case CONST_INT: \
762 case CONST_WIDE_INT: \
763 case CONST_POLY_INT: \
764 case CONST_DOUBLE: \
765 case CONST_FIXED
766
767/* Match all CONST_* rtxes. */
768#define CASE_CONST_ANYcase CONST_INT: case CONST_WIDE_INT: case CONST_POLY_INT: case
CONST_DOUBLE: case CONST_FIXED: case CONST_VECTOR
\
769 case CONST_INT: \
770 case CONST_WIDE_INT: \
771 case CONST_POLY_INT: \
772 case CONST_DOUBLE: \
773 case CONST_FIXED: \
774 case CONST_VECTOR
775
776#else
777
778/* Match CONST_*s that can represent compile-time constant integers. */
779#define CASE_CONST_SCALAR_INTcase CONST_INT: case CONST_WIDE_INT \
780 case CONST_INT: \
781 case CONST_DOUBLE
782
783/* Match CONST_*s for which pointer equality corresponds to value
784 equality. */
785#define CASE_CONST_UNIQUEcase CONST_INT: case CONST_WIDE_INT: case CONST_POLY_INT: case
CONST_DOUBLE: case CONST_FIXED
\
786 case CONST_INT: \
787 case CONST_DOUBLE: \
788 case CONST_FIXED
789
790/* Match all CONST_* rtxes. */
791#define CASE_CONST_ANYcase CONST_INT: case CONST_WIDE_INT: case CONST_POLY_INT: case
CONST_DOUBLE: case CONST_FIXED: case CONST_VECTOR
\
792 case CONST_INT: \
793 case CONST_DOUBLE: \
794 case CONST_FIXED: \
795 case CONST_VECTOR
796#endif
797
798/* Predicate yielding nonzero iff X is an rtx for a constant integer. */
799#define CONST_INT_P(X)(((enum rtx_code) (X)->code) == CONST_INT) (GET_CODE (X)((enum rtx_code) (X)->code) == CONST_INT)
800
801/* Predicate yielding nonzero iff X is an rtx for a constant integer. */
802#define CONST_WIDE_INT_P(X)(((enum rtx_code) (X)->code) == CONST_WIDE_INT) (GET_CODE (X)((enum rtx_code) (X)->code) == CONST_WIDE_INT)
803
804/* Predicate yielding nonzero iff X is an rtx for a polynomial constant
805 integer. */
806#define CONST_POLY_INT_P(X)(1 > 1 && ((enum rtx_code) (X)->code) == CONST_POLY_INT
)
\
807 (NUM_POLY_INT_COEFFS1 > 1 && GET_CODE (X)((enum rtx_code) (X)->code) == CONST_POLY_INT)
808
809/* Predicate yielding nonzero iff X is an rtx for a constant fixed-point. */
810#define CONST_FIXED_P(X)(((enum rtx_code) (X)->code) == CONST_FIXED) (GET_CODE (X)((enum rtx_code) (X)->code) == CONST_FIXED)
811
812/* Predicate yielding true iff X is an rtx for a double-int
813 or floating point constant. */
814#define CONST_DOUBLE_P(X)(((enum rtx_code) (X)->code) == CONST_DOUBLE) (GET_CODE (X)((enum rtx_code) (X)->code) == CONST_DOUBLE)
815
816/* Predicate yielding true iff X is an rtx for a double-int. */
817#define CONST_DOUBLE_AS_INT_P(X)(((enum rtx_code) (X)->code) == CONST_DOUBLE && ((
machine_mode) (X)->mode) == ((void) 0, E_VOIDmode))
\
818 (GET_CODE (X)((enum rtx_code) (X)->code) == CONST_DOUBLE && GET_MODE (X)((machine_mode) (X)->mode) == VOIDmode((void) 0, E_VOIDmode))
819
820/* Predicate yielding true iff X is an rtx for a integer const. */
821#if TARGET_SUPPORTS_WIDE_INT1
822#define CONST_SCALAR_INT_P(X)((((enum rtx_code) (X)->code) == CONST_INT) || (((enum rtx_code
) (X)->code) == CONST_WIDE_INT))
\
823 (CONST_INT_P (X)(((enum rtx_code) (X)->code) == CONST_INT) || CONST_WIDE_INT_P (X)(((enum rtx_code) (X)->code) == CONST_WIDE_INT))
824#else
825#define CONST_SCALAR_INT_P(X)((((enum rtx_code) (X)->code) == CONST_INT) || (((enum rtx_code
) (X)->code) == CONST_WIDE_INT))
\
826 (CONST_INT_P (X)(((enum rtx_code) (X)->code) == CONST_INT) || CONST_DOUBLE_AS_INT_P (X)(((enum rtx_code) (X)->code) == CONST_DOUBLE && ((
machine_mode) (X)->mode) == ((void) 0, E_VOIDmode))
)
827#endif
828
829/* Predicate yielding true iff X is an rtx for a double-int. */
830#define CONST_DOUBLE_AS_FLOAT_P(X)(((enum rtx_code) (X)->code) == CONST_DOUBLE && ((
machine_mode) (X)->mode) != ((void) 0, E_VOIDmode))
\
831 (GET_CODE (X)((enum rtx_code) (X)->code) == CONST_DOUBLE && GET_MODE (X)((machine_mode) (X)->mode) != VOIDmode((void) 0, E_VOIDmode))
832
833/* Predicate yielding nonzero iff X is an rtx for a constant vector. */
834#define CONST_VECTOR_P(X)(((enum rtx_code) (X)->code) == CONST_VECTOR) (GET_CODE (X)((enum rtx_code) (X)->code) == CONST_VECTOR)
835
836/* Predicate yielding nonzero iff X is a label insn. */
837#define LABEL_P(X)(((enum rtx_code) (X)->code) == CODE_LABEL) (GET_CODE (X)((enum rtx_code) (X)->code) == CODE_LABEL)
838
839/* Predicate yielding nonzero iff X is a jump insn. */
840#define JUMP_P(X)(((enum rtx_code) (X)->code) == JUMP_INSN) (GET_CODE (X)((enum rtx_code) (X)->code) == JUMP_INSN)
841
842/* Predicate yielding nonzero iff X is a call insn. */
843#define CALL_P(X)(((enum rtx_code) (X)->code) == CALL_INSN) (GET_CODE (X)((enum rtx_code) (X)->code) == CALL_INSN)
844
845/* 1 if RTX is a call_insn for a fake call.
846 CALL_INSN use "used" flag to indicate it's a fake call. */
847#define FAKE_CALL_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != CALL_INSN) rtl_check_failed_flag
("FAKE_CALL_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 847, __FUNCTION__); _rtx; })->used)
\
848 (RTL_FLAG_CHECK1 ("FAKE_CALL_P", (RTX), CALL_INSN)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != CALL_INSN) rtl_check_failed_flag
("FAKE_CALL_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 848, __FUNCTION__); _rtx; })
->used)
849
850/* Predicate yielding nonzero iff X is an insn that cannot jump. */
851#define NONJUMP_INSN_P(X)(((enum rtx_code) (X)->code) == INSN) (GET_CODE (X)((enum rtx_code) (X)->code) == INSN)
852
853/* Predicate yielding nonzero iff X is a debug note/insn. */
854#define DEBUG_INSN_P(X)(((enum rtx_code) (X)->code) == DEBUG_INSN) (GET_CODE (X)((enum rtx_code) (X)->code) == DEBUG_INSN)
855
856/* Predicate yielding nonzero iff X is an insn that is not a debug insn. */
857#define NONDEBUG_INSN_P(X)((((enum rtx_code) (X)->code) == INSN) || (((enum rtx_code
) (X)->code) == JUMP_INSN) || (((enum rtx_code) (X)->code
) == CALL_INSN))
(NONJUMP_INSN_P (X)(((enum rtx_code) (X)->code) == INSN) || JUMP_P (X)(((enum rtx_code) (X)->code) == JUMP_INSN) || CALL_P (X)(((enum rtx_code) (X)->code) == CALL_INSN))
858
859/* Nonzero if DEBUG_MARKER_INSN_P may possibly hold. */
860#define MAY_HAVE_DEBUG_MARKER_INSNSglobal_options.x_debug_nonbind_markers_p debug_nonbind_markers_pglobal_options.x_debug_nonbind_markers_p
861/* Nonzero if DEBUG_BIND_INSN_P may possibly hold. */
862#define MAY_HAVE_DEBUG_BIND_INSNSglobal_options.x_flag_var_tracking_assignments flag_var_tracking_assignmentsglobal_options.x_flag_var_tracking_assignments
863/* Nonzero if DEBUG_INSN_P may possibly hold. */
864#define MAY_HAVE_DEBUG_INSNS(global_options.x_debug_nonbind_markers_p || global_options.x_flag_var_tracking_assignments
)
\
865 (MAY_HAVE_DEBUG_MARKER_INSNSglobal_options.x_debug_nonbind_markers_p || MAY_HAVE_DEBUG_BIND_INSNSglobal_options.x_flag_var_tracking_assignments)
866
867/* Predicate yielding nonzero iff X is a real insn. */
868#define INSN_P(X)(((((enum rtx_code) (X)->code) == INSN) || (((enum rtx_code
) (X)->code) == JUMP_INSN) || (((enum rtx_code) (X)->code
) == CALL_INSN)) || (((enum rtx_code) (X)->code) == DEBUG_INSN
))
(NONDEBUG_INSN_P (X)((((enum rtx_code) (X)->code) == INSN) || (((enum rtx_code
) (X)->code) == JUMP_INSN) || (((enum rtx_code) (X)->code
) == CALL_INSN))
|| DEBUG_INSN_P (X)(((enum rtx_code) (X)->code) == DEBUG_INSN))
869
870/* Predicate yielding nonzero iff X is a note insn. */
871#define NOTE_P(X)(((enum rtx_code) (X)->code) == NOTE) (GET_CODE (X)((enum rtx_code) (X)->code) == NOTE)
872
873/* Predicate yielding nonzero iff X is a barrier insn. */
874#define BARRIER_P(X)(((enum rtx_code) (X)->code) == BARRIER) (GET_CODE (X)((enum rtx_code) (X)->code) == BARRIER)
875
876/* Predicate yielding nonzero iff X is a data for a jump table. */
877#define JUMP_TABLE_DATA_P(INSN)(((enum rtx_code) (INSN)->code) == JUMP_TABLE_DATA) (GET_CODE (INSN)((enum rtx_code) (INSN)->code) == JUMP_TABLE_DATA)
878
879/* Predicate yielding nonzero iff RTX is a subreg. */
880#define SUBREG_P(RTX)(((enum rtx_code) (RTX)->code) == SUBREG) (GET_CODE (RTX)((enum rtx_code) (RTX)->code) == SUBREG)
881
882/* Predicate yielding true iff RTX is a symbol ref. */
883#define SYMBOL_REF_P(RTX)(((enum rtx_code) (RTX)->code) == SYMBOL_REF) (GET_CODE (RTX)((enum rtx_code) (RTX)->code) == SYMBOL_REF)
884
885template <>
886template <>
887inline bool
888is_a_helper <rtx_insn *>::test (rtx rt)
889{
890 return (INSN_P (rt)(((((enum rtx_code) (rt)->code) == INSN) || (((enum rtx_code
) (rt)->code) == JUMP_INSN) || (((enum rtx_code) (rt)->
code) == CALL_INSN)) || (((enum rtx_code) (rt)->code) == DEBUG_INSN
))
891 || NOTE_P (rt)(((enum rtx_code) (rt)->code) == NOTE)
892 || JUMP_TABLE_DATA_P (rt)(((enum rtx_code) (rt)->code) == JUMP_TABLE_DATA)
893 || BARRIER_P (rt)(((enum rtx_code) (rt)->code) == BARRIER)
894 || LABEL_P (rt)(((enum rtx_code) (rt)->code) == CODE_LABEL));
895}
896
897template <>
898template <>
899inline bool
900is_a_helper <const rtx_insn *>::test (const_rtx rt)
901{
902 return (INSN_P (rt)(((((enum rtx_code) (rt)->code) == INSN) || (((enum rtx_code
) (rt)->code) == JUMP_INSN) || (((enum rtx_code) (rt)->
code) == CALL_INSN)) || (((enum rtx_code) (rt)->code) == DEBUG_INSN
))
903 || NOTE_P (rt)(((enum rtx_code) (rt)->code) == NOTE)
904 || JUMP_TABLE_DATA_P (rt)(((enum rtx_code) (rt)->code) == JUMP_TABLE_DATA)
905 || BARRIER_P (rt)(((enum rtx_code) (rt)->code) == BARRIER)
906 || LABEL_P (rt)(((enum rtx_code) (rt)->code) == CODE_LABEL));
907}
908
909template <>
910template <>
911inline bool
912is_a_helper <rtx_debug_insn *>::test (rtx rt)
913{
914 return DEBUG_INSN_P (rt)(((enum rtx_code) (rt)->code) == DEBUG_INSN);
915}
916
917template <>
918template <>
919inline bool
920is_a_helper <rtx_nonjump_insn *>::test (rtx rt)
921{
922 return NONJUMP_INSN_P (rt)(((enum rtx_code) (rt)->code) == INSN);
923}
924
925template <>
926template <>
927inline bool
928is_a_helper <rtx_jump_insn *>::test (rtx rt)
929{
930 return JUMP_P (rt)(((enum rtx_code) (rt)->code) == JUMP_INSN);
931}
932
933template <>
934template <>
935inline bool
936is_a_helper <rtx_jump_insn *>::test (rtx_insn *insn)
937{
938 return JUMP_P (insn)(((enum rtx_code) (insn)->code) == JUMP_INSN);
939}
940
941template <>
942template <>
943inline bool
944is_a_helper <rtx_call_insn *>::test (rtx rt)
945{
946 return CALL_P (rt)(((enum rtx_code) (rt)->code) == CALL_INSN);
947}
948
949template <>
950template <>
951inline bool
952is_a_helper <rtx_call_insn *>::test (rtx_insn *insn)
953{
954 return CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN);
955}
956
957template <>
958template <>
959inline bool
960is_a_helper <rtx_jump_table_data *>::test (rtx rt)
961{
962 return JUMP_TABLE_DATA_P (rt)(((enum rtx_code) (rt)->code) == JUMP_TABLE_DATA);
963}
964
965template <>
966template <>
967inline bool
968is_a_helper <rtx_jump_table_data *>::test (rtx_insn *insn)
969{
970 return JUMP_TABLE_DATA_P (insn)(((enum rtx_code) (insn)->code) == JUMP_TABLE_DATA);
971}
972
973template <>
974template <>
975inline bool
976is_a_helper <rtx_barrier *>::test (rtx rt)
977{
978 return BARRIER_P (rt)(((enum rtx_code) (rt)->code) == BARRIER);
979}
980
981template <>
982template <>
983inline bool
984is_a_helper <rtx_code_label *>::test (rtx rt)
985{
986 return LABEL_P (rt)(((enum rtx_code) (rt)->code) == CODE_LABEL);
987}
988
989template <>
990template <>
991inline bool
992is_a_helper <rtx_code_label *>::test (rtx_insn *insn)
993{
994 return LABEL_P (insn)(((enum rtx_code) (insn)->code) == CODE_LABEL);
995}
996
997template <>
998template <>
999inline bool
1000is_a_helper <rtx_note *>::test (rtx rt)
1001{
1002 return NOTE_P (rt)(((enum rtx_code) (rt)->code) == NOTE);
1003}
1004
1005template <>
1006template <>
1007inline bool
1008is_a_helper <rtx_note *>::test (rtx_insn *insn)
1009{
1010 return NOTE_P (insn)(((enum rtx_code) (insn)->code) == NOTE);
1011}
1012
1013/* Predicate yielding nonzero iff X is a return or simple_return. */
1014#define ANY_RETURN_P(X)(((enum rtx_code) (X)->code) == RETURN || ((enum rtx_code)
(X)->code) == SIMPLE_RETURN)
\
1015 (GET_CODE (X)((enum rtx_code) (X)->code) == RETURN || GET_CODE (X)((enum rtx_code) (X)->code) == SIMPLE_RETURN)
1016
1017/* 1 if X is a unary operator. */
1018
1019#define UNARY_P(X)((rtx_class[(int) (((enum rtx_code) (X)->code))]) == RTX_UNARY
)
\
1020 (GET_RTX_CLASS (GET_CODE (X))(rtx_class[(int) (((enum rtx_code) (X)->code))]) == RTX_UNARY)
1021
1022/* 1 if X is a binary operator. */
1023
1024#define BINARY_P(X)(((rtx_class[(int) (((enum rtx_code) (X)->code))]) & (
~3)) == (RTX_COMPARE & (~3)))
\
1025 ((GET_RTX_CLASS (GET_CODE (X))(rtx_class[(int) (((enum rtx_code) (X)->code))]) & RTX_BINARY_MASK(~3)) == RTX_BINARY_RESULT(RTX_COMPARE & (~3)))
1026
1027/* 1 if X is an arithmetic operator. */
1028
1029#define ARITHMETIC_P(X)(((rtx_class[(int) (((enum rtx_code) (X)->code))]) & (
~1)) == (RTX_COMM_ARITH & (~1)))
\
1030 ((GET_RTX_CLASS (GET_CODE (X))(rtx_class[(int) (((enum rtx_code) (X)->code))]) & RTX_ARITHMETIC_MASK(~1)) \
1031 == RTX_ARITHMETIC_RESULT(RTX_COMM_ARITH & (~1)))
1032
1033/* 1 if X is an arithmetic operator. */
1034
1035#define COMMUTATIVE_ARITH_P(X)((rtx_class[(int) (((enum rtx_code) (X)->code))]) == RTX_COMM_ARITH
)
\
1036 (GET_RTX_CLASS (GET_CODE (X))(rtx_class[(int) (((enum rtx_code) (X)->code))]) == RTX_COMM_ARITH)
1037
1038/* 1 if X is a commutative arithmetic operator or a comparison operator.
1039 These two are sometimes selected together because it is possible to
1040 swap the two operands. */
1041
1042#define SWAPPABLE_OPERANDS_P(X)((1 << (rtx_class[(int) (((enum rtx_code) (X)->code)
)])) & ((1 << RTX_COMM_ARITH) | (1 << RTX_COMM_COMPARE
) | (1 << RTX_COMPARE)))
\
1043 ((1 << GET_RTX_CLASS (GET_CODE (X))(rtx_class[(int) (((enum rtx_code) (X)->code))])) \
1044 & ((1 << RTX_COMM_ARITH) | (1 << RTX_COMM_COMPARE) \
1045 | (1 << RTX_COMPARE)))
1046
1047/* 1 if X is a non-commutative operator. */
1048
1049#define NON_COMMUTATIVE_P(X)(((rtx_class[(int) (((enum rtx_code) (X)->code))]) & (
~2)) == (RTX_COMPARE & (~2)))
\
1050 ((GET_RTX_CLASS (GET_CODE (X))(rtx_class[(int) (((enum rtx_code) (X)->code))]) & RTX_COMMUTATIVE_MASK(~2)) \
1051 == RTX_NON_COMMUTATIVE_RESULT(RTX_COMPARE & (~2)))
1052
1053/* 1 if X is a commutative operator on integers. */
1054
1055#define COMMUTATIVE_P(X)(((rtx_class[(int) (((enum rtx_code) (X)->code))]) & (
~2)) == (RTX_COMM_COMPARE & (~2)))
\
1056 ((GET_RTX_CLASS (GET_CODE (X))(rtx_class[(int) (((enum rtx_code) (X)->code))]) & RTX_COMMUTATIVE_MASK(~2)) \
1057 == RTX_COMMUTATIVE_RESULT(RTX_COMM_COMPARE & (~2)))
1058
1059/* 1 if X is a relational operator. */
1060
1061#define COMPARISON_P(X)(((rtx_class[(int) (((enum rtx_code) (X)->code))]) & (
~1)) == (RTX_COMPARE & (~1)))
\
1062 ((GET_RTX_CLASS (GET_CODE (X))(rtx_class[(int) (((enum rtx_code) (X)->code))]) & RTX_COMPARE_MASK(~1)) == RTX_COMPARE_RESULT(RTX_COMPARE & (~1)))
1063
1064/* 1 if X is a constant value that is an integer. */
1065
1066#define CONSTANT_P(X)((rtx_class[(int) (((enum rtx_code) (X)->code))]) == RTX_CONST_OBJ
)
\
1067 (GET_RTX_CLASS (GET_CODE (X))(rtx_class[(int) (((enum rtx_code) (X)->code))]) == RTX_CONST_OBJ)
1068
1069/* 1 if X is a LABEL_REF. */
1070#define LABEL_REF_P(X)(((enum rtx_code) (X)->code) == LABEL_REF) \
1071 (GET_CODE (X)((enum rtx_code) (X)->code) == LABEL_REF)
1072
1073/* 1 if X can be used to represent an object. */
1074#define OBJECT_P(X)(((rtx_class[(int) (((enum rtx_code) (X)->code))]) & (
~1)) == (RTX_OBJ & (~1)))
\
1075 ((GET_RTX_CLASS (GET_CODE (X))(rtx_class[(int) (((enum rtx_code) (X)->code))]) & RTX_OBJ_MASK(~1)) == RTX_OBJ_RESULT(RTX_OBJ & (~1)))
1076
1077/* General accessor macros for accessing the fields of an rtx. */
1078
1079#if defined ENABLE_RTL_CHECKING && (GCC_VERSION(4 * 1000 + 2) >= 2007)
1080/* The bit with a star outside the statement expr and an & inside is
1081 so that N can be evaluated only once. */
1082#define RTL_CHECK1(RTX, N, C1)((RTX)->u.fld[N]) __extension__ \
1083(*({ __typeof (RTX) const _rtx = (RTX); const int _n = (N); \
1084 const enum rtx_code _code = GET_CODE (_rtx)((enum rtx_code) (_rtx)->code); \
1085 if (_n < 0 || _n >= GET_RTX_LENGTH (_code)(rtx_length[(int) (_code)])) \
1086 rtl_check_failed_bounds (_rtx, _n, __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1086, \
1087 __FUNCTION__); \
1088 if (GET_RTX_FORMAT (_code)(rtx_format[(int) (_code)])[_n] != C1) \
1089 rtl_check_failed_type1 (_rtx, _n, C1, __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1089, \
1090 __FUNCTION__); \
1091 &_rtx->u.fld[_n]; }))
1092
1093#define RTL_CHECK2(RTX, N, C1, C2)((RTX)->u.fld[N]) __extension__ \
1094(*({ __typeof (RTX) const _rtx = (RTX); const int _n = (N); \
1095 const enum rtx_code _code = GET_CODE (_rtx)((enum rtx_code) (_rtx)->code); \
1096 if (_n < 0 || _n >= GET_RTX_LENGTH (_code)(rtx_length[(int) (_code)])) \
1097 rtl_check_failed_bounds (_rtx, _n, __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1097, \
1098 __FUNCTION__); \
1099 if (GET_RTX_FORMAT (_code)(rtx_format[(int) (_code)])[_n] != C1 \
1100 && GET_RTX_FORMAT (_code)(rtx_format[(int) (_code)])[_n] != C2) \
1101 rtl_check_failed_type2 (_rtx, _n, C1, C2, __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1101, \
1102 __FUNCTION__); \
1103 &_rtx->u.fld[_n]; }))
1104
1105#define RTL_CHECKC1(RTX, N, C)((RTX)->u.fld[N]) __extension__ \
1106(*({ __typeof (RTX) const _rtx = (RTX); const int _n = (N); \
1107 if (GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != (C)) \
1108 rtl_check_failed_code1 (_rtx, (C), __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1108, \
1109 __FUNCTION__); \
1110 &_rtx->u.fld[_n]; }))
1111
1112#define RTL_CHECKC2(RTX, N, C1, C2)((RTX)->u.fld[N]) __extension__ \
1113(*({ __typeof (RTX) const _rtx = (RTX); const int _n = (N); \
1114 const enum rtx_code _code = GET_CODE (_rtx)((enum rtx_code) (_rtx)->code); \
1115 if (_code != (C1) && _code != (C2)) \
1116 rtl_check_failed_code2 (_rtx, (C1), (C2), __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1116, \
1117 __FUNCTION__); \
1118 &_rtx->u.fld[_n]; }))
1119
1120#define RTL_CHECKC3(RTX, N, C1, C2, C3)((RTX)->u.fld[N]) __extension__ \
1121(*({ __typeof (RTX) const _rtx = (RTX); const int _n = (N); \
1122 const enum rtx_code _code = GET_CODE (_rtx)((enum rtx_code) (_rtx)->code); \
1123 if (_code != (C1) && _code != (C2) && _code != (C3)) \
1124 rtl_check_failed_code3 (_rtx, (C1), (C2), (C3), __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", \
1125 __LINE__1125, __FUNCTION__); \
1126 &_rtx->u.fld[_n]; }))
1127
1128#define RTVEC_ELT(RTVEC, I)((RTVEC)->elem[I]) __extension__ \
1129(*({ __typeof (RTVEC) const _rtvec = (RTVEC); const int _i = (I); \
1130 if (_i < 0 || _i >= GET_NUM_ELEM (_rtvec)((_rtvec)->num_elem)) \
1131 rtvec_check_failed_bounds (_rtvec, _i, __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1131, \
1132 __FUNCTION__); \
1133 &_rtvec->elem[_i]; }))
1134
1135#define XWINT(RTX, N)((RTX)->u.hwint[N]) __extension__ \
1136(*({ __typeof (RTX) const _rtx = (RTX); const int _n = (N); \
1137 const enum rtx_code _code = GET_CODE (_rtx)((enum rtx_code) (_rtx)->code); \
1138 if (_n < 0 || _n >= GET_RTX_LENGTH (_code)(rtx_length[(int) (_code)])) \
1139 rtl_check_failed_bounds (_rtx, _n, __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1139, \
1140 __FUNCTION__); \
1141 if (GET_RTX_FORMAT (_code)(rtx_format[(int) (_code)])[_n] != 'w') \
1142 rtl_check_failed_type1 (_rtx, _n, 'w', __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1142, \
1143 __FUNCTION__); \
1144 &_rtx->u.hwint[_n]; }))
1145
1146#define CWI_ELT(RTX, I)((RTX)->u.hwiv.elem[I]) __extension__ \
1147(*({ __typeof (RTX) const _cwi = (RTX); \
1148 int _max = CWI_GET_NUM_ELEM (_cwi)((int)__extension__ ({ __typeof ((_cwi)) const _rtx = ((_cwi)
); if (((enum rtx_code) (_rtx)->code) != CONST_WIDE_INT) rtl_check_failed_flag
("CWI_GET_NUM_ELEM", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1148, __FUNCTION__); _rtx; })->u2.num_elem)
; \
1149 const int _i = (I); \
1150 if (_i < 0 || _i >= _max) \
1151 cwi_check_failed_bounds (_cwi, _i, __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1151, \
1152 __FUNCTION__); \
1153 &_cwi->u.hwiv.elem[_i]; }))
1154
1155#define XCWINT(RTX, N, C)((RTX)->u.hwint[N]) __extension__ \
1156(*({ __typeof (RTX) const _rtx = (RTX); \
1157 if (GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != (C)) \
1158 rtl_check_failed_code1 (_rtx, (C), __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1158, \
1159 __FUNCTION__); \
1160 &_rtx->u.hwint[N]; }))
1161
1162#define XCMWINT(RTX, N, C, M)((RTX)->u.hwint[N]) __extension__ \
1163(*({ __typeof (RTX) const _rtx = (RTX); \
1164 if (GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != (C) || GET_MODE (_rtx)((machine_mode) (_rtx)->mode) != (M)) \
1165 rtl_check_failed_code_mode (_rtx, (C), (M), false, __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", \
1166 __LINE__1166, __FUNCTION__); \
1167 &_rtx->u.hwint[N]; }))
1168
1169#define XCNMPRV(RTX, C, M)(&(RTX)->u.rv) __extension__ \
1170({ __typeof (RTX) const _rtx = (RTX); \
1171 if (GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != (C) || GET_MODE (_rtx)((machine_mode) (_rtx)->mode) == (M)) \
1172 rtl_check_failed_code_mode (_rtx, (C), (M), true, __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", \
1173 __LINE__1173, __FUNCTION__); \
1174 &_rtx->u.rv; })
1175
1176#define XCNMPFV(RTX, C, M)(&(RTX)->u.fv) __extension__ \
1177({ __typeof (RTX) const _rtx = (RTX); \
1178 if (GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != (C) || GET_MODE (_rtx)((machine_mode) (_rtx)->mode) == (M)) \
1179 rtl_check_failed_code_mode (_rtx, (C), (M), true, __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", \
1180 __LINE__1180, __FUNCTION__); \
1181 &_rtx->u.fv; })
1182
1183#define REG_CHECK(RTX)(&(RTX)->u.reg) __extension__ \
1184({ __typeof (RTX) const _rtx = (RTX); \
1185 if (GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != REG) \
1186 rtl_check_failed_code1 (_rtx, REG, __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1186, \
1187 __FUNCTION__); \
1188 &_rtx->u.reg; })
1189
1190#define BLOCK_SYMBOL_CHECK(RTX)(&(RTX)->u.block_sym) __extension__ \
1191({ __typeof (RTX) const _symbol = (RTX); \
1192 const unsigned int flags = SYMBOL_REF_FLAGS (_symbol)(__extension__ ({ __typeof ((_symbol)) const _rtx = ((_symbol
)); if (((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("SYMBOL_REF_FLAGS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1192, __FUNCTION__); _rtx; }) ->u2.symbol_ref_flags)
; \
1193 if ((flags & SYMBOL_FLAG_HAS_BLOCK_INFO(1 << 7)) == 0) \
1194 rtl_check_failed_block_symbol (__FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1194, \
1195 __FUNCTION__); \
1196 &_symbol->u.block_sym; })
1197
1198#define HWIVEC_CHECK(RTX,C)(&(RTX)->u.hwiv) __extension__ \
1199({ __typeof (RTX) const _symbol = (RTX); \
1200 RTL_CHECKC1 (_symbol, 0, C)((_symbol)->u.fld[0]); \
1201 &_symbol->u.hwiv; })
1202
1203extern void rtl_check_failed_bounds (const_rtx, int, const char *, int,
1204 const char *)
1205 ATTRIBUTE_NORETURN__attribute__ ((__noreturn__)) ATTRIBUTE_COLD;
1206extern void rtl_check_failed_type1 (const_rtx, int, int, const char *, int,
1207 const char *)
1208 ATTRIBUTE_NORETURN__attribute__ ((__noreturn__)) ATTRIBUTE_COLD;
1209extern void rtl_check_failed_type2 (const_rtx, int, int, int, const char *,
1210 int, const char *)
1211 ATTRIBUTE_NORETURN__attribute__ ((__noreturn__)) ATTRIBUTE_COLD;
1212extern void rtl_check_failed_code1 (const_rtx, enum rtx_code, const char *,
1213 int, const char *)
1214 ATTRIBUTE_NORETURN__attribute__ ((__noreturn__)) ATTRIBUTE_COLD;
1215extern void rtl_check_failed_code2 (const_rtx, enum rtx_code, enum rtx_code,
1216 const char *, int, const char *)
1217 ATTRIBUTE_NORETURN__attribute__ ((__noreturn__)) ATTRIBUTE_COLD;
1218extern void rtl_check_failed_code3 (const_rtx, enum rtx_code, enum rtx_code,
1219 enum rtx_code, const char *, int,
1220 const char *)
1221 ATTRIBUTE_NORETURN__attribute__ ((__noreturn__)) ATTRIBUTE_COLD;
1222extern void rtl_check_failed_code_mode (const_rtx, enum rtx_code, machine_mode,
1223 bool, const char *, int, const char *)
1224 ATTRIBUTE_NORETURN__attribute__ ((__noreturn__)) ATTRIBUTE_COLD;
1225extern void rtl_check_failed_block_symbol (const char *, int, const char *)
1226 ATTRIBUTE_NORETURN__attribute__ ((__noreturn__)) ATTRIBUTE_COLD;
1227extern void cwi_check_failed_bounds (const_rtx, int, const char *, int,
1228 const char *)
1229 ATTRIBUTE_NORETURN__attribute__ ((__noreturn__)) ATTRIBUTE_COLD;
1230extern void rtvec_check_failed_bounds (const_rtvec, int, const char *, int,
1231 const char *)
1232 ATTRIBUTE_NORETURN__attribute__ ((__noreturn__)) ATTRIBUTE_COLD;
1233
1234#else /* not ENABLE_RTL_CHECKING */
1235
1236#define RTL_CHECK1(RTX, N, C1)((RTX)->u.fld[N]) ((RTX)->u.fld[N])
1237#define RTL_CHECK2(RTX, N, C1, C2)((RTX)->u.fld[N]) ((RTX)->u.fld[N])
1238#define RTL_CHECKC1(RTX, N, C)((RTX)->u.fld[N]) ((RTX)->u.fld[N])
1239#define RTL_CHECKC2(RTX, N, C1, C2)((RTX)->u.fld[N]) ((RTX)->u.fld[N])
1240#define RTL_CHECKC3(RTX, N, C1, C2, C3)((RTX)->u.fld[N]) ((RTX)->u.fld[N])
1241#define RTVEC_ELT(RTVEC, I)((RTVEC)->elem[I]) ((RTVEC)->elem[I])
1242#define XWINT(RTX, N)((RTX)->u.hwint[N]) ((RTX)->u.hwint[N])
1243#define CWI_ELT(RTX, I)((RTX)->u.hwiv.elem[I]) ((RTX)->u.hwiv.elem[I])
1244#define XCWINT(RTX, N, C)((RTX)->u.hwint[N]) ((RTX)->u.hwint[N])
1245#define XCMWINT(RTX, N, C, M)((RTX)->u.hwint[N]) ((RTX)->u.hwint[N])
1246#define XCNMWINT(RTX, N, C, M)((RTX)->u.hwint[N]) ((RTX)->u.hwint[N])
1247#define XCNMPRV(RTX, C, M)(&(RTX)->u.rv) (&(RTX)->u.rv)
1248#define XCNMPFV(RTX, C, M)(&(RTX)->u.fv) (&(RTX)->u.fv)
1249#define REG_CHECK(RTX)(&(RTX)->u.reg) (&(RTX)->u.reg)
1250#define BLOCK_SYMBOL_CHECK(RTX)(&(RTX)->u.block_sym) (&(RTX)->u.block_sym)
1251#define HWIVEC_CHECK(RTX,C)(&(RTX)->u.hwiv) (&(RTX)->u.hwiv)
1252
1253#endif
1254
1255/* General accessor macros for accessing the flags of an rtx. */
1256
1257/* Access an individual rtx flag, with no checking of any kind. */
1258#define RTX_FLAG(RTX, FLAG)((RTX)->FLAG) ((RTX)->FLAG)
1259
1260#if defined ENABLE_RTL_FLAG_CHECKING1 && (GCC_VERSION(4 * 1000 + 2) >= 2007)
1261#define RTL_FLAG_CHECK1(NAME, RTX, C1)__extension__ ({ __typeof (RTX) const _rtx = (RTX); if (((enum
rtx_code) (_rtx)->code) != C1) rtl_check_failed_flag (NAME
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1261, __FUNCTION__); _rtx; })
__extension__ \
1262({ __typeof (RTX) const _rtx = (RTX); \
1263 if (GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != C1) \
1264 rtl_check_failed_flag (NAME, _rtx, __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1264, \
1265 __FUNCTION__); \
1266 _rtx; })
1267
1268#define RTL_FLAG_CHECK2(NAME, RTX, C1, C2)__extension__ ({ __typeof (RTX) const _rtx = (RTX); if (((enum
rtx_code) (_rtx)->code) != C1 && ((enum rtx_code)
(_rtx)->code) != C2) rtl_check_failed_flag (NAME,_rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1268, __FUNCTION__); _rtx; })
__extension__ \
1269({ __typeof (RTX) const _rtx = (RTX); \
1270 if (GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != C1 && GET_CODE(_rtx)((enum rtx_code) (_rtx)->code) != C2) \
1271 rtl_check_failed_flag (NAME,_rtx, __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1271, \
1272 __FUNCTION__); \
1273 _rtx; })
1274
1275#define RTL_FLAG_CHECK3(NAME, RTX, C1, C2, C3)__extension__ ({ __typeof (RTX) const _rtx = (RTX); if (((enum
rtx_code) (_rtx)->code) != C1 && ((enum rtx_code)
(_rtx)->code) != C2 && ((enum rtx_code) (_rtx)->
code) != C3) rtl_check_failed_flag (NAME, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1275, __FUNCTION__); _rtx; })
__extension__ \
1276({ __typeof (RTX) const _rtx = (RTX); \
1277 if (GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != C1 && GET_CODE(_rtx)((enum rtx_code) (_rtx)->code) != C2 \
1278 && GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != C3) \
1279 rtl_check_failed_flag (NAME, _rtx, __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1279, \
1280 __FUNCTION__); \
1281 _rtx; })
1282
1283#define RTL_FLAG_CHECK4(NAME, RTX, C1, C2, C3, C4)__extension__ ({ __typeof (RTX) const _rtx = (RTX); if (((enum
rtx_code) (_rtx)->code) != C1 && ((enum rtx_code)
(_rtx)->code) != C2 && ((enum rtx_code) (_rtx)->
code) != C3 && ((enum rtx_code) (_rtx)->code) != C4
) rtl_check_failed_flag (NAME, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1283, __FUNCTION__); _rtx; })
__extension__ \
1284({ __typeof (RTX) const _rtx = (RTX); \
1285 if (GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != C1 && GET_CODE(_rtx)((enum rtx_code) (_rtx)->code) != C2 \
1286 && GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != C3 && GET_CODE(_rtx)((enum rtx_code) (_rtx)->code) != C4) \
1287 rtl_check_failed_flag (NAME, _rtx, __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1287, \
1288 __FUNCTION__); \
1289 _rtx; })
1290
1291#define RTL_FLAG_CHECK5(NAME, RTX, C1, C2, C3, C4, C5)__extension__ ({ __typeof (RTX) const _rtx = (RTX); if (((enum
rtx_code) (_rtx)->code) != C1 && ((enum rtx_code)
(_rtx)->code) != C2 && ((enum rtx_code) (_rtx)->
code) != C3 && ((enum rtx_code) (_rtx)->code) != C4
&& ((enum rtx_code) (_rtx)->code) != C5) rtl_check_failed_flag
(NAME, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1291, __FUNCTION__); _rtx; })
__extension__ \
1292({ __typeof (RTX) const _rtx = (RTX); \
1293 if (GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != C1 && GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != C2 \
1294 && GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != C3 && GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != C4 \
1295 && GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != C5) \
1296 rtl_check_failed_flag (NAME, _rtx, __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1296, \
1297 __FUNCTION__); \
1298 _rtx; })
1299
1300#define RTL_FLAG_CHECK6(NAME, RTX, C1, C2, C3, C4, C5, C6)__extension__ ({ __typeof (RTX) const _rtx = (RTX); if (((enum
rtx_code) (_rtx)->code) != C1 && ((enum rtx_code)
(_rtx)->code) != C2 && ((enum rtx_code) (_rtx)->
code) != C3 && ((enum rtx_code) (_rtx)->code) != C4
&& ((enum rtx_code) (_rtx)->code) != C5 &&
((enum rtx_code) (_rtx)->code) != C6) rtl_check_failed_flag
(NAME,_rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1300, __FUNCTION__); _rtx; })
\
1301 __extension__ \
1302({ __typeof (RTX) const _rtx = (RTX); \
1303 if (GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != C1 && GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != C2 \
1304 && GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != C3 && GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != C4 \
1305 && GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != C5 && GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != C6) \
1306 rtl_check_failed_flag (NAME,_rtx, __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1306, \
1307 __FUNCTION__); \
1308 _rtx; })
1309
1310#define RTL_FLAG_CHECK7(NAME, RTX, C1, C2, C3, C4, C5, C6, C7)__extension__ ({ __typeof (RTX) const _rtx = (RTX); if (((enum
rtx_code) (_rtx)->code) != C1 && ((enum rtx_code)
(_rtx)->code) != C2 && ((enum rtx_code) (_rtx)->
code) != C3 && ((enum rtx_code) (_rtx)->code) != C4
&& ((enum rtx_code) (_rtx)->code) != C5 &&
((enum rtx_code) (_rtx)->code) != C6 && ((enum rtx_code
) (_rtx)->code) != C7) rtl_check_failed_flag (NAME, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1310, __FUNCTION__); _rtx; })
\
1311 __extension__ \
1312({ __typeof (RTX) const _rtx = (RTX); \
1313 if (GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != C1 && GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != C2 \
1314 && GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != C3 && GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != C4 \
1315 && GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != C5 && GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != C6 \
1316 && GET_CODE (_rtx)((enum rtx_code) (_rtx)->code) != C7) \
1317 rtl_check_failed_flag (NAME, _rtx, __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1317, \
1318 __FUNCTION__); \
1319 _rtx; })
1320
1321#define RTL_INSN_CHAIN_FLAG_CHECK(NAME, RTX)__extension__ ({ __typeof (RTX) const _rtx = (RTX); if (!((unsigned
long) (((enum rtx_code) (_rtx)->code)) - (unsigned long) (
DEBUG_INSN) <= (unsigned long) (NOTE) - (unsigned long) (DEBUG_INSN
))) rtl_check_failed_flag (NAME, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1321, __FUNCTION__); _rtx; })
\
1322 __extension__ \
1323({ __typeof (RTX) const _rtx = (RTX); \
1324 if (!INSN_CHAIN_CODE_P (GET_CODE (_rtx))((unsigned long) (((enum rtx_code) (_rtx)->code)) - (unsigned
long) (DEBUG_INSN) <= (unsigned long) (NOTE) - (unsigned long
) (DEBUG_INSN))
) \
1325 rtl_check_failed_flag (NAME, _rtx, __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1325, \
1326 __FUNCTION__); \
1327 _rtx; })
1328
1329extern void rtl_check_failed_flag (const char *, const_rtx, const char *,
1330 int, const char *)
1331 ATTRIBUTE_NORETURN__attribute__ ((__noreturn__)) ATTRIBUTE_COLD
1332 ;
1333
1334#else /* not ENABLE_RTL_FLAG_CHECKING */
1335
1336#define RTL_FLAG_CHECK1(NAME, RTX, C1)__extension__ ({ __typeof (RTX) const _rtx = (RTX); if (((enum
rtx_code) (_rtx)->code) != C1) rtl_check_failed_flag (NAME
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1336, __FUNCTION__); _rtx; })
(RTX)
1337#define RTL_FLAG_CHECK2(NAME, RTX, C1, C2)__extension__ ({ __typeof (RTX) const _rtx = (RTX); if (((enum
rtx_code) (_rtx)->code) != C1 && ((enum rtx_code)
(_rtx)->code) != C2) rtl_check_failed_flag (NAME,_rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1337, __FUNCTION__); _rtx; })
(RTX)
1338#define RTL_FLAG_CHECK3(NAME, RTX, C1, C2, C3)__extension__ ({ __typeof (RTX) const _rtx = (RTX); if (((enum
rtx_code) (_rtx)->code) != C1 && ((enum rtx_code)
(_rtx)->code) != C2 && ((enum rtx_code) (_rtx)->
code) != C3) rtl_check_failed_flag (NAME, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1338, __FUNCTION__); _rtx; })
(RTX)
1339#define RTL_FLAG_CHECK4(NAME, RTX, C1, C2, C3, C4)__extension__ ({ __typeof (RTX) const _rtx = (RTX); if (((enum
rtx_code) (_rtx)->code) != C1 && ((enum rtx_code)
(_rtx)->code) != C2 && ((enum rtx_code) (_rtx)->
code) != C3 && ((enum rtx_code) (_rtx)->code) != C4
) rtl_check_failed_flag (NAME, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1339, __FUNCTION__); _rtx; })
(RTX)
1340#define RTL_FLAG_CHECK5(NAME, RTX, C1, C2, C3, C4, C5)__extension__ ({ __typeof (RTX) const _rtx = (RTX); if (((enum
rtx_code) (_rtx)->code) != C1 && ((enum rtx_code)
(_rtx)->code) != C2 && ((enum rtx_code) (_rtx)->
code) != C3 && ((enum rtx_code) (_rtx)->code) != C4
&& ((enum rtx_code) (_rtx)->code) != C5) rtl_check_failed_flag
(NAME, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1340, __FUNCTION__); _rtx; })
(RTX)
1341#define RTL_FLAG_CHECK6(NAME, RTX, C1, C2, C3, C4, C5, C6)__extension__ ({ __typeof (RTX) const _rtx = (RTX); if (((enum
rtx_code) (_rtx)->code) != C1 && ((enum rtx_code)
(_rtx)->code) != C2 && ((enum rtx_code) (_rtx)->
code) != C3 && ((enum rtx_code) (_rtx)->code) != C4
&& ((enum rtx_code) (_rtx)->code) != C5 &&
((enum rtx_code) (_rtx)->code) != C6) rtl_check_failed_flag
(NAME,_rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1341, __FUNCTION__); _rtx; })
(RTX)
1342#define RTL_FLAG_CHECK7(NAME, RTX, C1, C2, C3, C4, C5, C6, C7)__extension__ ({ __typeof (RTX) const _rtx = (RTX); if (((enum
rtx_code) (_rtx)->code) != C1 && ((enum rtx_code)
(_rtx)->code) != C2 && ((enum rtx_code) (_rtx)->
code) != C3 && ((enum rtx_code) (_rtx)->code) != C4
&& ((enum rtx_code) (_rtx)->code) != C5 &&
((enum rtx_code) (_rtx)->code) != C6 && ((enum rtx_code
) (_rtx)->code) != C7) rtl_check_failed_flag (NAME, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1342, __FUNCTION__); _rtx; })
(RTX)
1343#define RTL_INSN_CHAIN_FLAG_CHECK(NAME, RTX)__extension__ ({ __typeof (RTX) const _rtx = (RTX); if (!((unsigned
long) (((enum rtx_code) (_rtx)->code)) - (unsigned long) (
DEBUG_INSN) <= (unsigned long) (NOTE) - (unsigned long) (DEBUG_INSN
))) rtl_check_failed_flag (NAME, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1343, __FUNCTION__); _rtx; })
(RTX)
1344#endif
1345
1346#define XINT(RTX, N)(((RTX)->u.fld[N]).rt_int) (RTL_CHECK2 (RTX, N, 'i', 'n')((RTX)->u.fld[N]).rt_int)
1347#define XUINT(RTX, N)(((RTX)->u.fld[N]).rt_uint) (RTL_CHECK2 (RTX, N, 'i', 'n')((RTX)->u.fld[N]).rt_uint)
1348#define XSTR(RTX, N)(((RTX)->u.fld[N]).rt_str) (RTL_CHECK2 (RTX, N, 's', 'S')((RTX)->u.fld[N]).rt_str)
1349#define XEXP(RTX, N)(((RTX)->u.fld[N]).rt_rtx) (RTL_CHECK2 (RTX, N, 'e', 'u')((RTX)->u.fld[N]).rt_rtx)
1350#define XVEC(RTX, N)(((RTX)->u.fld[N]).rt_rtvec) (RTL_CHECK2 (RTX, N, 'E', 'V')((RTX)->u.fld[N]).rt_rtvec)
1351#define XMODE(RTX, N)(((RTX)->u.fld[N]).rt_type) (RTL_CHECK1 (RTX, N, 'M')((RTX)->u.fld[N]).rt_type)
1352#define XTREE(RTX, N)(((RTX)->u.fld[N]).rt_tree) (RTL_CHECK1 (RTX, N, 't')((RTX)->u.fld[N]).rt_tree)
1353#define XBBDEF(RTX, N)(((RTX)->u.fld[N]).rt_bb) (RTL_CHECK1 (RTX, N, 'B')((RTX)->u.fld[N]).rt_bb)
1354#define XTMPL(RTX, N)(((RTX)->u.fld[N]).rt_str) (RTL_CHECK1 (RTX, N, 'T')((RTX)->u.fld[N]).rt_str)
1355#define XCFI(RTX, N)(((RTX)->u.fld[N]).rt_cfi) (RTL_CHECK1 (RTX, N, 'C')((RTX)->u.fld[N]).rt_cfi)
1356
1357#define XVECEXP(RTX, N, M)(((((RTX)->u.fld[N]).rt_rtvec))->elem[M]) RTVEC_ELT (XVEC (RTX, N), M)(((((RTX)->u.fld[N]).rt_rtvec))->elem[M])
1358#define XVECLEN(RTX, N)(((((RTX)->u.fld[N]).rt_rtvec))->num_elem) GET_NUM_ELEM (XVEC (RTX, N))(((((RTX)->u.fld[N]).rt_rtvec))->num_elem)
1359
1360/* These are like XINT, etc. except that they expect a '0' field instead
1361 of the normal type code. */
1362
1363#define X0INT(RTX, N)(((RTX)->u.fld[N]).rt_int) (RTL_CHECK1 (RTX, N, '0')((RTX)->u.fld[N]).rt_int)
1364#define X0UINT(RTX, N)(((RTX)->u.fld[N]).rt_uint) (RTL_CHECK1 (RTX, N, '0')((RTX)->u.fld[N]).rt_uint)
1365#define X0STR(RTX, N)(((RTX)->u.fld[N]).rt_str) (RTL_CHECK1 (RTX, N, '0')((RTX)->u.fld[N]).rt_str)
1366#define X0EXP(RTX, N)(((RTX)->u.fld[N]).rt_rtx) (RTL_CHECK1 (RTX, N, '0')((RTX)->u.fld[N]).rt_rtx)
1367#define X0VEC(RTX, N)(((RTX)->u.fld[N]).rt_rtvec) (RTL_CHECK1 (RTX, N, '0')((RTX)->u.fld[N]).rt_rtvec)
1368#define X0MODE(RTX, N)(((RTX)->u.fld[N]).rt_type) (RTL_CHECK1 (RTX, N, '0')((RTX)->u.fld[N]).rt_type)
1369#define X0TREE(RTX, N)(((RTX)->u.fld[N]).rt_tree) (RTL_CHECK1 (RTX, N, '0')((RTX)->u.fld[N]).rt_tree)
1370#define X0BBDEF(RTX, N)(((RTX)->u.fld[N]).rt_bb) (RTL_CHECK1 (RTX, N, '0')((RTX)->u.fld[N]).rt_bb)
1371#define X0ADVFLAGS(RTX, N)(((RTX)->u.fld[N]).rt_addr_diff_vec_flags) (RTL_CHECK1 (RTX, N, '0')((RTX)->u.fld[N]).rt_addr_diff_vec_flags)
1372#define X0CSELIB(RTX, N)(((RTX)->u.fld[N]).rt_cselib) (RTL_CHECK1 (RTX, N, '0')((RTX)->u.fld[N]).rt_cselib)
1373#define X0MEMATTR(RTX, N)(((RTX)->u.fld[N]).rt_mem) (RTL_CHECKC1 (RTX, N, MEM)((RTX)->u.fld[N]).rt_mem)
1374#define X0CONSTANT(RTX, N)(((RTX)->u.fld[N]).rt_constant) (RTL_CHECK1 (RTX, N, '0')((RTX)->u.fld[N]).rt_constant)
1375
1376/* Access a '0' field with any type. */
1377#define X0ANY(RTX, N)((RTX)->u.fld[N]) RTL_CHECK1 (RTX, N, '0')((RTX)->u.fld[N])
1378
1379#define XCINT(RTX, N, C)(((RTX)->u.fld[N]).rt_int) (RTL_CHECKC1 (RTX, N, C)((RTX)->u.fld[N]).rt_int)
1380#define XCUINT(RTX, N, C)(((RTX)->u.fld[N]).rt_uint) (RTL_CHECKC1 (RTX, N, C)((RTX)->u.fld[N]).rt_uint)
1381#define XCSUBREG(RTX, N, C)(((RTX)->u.fld[N]).rt_subreg) (RTL_CHECKC1 (RTX, N, C)((RTX)->u.fld[N]).rt_subreg)
1382#define XCSTR(RTX, N, C)(((RTX)->u.fld[N]).rt_str) (RTL_CHECKC1 (RTX, N, C)((RTX)->u.fld[N]).rt_str)
1383#define XCEXP(RTX, N, C)(((RTX)->u.fld[N]).rt_rtx) (RTL_CHECKC1 (RTX, N, C)((RTX)->u.fld[N]).rt_rtx)
1384#define XCVEC(RTX, N, C)(((RTX)->u.fld[N]).rt_rtvec) (RTL_CHECKC1 (RTX, N, C)((RTX)->u.fld[N]).rt_rtvec)
1385#define XCMODE(RTX, N, C)(((RTX)->u.fld[N]).rt_type) (RTL_CHECKC1 (RTX, N, C)((RTX)->u.fld[N]).rt_type)
1386#define XCTREE(RTX, N, C)(((RTX)->u.fld[N]).rt_tree) (RTL_CHECKC1 (RTX, N, C)((RTX)->u.fld[N]).rt_tree)
1387#define XCBBDEF(RTX, N, C)(((RTX)->u.fld[N]).rt_bb) (RTL_CHECKC1 (RTX, N, C)((RTX)->u.fld[N]).rt_bb)
1388#define XCCFI(RTX, N, C)(((RTX)->u.fld[N]).rt_cfi) (RTL_CHECKC1 (RTX, N, C)((RTX)->u.fld[N]).rt_cfi)
1389#define XCCSELIB(RTX, N, C)(((RTX)->u.fld[N]).rt_cselib) (RTL_CHECKC1 (RTX, N, C)((RTX)->u.fld[N]).rt_cselib)
1390
1391#define XCVECEXP(RTX, N, M, C)(((((RTX)->u.fld[N]).rt_rtvec))->elem[M]) RTVEC_ELT (XCVEC (RTX, N, C), M)(((((RTX)->u.fld[N]).rt_rtvec))->elem[M])
1392#define XCVECLEN(RTX, N, C)(((((RTX)->u.fld[N]).rt_rtvec))->num_elem) GET_NUM_ELEM (XCVEC (RTX, N, C))(((((RTX)->u.fld[N]).rt_rtvec))->num_elem)
1393
1394#define XC2EXP(RTX, N, C1, C2)(((RTX)->u.fld[N]).rt_rtx) (RTL_CHECKC2 (RTX, N, C1, C2)((RTX)->u.fld[N]).rt_rtx)
1395#define XC3EXP(RTX, N, C1, C2, C3)(((RTX)->u.fld[N]).rt_rtx) (RTL_CHECKC3 (RTX, N, C1, C2, C3)((RTX)->u.fld[N]).rt_rtx)
1396
1397
1398/* Methods of rtx_expr_list. */
1399
1400inline rtx_expr_list *rtx_expr_list::next () const
1401{
1402 rtx tmp = XEXP (this, 1)(((this)->u.fld[1]).rt_rtx);
1403 return safe_as_a <rtx_expr_list *> (tmp);
1404}
1405
1406inline rtx rtx_expr_list::element () const
1407{
1408 return XEXP (this, 0)(((this)->u.fld[0]).rt_rtx);
1409}
1410
1411/* Methods of rtx_insn_list. */
1412
1413inline rtx_insn_list *rtx_insn_list::next () const
1414{
1415 rtx tmp = XEXP (this, 1)(((this)->u.fld[1]).rt_rtx);
1416 return safe_as_a <rtx_insn_list *> (tmp);
1417}
1418
1419inline rtx_insn *rtx_insn_list::insn () const
1420{
1421 rtx tmp = XEXP (this, 0)(((this)->u.fld[0]).rt_rtx);
1422 return safe_as_a <rtx_insn *> (tmp);
1423}
1424
1425/* Methods of rtx_sequence. */
1426
1427inline int rtx_sequence::len () const
1428{
1429 return XVECLEN (this, 0)(((((this)->u.fld[0]).rt_rtvec))->num_elem);
1430}
1431
1432inline rtx rtx_sequence::element (int index) const
1433{
1434 return XVECEXP (this, 0, index)(((((this)->u.fld[0]).rt_rtvec))->elem[index]);
1435}
1436
1437inline rtx_insn *rtx_sequence::insn (int index) const
1438{
1439 return as_a <rtx_insn *> (XVECEXP (this, 0, index)(((((this)->u.fld[0]).rt_rtvec))->elem[index]));
1440}
1441
1442/* ACCESS MACROS for particular fields of insns. */
1443
1444/* Holds a unique number for each insn.
1445 These are not necessarily sequentially increasing. */
1446inline int INSN_UID (const_rtx insn)
1447{
1448 return RTL_INSN_CHAIN_FLAG_CHECK ("INSN_UID",__extension__ ({ __typeof ((insn)) const _rtx = ((insn)); if (
!((unsigned long) (((enum rtx_code) (_rtx)->code)) - (unsigned
long) (DEBUG_INSN) <= (unsigned long) (NOTE) - (unsigned long
) (DEBUG_INSN))) rtl_check_failed_flag ("INSN_UID", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1449, __FUNCTION__); _rtx; })
1449 (insn))__extension__ ({ __typeof ((insn)) const _rtx = ((insn)); if (
!((unsigned long) (((enum rtx_code) (_rtx)->code)) - (unsigned
long) (DEBUG_INSN) <= (unsigned long) (NOTE) - (unsigned long
) (DEBUG_INSN))) rtl_check_failed_flag ("INSN_UID", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1449, __FUNCTION__); _rtx; })
->u2.insn_uid;
1450}
1451inline int& INSN_UID (rtx insn)
1452{
1453 return RTL_INSN_CHAIN_FLAG_CHECK ("INSN_UID",__extension__ ({ __typeof ((insn)) const _rtx = ((insn)); if (
!((unsigned long) (((enum rtx_code) (_rtx)->code)) - (unsigned
long) (DEBUG_INSN) <= (unsigned long) (NOTE) - (unsigned long
) (DEBUG_INSN))) rtl_check_failed_flag ("INSN_UID", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1454, __FUNCTION__); _rtx; })
1454 (insn))__extension__ ({ __typeof ((insn)) const _rtx = ((insn)); if (
!((unsigned long) (((enum rtx_code) (_rtx)->code)) - (unsigned
long) (DEBUG_INSN) <= (unsigned long) (NOTE) - (unsigned long
) (DEBUG_INSN))) rtl_check_failed_flag ("INSN_UID", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1454, __FUNCTION__); _rtx; })
->u2.insn_uid;
1455}
1456
1457/* Chain insns together in sequence. */
1458
1459/* For now these are split in two: an rvalue form:
1460 PREV_INSN/NEXT_INSN
1461 and an lvalue form:
1462 SET_NEXT_INSN/SET_PREV_INSN. */
1463
1464inline rtx_insn *PREV_INSN (const rtx_insn *insn)
1465{
1466 rtx prev = XEXP (insn, 0)(((insn)->u.fld[0]).rt_rtx);
1467 return safe_as_a <rtx_insn *> (prev);
1468}
1469
1470inline rtx& SET_PREV_INSN (rtx_insn *insn)
1471{
1472 return XEXP (insn, 0)(((insn)->u.fld[0]).rt_rtx);
1473}
1474
1475inline rtx_insn *NEXT_INSN (const rtx_insn *insn)
1476{
1477 rtx next = XEXP (insn, 1)(((insn)->u.fld[1]).rt_rtx);
1478 return safe_as_a <rtx_insn *> (next);
1479}
1480
1481inline rtx& SET_NEXT_INSN (rtx_insn *insn)
1482{
1483 return XEXP (insn, 1)(((insn)->u.fld[1]).rt_rtx);
1484}
1485
1486inline basic_block BLOCK_FOR_INSN (const_rtx insn)
1487{
1488 return XBBDEF (insn, 2)(((insn)->u.fld[2]).rt_bb);
1489}
1490
1491inline basic_block& BLOCK_FOR_INSN (rtx insn)
1492{
1493 return XBBDEF (insn, 2)(((insn)->u.fld[2]).rt_bb);
22
Returning null reference
1494}
1495
1496inline void set_block_for_insn (rtx_insn *insn, basic_block bb)
1497{
1498 BLOCK_FOR_INSN (insn) = bb;
1499}
1500
1501/* The body of an insn. */
1502inline rtx PATTERN (const_rtx insn)
1503{
1504 return XEXP (insn, 3)(((insn)->u.fld[3]).rt_rtx);
1505}
1506
1507inline rtx& PATTERN (rtx insn)
1508{
1509 return XEXP (insn, 3)(((insn)->u.fld[3]).rt_rtx);
1510}
1511
1512inline unsigned int INSN_LOCATION (const rtx_insn *insn)
1513{
1514 return XUINT (insn, 4)(((insn)->u.fld[4]).rt_uint);
1515}
1516
1517inline unsigned int& INSN_LOCATION (rtx_insn *insn)
1518{
1519 return XUINT (insn, 4)(((insn)->u.fld[4]).rt_uint);
1520}
1521
1522inline bool INSN_HAS_LOCATION (const rtx_insn *insn)
1523{
1524 return LOCATION_LOCUS (INSN_LOCATION (insn))((IS_ADHOC_LOC (INSN_LOCATION (insn))) ? get_location_from_adhoc_loc
(line_table, INSN_LOCATION (insn)) : (INSN_LOCATION (insn)))
!= UNKNOWN_LOCATION((location_t) 0);
1525}
1526
1527/* LOCATION of an RTX if relevant. */
1528#define RTL_LOCATION(X)((((((enum rtx_code) (X)->code) == INSN) || (((enum rtx_code
) (X)->code) == JUMP_INSN) || (((enum rtx_code) (X)->code
) == CALL_INSN)) || (((enum rtx_code) (X)->code) == DEBUG_INSN
)) ? INSN_LOCATION (as_a <rtx_insn *> (X)) : ((location_t
) 0))
(INSN_P (X)(((((enum rtx_code) (X)->code) == INSN) || (((enum rtx_code
) (X)->code) == JUMP_INSN) || (((enum rtx_code) (X)->code
) == CALL_INSN)) || (((enum rtx_code) (X)->code) == DEBUG_INSN
))
? \
1529 INSN_LOCATION (as_a <rtx_insn *> (X)) \
1530 : UNKNOWN_LOCATION((location_t) 0))
1531
1532/* Code number of instruction, from when it was recognized.
1533 -1 means this instruction has not been recognized yet. */
1534#define INSN_CODE(INSN)(((INSN)->u.fld[5]).rt_int) XINT (INSN, 5)(((INSN)->u.fld[5]).rt_int)
1535
1536inline rtvec rtx_jump_table_data::get_labels () const
1537{
1538 rtx pat = PATTERN (this);
1539 if (GET_CODE (pat)((enum rtx_code) (pat)->code) == ADDR_VEC)
1540 return XVEC (pat, 0)(((pat)->u.fld[0]).rt_rtvec);
1541 else
1542 return XVEC (pat, 1)(((pat)->u.fld[1]).rt_rtvec); /* presumably an ADDR_DIFF_VEC */
1543}
1544
1545/* Return the mode of the data in the table, which is always a scalar
1546 integer. */
1547
1548inline scalar_int_mode
1549rtx_jump_table_data::get_data_mode () const
1550{
1551 return as_a <scalar_int_mode> (GET_MODE (PATTERN (this))((machine_mode) (PATTERN (this))->mode));
1552}
1553
1554/* If LABEL is followed by a jump table, return the table, otherwise
1555 return null. */
1556
1557inline rtx_jump_table_data *
1558jump_table_for_label (const rtx_code_label *label)
1559{
1560 return safe_dyn_cast <rtx_jump_table_data *> (NEXT_INSN (label));
1561}
1562
1563#define RTX_FRAME_RELATED_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != DEBUG_INSN && ((
enum rtx_code) (_rtx)->code) != INSN && ((enum rtx_code
) (_rtx)->code) != CALL_INSN && ((enum rtx_code) (
_rtx)->code) != JUMP_INSN && ((enum rtx_code) (_rtx
)->code) != BARRIER && ((enum rtx_code) (_rtx)->
code) != SET) rtl_check_failed_flag ("RTX_FRAME_RELATED_P",_rtx
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1563, __FUNCTION__); _rtx; })->frame_related)
\
1564 (RTL_FLAG_CHECK6 ("RTX_FRAME_RELATED_P", (RTX), DEBUG_INSN, INSN, \__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != DEBUG_INSN && ((enum
rtx_code) (_rtx)->code) != INSN && ((enum rtx_code
) (_rtx)->code) != CALL_INSN && ((enum rtx_code) (
_rtx)->code) != JUMP_INSN && ((enum rtx_code) (_rtx
)->code) != BARRIER && ((enum rtx_code) (_rtx)->
code) != SET) rtl_check_failed_flag ("RTX_FRAME_RELATED_P",_rtx
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1565, __FUNCTION__); _rtx; })
1565 CALL_INSN, JUMP_INSN, BARRIER, SET)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != DEBUG_INSN && ((enum
rtx_code) (_rtx)->code) != INSN && ((enum rtx_code
) (_rtx)->code) != CALL_INSN && ((enum rtx_code) (
_rtx)->code) != JUMP_INSN && ((enum rtx_code) (_rtx
)->code) != BARRIER && ((enum rtx_code) (_rtx)->
code) != SET) rtl_check_failed_flag ("RTX_FRAME_RELATED_P",_rtx
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1565, __FUNCTION__); _rtx; })
->frame_related)
1566
1567/* 1 if JUMP RTX is a crossing jump. */
1568#define CROSSING_JUMP_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != JUMP_INSN) rtl_check_failed_flag
("CROSSING_JUMP_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1568, __FUNCTION__); _rtx; })->jump)
\
1569 (RTL_FLAG_CHECK1 ("CROSSING_JUMP_P", (RTX), JUMP_INSN)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != JUMP_INSN) rtl_check_failed_flag
("CROSSING_JUMP_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1569, __FUNCTION__); _rtx; })
->jump)
1570
1571/* 1 if RTX is a call to a const function. Built from ECF_CONST and
1572 TREE_READONLY. */
1573#define RTL_CONST_CALL_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != CALL_INSN) rtl_check_failed_flag
("RTL_CONST_CALL_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1573, __FUNCTION__); _rtx; })->unchanging)
\
1574 (RTL_FLAG_CHECK1 ("RTL_CONST_CALL_P", (RTX), CALL_INSN)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != CALL_INSN) rtl_check_failed_flag
("RTL_CONST_CALL_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1574, __FUNCTION__); _rtx; })
->unchanging)
1575
1576/* 1 if RTX is a call to a pure function. Built from ECF_PURE and
1577 DECL_PURE_P. */
1578#define RTL_PURE_CALL_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != CALL_INSN) rtl_check_failed_flag
("RTL_PURE_CALL_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1578, __FUNCTION__); _rtx; })->return_val)
\
1579 (RTL_FLAG_CHECK1 ("RTL_PURE_CALL_P", (RTX), CALL_INSN)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != CALL_INSN) rtl_check_failed_flag
("RTL_PURE_CALL_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1579, __FUNCTION__); _rtx; })
->return_val)
1580
1581/* 1 if RTX is a call to a const or pure function. */
1582#define RTL_CONST_OR_PURE_CALL_P(RTX)((__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != CALL_INSN) rtl_check_failed_flag
("RTL_CONST_CALL_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1582, __FUNCTION__); _rtx; })->unchanging) || (__extension__
({ __typeof ((RTX)) const _rtx = ((RTX)); if (((enum rtx_code
) (_rtx)->code) != CALL_INSN) rtl_check_failed_flag ("RTL_PURE_CALL_P"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1582, __FUNCTION__); _rtx; })->return_val))
\
1583 (RTL_CONST_CALL_P (RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != CALL_INSN) rtl_check_failed_flag
("RTL_CONST_CALL_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1583, __FUNCTION__); _rtx; })->unchanging)
|| RTL_PURE_CALL_P (RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != CALL_INSN) rtl_check_failed_flag
("RTL_PURE_CALL_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1583, __FUNCTION__); _rtx; })->return_val)
)
1584
1585/* 1 if RTX is a call to a looping const or pure function. Built from
1586 ECF_LOOPING_CONST_OR_PURE and DECL_LOOPING_CONST_OR_PURE_P. */
1587#define RTL_LOOPING_CONST_OR_PURE_CALL_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != CALL_INSN) rtl_check_failed_flag
("CONST_OR_PURE_CALL_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1587, __FUNCTION__); _rtx; })->call)
\
1588 (RTL_FLAG_CHECK1 ("CONST_OR_PURE_CALL_P", (RTX), CALL_INSN)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != CALL_INSN) rtl_check_failed_flag
("CONST_OR_PURE_CALL_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1588, __FUNCTION__); _rtx; })
->call)
1589
1590/* 1 if RTX is a call_insn for a sibling call. */
1591#define SIBLING_CALL_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != CALL_INSN) rtl_check_failed_flag
("SIBLING_CALL_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1591, __FUNCTION__); _rtx; })->jump)
\
1592 (RTL_FLAG_CHECK1 ("SIBLING_CALL_P", (RTX), CALL_INSN)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != CALL_INSN) rtl_check_failed_flag
("SIBLING_CALL_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1592, __FUNCTION__); _rtx; })
->jump)
1593
1594/* 1 if RTX is a jump_insn, call_insn, or insn that is an annulling branch. */
1595#define INSN_ANNULLED_BRANCH_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != JUMP_INSN) rtl_check_failed_flag
("INSN_ANNULLED_BRANCH_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1595, __FUNCTION__); _rtx; })->unchanging)
\
1596 (RTL_FLAG_CHECK1 ("INSN_ANNULLED_BRANCH_P", (RTX), JUMP_INSN)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != JUMP_INSN) rtl_check_failed_flag
("INSN_ANNULLED_BRANCH_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1596, __FUNCTION__); _rtx; })
->unchanging)
1597
1598/* 1 if RTX is an insn in a delay slot and is from the target of the branch.
1599 If the branch insn has INSN_ANNULLED_BRANCH_P set, this insn should only be
1600 executed if the branch is taken. For annulled branches with this bit
1601 clear, the insn should be executed only if the branch is not taken. */
1602#define INSN_FROM_TARGET_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != INSN && ((enum rtx_code
) (_rtx)->code) != JUMP_INSN && ((enum rtx_code) (
_rtx)->code) != CALL_INSN) rtl_check_failed_flag ("INSN_FROM_TARGET_P"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1602, __FUNCTION__); _rtx; })->in_struct)
\
1603 (RTL_FLAG_CHECK3 ("INSN_FROM_TARGET_P", (RTX), INSN, JUMP_INSN, \__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != INSN && ((enum rtx_code
) (_rtx)->code) != JUMP_INSN && ((enum rtx_code) (
_rtx)->code) != CALL_INSN) rtl_check_failed_flag ("INSN_FROM_TARGET_P"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1604, __FUNCTION__); _rtx; })
1604 CALL_INSN)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != INSN && ((enum rtx_code
) (_rtx)->code) != JUMP_INSN && ((enum rtx_code) (
_rtx)->code) != CALL_INSN) rtl_check_failed_flag ("INSN_FROM_TARGET_P"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1604, __FUNCTION__); _rtx; })
->in_struct)
1605
1606/* In an ADDR_DIFF_VEC, the flags for RTX for use by branch shortening.
1607 See the comments for ADDR_DIFF_VEC in rtl.def. */
1608#define ADDR_DIFF_VEC_FLAGS(RTX)(((RTX)->u.fld[4]).rt_addr_diff_vec_flags) X0ADVFLAGS (RTX, 4)(((RTX)->u.fld[4]).rt_addr_diff_vec_flags)
1609
1610/* In a VALUE, the value cselib has assigned to RTX.
1611 This is a "struct cselib_val", see cselib.h. */
1612#define CSELIB_VAL_PTR(RTX)(((RTX)->u.fld[0]).rt_cselib) X0CSELIB (RTX, 0)(((RTX)->u.fld[0]).rt_cselib)
1613
1614/* Holds a list of notes on what this insn does to various REGs.
1615 It is a chain of EXPR_LIST rtx's, where the second operand is the
1616 chain pointer and the first operand is the REG being described.
1617 The mode field of the EXPR_LIST contains not a real machine mode
1618 but a value from enum reg_note. */
1619#define REG_NOTES(INSN)(((INSN)->u.fld[6]).rt_rtx) XEXP(INSN, 6)(((INSN)->u.fld[6]).rt_rtx)
1620
1621/* In an ENTRY_VALUE this is the DECL_INCOMING_RTL of the argument in
1622 question. */
1623#define ENTRY_VALUE_EXP(RTX)(((RTX)->u.fld[0]).rt_rtx) (RTL_CHECKC1 (RTX, 0, ENTRY_VALUE)((RTX)->u.fld[0]).rt_rtx)
1624
1625enum reg_note
1626{
1627#define DEF_REG_NOTE(NAME) NAME,
1628#include "reg-notes.def"
1629#undef DEF_REG_NOTE
1630 REG_NOTE_MAX
1631};
1632
1633/* Define macros to extract and insert the reg-note kind in an EXPR_LIST. */
1634#define REG_NOTE_KIND(LINK)((enum reg_note) ((machine_mode) (LINK)->mode)) ((enum reg_note) GET_MODE (LINK)((machine_mode) (LINK)->mode))
1635#define PUT_REG_NOTE_KIND(LINK, KIND)((LINK)->mode = ((machine_mode) (KIND))) \
1636 PUT_MODE_RAW (LINK, (machine_mode) (KIND))((LINK)->mode = ((machine_mode) (KIND)))
1637
1638/* Names for REG_NOTE's in EXPR_LIST insn's. */
1639
1640extern const char * const reg_note_name[];
1641#define GET_REG_NOTE_NAME(MODE)(reg_note_name[(int) (MODE)]) (reg_note_name[(int) (MODE)])
1642
1643/* This field is only present on CALL_INSNs. It holds a chain of EXPR_LIST of
1644 USE, CLOBBER and SET expressions.
1645 USE expressions list the registers filled with arguments that
1646 are passed to the function.
1647 CLOBBER expressions document the registers explicitly clobbered
1648 by this CALL_INSN.
1649 SET expressions say that the return value of the call (the SET_DEST)
1650 is equivalent to a value available before the call (the SET_SRC).
1651 This kind of SET is used when the return value is predictable in
1652 advance. It is purely an optimisation hint; unlike USEs and CLOBBERs,
1653 it does not affect register liveness.
1654
1655 Pseudo registers cannot be mentioned in this list. */
1656#define CALL_INSN_FUNCTION_USAGE(INSN)(((INSN)->u.fld[7]).rt_rtx) XEXP(INSN, 7)(((INSN)->u.fld[7]).rt_rtx)
1657
1658/* The label-number of a code-label. The assembler label
1659 is made from `L' and the label-number printed in decimal.
1660 Label numbers are unique in a compilation. */
1661#define CODE_LABEL_NUMBER(INSN)(((INSN)->u.fld[5]).rt_int) XINT (INSN, 5)(((INSN)->u.fld[5]).rt_int)
1662
1663/* In a NOTE that is a line number, this is a string for the file name that the
1664 line is in. We use the same field to record block numbers temporarily in
1665 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes. (We avoid lots of casts
1666 between ints and pointers if we use a different macro for the block number.)
1667 */
1668
1669/* Opaque data. */
1670#define NOTE_DATA(INSN)((INSN)->u.fld[3]) RTL_CHECKC1 (INSN, 3, NOTE)((INSN)->u.fld[3])
1671#define NOTE_DELETED_LABEL_NAME(INSN)(((INSN)->u.fld[3]).rt_str) XCSTR (INSN, 3, NOTE)(((INSN)->u.fld[3]).rt_str)
1672#define SET_INSN_DELETED(INSN)set_insn_deleted (INSN); set_insn_deleted (INSN);
1673#define NOTE_BLOCK(INSN)(((INSN)->u.fld[3]).rt_tree) XCTREE (INSN, 3, NOTE)(((INSN)->u.fld[3]).rt_tree)
1674#define NOTE_EH_HANDLER(INSN)(((INSN)->u.fld[3]).rt_int) XCINT (INSN, 3, NOTE)(((INSN)->u.fld[3]).rt_int)
1675#define NOTE_BASIC_BLOCK(INSN)(((INSN)->u.fld[3]).rt_bb) XCBBDEF (INSN, 3, NOTE)(((INSN)->u.fld[3]).rt_bb)
1676#define NOTE_VAR_LOCATION(INSN)(((INSN)->u.fld[3]).rt_rtx) XCEXP (INSN, 3, NOTE)(((INSN)->u.fld[3]).rt_rtx)
1677#define NOTE_MARKER_LOCATION(INSN)(((INSN)->u.fld[3]).rt_uint) XCUINT (INSN, 3, NOTE)(((INSN)->u.fld[3]).rt_uint)
1678#define NOTE_CFI(INSN)(((INSN)->u.fld[3]).rt_cfi) XCCFI (INSN, 3, NOTE)(((INSN)->u.fld[3]).rt_cfi)
1679#define NOTE_LABEL_NUMBER(INSN)(((INSN)->u.fld[3]).rt_int) XCINT (INSN, 3, NOTE)(((INSN)->u.fld[3]).rt_int)
1680
1681/* In a NOTE that is a line number, this is the line number.
1682 Other kinds of NOTEs are identified by negative numbers here. */
1683#define NOTE_KIND(INSN)(((INSN)->u.fld[4]).rt_int) XCINT (INSN, 4, NOTE)(((INSN)->u.fld[4]).rt_int)
1684
1685/* Nonzero if INSN is a note marking the beginning of a basic block. */
1686#define NOTE_INSN_BASIC_BLOCK_P(INSN)((((enum rtx_code) (INSN)->code) == NOTE) && (((INSN
)->u.fld[4]).rt_int) == NOTE_INSN_BASIC_BLOCK)
\
1687 (NOTE_P (INSN)(((enum rtx_code) (INSN)->code) == NOTE) && NOTE_KIND (INSN)(((INSN)->u.fld[4]).rt_int) == NOTE_INSN_BASIC_BLOCK)
1688
1689/* Nonzero if INSN is a debug nonbind marker note,
1690 for which NOTE_MARKER_LOCATION can be used. */
1691#define NOTE_MARKER_P(INSN)((((enum rtx_code) (INSN)->code) == NOTE) && ((((INSN
)->u.fld[4]).rt_int) == NOTE_INSN_BEGIN_STMT || (((INSN)->
u.fld[4]).rt_int) == NOTE_INSN_INLINE_ENTRY))
\
1692 (NOTE_P (INSN)(((enum rtx_code) (INSN)->code) == NOTE) && \
1693 (NOTE_KIND (INSN)(((INSN)->u.fld[4]).rt_int) == NOTE_INSN_BEGIN_STMT \
1694 || NOTE_KIND (INSN)(((INSN)->u.fld[4]).rt_int) == NOTE_INSN_INLINE_ENTRY))
1695
1696/* Variable declaration and the location of a variable. */
1697#define PAT_VAR_LOCATION_DECL(PAT)(((((PAT))->u.fld[0]).rt_tree)) (XCTREE ((PAT), 0, VAR_LOCATION)((((PAT))->u.fld[0]).rt_tree))
1698#define PAT_VAR_LOCATION_LOC(PAT)(((((PAT))->u.fld[1]).rt_rtx)) (XCEXP ((PAT), 1, VAR_LOCATION)((((PAT))->u.fld[1]).rt_rtx))
1699
1700/* Initialization status of the variable in the location. Status
1701 can be unknown, uninitialized or initialized. See enumeration
1702 type below. */
1703#define PAT_VAR_LOCATION_STATUS(PAT)(__extension__ ({ __typeof (PAT) const _rtx = (PAT); if (((enum
rtx_code) (_rtx)->code) != VAR_LOCATION) rtl_check_failed_flag
("PAT_VAR_LOCATION_STATUS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1703, __FUNCTION__); _rtx; }) ->u2.var_location_status)
\
1704 (RTL_FLAG_CHECK1 ("PAT_VAR_LOCATION_STATUS", PAT, VAR_LOCATION)__extension__ ({ __typeof (PAT) const _rtx = (PAT); if (((enum
rtx_code) (_rtx)->code) != VAR_LOCATION) rtl_check_failed_flag
("PAT_VAR_LOCATION_STATUS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1704, __FUNCTION__); _rtx; })
\
1705 ->u2.var_location_status)
1706
1707/* Accessors for a NOTE_INSN_VAR_LOCATION. */
1708#define NOTE_VAR_LOCATION_DECL(NOTE)((((((((NOTE)->u.fld[3]).rt_rtx)))->u.fld[0]).rt_tree)) \
1709 PAT_VAR_LOCATION_DECL (NOTE_VAR_LOCATION (NOTE))((((((((NOTE)->u.fld[3]).rt_rtx)))->u.fld[0]).rt_tree))
1710#define NOTE_VAR_LOCATION_LOC(NOTE)((((((((NOTE)->u.fld[3]).rt_rtx)))->u.fld[1]).rt_rtx)) \
1711 PAT_VAR_LOCATION_LOC (NOTE_VAR_LOCATION (NOTE))((((((((NOTE)->u.fld[3]).rt_rtx)))->u.fld[1]).rt_rtx))
1712#define NOTE_VAR_LOCATION_STATUS(NOTE)(__extension__ ({ __typeof ((((NOTE)->u.fld[3]).rt_rtx)) const
_rtx = ((((NOTE)->u.fld[3]).rt_rtx)); if (((enum rtx_code
) (_rtx)->code) != VAR_LOCATION) rtl_check_failed_flag ("PAT_VAR_LOCATION_STATUS"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1712, __FUNCTION__); _rtx; }) ->u2.var_location_status)
\
1713 PAT_VAR_LOCATION_STATUS (NOTE_VAR_LOCATION (NOTE))(__extension__ ({ __typeof ((((NOTE)->u.fld[3]).rt_rtx)) const
_rtx = ((((NOTE)->u.fld[3]).rt_rtx)); if (((enum rtx_code
) (_rtx)->code) != VAR_LOCATION) rtl_check_failed_flag ("PAT_VAR_LOCATION_STATUS"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1713, __FUNCTION__); _rtx; }) ->u2.var_location_status)
1714
1715/* Evaluate to TRUE if INSN is a debug insn that denotes a variable
1716 location/value tracking annotation. */
1717#define DEBUG_BIND_INSN_P(INSN)((((enum rtx_code) (INSN)->code) == DEBUG_INSN) &&
(((enum rtx_code) (PATTERN (INSN))->code) == VAR_LOCATION
))
\
1718 (DEBUG_INSN_P (INSN)(((enum rtx_code) (INSN)->code) == DEBUG_INSN) \
1719 && (GET_CODE (PATTERN (INSN))((enum rtx_code) (PATTERN (INSN))->code) \
1720 == VAR_LOCATION))
1721/* Evaluate to TRUE if INSN is a debug insn that denotes a program
1722 source location marker. */
1723#define DEBUG_MARKER_INSN_P(INSN)((((enum rtx_code) (INSN)->code) == DEBUG_INSN) &&
(((enum rtx_code) (PATTERN (INSN))->code) != VAR_LOCATION
))
\
1724 (DEBUG_INSN_P (INSN)(((enum rtx_code) (INSN)->code) == DEBUG_INSN) \
1725 && (GET_CODE (PATTERN (INSN))((enum rtx_code) (PATTERN (INSN))->code) \
1726 != VAR_LOCATION))
1727/* Evaluate to the marker kind. */
1728#define INSN_DEBUG_MARKER_KIND(INSN)(((enum rtx_code) (PATTERN (INSN))->code) == DEBUG_MARKER ?
(((machine_mode) (PATTERN (INSN))->mode) == ((void) 0, E_VOIDmode
) ? NOTE_INSN_BEGIN_STMT : ((machine_mode) (PATTERN (INSN))->
mode) == ((void) 0, E_BLKmode) ? NOTE_INSN_INLINE_ENTRY : (enum
insn_note)-1) : (enum insn_note)-1)
\
1729 (GET_CODE (PATTERN (INSN))((enum rtx_code) (PATTERN (INSN))->code) == DEBUG_MARKER \
1730 ? (GET_MODE (PATTERN (INSN))((machine_mode) (PATTERN (INSN))->mode) == VOIDmode((void) 0, E_VOIDmode) \
1731 ? NOTE_INSN_BEGIN_STMT \
1732 : GET_MODE (PATTERN (INSN))((machine_mode) (PATTERN (INSN))->mode) == BLKmode((void) 0, E_BLKmode) \
1733 ? NOTE_INSN_INLINE_ENTRY \
1734 : (enum insn_note)-1) \
1735 : (enum insn_note)-1)
1736/* Create patterns for debug markers. These and the above abstract
1737 the representation, so that it's easier to get rid of the abuse of
1738 the mode to hold the marker kind. Other marker types are
1739 envisioned, so a single bit flag won't do; maybe separate RTL codes
1740 wouldn't be a problem. */
1741#define GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT()gen_rtx_fmt__stat ((DEBUG_MARKER), ((((void) 0, E_VOIDmode)))
)
\
1742 gen_rtx_DEBUG_MARKER (VOIDmode)gen_rtx_fmt__stat ((DEBUG_MARKER), ((((void) 0, E_VOIDmode)))
)
1743#define GEN_RTX_DEBUG_MARKER_INLINE_ENTRY_PAT()gen_rtx_fmt__stat ((DEBUG_MARKER), ((((void) 0, E_BLKmode))) ) \
1744 gen_rtx_DEBUG_MARKER (BLKmode)gen_rtx_fmt__stat ((DEBUG_MARKER), ((((void) 0, E_BLKmode))) )
1745
1746/* The VAR_LOCATION rtx in a DEBUG_INSN. */
1747#define INSN_VAR_LOCATION(INSN)(__extension__ ({ __typeof (PATTERN (INSN)) const _rtx = (PATTERN
(INSN)); if (((enum rtx_code) (_rtx)->code) != VAR_LOCATION
) rtl_check_failed_flag ("INSN_VAR_LOCATION", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1747, __FUNCTION__); _rtx; }))
\
1748 (RTL_FLAG_CHECK1 ("INSN_VAR_LOCATION", PATTERN (INSN), VAR_LOCATION)__extension__ ({ __typeof (PATTERN (INSN)) const _rtx = (PATTERN
(INSN)); if (((enum rtx_code) (_rtx)->code) != VAR_LOCATION
) rtl_check_failed_flag ("INSN_VAR_LOCATION", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1748, __FUNCTION__); _rtx; })
)
1749/* A pointer to the VAR_LOCATION rtx in a DEBUG_INSN. */
1750#define INSN_VAR_LOCATION_PTR(INSN)(&PATTERN (INSN)) \
1751 (&PATTERN (INSN))
1752
1753/* Accessors for a tree-expanded var location debug insn. */
1754#define INSN_VAR_LOCATION_DECL(INSN)((((((__extension__ ({ __typeof (PATTERN (INSN)) const _rtx =
(PATTERN (INSN)); if (((enum rtx_code) (_rtx)->code) != VAR_LOCATION
) rtl_check_failed_flag ("INSN_VAR_LOCATION", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1754, __FUNCTION__); _rtx; }))))->u.fld[0]).rt_tree))
\
1755 PAT_VAR_LOCATION_DECL (INSN_VAR_LOCATION (INSN))((((((__extension__ ({ __typeof (PATTERN (INSN)) const _rtx =
(PATTERN (INSN)); if (((enum rtx_code) (_rtx)->code) != VAR_LOCATION
) rtl_check_failed_flag ("INSN_VAR_LOCATION", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1755, __FUNCTION__); _rtx; }))))->u.fld[0]).rt_tree))
1756#define INSN_VAR_LOCATION_LOC(INSN)((((((__extension__ ({ __typeof (PATTERN (INSN)) const _rtx =
(PATTERN (INSN)); if (((enum rtx_code) (_rtx)->code) != VAR_LOCATION
) rtl_check_failed_flag ("INSN_VAR_LOCATION", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1756, __FUNCTION__); _rtx; }))))->u.fld[1]).rt_rtx))
\
1757 PAT_VAR_LOCATION_LOC (INSN_VAR_LOCATION (INSN))((((((__extension__ ({ __typeof (PATTERN (INSN)) const _rtx =
(PATTERN (INSN)); if (((enum rtx_code) (_rtx)->code) != VAR_LOCATION
) rtl_check_failed_flag ("INSN_VAR_LOCATION", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1757, __FUNCTION__); _rtx; }))))->u.fld[1]).rt_rtx))
1758#define INSN_VAR_LOCATION_STATUS(INSN)(__extension__ ({ __typeof ((__extension__ ({ __typeof (PATTERN
(INSN)) const _rtx = (PATTERN (INSN)); if (((enum rtx_code) (
_rtx)->code) != VAR_LOCATION) rtl_check_failed_flag ("INSN_VAR_LOCATION"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1758, __FUNCTION__); _rtx; }))) const _rtx = ((__extension__
({ __typeof (PATTERN (INSN)) const _rtx = (PATTERN (INSN)); if
(((enum rtx_code) (_rtx)->code) != VAR_LOCATION) rtl_check_failed_flag
("INSN_VAR_LOCATION", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1758, __FUNCTION__); _rtx; }))); if (((enum rtx_code) (_rtx
)->code) != VAR_LOCATION) rtl_check_failed_flag ("PAT_VAR_LOCATION_STATUS"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1758, __FUNCTION__); _rtx; }) ->u2.var_location_status)
\
1759 PAT_VAR_LOCATION_STATUS (INSN_VAR_LOCATION (INSN))(__extension__ ({ __typeof ((__extension__ ({ __typeof (PATTERN
(INSN)) const _rtx = (PATTERN (INSN)); if (((enum rtx_code) (
_rtx)->code) != VAR_LOCATION) rtl_check_failed_flag ("INSN_VAR_LOCATION"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1759, __FUNCTION__); _rtx; }))) const _rtx = ((__extension__
({ __typeof (PATTERN (INSN)) const _rtx = (PATTERN (INSN)); if
(((enum rtx_code) (_rtx)->code) != VAR_LOCATION) rtl_check_failed_flag
("INSN_VAR_LOCATION", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1759, __FUNCTION__); _rtx; }))); if (((enum rtx_code) (_rtx
)->code) != VAR_LOCATION) rtl_check_failed_flag ("PAT_VAR_LOCATION_STATUS"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1759, __FUNCTION__); _rtx; }) ->u2.var_location_status)
1760
1761/* Expand to the RTL that denotes an unknown variable location in a
1762 DEBUG_INSN. */
1763#define gen_rtx_UNKNOWN_VAR_LOC()(gen_rtx_fmt_e_stat ((CLOBBER), ((((void) 0, E_VOIDmode))), (
((const_int_rtx[64]))) ))
(gen_rtx_CLOBBER (VOIDmode, const0_rtx)gen_rtx_fmt_e_stat ((CLOBBER), ((((void) 0, E_VOIDmode))), ((
(const_int_rtx[64]))) )
)
1764
1765/* Determine whether X is such an unknown location. */
1766#define VAR_LOC_UNKNOWN_P(X)(((enum rtx_code) (X)->code) == CLOBBER && ((((X))
->u.fld[0]).rt_rtx) == (const_int_rtx[64]))
\
1767 (GET_CODE (X)((enum rtx_code) (X)->code) == CLOBBER && XEXP ((X), 0)((((X))->u.fld[0]).rt_rtx) == const0_rtx(const_int_rtx[64]))
1768
1769/* 1 if RTX is emitted after a call, but it should take effect before
1770 the call returns. */
1771#define NOTE_DURING_CALL_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != NOTE) rtl_check_failed_flag
("NOTE_VAR_LOCATION_DURING_CALL_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1771, __FUNCTION__); _rtx; })->call)
\
1772 (RTL_FLAG_CHECK1 ("NOTE_VAR_LOCATION_DURING_CALL_P", (RTX), NOTE)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != NOTE) rtl_check_failed_flag
("NOTE_VAR_LOCATION_DURING_CALL_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1772, __FUNCTION__); _rtx; })
->call)
1773
1774/* DEBUG_EXPR_DECL corresponding to a DEBUG_EXPR RTX. */
1775#define DEBUG_EXPR_TREE_DECL(RTX)(((RTX)->u.fld[0]).rt_tree) XCTREE (RTX, 0, DEBUG_EXPR)(((RTX)->u.fld[0]).rt_tree)
1776
1777/* VAR_DECL/PARM_DECL DEBUG_IMPLICIT_PTR takes address of. */
1778#define DEBUG_IMPLICIT_PTR_DECL(RTX)(((RTX)->u.fld[0]).rt_tree) XCTREE (RTX, 0, DEBUG_IMPLICIT_PTR)(((RTX)->u.fld[0]).rt_tree)
1779
1780/* PARM_DECL DEBUG_PARAMETER_REF references. */
1781#define DEBUG_PARAMETER_REF_DECL(RTX)(((RTX)->u.fld[0]).rt_tree) XCTREE (RTX, 0, DEBUG_PARAMETER_REF)(((RTX)->u.fld[0]).rt_tree)
1782
1783/* Codes that appear in the NOTE_KIND field for kinds of notes
1784 that are not line numbers. These codes are all negative.
1785
1786 Notice that we do not try to use zero here for any of
1787 the special note codes because sometimes the source line
1788 actually can be zero! This happens (for example) when we
1789 are generating code for the per-translation-unit constructor
1790 and destructor routines for some C++ translation unit. */
1791
1792enum insn_note
1793{
1794#define DEF_INSN_NOTE(NAME) NAME,
1795#include "insn-notes.def"
1796#undef DEF_INSN_NOTE
1797
1798 NOTE_INSN_MAX
1799};
1800
1801/* Names for NOTE insn's other than line numbers. */
1802
1803extern const char * const note_insn_name[NOTE_INSN_MAX];
1804#define GET_NOTE_INSN_NAME(NOTE_CODE)(note_insn_name[(NOTE_CODE)]) \
1805 (note_insn_name[(NOTE_CODE)])
1806
1807/* The name of a label, in case it corresponds to an explicit label
1808 in the input source code. */
1809#define LABEL_NAME(RTX)(((RTX)->u.fld[6]).rt_str) XCSTR (RTX, 6, CODE_LABEL)(((RTX)->u.fld[6]).rt_str)
1810
1811/* In jump.cc, each label contains a count of the number
1812 of LABEL_REFs that point at it, so unused labels can be deleted. */
1813#define LABEL_NUSES(RTX)(((RTX)->u.fld[4]).rt_int) XCINT (RTX, 4, CODE_LABEL)(((RTX)->u.fld[4]).rt_int)
1814
1815/* Labels carry a two-bit field composed of the ->jump and ->call
1816 bits. This field indicates whether the label is an alternate
1817 entry point, and if so, what kind. */
1818enum label_kind
1819{
1820 LABEL_NORMAL = 0, /* ordinary label */
1821 LABEL_STATIC_ENTRY, /* alternate entry point, not exported */
1822 LABEL_GLOBAL_ENTRY, /* alternate entry point, exported */
1823 LABEL_WEAK_ENTRY /* alternate entry point, exported as weak symbol */
1824};
1825
1826#if defined ENABLE_RTL_FLAG_CHECKING1 && (GCC_VERSION(4 * 1000 + 2) > 2007)
1827
1828/* Retrieve the kind of LABEL. */
1829#define LABEL_KIND(LABEL)__extension__ ({ __typeof (LABEL) const _label = (LABEL); if (
! (((enum rtx_code) (_label)->code) == CODE_LABEL)) rtl_check_failed_flag
("LABEL_KIND", _label, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1829, __FUNCTION__); (enum label_kind) ((_label->jump <<
1) | _label->call); })
__extension__ \
1830({ __typeof (LABEL) const _label = (LABEL); \
1831 if (! LABEL_P (_label)(((enum rtx_code) (_label)->code) == CODE_LABEL)) \
1832 rtl_check_failed_flag ("LABEL_KIND", _label, __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1832, \
1833 __FUNCTION__); \
1834 (enum label_kind) ((_label->jump << 1) | _label->call); })
1835
1836/* Set the kind of LABEL. */
1837#define SET_LABEL_KIND(LABEL, KIND)do { __typeof (LABEL) const _label = (LABEL); const unsigned int
_kind = (KIND); if (! (((enum rtx_code) (_label)->code) ==
CODE_LABEL)) rtl_check_failed_flag ("SET_LABEL_KIND", _label
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1837, __FUNCTION__); _label->jump = ((_kind >> 1) &
1); _label->call = (_kind & 1); } while (0)
do { \
1838 __typeof (LABEL) const _label = (LABEL); \
1839 const unsigned int _kind = (KIND); \
1840 if (! LABEL_P (_label)(((enum rtx_code) (_label)->code) == CODE_LABEL)) \
1841 rtl_check_failed_flag ("SET_LABEL_KIND", _label, __FILE__"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h", __LINE__1841, \
1842 __FUNCTION__); \
1843 _label->jump = ((_kind >> 1) & 1); \
1844 _label->call = (_kind & 1); \
1845} while (0)
1846
1847#else
1848
1849/* Retrieve the kind of LABEL. */
1850#define LABEL_KIND(LABEL)__extension__ ({ __typeof (LABEL) const _label = (LABEL); if (
! (((enum rtx_code) (_label)->code) == CODE_LABEL)) rtl_check_failed_flag
("LABEL_KIND", _label, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1850, __FUNCTION__); (enum label_kind) ((_label->jump <<
1) | _label->call); })
\
1851 ((enum label_kind) (((LABEL)->jump << 1) | (LABEL)->call))
1852
1853/* Set the kind of LABEL. */
1854#define SET_LABEL_KIND(LABEL, KIND)do { __typeof (LABEL) const _label = (LABEL); const unsigned int
_kind = (KIND); if (! (((enum rtx_code) (_label)->code) ==
CODE_LABEL)) rtl_check_failed_flag ("SET_LABEL_KIND", _label
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1854, __FUNCTION__); _label->jump = ((_kind >> 1) &
1); _label->call = (_kind & 1); } while (0)
do { \
1855 rtx const _label = (LABEL); \
1856 const unsigned int _kind = (KIND); \
1857 _label->jump = ((_kind >> 1) & 1); \
1858 _label->call = (_kind & 1); \
1859} while (0)
1860
1861#endif /* rtl flag checking */
1862
1863#define LABEL_ALT_ENTRY_P(LABEL)(__extension__ ({ __typeof (LABEL) const _label = (LABEL); if
(! (((enum rtx_code) (_label)->code) == CODE_LABEL)) rtl_check_failed_flag
("LABEL_KIND", _label, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1863, __FUNCTION__); (enum label_kind) ((_label->jump <<
1) | _label->call); }) != LABEL_NORMAL)
(LABEL_KIND (LABEL)__extension__ ({ __typeof (LABEL) const _label = (LABEL); if (
! (((enum rtx_code) (_label)->code) == CODE_LABEL)) rtl_check_failed_flag
("LABEL_KIND", _label, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1863, __FUNCTION__); (enum label_kind) ((_label->jump <<
1) | _label->call); })
!= LABEL_NORMAL)
1864
1865/* In jump.cc, each JUMP_INSN can point to a label that it can jump to,
1866 so that if the JUMP_INSN is deleted, the label's LABEL_NUSES can
1867 be decremented and possibly the label can be deleted. */
1868#define JUMP_LABEL(INSN)(((INSN)->u.fld[7]).rt_rtx) XCEXP (INSN, 7, JUMP_INSN)(((INSN)->u.fld[7]).rt_rtx)
1869
1870inline rtx_insn *JUMP_LABEL_AS_INSN (const rtx_insn *insn)
1871{
1872 return safe_as_a <rtx_insn *> (JUMP_LABEL (insn)(((insn)->u.fld[7]).rt_rtx));
1873}
1874
1875/* Methods of rtx_jump_insn. */
1876
1877inline rtx rtx_jump_insn::jump_label () const
1878{
1879 return JUMP_LABEL (this)(((this)->u.fld[7]).rt_rtx);
1880}
1881
1882inline rtx_code_label *rtx_jump_insn::jump_target () const
1883{
1884 return safe_as_a <rtx_code_label *> (JUMP_LABEL (this)(((this)->u.fld[7]).rt_rtx));
1885}
1886
1887inline void rtx_jump_insn::set_jump_target (rtx_code_label *target)
1888{
1889 JUMP_LABEL (this)(((this)->u.fld[7]).rt_rtx) = target;
1890}
1891
1892/* Once basic blocks are found, each CODE_LABEL starts a chain that
1893 goes through all the LABEL_REFs that jump to that label. The chain
1894 eventually winds up at the CODE_LABEL: it is circular. */
1895#define LABEL_REFS(LABEL)(((LABEL)->u.fld[3]).rt_rtx) XCEXP (LABEL, 3, CODE_LABEL)(((LABEL)->u.fld[3]).rt_rtx)
1896
1897/* Get the label that a LABEL_REF references. */
1898inline rtx_insn *
1899label_ref_label (const_rtx ref)
1900{
1901 return as_a<rtx_insn *> (XCEXP (ref, 0, LABEL_REF)(((ref)->u.fld[0]).rt_rtx));
1902}
1903
1904/* Set the label that LABEL_REF ref refers to. */
1905
1906inline void
1907set_label_ref_label (rtx ref, rtx_insn *label)
1908{
1909 XCEXP (ref, 0, LABEL_REF)(((ref)->u.fld[0]).rt_rtx) = label;
1910}
1911
1912/* For a REG rtx, REGNO extracts the register number. REGNO can only
1913 be used on RHS. Use SET_REGNO to change the value. */
1914#define REGNO(RTX)(rhs_regno(RTX)) (rhs_regno(RTX))
1915#define SET_REGNO(RTX, N)(df_ref_change_reg_with_loc (RTX, N)) (df_ref_change_reg_with_loc (RTX, N))
1916
1917/* Return the number of consecutive registers in a REG. This is always
1918 1 for pseudo registers and is determined by TARGET_HARD_REGNO_NREGS for
1919 hard registers. */
1920#define REG_NREGS(RTX)((&(RTX)->u.reg)->nregs) (REG_CHECK (RTX)(&(RTX)->u.reg)->nregs)
1921
1922/* ORIGINAL_REGNO holds the number the register originally had; for a
1923 pseudo register turned into a hard reg this will hold the old pseudo
1924 register number. */
1925#define ORIGINAL_REGNO(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != REG) rtl_check_failed_flag
("ORIGINAL_REGNO", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1925, __FUNCTION__); _rtx; })->u2.original_regno)
\
1926 (RTL_FLAG_CHECK1 ("ORIGINAL_REGNO", (RTX), REG)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != REG) rtl_check_failed_flag
("ORIGINAL_REGNO", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1926, __FUNCTION__); _rtx; })
->u2.original_regno)
1927
1928/* Force the REGNO macro to only be used on the lhs. */
1929inline unsigned int
1930rhs_regno (const_rtx x)
1931{
1932 return REG_CHECK (x)(&(x)->u.reg)->regno;
1933}
1934
1935/* Return the final register in REG X plus one. */
1936inline unsigned int
1937END_REGNO (const_rtx x)
1938{
1939 return REGNO (x)(rhs_regno(x)) + REG_NREGS (x)((&(x)->u.reg)->nregs);
1940}
1941
1942/* Change the REGNO and REG_NREGS of REG X to the specified values,
1943 bypassing the df machinery. */
1944inline void
1945set_regno_raw (rtx x, unsigned int regno, unsigned int nregs)
1946{
1947 reg_info *reg = REG_CHECK (x)(&(x)->u.reg);
1948 reg->regno = regno;
1949 reg->nregs = nregs;
1950}
1951
1952/* 1 if RTX is a reg or parallel that is the current function's return
1953 value. */
1954#define REG_FUNCTION_VALUE_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != REG && ((enum rtx_code
) (_rtx)->code) != PARALLEL) rtl_check_failed_flag ("REG_FUNCTION_VALUE_P"
,_rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1954, __FUNCTION__); _rtx; })->return_val)
\
1955 (RTL_FLAG_CHECK2 ("REG_FUNCTION_VALUE_P", (RTX), REG, PARALLEL)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != REG && ((enum rtx_code
) (_rtx)->code) != PARALLEL) rtl_check_failed_flag ("REG_FUNCTION_VALUE_P"
,_rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1955, __FUNCTION__); _rtx; })
->return_val)
1956
1957/* 1 if RTX is a reg that corresponds to a variable declared by the user. */
1958#define REG_USERVAR_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != REG) rtl_check_failed_flag
("REG_USERVAR_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1958, __FUNCTION__); _rtx; })->volatil)
\
1959 (RTL_FLAG_CHECK1 ("REG_USERVAR_P", (RTX), REG)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != REG) rtl_check_failed_flag
("REG_USERVAR_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1959, __FUNCTION__); _rtx; })
->volatil)
1960
1961/* 1 if RTX is a reg that holds a pointer value. */
1962#define REG_POINTER(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != REG) rtl_check_failed_flag
("REG_POINTER", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1962, __FUNCTION__); _rtx; })->frame_related)
\
1963 (RTL_FLAG_CHECK1 ("REG_POINTER", (RTX), REG)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != REG) rtl_check_failed_flag
("REG_POINTER", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1963, __FUNCTION__); _rtx; })
->frame_related)
1964
1965/* 1 if RTX is a mem that holds a pointer value. */
1966#define MEM_POINTER(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != MEM) rtl_check_failed_flag
("MEM_POINTER", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1966, __FUNCTION__); _rtx; })->frame_related)
\
1967 (RTL_FLAG_CHECK1 ("MEM_POINTER", (RTX), MEM)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != MEM) rtl_check_failed_flag
("MEM_POINTER", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1967, __FUNCTION__); _rtx; })
->frame_related)
1968
1969/* 1 if the given register REG corresponds to a hard register. */
1970#define HARD_REGISTER_P(REG)((((rhs_regno(REG))) < 76)) (HARD_REGISTER_NUM_P (REGNO (REG))(((rhs_regno(REG))) < 76))
1971
1972/* 1 if the given register number REG_NO corresponds to a hard register. */
1973#define HARD_REGISTER_NUM_P(REG_NO)((REG_NO) < 76) ((REG_NO) < FIRST_PSEUDO_REGISTER76)
1974
1975/* For a CONST_INT rtx, INTVAL extracts the integer. */
1976#define INTVAL(RTX)((RTX)->u.hwint[0]) XCWINT (RTX, 0, CONST_INT)((RTX)->u.hwint[0])
1977#define UINTVAL(RTX)((unsigned long) ((RTX)->u.hwint[0])) ((unsigned HOST_WIDE_INTlong) INTVAL (RTX)((RTX)->u.hwint[0]))
1978
1979/* For a CONST_WIDE_INT, CONST_WIDE_INT_NUNITS is the number of
1980 elements actually needed to represent the constant.
1981 CONST_WIDE_INT_ELT gets one of the elements. 0 is the least
1982 significant HOST_WIDE_INT. */
1983#define CONST_WIDE_INT_VEC(RTX)(&(RTX)->u.hwiv) HWIVEC_CHECK (RTX, CONST_WIDE_INT)(&(RTX)->u.hwiv)
1984#define CONST_WIDE_INT_NUNITS(RTX)((int)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX));
if (((enum rtx_code) (_rtx)->code) != CONST_WIDE_INT) rtl_check_failed_flag
("CWI_GET_NUM_ELEM", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1984, __FUNCTION__); _rtx; })->u2.num_elem)
CWI_GET_NUM_ELEM (RTX)((int)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX));
if (((enum rtx_code) (_rtx)->code) != CONST_WIDE_INT) rtl_check_failed_flag
("CWI_GET_NUM_ELEM", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1984, __FUNCTION__); _rtx; })->u2.num_elem)
1985#define CONST_WIDE_INT_ELT(RTX, N)((RTX)->u.hwiv.elem[N]) CWI_ELT (RTX, N)((RTX)->u.hwiv.elem[N])
1986
1987/* For a CONST_POLY_INT, CONST_POLY_INT_COEFFS gives access to the
1988 individual coefficients, in the form of a trailing_wide_ints structure. */
1989#define CONST_POLY_INT_COEFFS(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != CONST_POLY_INT) rtl_check_failed_flag
("CONST_POLY_INT_COEFFS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1989, __FUNCTION__); _rtx; })->u.cpi.coeffs)
\
1990 (RTL_FLAG_CHECK1("CONST_POLY_INT_COEFFS", (RTX), \__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != CONST_POLY_INT) rtl_check_failed_flag
("CONST_POLY_INT_COEFFS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1991, __FUNCTION__); _rtx; })
1991 CONST_POLY_INT)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != CONST_POLY_INT) rtl_check_failed_flag
("CONST_POLY_INT_COEFFS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 1991, __FUNCTION__); _rtx; })
->u.cpi.coeffs)
1992
1993/* For a CONST_DOUBLE:
1994#if TARGET_SUPPORTS_WIDE_INT == 0
1995 For a VOIDmode, there are two integers CONST_DOUBLE_LOW is the
1996 low-order word and ..._HIGH the high-order.
1997#endif
1998 For a float, there is a REAL_VALUE_TYPE structure, and
1999 CONST_DOUBLE_REAL_VALUE(r) is a pointer to it. */
2000#define CONST_DOUBLE_LOW(r)((r)->u.hwint[0]) XCMWINT (r, 0, CONST_DOUBLE, VOIDmode)((r)->u.hwint[0])
2001#define CONST_DOUBLE_HIGH(r)((r)->u.hwint[1]) XCMWINT (r, 1, CONST_DOUBLE, VOIDmode)((r)->u.hwint[1])
2002#define CONST_DOUBLE_REAL_VALUE(r)((const struct real_value *) (&(r)->u.rv)) \
2003 ((const struct real_value *) XCNMPRV (r, CONST_DOUBLE, VOIDmode)(&(r)->u.rv))
2004
2005#define CONST_FIXED_VALUE(r)((const struct fixed_value *) (&(r)->u.fv)) \
2006 ((const struct fixed_value *) XCNMPFV (r, CONST_FIXED, VOIDmode)(&(r)->u.fv))
2007#define CONST_FIXED_VALUE_HIGH(r)((long) (((const struct fixed_value *) (&(r)->u.fv))->
data.high))
\
2008 ((HOST_WIDE_INTlong) (CONST_FIXED_VALUE (r)((const struct fixed_value *) (&(r)->u.fv))->data.high))
2009#define CONST_FIXED_VALUE_LOW(r)((long) (((const struct fixed_value *) (&(r)->u.fv))->
data.low))
\
2010 ((HOST_WIDE_INTlong) (CONST_FIXED_VALUE (r)((const struct fixed_value *) (&(r)->u.fv))->data.low))
2011
2012/* For a CONST_VECTOR, return element #n. */
2013#define CONST_VECTOR_ELT(RTX, N)const_vector_elt (RTX, N) const_vector_elt (RTX, N)
2014
2015/* See rtl.texi for a description of these macros. */
2016#define CONST_VECTOR_NPATTERNS(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != CONST_VECTOR) rtl_check_failed_flag
("CONST_VECTOR_NPATTERNS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2016, __FUNCTION__); _rtx; }) ->u2.const_vector.npatterns
)
\
2017 (RTL_FLAG_CHECK1 ("CONST_VECTOR_NPATTERNS", (RTX), CONST_VECTOR)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != CONST_VECTOR) rtl_check_failed_flag
("CONST_VECTOR_NPATTERNS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2017, __FUNCTION__); _rtx; })
\
2018 ->u2.const_vector.npatterns)
2019
2020#define CONST_VECTOR_NELTS_PER_PATTERN(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != CONST_VECTOR) rtl_check_failed_flag
("CONST_VECTOR_NELTS_PER_PATTERN", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2020, __FUNCTION__); _rtx; }) ->u2.const_vector.nelts_per_pattern
)
\
2021 (RTL_FLAG_CHECK1 ("CONST_VECTOR_NELTS_PER_PATTERN", (RTX), CONST_VECTOR)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != CONST_VECTOR) rtl_check_failed_flag
("CONST_VECTOR_NELTS_PER_PATTERN", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2021, __FUNCTION__); _rtx; })
\
2022 ->u2.const_vector.nelts_per_pattern)
2023
2024#define CONST_VECTOR_DUPLICATE_P(RTX)((__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != CONST_VECTOR) rtl_check_failed_flag
("CONST_VECTOR_NELTS_PER_PATTERN", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2024, __FUNCTION__); _rtx; }) ->u2.const_vector.nelts_per_pattern
) == 1)
\
2025 (CONST_VECTOR_NELTS_PER_PATTERN (RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != CONST_VECTOR) rtl_check_failed_flag
("CONST_VECTOR_NELTS_PER_PATTERN", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2025, __FUNCTION__); _rtx; }) ->u2.const_vector.nelts_per_pattern
)
== 1)
2026
2027#define CONST_VECTOR_STEPPED_P(RTX)((__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != CONST_VECTOR) rtl_check_failed_flag
("CONST_VECTOR_NELTS_PER_PATTERN", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2027, __FUNCTION__); _rtx; }) ->u2.const_vector.nelts_per_pattern
) == 3)
\
2028 (CONST_VECTOR_NELTS_PER_PATTERN (RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != CONST_VECTOR) rtl_check_failed_flag
("CONST_VECTOR_NELTS_PER_PATTERN", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2028, __FUNCTION__); _rtx; }) ->u2.const_vector.nelts_per_pattern
)
== 3)
2029
2030#define CONST_VECTOR_ENCODED_ELT(RTX, N)(((((RTX)->u.fld[0]).rt_rtvec))->elem[N]) XCVECEXP (RTX, 0, N, CONST_VECTOR)(((((RTX)->u.fld[0]).rt_rtvec))->elem[N])
2031
2032/* Return the number of elements encoded directly in a CONST_VECTOR. */
2033
2034inline unsigned int
2035const_vector_encoded_nelts (const_rtx x)
2036{
2037 return CONST_VECTOR_NPATTERNS (x)(__extension__ ({ __typeof ((x)) const _rtx = ((x)); if (((enum
rtx_code) (_rtx)->code) != CONST_VECTOR) rtl_check_failed_flag
("CONST_VECTOR_NPATTERNS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2037, __FUNCTION__); _rtx; }) ->u2.const_vector.npatterns
)
* CONST_VECTOR_NELTS_PER_PATTERN (x)(__extension__ ({ __typeof ((x)) const _rtx = ((x)); if (((enum
rtx_code) (_rtx)->code) != CONST_VECTOR) rtl_check_failed_flag
("CONST_VECTOR_NELTS_PER_PATTERN", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2037, __FUNCTION__); _rtx; }) ->u2.const_vector.nelts_per_pattern
)
;
2038}
2039
2040/* For a CONST_VECTOR, return the number of elements in a vector. */
2041#define CONST_VECTOR_NUNITS(RTX)GET_MODE_NUNITS (((machine_mode) (RTX)->mode)) GET_MODE_NUNITS (GET_MODE (RTX)((machine_mode) (RTX)->mode))
2042
2043/* For a SUBREG rtx, SUBREG_REG extracts the value we want a subreg of.
2044 SUBREG_BYTE extracts the byte-number. */
2045
2046#define SUBREG_REG(RTX)(((RTX)->u.fld[0]).rt_rtx) XCEXP (RTX, 0, SUBREG)(((RTX)->u.fld[0]).rt_rtx)
2047#define SUBREG_BYTE(RTX)(((RTX)->u.fld[1]).rt_subreg) XCSUBREG (RTX, 1, SUBREG)(((RTX)->u.fld[1]).rt_subreg)
2048
2049/* in rtlanal.cc */
2050/* Return the right cost to give to an operation
2051 to make the cost of the corresponding register-to-register instruction
2052 N times that of a fast register-to-register instruction. */
2053#define COSTS_N_INSNS(N)((N) * 4) ((N) * 4)
2054
2055/* Maximum cost of an rtl expression. This value has the special meaning
2056 not to use an rtx with this cost under any circumstances. */
2057#define MAX_COST2147483647 INT_MAX2147483647
2058
2059/* Return true if CODE always has VOIDmode. */
2060
2061inline bool
2062always_void_p (enum rtx_code code)
2063{
2064 return code == SET;
2065}
2066
2067/* A structure to hold all available cost information about an rtl
2068 expression. */
2069struct full_rtx_costs
2070{
2071 int speed;
2072 int size;
2073};
2074
2075/* Initialize a full_rtx_costs structure C to the maximum cost. */
2076inline void
2077init_costs_to_max (struct full_rtx_costs *c)
2078{
2079 c->speed = MAX_COST2147483647;
2080 c->size = MAX_COST2147483647;
2081}
2082
2083/* Initialize a full_rtx_costs structure C to zero cost. */
2084inline void
2085init_costs_to_zero (struct full_rtx_costs *c)
2086{
2087 c->speed = 0;
2088 c->size = 0;
2089}
2090
2091/* Compare two full_rtx_costs structures A and B, returning true
2092 if A < B when optimizing for speed. */
2093inline bool
2094costs_lt_p (struct full_rtx_costs *a, struct full_rtx_costs *b,
2095 bool speed)
2096{
2097 if (speed)
2098 return (a->speed < b->speed
2099 || (a->speed == b->speed && a->size < b->size));
2100 else
2101 return (a->size < b->size
2102 || (a->size == b->size && a->speed < b->speed));
2103}
2104
2105/* Increase both members of the full_rtx_costs structure C by the
2106 cost of N insns. */
2107inline void
2108costs_add_n_insns (struct full_rtx_costs *c, int n)
2109{
2110 c->speed += COSTS_N_INSNS (n)((n) * 4);
2111 c->size += COSTS_N_INSNS (n)((n) * 4);
2112}
2113
2114/* Describes the shape of a subreg:
2115
2116 inner_mode == the mode of the SUBREG_REG
2117 offset == the SUBREG_BYTE
2118 outer_mode == the mode of the SUBREG itself. */
2119class subreg_shape {
2120public:
2121 subreg_shape (machine_mode, poly_uint16, machine_mode);
2122 bool operator == (const subreg_shape &) const;
2123 bool operator != (const subreg_shape &) const;
2124 unsigned HOST_WIDE_INTlong unique_id () const;
2125
2126 machine_mode inner_mode;
2127 poly_uint16 offset;
2128 machine_mode outer_mode;
2129};
2130
2131inline
2132subreg_shape::subreg_shape (machine_mode inner_mode_in,
2133 poly_uint16 offset_in,
2134 machine_mode outer_mode_in)
2135 : inner_mode (inner_mode_in), offset (offset_in), outer_mode (outer_mode_in)
2136{}
2137
2138inline bool
2139subreg_shape::operator == (const subreg_shape &other) const
2140{
2141 return (inner_mode == other.inner_mode
2142 && known_eq (offset, other.offset)(!maybe_ne (offset, other.offset))
2143 && outer_mode == other.outer_mode);
2144}
2145
2146inline bool
2147subreg_shape::operator != (const subreg_shape &other) const
2148{
2149 return !operator == (other);
2150}
2151
2152/* Return an integer that uniquely identifies this shape. Structures
2153 like rtx_def assume that a mode can fit in an 8-bit bitfield and no
2154 current mode is anywhere near being 65536 bytes in size, so the
2155 id comfortably fits in an int. */
2156
2157inline unsigned HOST_WIDE_INTlong
2158subreg_shape::unique_id () const
2159{
2160 { STATIC_ASSERT (MAX_MACHINE_MODE <= 256)static_assert ((MAX_MACHINE_MODE <= 256), "MAX_MACHINE_MODE <= 256"
)
; }
2161 { STATIC_ASSERT (NUM_POLY_INT_COEFFS <= 3)static_assert ((1 <= 3), "NUM_POLY_INT_COEFFS <= 3"); }
2162 { STATIC_ASSERT (sizeof (offset.coeffs[0]) <= 2)static_assert ((sizeof (offset.coeffs[0]) <= 2), "sizeof (offset.coeffs[0]) <= 2"
)
; }
2163 int res = (int) inner_mode + ((int) outer_mode << 8);
2164 for (int i = 0; i < NUM_POLY_INT_COEFFS1; ++i)
2165 res += (HOST_WIDE_INTlong) offset.coeffs[i] << ((1 + i) * 16);
2166 return res;
2167}
2168
2169/* Return the shape of a SUBREG rtx. */
2170
2171inline subreg_shape
2172shape_of_subreg (const_rtx x)
2173{
2174 return subreg_shape (GET_MODE (SUBREG_REG (x))((machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode),
2175 SUBREG_BYTE (x)(((x)->u.fld[1]).rt_subreg), GET_MODE (x)((machine_mode) (x)->mode));
2176}
2177
2178/* Information about an address. This structure is supposed to be able
2179 to represent all supported target addresses. Please extend it if it
2180 is not yet general enough. */
2181struct address_info {
2182 /* The mode of the value being addressed, or VOIDmode if this is
2183 a load-address operation with no known address mode. */
2184 machine_mode mode;
2185
2186 /* The address space. */
2187 addr_space_t as;
2188
2189 /* True if this is an RTX_AUTOINC address. */
2190 bool autoinc_p;
2191
2192 /* A pointer to the top-level address. */
2193 rtx *outer;
2194
2195 /* A pointer to the inner address, after all address mutations
2196 have been stripped from the top-level address. It can be one
2197 of the following:
2198
2199 - A {PRE,POST}_{INC,DEC} of *BASE. SEGMENT, INDEX and DISP are null.
2200
2201 - A {PRE,POST}_MODIFY of *BASE. In this case either INDEX or DISP
2202 points to the step value, depending on whether the step is variable
2203 or constant respectively. SEGMENT is null.
2204
2205 - A plain sum of the form SEGMENT + BASE + INDEX + DISP,
2206 with null fields evaluating to 0. */
2207 rtx *inner;
2208
2209 /* Components that make up *INNER. Each one may be null or nonnull.
2210 When nonnull, their meanings are as follows:
2211
2212 - *SEGMENT is the "segment" of memory to which the address refers.
2213 This value is entirely target-specific and is only called a "segment"
2214 because that's its most typical use. It contains exactly one UNSPEC,
2215 pointed to by SEGMENT_TERM. The contents of *SEGMENT do not need
2216 reloading.
2217
2218 - *BASE is a variable expression representing a base address.
2219 It contains exactly one REG, SUBREG or MEM, pointed to by BASE_TERM.
2220
2221 - *INDEX is a variable expression representing an index value.
2222 It may be a scaled expression, such as a MULT. It has exactly
2223 one REG, SUBREG or MEM, pointed to by INDEX_TERM.
2224
2225 - *DISP is a constant, possibly mutated. DISP_TERM points to the
2226 unmutated RTX_CONST_OBJ. */
2227 rtx *segment;
2228 rtx *base;
2229 rtx *index;
2230 rtx *disp;
2231
2232 rtx *segment_term;
2233 rtx *base_term;
2234 rtx *index_term;
2235 rtx *disp_term;
2236
2237 /* In a {PRE,POST}_MODIFY address, this points to a second copy
2238 of BASE_TERM, otherwise it is null. */
2239 rtx *base_term2;
2240
2241 /* ADDRESS if this structure describes an address operand, MEM if
2242 it describes a MEM address. */
2243 enum rtx_code addr_outer_code;
2244
2245 /* If BASE is nonnull, this is the code of the rtx that contains it. */
2246 enum rtx_code base_outer_code;
2247};
2248
2249/* This is used to bundle an rtx and a mode together so that the pair
2250 can be used with the wi:: routines. If we ever put modes into rtx
2251 integer constants, this should go away and then just pass an rtx in. */
2252typedef std::pair <rtx, machine_mode> rtx_mode_t;
2253
2254namespace wi
2255{
2256 template <>
2257 struct int_traits <rtx_mode_t>
2258 {
2259 static const enum precision_type precision_type = VAR_PRECISION;
2260 static const bool host_dependent_precision = false;
2261 /* This ought to be true, except for the special case that BImode
2262 is canonicalized to STORE_FLAG_VALUE, which might be 1. */
2263 static const bool is_sign_extended = false;
2264 static unsigned int get_precision (const rtx_mode_t &);
2265 static wi::storage_ref decompose (HOST_WIDE_INTlong *, unsigned int,
2266 const rtx_mode_t &);
2267 };
2268}
2269
2270inline unsigned int
2271wi::int_traits <rtx_mode_t>::get_precision (const rtx_mode_t &x)
2272{
2273 return GET_MODE_PRECISION (as_a <scalar_mode> (x.second));
2274}
2275
2276inline wi::storage_ref
2277wi::int_traits <rtx_mode_t>::decompose (HOST_WIDE_INTlong *,
2278 unsigned int precision,
2279 const rtx_mode_t &x)
2280{
2281 gcc_checking_assert (precision == get_precision (x))((void)(!(precision == get_precision (x)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2281, __FUNCTION__), 0 : 0))
;
2282 switch (GET_CODE (x.first)((enum rtx_code) (x.first)->code))
2283 {
2284 case CONST_INT:
2285 if (precision < HOST_BITS_PER_WIDE_INT64)
2286 /* Nonzero BImodes are stored as STORE_FLAG_VALUE, which on many
2287 targets is 1 rather than -1. */
2288 gcc_checking_assert (INTVAL (x.first)((void)(!(((x.first)->u.hwint[0]) == sext_hwi (((x.first)->
u.hwint[0]), precision) || (x.second == (scalar_int_mode ((scalar_int_mode
::from_int) E_BImode)) && ((x.first)->u.hwint[0]) ==
1)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2290, __FUNCTION__), 0 : 0))
2289 == sext_hwi (INTVAL (x.first), precision)((void)(!(((x.first)->u.hwint[0]) == sext_hwi (((x.first)->
u.hwint[0]), precision) || (x.second == (scalar_int_mode ((scalar_int_mode
::from_int) E_BImode)) && ((x.first)->u.hwint[0]) ==
1)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2290, __FUNCTION__), 0 : 0))
2290 || (x.second == BImode && INTVAL (x.first) == 1))((void)(!(((x.first)->u.hwint[0]) == sext_hwi (((x.first)->
u.hwint[0]), precision) || (x.second == (scalar_int_mode ((scalar_int_mode
::from_int) E_BImode)) && ((x.first)->u.hwint[0]) ==
1)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2290, __FUNCTION__), 0 : 0))
;
2291
2292 return wi::storage_ref (&INTVAL (x.first)((x.first)->u.hwint[0]), 1, precision);
2293
2294 case CONST_WIDE_INT:
2295 return wi::storage_ref (&CONST_WIDE_INT_ELT (x.first, 0)((x.first)->u.hwiv.elem[0]),
2296 CONST_WIDE_INT_NUNITS (x.first)((int)__extension__ ({ __typeof ((x.first)) const _rtx = ((x.
first)); if (((enum rtx_code) (_rtx)->code) != CONST_WIDE_INT
) rtl_check_failed_flag ("CWI_GET_NUM_ELEM", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2296, __FUNCTION__); _rtx; })->u2.num_elem)
, precision);
2297
2298#if TARGET_SUPPORTS_WIDE_INT1 == 0
2299 case CONST_DOUBLE:
2300 return wi::storage_ref (&CONST_DOUBLE_LOW (x.first)((x.first)->u.hwint[0]), 2, precision);
2301#endif
2302
2303 default:
2304 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2304, __FUNCTION__))
;
2305 }
2306}
2307
2308namespace wi
2309{
2310 hwi_with_prec shwi (HOST_WIDE_INTlong, machine_mode mode);
2311 wide_int min_value (machine_mode, signop);
2312 wide_int max_value (machine_mode, signop);
2313}
2314
2315inline wi::hwi_with_prec
2316wi::shwi (HOST_WIDE_INTlong val, machine_mode mode)
2317{
2318 return shwi (val, GET_MODE_PRECISION (as_a <scalar_mode> (mode)));
2319}
2320
2321/* Produce the smallest number that is represented in MODE. The precision
2322 is taken from MODE and the sign from SGN. */
2323inline wide_int
2324wi::min_value (machine_mode mode, signop sgn)
2325{
2326 return min_value (GET_MODE_PRECISION (as_a <scalar_mode> (mode)), sgn);
2327}
2328
2329/* Produce the largest number that is represented in MODE. The precision
2330 is taken from MODE and the sign from SGN. */
2331inline wide_int
2332wi::max_value (machine_mode mode, signop sgn)
2333{
2334 return max_value (GET_MODE_PRECISION (as_a <scalar_mode> (mode)), sgn);
2335}
2336
2337namespace wi
2338{
2339 typedef poly_int<NUM_POLY_INT_COEFFS1,
2340 generic_wide_int <wide_int_ref_storage <false, false> > >
2341 rtx_to_poly_wide_ref;
2342 rtx_to_poly_wide_ref to_poly_wide (const_rtx, machine_mode);
2343}
2344
2345/* Return the value of a CONST_POLY_INT in its native precision. */
2346
2347inline wi::rtx_to_poly_wide_ref
2348const_poly_int_value (const_rtx x)
2349{
2350 poly_int<NUM_POLY_INT_COEFFS1, WIDE_INT_REF_FOR (wide_int)generic_wide_int <wide_int_ref_storage <wi::int_traits <
wide_int>::is_sign_extended, wi::int_traits <wide_int>
::host_dependent_precision> >
> res;
2351 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS1; ++i)
2352 res.coeffs[i] = CONST_POLY_INT_COEFFS (x)(__extension__ ({ __typeof ((x)) const _rtx = ((x)); if (((enum
rtx_code) (_rtx)->code) != CONST_POLY_INT) rtl_check_failed_flag
("CONST_POLY_INT_COEFFS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2352, __FUNCTION__); _rtx; })->u.cpi.coeffs)
[i];
2353 return res;
2354}
2355
2356/* Return true if X is a scalar integer or a CONST_POLY_INT. The value
2357 can then be extracted using wi::to_poly_wide. */
2358
2359inline bool
2360poly_int_rtx_p (const_rtx x)
2361{
2362 return CONST_SCALAR_INT_P (x)((((enum rtx_code) (x)->code) == CONST_INT) || (((enum rtx_code
) (x)->code) == CONST_WIDE_INT))
|| CONST_POLY_INT_P (x)(1 > 1 && ((enum rtx_code) (x)->code) == CONST_POLY_INT
)
;
2363}
2364
2365/* Access X (which satisfies poly_int_rtx_p) as a poly_wide_int.
2366 MODE is the mode of X. */
2367
2368inline wi::rtx_to_poly_wide_ref
2369wi::to_poly_wide (const_rtx x, machine_mode mode)
2370{
2371 if (CONST_POLY_INT_P (x)(1 > 1 && ((enum rtx_code) (x)->code) == CONST_POLY_INT
)
)
2372 return const_poly_int_value (x);
2373 return rtx_mode_t (const_cast<rtx> (x), mode);
2374}
2375
2376/* Return the value of X as a poly_int64. */
2377
2378inline poly_int64
2379rtx_to_poly_int64 (const_rtx x)
2380{
2381 if (CONST_POLY_INT_P (x)(1 > 1 && ((enum rtx_code) (x)->code) == CONST_POLY_INT
)
)
2382 {
2383 poly_int64 res;
2384 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS1; ++i)
2385 res.coeffs[i] = CONST_POLY_INT_COEFFS (x)(__extension__ ({ __typeof ((x)) const _rtx = ((x)); if (((enum
rtx_code) (_rtx)->code) != CONST_POLY_INT) rtl_check_failed_flag
("CONST_POLY_INT_COEFFS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2385, __FUNCTION__); _rtx; })->u.cpi.coeffs)
[i].to_shwi ();
2386 return res;
2387 }
2388 return INTVAL (x)((x)->u.hwint[0]);
2389}
2390
2391/* Return true if arbitrary value X is an integer constant that can
2392 be represented as a poly_int64. Store the value in *RES if so,
2393 otherwise leave it unmodified. */
2394
2395inline bool
2396poly_int_rtx_p (const_rtx x, poly_int64_pod *res)
2397{
2398 if (CONST_INT_P (x)(((enum rtx_code) (x)->code) == CONST_INT))
2399 {
2400 *res = INTVAL (x)((x)->u.hwint[0]);
2401 return true;
2402 }
2403 if (CONST_POLY_INT_P (x)(1 > 1 && ((enum rtx_code) (x)->code) == CONST_POLY_INT
)
)
2404 {
2405 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS1; ++i)
2406 if (!wi::fits_shwi_p (CONST_POLY_INT_COEFFS (x)(__extension__ ({ __typeof ((x)) const _rtx = ((x)); if (((enum
rtx_code) (_rtx)->code) != CONST_POLY_INT) rtl_check_failed_flag
("CONST_POLY_INT_COEFFS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2406, __FUNCTION__); _rtx; })->u.cpi.coeffs)
[i]))
2407 return false;
2408 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS1; ++i)
2409 res->coeffs[i] = CONST_POLY_INT_COEFFS (x)(__extension__ ({ __typeof ((x)) const _rtx = ((x)); if (((enum
rtx_code) (_rtx)->code) != CONST_POLY_INT) rtl_check_failed_flag
("CONST_POLY_INT_COEFFS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2409, __FUNCTION__); _rtx; })->u.cpi.coeffs)
[i].to_shwi ();
2410 return true;
2411 }
2412 return false;
2413}
2414
2415extern void init_rtlanal (void);
2416extern int rtx_cost (rtx, machine_mode, enum rtx_code, int, bool);
2417extern int address_cost (rtx, machine_mode, addr_space_t, bool);
2418extern void get_full_rtx_cost (rtx, machine_mode, enum rtx_code, int,
2419 struct full_rtx_costs *);
2420extern bool native_encode_rtx (machine_mode, rtx, vec<target_unit> &,
2421 unsigned int, unsigned int);
2422extern rtx native_decode_rtx (machine_mode, const vec<target_unit> &,
2423 unsigned int);
2424extern rtx native_decode_vector_rtx (machine_mode, const vec<target_unit> &,
2425 unsigned int, unsigned int, unsigned int);
2426extern poly_uint64 subreg_lsb (const_rtx);
2427extern poly_uint64 subreg_size_lsb (poly_uint64, poly_uint64, poly_uint64);
2428extern poly_uint64 subreg_size_offset_from_lsb (poly_uint64, poly_uint64,
2429 poly_uint64);
2430extern bool read_modify_subreg_p (const_rtx);
2431
2432/* Given a subreg's OUTER_MODE, INNER_MODE, and SUBREG_BYTE, return the
2433 bit offset at which the subreg begins (counting from the least significant
2434 bit of the operand). */
2435
2436inline poly_uint64
2437subreg_lsb_1 (machine_mode outer_mode, machine_mode inner_mode,
2438 poly_uint64 subreg_byte)
2439{
2440 return subreg_size_lsb (GET_MODE_SIZE (outer_mode),
2441 GET_MODE_SIZE (inner_mode), subreg_byte);
2442}
2443
2444/* Return the subreg byte offset for a subreg whose outer mode is
2445 OUTER_MODE, whose inner mode is INNER_MODE, and where there are
2446 LSB_SHIFT *bits* between the lsb of the outer value and the lsb of
2447 the inner value. This is the inverse of subreg_lsb_1 (which converts
2448 byte offsets to bit shifts). */
2449
2450inline poly_uint64
2451subreg_offset_from_lsb (machine_mode outer_mode,
2452 machine_mode inner_mode,
2453 poly_uint64 lsb_shift)
2454{
2455 return subreg_size_offset_from_lsb (GET_MODE_SIZE (outer_mode),
2456 GET_MODE_SIZE (inner_mode), lsb_shift);
2457}
2458
2459extern unsigned int subreg_regno_offset (unsigned int, machine_mode,
2460 poly_uint64, machine_mode);
2461extern bool subreg_offset_representable_p (unsigned int, machine_mode,
2462 poly_uint64, machine_mode);
2463extern unsigned int subreg_regno (const_rtx);
2464extern int simplify_subreg_regno (unsigned int, machine_mode,
2465 poly_uint64, machine_mode);
2466extern int lowpart_subreg_regno (unsigned int, machine_mode,
2467 machine_mode);
2468extern unsigned int subreg_nregs (const_rtx);
2469extern unsigned int subreg_nregs_with_regno (unsigned int, const_rtx);
2470extern unsigned HOST_WIDE_INTlong nonzero_bits (const_rtx, machine_mode);
2471extern unsigned int num_sign_bit_copies (const_rtx, machine_mode);
2472extern bool constant_pool_constant_p (rtx);
2473extern bool truncated_to_mode (machine_mode, const_rtx);
2474extern int low_bitmask_len (machine_mode, unsigned HOST_WIDE_INTlong);
2475extern void split_double (rtx, rtx *, rtx *);
2476extern rtx *strip_address_mutations (rtx *, enum rtx_code * = 0);
2477extern void decompose_address (struct address_info *, rtx *,
2478 machine_mode, addr_space_t, enum rtx_code);
2479extern void decompose_lea_address (struct address_info *, rtx *);
2480extern void decompose_mem_address (struct address_info *, rtx);
2481extern void update_address (struct address_info *);
2482extern HOST_WIDE_INTlong get_index_scale (const struct address_info *);
2483extern enum rtx_code get_index_code (const struct address_info *);
2484
2485/* 1 if RTX is a subreg containing a reg that is already known to be
2486 sign- or zero-extended from the mode of the subreg to the mode of
2487 the reg. SUBREG_PROMOTED_UNSIGNED_P gives the signedness of the
2488 extension.
2489
2490 When used as a LHS, is means that this extension must be done
2491 when assigning to SUBREG_REG. */
2492
2493#define SUBREG_PROMOTED_VAR_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != SUBREG) rtl_check_failed_flag
("SUBREG_PROMOTED", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2493, __FUNCTION__); _rtx; })->in_struct)
\
2494 (RTL_FLAG_CHECK1 ("SUBREG_PROMOTED", (RTX), SUBREG)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != SUBREG) rtl_check_failed_flag
("SUBREG_PROMOTED", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2494, __FUNCTION__); _rtx; })
->in_struct)
2495
2496/* Valid for subregs which are SUBREG_PROMOTED_VAR_P(). In that case
2497 this gives the necessary extensions:
2498 0 - signed (SPR_SIGNED)
2499 1 - normal unsigned (SPR_UNSIGNED)
2500 2 - value is both sign and unsign extended for mode
2501 (SPR_SIGNED_AND_UNSIGNED).
2502 -1 - pointer unsigned, which most often can be handled like unsigned
2503 extension, except for generating instructions where we need to
2504 emit special code (ptr_extend insns) on some architectures
2505 (SPR_POINTER). */
2506
2507const int SRP_POINTER = -1;
2508const int SRP_SIGNED = 0;
2509const int SRP_UNSIGNED = 1;
2510const int SRP_SIGNED_AND_UNSIGNED = 2;
2511
2512/* Sets promoted mode for SUBREG_PROMOTED_VAR_P(). */
2513#define SUBREG_PROMOTED_SET(RTX, VAL)do { rtx const _rtx = __extension__ ({ __typeof ((RTX)) const
_rtx = ((RTX)); if (((enum rtx_code) (_rtx)->code) != SUBREG
) rtl_check_failed_flag ("SUBREG_PROMOTED_SET", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2513, __FUNCTION__); _rtx; }); switch (VAL) { case SRP_POINTER
: _rtx->volatil = 0; _rtx->unchanging = 0; break; case SRP_SIGNED
: _rtx->volatil = 0; _rtx->unchanging = 1; break; case SRP_UNSIGNED
: _rtx->volatil = 1; _rtx->unchanging = 0; break; case SRP_SIGNED_AND_UNSIGNED
: _rtx->volatil = 1; _rtx->unchanging = 1; break; } } while
(0)
\
2514do { \
2515 rtx const _rtx = RTL_FLAG_CHECK1 ("SUBREG_PROMOTED_SET", \__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != SUBREG) rtl_check_failed_flag
("SUBREG_PROMOTED_SET", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2516, __FUNCTION__); _rtx; })
2516 (RTX), SUBREG)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != SUBREG) rtl_check_failed_flag
("SUBREG_PROMOTED_SET", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2516, __FUNCTION__); _rtx; })
; \
2517 switch (VAL) \
2518 { \
2519 case SRP_POINTER: \
2520 _rtx->volatil = 0; \
2521 _rtx->unchanging = 0; \
2522 break; \
2523 case SRP_SIGNED: \
2524 _rtx->volatil = 0; \
2525 _rtx->unchanging = 1; \
2526 break; \
2527 case SRP_UNSIGNED: \
2528 _rtx->volatil = 1; \
2529 _rtx->unchanging = 0; \
2530 break; \
2531 case SRP_SIGNED_AND_UNSIGNED: \
2532 _rtx->volatil = 1; \
2533 _rtx->unchanging = 1; \
2534 break; \
2535 } \
2536} while (0)
2537
2538/* Gets the value stored in promoted mode for SUBREG_PROMOTED_VAR_P(),
2539 including SRP_SIGNED_AND_UNSIGNED if promoted for
2540 both signed and unsigned. */
2541#define SUBREG_PROMOTED_GET(RTX)(2 * (__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX));
if (((enum rtx_code) (_rtx)->code) != SUBREG) rtl_check_failed_flag
("SUBREG_PROMOTED_GET", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2541, __FUNCTION__); _rtx; })->volatil) + (RTX)->unchanging
- 1)
\
2542 (2 * (RTL_FLAG_CHECK1 ("SUBREG_PROMOTED_GET", (RTX), SUBREG)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != SUBREG) rtl_check_failed_flag
("SUBREG_PROMOTED_GET", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2542, __FUNCTION__); _rtx; })
->volatil)\
2543 + (RTX)->unchanging - 1)
2544
2545/* Returns sign of promoted mode for SUBREG_PROMOTED_VAR_P(). */
2546#define SUBREG_PROMOTED_SIGN(RTX)((__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != SUBREG) rtl_check_failed_flag
("SUBREG_PROMOTED_SIGN", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2546, __FUNCTION__); _rtx; })->volatil) ? 1 : (RTX)->
unchanging - 1)
\
2547 ((RTL_FLAG_CHECK1 ("SUBREG_PROMOTED_SIGN", (RTX), SUBREG)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != SUBREG) rtl_check_failed_flag
("SUBREG_PROMOTED_SIGN", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2547, __FUNCTION__); _rtx; })
->volatil) ? 1\
2548 : (RTX)->unchanging - 1)
2549
2550/* Predicate to check if RTX of SUBREG_PROMOTED_VAR_P() is promoted
2551 for SIGNED type. */
2552#define SUBREG_PROMOTED_SIGNED_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != SUBREG) rtl_check_failed_flag
("SUBREG_PROMOTED_SIGNED_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2552, __FUNCTION__); _rtx; })->unchanging)
\
2553 (RTL_FLAG_CHECK1 ("SUBREG_PROMOTED_SIGNED_P", (RTX), SUBREG)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != SUBREG) rtl_check_failed_flag
("SUBREG_PROMOTED_SIGNED_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2553, __FUNCTION__); _rtx; })
->unchanging)
2554
2555/* Predicate to check if RTX of SUBREG_PROMOTED_VAR_P() is promoted
2556 for UNSIGNED type. */
2557#define SUBREG_PROMOTED_UNSIGNED_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != SUBREG) rtl_check_failed_flag
("SUBREG_PROMOTED_UNSIGNED_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2557, __FUNCTION__); _rtx; })->volatil)
\
2558 (RTL_FLAG_CHECK1 ("SUBREG_PROMOTED_UNSIGNED_P", (RTX), SUBREG)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != SUBREG) rtl_check_failed_flag
("SUBREG_PROMOTED_UNSIGNED_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2558, __FUNCTION__); _rtx; })
->volatil)
2559
2560/* Checks if RTX of SUBREG_PROMOTED_VAR_P() is promoted for given SIGN. */
2561#define SUBREG_CHECK_PROMOTED_SIGN(RTX, SIGN)((SIGN) == SRP_POINTER ? (2 * (__extension__ ({ __typeof ((RTX
)) const _rtx = ((RTX)); if (((enum rtx_code) (_rtx)->code
) != SUBREG) rtl_check_failed_flag ("SUBREG_PROMOTED_GET", _rtx
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2561, __FUNCTION__); _rtx; })->volatil) + (RTX)->unchanging
- 1) == SRP_POINTER : (SIGN) == SRP_SIGNED ? (__extension__ (
{ __typeof ((RTX)) const _rtx = ((RTX)); if (((enum rtx_code)
(_rtx)->code) != SUBREG) rtl_check_failed_flag ("SUBREG_PROMOTED_SIGNED_P"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2561, __FUNCTION__); _rtx; })->unchanging) : (__extension__
({ __typeof ((RTX)) const _rtx = ((RTX)); if (((enum rtx_code
) (_rtx)->code) != SUBREG) rtl_check_failed_flag ("SUBREG_PROMOTED_UNSIGNED_P"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2561, __FUNCTION__); _rtx; })->volatil))
\
2562((SIGN) == SRP_POINTER ? SUBREG_PROMOTED_GET (RTX)(2 * (__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX));
if (((enum rtx_code) (_rtx)->code) != SUBREG) rtl_check_failed_flag
("SUBREG_PROMOTED_GET", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2562, __FUNCTION__); _rtx; })->volatil) + (RTX)->unchanging
- 1)
== SRP_POINTER \
2563 : (SIGN) == SRP_SIGNED ? SUBREG_PROMOTED_SIGNED_P (RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != SUBREG) rtl_check_failed_flag
("SUBREG_PROMOTED_SIGNED_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2563, __FUNCTION__); _rtx; })->unchanging)
\
2564 : SUBREG_PROMOTED_UNSIGNED_P (RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != SUBREG) rtl_check_failed_flag
("SUBREG_PROMOTED_UNSIGNED_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2564, __FUNCTION__); _rtx; })->volatil)
)
2565
2566/* True if the REG is the static chain register for some CALL_INSN. */
2567#define STATIC_CHAIN_REG_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != REG) rtl_check_failed_flag
("STATIC_CHAIN_REG_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2567, __FUNCTION__); _rtx; })->jump)
\
2568 (RTL_FLAG_CHECK1 ("STATIC_CHAIN_REG_P", (RTX), REG)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != REG) rtl_check_failed_flag
("STATIC_CHAIN_REG_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2568, __FUNCTION__); _rtx; })
->jump)
2569
2570/* True if the subreg was generated by LRA for reload insns. Such
2571 subregs are valid only during LRA. */
2572#define LRA_SUBREG_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != SUBREG) rtl_check_failed_flag
("LRA_SUBREG_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2572, __FUNCTION__); _rtx; })->jump)
\
2573 (RTL_FLAG_CHECK1 ("LRA_SUBREG_P", (RTX), SUBREG)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != SUBREG) rtl_check_failed_flag
("LRA_SUBREG_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2573, __FUNCTION__); _rtx; })
->jump)
2574
2575/* Access various components of an ASM_OPERANDS rtx. */
2576
2577#define ASM_OPERANDS_TEMPLATE(RTX)(((RTX)->u.fld[0]).rt_str) XCSTR (RTX, 0, ASM_OPERANDS)(((RTX)->u.fld[0]).rt_str)
2578#define ASM_OPERANDS_OUTPUT_CONSTRAINT(RTX)(((RTX)->u.fld[1]).rt_str) XCSTR (RTX, 1, ASM_OPERANDS)(((RTX)->u.fld[1]).rt_str)
2579#define ASM_OPERANDS_OUTPUT_IDX(RTX)(((RTX)->u.fld[2]).rt_int) XCINT (RTX, 2, ASM_OPERANDS)(((RTX)->u.fld[2]).rt_int)
2580#define ASM_OPERANDS_INPUT_VEC(RTX)(((RTX)->u.fld[3]).rt_rtvec) XCVEC (RTX, 3, ASM_OPERANDS)(((RTX)->u.fld[3]).rt_rtvec)
2581#define ASM_OPERANDS_INPUT_CONSTRAINT_VEC(RTX)(((RTX)->u.fld[4]).rt_rtvec) XCVEC (RTX, 4, ASM_OPERANDS)(((RTX)->u.fld[4]).rt_rtvec)
2582#define ASM_OPERANDS_INPUT(RTX, N)(((((RTX)->u.fld[3]).rt_rtvec))->elem[N]) XCVECEXP (RTX, 3, N, ASM_OPERANDS)(((((RTX)->u.fld[3]).rt_rtvec))->elem[N])
2583#define ASM_OPERANDS_INPUT_LENGTH(RTX)(((((RTX)->u.fld[3]).rt_rtvec))->num_elem) XCVECLEN (RTX, 3, ASM_OPERANDS)(((((RTX)->u.fld[3]).rt_rtvec))->num_elem)
2584#define ASM_OPERANDS_INPUT_CONSTRAINT_EXP(RTX, N)(((((RTX)->u.fld[4]).rt_rtvec))->elem[N]) \
2585 XCVECEXP (RTX, 4, N, ASM_OPERANDS)(((((RTX)->u.fld[4]).rt_rtvec))->elem[N])
2586#define ASM_OPERANDS_INPUT_CONSTRAINT(RTX, N)((((((((RTX)->u.fld[4]).rt_rtvec))->elem[N]))->u.fld
[0]).rt_str)
\
2587 XSTR (XCVECEXP (RTX, 4, N, ASM_OPERANDS), 0)((((((((RTX)->u.fld[4]).rt_rtvec))->elem[N]))->u.fld
[0]).rt_str)
2588#define ASM_OPERANDS_INPUT_MODE(RTX, N)((machine_mode) ((((((RTX)->u.fld[4]).rt_rtvec))->elem[
N]))->mode)
\
2589 GET_MODE (XCVECEXP (RTX, 4, N, ASM_OPERANDS))((machine_mode) ((((((RTX)->u.fld[4]).rt_rtvec))->elem[
N]))->mode)
2590#define ASM_OPERANDS_LABEL_VEC(RTX)(((RTX)->u.fld[5]).rt_rtvec) XCVEC (RTX, 5, ASM_OPERANDS)(((RTX)->u.fld[5]).rt_rtvec)
2591#define ASM_OPERANDS_LABEL_LENGTH(RTX)(((((RTX)->u.fld[5]).rt_rtvec))->num_elem) XCVECLEN (RTX, 5, ASM_OPERANDS)(((((RTX)->u.fld[5]).rt_rtvec))->num_elem)
2592#define ASM_OPERANDS_LABEL(RTX, N)(((((RTX)->u.fld[5]).rt_rtvec))->elem[N]) XCVECEXP (RTX, 5, N, ASM_OPERANDS)(((((RTX)->u.fld[5]).rt_rtvec))->elem[N])
2593#define ASM_OPERANDS_SOURCE_LOCATION(RTX)(((RTX)->u.fld[6]).rt_uint) XCUINT (RTX, 6, ASM_OPERANDS)(((RTX)->u.fld[6]).rt_uint)
2594#define ASM_INPUT_SOURCE_LOCATION(RTX)(((RTX)->u.fld[1]).rt_uint) XCUINT (RTX, 1, ASM_INPUT)(((RTX)->u.fld[1]).rt_uint)
2595
2596/* 1 if RTX is a mem that is statically allocated in read-only memory. */
2597#define MEM_READONLY_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != MEM) rtl_check_failed_flag
("MEM_READONLY_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2597, __FUNCTION__); _rtx; })->unchanging)
\
2598 (RTL_FLAG_CHECK1 ("MEM_READONLY_P", (RTX), MEM)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != MEM) rtl_check_failed_flag
("MEM_READONLY_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2598, __FUNCTION__); _rtx; })
->unchanging)
2599
2600/* 1 if RTX is a mem and we should keep the alias set for this mem
2601 unchanged when we access a component. Set to 1, or example, when we
2602 are already in a non-addressable component of an aggregate. */
2603#define MEM_KEEP_ALIAS_SET_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != MEM) rtl_check_failed_flag
("MEM_KEEP_ALIAS_SET_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2603, __FUNCTION__); _rtx; })->jump)
\
2604 (RTL_FLAG_CHECK1 ("MEM_KEEP_ALIAS_SET_P", (RTX), MEM)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != MEM) rtl_check_failed_flag
("MEM_KEEP_ALIAS_SET_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2604, __FUNCTION__); _rtx; })
->jump)
2605
2606/* 1 if RTX is a mem or asm_operand for a volatile reference. */
2607#define MEM_VOLATILE_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != MEM && ((enum rtx_code
) (_rtx)->code) != ASM_OPERANDS && ((enum rtx_code
) (_rtx)->code) != ASM_INPUT) rtl_check_failed_flag ("MEM_VOLATILE_P"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2607, __FUNCTION__); _rtx; })->volatil)
\
2608 (RTL_FLAG_CHECK3 ("MEM_VOLATILE_P", (RTX), MEM, ASM_OPERANDS, \__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != MEM && ((enum rtx_code
) (_rtx)->code) != ASM_OPERANDS && ((enum rtx_code
) (_rtx)->code) != ASM_INPUT) rtl_check_failed_flag ("MEM_VOLATILE_P"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2609, __FUNCTION__); _rtx; })
2609 ASM_INPUT)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != MEM && ((enum rtx_code
) (_rtx)->code) != ASM_OPERANDS && ((enum rtx_code
) (_rtx)->code) != ASM_INPUT) rtl_check_failed_flag ("MEM_VOLATILE_P"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2609, __FUNCTION__); _rtx; })
->volatil)
2610
2611/* 1 if RTX is a mem that cannot trap. */
2612#define MEM_NOTRAP_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != MEM) rtl_check_failed_flag
("MEM_NOTRAP_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2612, __FUNCTION__); _rtx; })->call)
\
2613 (RTL_FLAG_CHECK1 ("MEM_NOTRAP_P", (RTX), MEM)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != MEM) rtl_check_failed_flag
("MEM_NOTRAP_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2613, __FUNCTION__); _rtx; })
->call)
2614
2615/* The memory attribute block. We provide access macros for each value
2616 in the block and provide defaults if none specified. */
2617#define MEM_ATTRS(RTX)(((RTX)->u.fld[1]).rt_mem) X0MEMATTR (RTX, 1)(((RTX)->u.fld[1]).rt_mem)
2618
2619/* The register attribute block. We provide access macros for each value
2620 in the block and provide defaults if none specified. */
2621#define REG_ATTRS(RTX)((&(RTX)->u.reg)->attrs) (REG_CHECK (RTX)(&(RTX)->u.reg)->attrs)
2622
2623#ifndef GENERATOR_FILE
2624/* For a MEM rtx, the alias set. If 0, this MEM is not in any alias
2625 set, and may alias anything. Otherwise, the MEM can only alias
2626 MEMs in a conflicting alias set. This value is set in a
2627 language-dependent manner in the front-end, and should not be
2628 altered in the back-end. These set numbers are tested with
2629 alias_sets_conflict_p. */
2630#define MEM_ALIAS_SET(RTX)(get_mem_attrs (RTX)->alias) (get_mem_attrs (RTX)->alias)
2631
2632/* For a MEM rtx, the decl it is known to refer to, if it is known to
2633 refer to part of a DECL. It may also be a COMPONENT_REF. */
2634#define MEM_EXPR(RTX)(get_mem_attrs (RTX)->expr) (get_mem_attrs (RTX)->expr)
2635
2636/* For a MEM rtx, true if its MEM_OFFSET is known. */
2637#define MEM_OFFSET_KNOWN_P(RTX)(get_mem_attrs (RTX)->offset_known_p) (get_mem_attrs (RTX)->offset_known_p)
2638
2639/* For a MEM rtx, the offset from the start of MEM_EXPR. */
2640#define MEM_OFFSET(RTX)(get_mem_attrs (RTX)->offset) (get_mem_attrs (RTX)->offset)
2641
2642/* For a MEM rtx, the address space. */
2643#define MEM_ADDR_SPACE(RTX)(get_mem_attrs (RTX)->addrspace) (get_mem_attrs (RTX)->addrspace)
2644
2645/* For a MEM rtx, true if its MEM_SIZE is known. */
2646#define MEM_SIZE_KNOWN_P(RTX)(get_mem_attrs (RTX)->size_known_p) (get_mem_attrs (RTX)->size_known_p)
2647
2648/* For a MEM rtx, the size in bytes of the MEM. */
2649#define MEM_SIZE(RTX)(get_mem_attrs (RTX)->size) (get_mem_attrs (RTX)->size)
2650
2651/* For a MEM rtx, the alignment in bits. We can use the alignment of the
2652 mode as a default when STRICT_ALIGNMENT, but not if not. */
2653#define MEM_ALIGN(RTX)(get_mem_attrs (RTX)->align) (get_mem_attrs (RTX)->align)
2654#else
2655#define MEM_ADDR_SPACE(RTX)(get_mem_attrs (RTX)->addrspace) ADDR_SPACE_GENERIC0
2656#endif
2657
2658/* For a REG rtx, the decl it is known to refer to, if it is known to
2659 refer to part of a DECL. */
2660#define REG_EXPR(RTX)(((&(RTX)->u.reg)->attrs) == 0 ? 0 : ((&(RTX)->
u.reg)->attrs)->decl)
(REG_ATTRS (RTX)((&(RTX)->u.reg)->attrs) == 0 ? 0 : REG_ATTRS (RTX)((&(RTX)->u.reg)->attrs)->decl)
2661
2662/* For a REG rtx, the offset from the start of REG_EXPR, if known, as an
2663 HOST_WIDE_INT. */
2664#define REG_OFFSET(RTX)(((&(RTX)->u.reg)->attrs) == 0 ? 0 : ((&(RTX)->
u.reg)->attrs)->offset)
(REG_ATTRS (RTX)((&(RTX)->u.reg)->attrs) == 0 ? 0 : REG_ATTRS (RTX)((&(RTX)->u.reg)->attrs)->offset)
2665
2666/* Copy the attributes that apply to memory locations from RHS to LHS. */
2667#define MEM_COPY_ATTRIBUTES(LHS, RHS)((__extension__ ({ __typeof ((LHS)) const _rtx = ((LHS)); if (
((enum rtx_code) (_rtx)->code) != MEM && ((enum rtx_code
) (_rtx)->code) != ASM_OPERANDS && ((enum rtx_code
) (_rtx)->code) != ASM_INPUT) rtl_check_failed_flag ("MEM_VOLATILE_P"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2667, __FUNCTION__); _rtx; })->volatil) = (__extension__
({ __typeof ((RHS)) const _rtx = ((RHS)); if (((enum rtx_code
) (_rtx)->code) != MEM && ((enum rtx_code) (_rtx)->
code) != ASM_OPERANDS && ((enum rtx_code) (_rtx)->
code) != ASM_INPUT) rtl_check_failed_flag ("MEM_VOLATILE_P", _rtx
, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2667, __FUNCTION__); _rtx; })->volatil), (__extension__ (
{ __typeof ((LHS)) const _rtx = ((LHS)); if (((enum rtx_code)
(_rtx)->code) != MEM) rtl_check_failed_flag ("MEM_NOTRAP_P"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2667, __FUNCTION__); _rtx; })->call) = (__extension__ ({
__typeof ((RHS)) const _rtx = ((RHS)); if (((enum rtx_code) (
_rtx)->code) != MEM) rtl_check_failed_flag ("MEM_NOTRAP_P"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2667, __FUNCTION__); _rtx; })->call), (__extension__ ({ __typeof
((LHS)) const _rtx = ((LHS)); if (((enum rtx_code) (_rtx)->
code) != MEM) rtl_check_failed_flag ("MEM_READONLY_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2667, __FUNCTION__); _rtx; })->unchanging) = (__extension__
({ __typeof ((RHS)) const _rtx = ((RHS)); if (((enum rtx_code
) (_rtx)->code) != MEM) rtl_check_failed_flag ("MEM_READONLY_P"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2667, __FUNCTION__); _rtx; })->unchanging), (__extension__
({ __typeof ((LHS)) const _rtx = ((LHS)); if (((enum rtx_code
) (_rtx)->code) != MEM) rtl_check_failed_flag ("MEM_KEEP_ALIAS_SET_P"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2667, __FUNCTION__); _rtx; })->jump) = (__extension__ ({
__typeof ((RHS)) const _rtx = ((RHS)); if (((enum rtx_code) (
_rtx)->code) != MEM) rtl_check_failed_flag ("MEM_KEEP_ALIAS_SET_P"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2667, __FUNCTION__); _rtx; })->jump), (__extension__ ({ __typeof
((LHS)) const _rtx = ((LHS)); if (((enum rtx_code) (_rtx)->
code) != MEM) rtl_check_failed_flag ("MEM_POINTER", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2667, __FUNCTION__); _rtx; })->frame_related) = (__extension__
({ __typeof ((RHS)) const _rtx = ((RHS)); if (((enum rtx_code
) (_rtx)->code) != MEM) rtl_check_failed_flag ("MEM_POINTER"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2667, __FUNCTION__); _rtx; })->frame_related), (((LHS)->
u.fld[1]).rt_mem) = (((RHS)->u.fld[1]).rt_mem))
\
2668 (MEM_VOLATILE_P (LHS)(__extension__ ({ __typeof ((LHS)) const _rtx = ((LHS)); if (
((enum rtx_code) (_rtx)->code) != MEM && ((enum rtx_code
) (_rtx)->code) != ASM_OPERANDS && ((enum rtx_code
) (_rtx)->code) != ASM_INPUT) rtl_check_failed_flag ("MEM_VOLATILE_P"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2668, __FUNCTION__); _rtx; })->volatil)
= MEM_VOLATILE_P (RHS)(__extension__ ({ __typeof ((RHS)) const _rtx = ((RHS)); if (
((enum rtx_code) (_rtx)->code) != MEM && ((enum rtx_code
) (_rtx)->code) != ASM_OPERANDS && ((enum rtx_code
) (_rtx)->code) != ASM_INPUT) rtl_check_failed_flag ("MEM_VOLATILE_P"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2668, __FUNCTION__); _rtx; })->volatil)
, \
2669 MEM_NOTRAP_P (LHS)(__extension__ ({ __typeof ((LHS)) const _rtx = ((LHS)); if (
((enum rtx_code) (_rtx)->code) != MEM) rtl_check_failed_flag
("MEM_NOTRAP_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2669, __FUNCTION__); _rtx; })->call)
= MEM_NOTRAP_P (RHS)(__extension__ ({ __typeof ((RHS)) const _rtx = ((RHS)); if (
((enum rtx_code) (_rtx)->code) != MEM) rtl_check_failed_flag
("MEM_NOTRAP_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2669, __FUNCTION__); _rtx; })->call)
, \
2670 MEM_READONLY_P (LHS)(__extension__ ({ __typeof ((LHS)) const _rtx = ((LHS)); if (
((enum rtx_code) (_rtx)->code) != MEM) rtl_check_failed_flag
("MEM_READONLY_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2670, __FUNCTION__); _rtx; })->unchanging)
= MEM_READONLY_P (RHS)(__extension__ ({ __typeof ((RHS)) const _rtx = ((RHS)); if (
((enum rtx_code) (_rtx)->code) != MEM) rtl_check_failed_flag
("MEM_READONLY_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2670, __FUNCTION__); _rtx; })->unchanging)
, \
2671 MEM_KEEP_ALIAS_SET_P (LHS)(__extension__ ({ __typeof ((LHS)) const _rtx = ((LHS)); if (
((enum rtx_code) (_rtx)->code) != MEM) rtl_check_failed_flag
("MEM_KEEP_ALIAS_SET_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2671, __FUNCTION__); _rtx; })->jump)
= MEM_KEEP_ALIAS_SET_P (RHS)(__extension__ ({ __typeof ((RHS)) const _rtx = ((RHS)); if (
((enum rtx_code) (_rtx)->code) != MEM) rtl_check_failed_flag
("MEM_KEEP_ALIAS_SET_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2671, __FUNCTION__); _rtx; })->jump)
, \
2672 MEM_POINTER (LHS)(__extension__ ({ __typeof ((LHS)) const _rtx = ((LHS)); if (
((enum rtx_code) (_rtx)->code) != MEM) rtl_check_failed_flag
("MEM_POINTER", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2672, __FUNCTION__); _rtx; })->frame_related)
= MEM_POINTER (RHS)(__extension__ ({ __typeof ((RHS)) const _rtx = ((RHS)); if (
((enum rtx_code) (_rtx)->code) != MEM) rtl_check_failed_flag
("MEM_POINTER", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2672, __FUNCTION__); _rtx; })->frame_related)
, \
2673 MEM_ATTRS (LHS)(((LHS)->u.fld[1]).rt_mem) = MEM_ATTRS (RHS)(((RHS)->u.fld[1]).rt_mem))
2674
2675/* 1 if RTX is a label_ref for a nonlocal label. */
2676/* Likewise in an expr_list for a REG_LABEL_OPERAND or
2677 REG_LABEL_TARGET note. */
2678#define LABEL_REF_NONLOCAL_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != LABEL_REF) rtl_check_failed_flag
("LABEL_REF_NONLOCAL_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2678, __FUNCTION__); _rtx; })->volatil)
\
2679 (RTL_FLAG_CHECK1 ("LABEL_REF_NONLOCAL_P", (RTX), LABEL_REF)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != LABEL_REF) rtl_check_failed_flag
("LABEL_REF_NONLOCAL_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2679, __FUNCTION__); _rtx; })
->volatil)
2680
2681/* 1 if RTX is a code_label that should always be considered to be needed. */
2682#define LABEL_PRESERVE_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != CODE_LABEL && ((
enum rtx_code) (_rtx)->code) != NOTE) rtl_check_failed_flag
("LABEL_PRESERVE_P",_rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2682, __FUNCTION__); _rtx; })->in_struct)
\
2683 (RTL_FLAG_CHECK2 ("LABEL_PRESERVE_P", (RTX), CODE_LABEL, NOTE)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != CODE_LABEL && ((enum
rtx_code) (_rtx)->code) != NOTE) rtl_check_failed_flag ("LABEL_PRESERVE_P"
,_rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2683, __FUNCTION__); _rtx; })
->in_struct)
2684
2685/* During sched, 1 if RTX is an insn that must be scheduled together
2686 with the preceding insn. */
2687#define SCHED_GROUP_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != DEBUG_INSN && ((
enum rtx_code) (_rtx)->code) != INSN && ((enum rtx_code
) (_rtx)->code) != JUMP_INSN && ((enum rtx_code) (
_rtx)->code) != CALL_INSN) rtl_check_failed_flag ("SCHED_GROUP_P"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2687, __FUNCTION__); _rtx; })->in_struct)
\
2688 (RTL_FLAG_CHECK4 ("SCHED_GROUP_P", (RTX), DEBUG_INSN, INSN, \__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != DEBUG_INSN && ((enum
rtx_code) (_rtx)->code) != INSN && ((enum rtx_code
) (_rtx)->code) != JUMP_INSN && ((enum rtx_code) (
_rtx)->code) != CALL_INSN) rtl_check_failed_flag ("SCHED_GROUP_P"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2689, __FUNCTION__); _rtx; })
2689 JUMP_INSN, CALL_INSN)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != DEBUG_INSN && ((enum
rtx_code) (_rtx)->code) != INSN && ((enum rtx_code
) (_rtx)->code) != JUMP_INSN && ((enum rtx_code) (
_rtx)->code) != CALL_INSN) rtl_check_failed_flag ("SCHED_GROUP_P"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2689, __FUNCTION__); _rtx; })
->in_struct)
2690
2691/* For a SET rtx, SET_DEST is the place that is set
2692 and SET_SRC is the value it is set to. */
2693#define SET_DEST(RTX)(((RTX)->u.fld[0]).rt_rtx) XC2EXP (RTX, 0, SET, CLOBBER)(((RTX)->u.fld[0]).rt_rtx)
2694#define SET_SRC(RTX)(((RTX)->u.fld[1]).rt_rtx) XCEXP (RTX, 1, SET)(((RTX)->u.fld[1]).rt_rtx)
2695#define SET_IS_RETURN_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != SET) rtl_check_failed_flag
("SET_IS_RETURN_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2695, __FUNCTION__); _rtx; })->jump)
\
2696 (RTL_FLAG_CHECK1 ("SET_IS_RETURN_P", (RTX), SET)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != SET) rtl_check_failed_flag
("SET_IS_RETURN_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2696, __FUNCTION__); _rtx; })
->jump)
2697
2698/* For a TRAP_IF rtx, TRAP_CONDITION is an expression. */
2699#define TRAP_CONDITION(RTX)(((RTX)->u.fld[0]).rt_rtx) XCEXP (RTX, 0, TRAP_IF)(((RTX)->u.fld[0]).rt_rtx)
2700#define TRAP_CODE(RTX)(((RTX)->u.fld[1]).rt_rtx) XCEXP (RTX, 1, TRAP_IF)(((RTX)->u.fld[1]).rt_rtx)
2701
2702/* For a COND_EXEC rtx, COND_EXEC_TEST is the condition to base
2703 conditionally executing the code on, COND_EXEC_CODE is the code
2704 to execute if the condition is true. */
2705#define COND_EXEC_TEST(RTX)(((RTX)->u.fld[0]).rt_rtx) XCEXP (RTX, 0, COND_EXEC)(((RTX)->u.fld[0]).rt_rtx)
2706#define COND_EXEC_CODE(RTX)(((RTX)->u.fld[1]).rt_rtx) XCEXP (RTX, 1, COND_EXEC)(((RTX)->u.fld[1]).rt_rtx)
2707
2708/* 1 if RTX is a symbol_ref that addresses this function's rtl
2709 constants pool. */
2710#define CONSTANT_POOL_ADDRESS_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("CONSTANT_POOL_ADDRESS_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2710, __FUNCTION__); _rtx; })->unchanging)
\
2711 (RTL_FLAG_CHECK1 ("CONSTANT_POOL_ADDRESS_P", (RTX), SYMBOL_REF)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("CONSTANT_POOL_ADDRESS_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2711, __FUNCTION__); _rtx; })
->unchanging)
2712
2713/* 1 if RTX is a symbol_ref that addresses a value in the file's
2714 tree constant pool. This information is private to varasm.cc. */
2715#define TREE_CONSTANT_POOL_ADDRESS_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("TREE_CONSTANT_POOL_ADDRESS_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2715, __FUNCTION__); _rtx; })->frame_related)
\
2716 (RTL_FLAG_CHECK1 ("TREE_CONSTANT_POOL_ADDRESS_P", \__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("TREE_CONSTANT_POOL_ADDRESS_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2717, __FUNCTION__); _rtx; })
2717 (RTX), SYMBOL_REF)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("TREE_CONSTANT_POOL_ADDRESS_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2717, __FUNCTION__); _rtx; })
->frame_related)
2718
2719/* Used if RTX is a symbol_ref, for machine-specific purposes. */
2720#define SYMBOL_REF_FLAG(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("SYMBOL_REF_FLAG", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2720, __FUNCTION__); _rtx; })->volatil)
\
2721 (RTL_FLAG_CHECK1 ("SYMBOL_REF_FLAG", (RTX), SYMBOL_REF)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("SYMBOL_REF_FLAG", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2721, __FUNCTION__); _rtx; })
->volatil)
2722
2723/* 1 if RTX is a symbol_ref that has been the library function in
2724 emit_library_call. */
2725#define SYMBOL_REF_USED(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("SYMBOL_REF_USED", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2725, __FUNCTION__); _rtx; })->used)
\
2726 (RTL_FLAG_CHECK1 ("SYMBOL_REF_USED", (RTX), SYMBOL_REF)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("SYMBOL_REF_USED", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2726, __FUNCTION__); _rtx; })
->used)
2727
2728/* 1 if RTX is a symbol_ref for a weak symbol. */
2729#define SYMBOL_REF_WEAK(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("SYMBOL_REF_WEAK", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2729, __FUNCTION__); _rtx; })->return_val)
\
2730 (RTL_FLAG_CHECK1 ("SYMBOL_REF_WEAK", (RTX), SYMBOL_REF)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("SYMBOL_REF_WEAK", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2730, __FUNCTION__); _rtx; })
->return_val)
2731
2732/* A pointer attached to the SYMBOL_REF; either SYMBOL_REF_DECL or
2733 SYMBOL_REF_CONSTANT. */
2734#define SYMBOL_REF_DATA(RTX)(((RTX))->u.fld[1]) X0ANY ((RTX), 1)(((RTX))->u.fld[1])
2735
2736/* Set RTX's SYMBOL_REF_DECL to DECL. RTX must not be a constant
2737 pool symbol. */
2738#define SET_SYMBOL_REF_DECL(RTX, DECL)(((void)(!(!(__extension__ ({ __typeof ((RTX)) const _rtx = (
(RTX)); if (((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("CONSTANT_POOL_ADDRESS_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2738, __FUNCTION__); _rtx; })->unchanging)) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2738, __FUNCTION__), 0 : 0)), ((((RTX))->u.fld[1]).rt_tree
) = (DECL))
\
2739 (gcc_assert (!CONSTANT_POOL_ADDRESS_P (RTX))((void)(!(!(__extension__ ({ __typeof ((RTX)) const _rtx = ((
RTX)); if (((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("CONSTANT_POOL_ADDRESS_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2739, __FUNCTION__); _rtx; })->unchanging)) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2739, __FUNCTION__), 0 : 0))
, X0TREE ((RTX), 1)((((RTX))->u.fld[1]).rt_tree) = (DECL))
2740
2741/* The tree (decl or constant) associated with the symbol, or null. */
2742#define SYMBOL_REF_DECL(RTX)((__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("CONSTANT_POOL_ADDRESS_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2742, __FUNCTION__); _rtx; })->unchanging) ? nullptr : (
(((RTX))->u.fld[1]).rt_tree))
\
2743 (CONSTANT_POOL_ADDRESS_P (RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("CONSTANT_POOL_ADDRESS_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2743, __FUNCTION__); _rtx; })->unchanging)
? NULLnullptr : X0TREE ((RTX), 1)((((RTX))->u.fld[1]).rt_tree))
2744
2745/* Set RTX's SYMBOL_REF_CONSTANT to C. RTX must be a constant pool symbol. */
2746#define SET_SYMBOL_REF_CONSTANT(RTX, C)(((void)(!((__extension__ ({ __typeof ((RTX)) const _rtx = ((
RTX)); if (((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("CONSTANT_POOL_ADDRESS_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2746, __FUNCTION__); _rtx; })->unchanging)) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2746, __FUNCTION__), 0 : 0)), ((((RTX))->u.fld[1]).rt_constant
) = (C))
\
2747 (gcc_assert (CONSTANT_POOL_ADDRESS_P (RTX))((void)(!((__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX
)); if (((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("CONSTANT_POOL_ADDRESS_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2747, __FUNCTION__); _rtx; })->unchanging)) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2747, __FUNCTION__), 0 : 0))
, X0CONSTANT ((RTX), 1)((((RTX))->u.fld[1]).rt_constant) = (C))
2748
2749/* The rtx constant pool entry for a symbol, or null. */
2750#define SYMBOL_REF_CONSTANT(RTX)((__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("CONSTANT_POOL_ADDRESS_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2750, __FUNCTION__); _rtx; })->unchanging) ? ((((RTX))->
u.fld[1]).rt_constant) : nullptr)
\
2751 (CONSTANT_POOL_ADDRESS_P (RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("CONSTANT_POOL_ADDRESS_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2751, __FUNCTION__); _rtx; })->unchanging)
? X0CONSTANT ((RTX), 1)((((RTX))->u.fld[1]).rt_constant) : NULLnullptr)
2752
2753/* A set of flags on a symbol_ref that are, in some respects, redundant with
2754 information derivable from the tree decl associated with this symbol.
2755 Except that we build a *lot* of SYMBOL_REFs that aren't associated with a
2756 decl. In some cases this is a bug. But beyond that, it's nice to cache
2757 this information to avoid recomputing it. Finally, this allows space for
2758 the target to store more than one bit of information, as with
2759 SYMBOL_REF_FLAG. */
2760#define SYMBOL_REF_FLAGS(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("SYMBOL_REF_FLAGS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2760, __FUNCTION__); _rtx; }) ->u2.symbol_ref_flags)
\
2761 (RTL_FLAG_CHECK1 ("SYMBOL_REF_FLAGS", (RTX), SYMBOL_REF)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("SYMBOL_REF_FLAGS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2761, __FUNCTION__); _rtx; })
\
2762 ->u2.symbol_ref_flags)
2763
2764/* These flags are common enough to be defined for all targets. They
2765 are computed by the default version of targetm.encode_section_info. */
2766
2767/* Set if this symbol is a function. */
2768#define SYMBOL_FLAG_FUNCTION(1 << 0) (1 << 0)
2769#define SYMBOL_REF_FUNCTION_P(RTX)(((__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if
(((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("SYMBOL_REF_FLAGS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2769, __FUNCTION__); _rtx; }) ->u2.symbol_ref_flags) &
(1 << 0)) != 0)
\
2770 ((SYMBOL_REF_FLAGS (RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("SYMBOL_REF_FLAGS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2770, __FUNCTION__); _rtx; }) ->u2.symbol_ref_flags)
& SYMBOL_FLAG_FUNCTION(1 << 0)) != 0)
2771/* Set if targetm.binds_local_p is true. */
2772#define SYMBOL_FLAG_LOCAL(1 << 1) (1 << 1)
2773#define SYMBOL_REF_LOCAL_P(RTX)(((__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if
(((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("SYMBOL_REF_FLAGS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2773, __FUNCTION__); _rtx; }) ->u2.symbol_ref_flags) &
(1 << 1)) != 0)
\
2774 ((SYMBOL_REF_FLAGS (RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("SYMBOL_REF_FLAGS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2774, __FUNCTION__); _rtx; }) ->u2.symbol_ref_flags)
& SYMBOL_FLAG_LOCAL(1 << 1)) != 0)
2775/* Set if targetm.in_small_data_p is true. */
2776#define SYMBOL_FLAG_SMALL(1 << 2) (1 << 2)
2777#define SYMBOL_REF_SMALL_P(RTX)(((__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if
(((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("SYMBOL_REF_FLAGS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2777, __FUNCTION__); _rtx; }) ->u2.symbol_ref_flags) &
(1 << 2)) != 0)
\
2778 ((SYMBOL_REF_FLAGS (RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("SYMBOL_REF_FLAGS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2778, __FUNCTION__); _rtx; }) ->u2.symbol_ref_flags)
& SYMBOL_FLAG_SMALL(1 << 2)) != 0)
2779/* The three-bit field at [5:3] is true for TLS variables; use
2780 SYMBOL_REF_TLS_MODEL to extract the field as an enum tls_model. */
2781#define SYMBOL_FLAG_TLS_SHIFT3 3
2782#define SYMBOL_REF_TLS_MODEL(RTX)((enum tls_model) (((__extension__ ({ __typeof ((RTX)) const _rtx
= ((RTX)); if (((enum rtx_code) (_rtx)->code) != SYMBOL_REF
) rtl_check_failed_flag ("SYMBOL_REF_FLAGS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2782, __FUNCTION__); _rtx; }) ->u2.symbol_ref_flags) >>
3) & 7))
\
2783 ((enum tls_model) ((SYMBOL_REF_FLAGS (RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("SYMBOL_REF_FLAGS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2783, __FUNCTION__); _rtx; }) ->u2.symbol_ref_flags)
>> SYMBOL_FLAG_TLS_SHIFT3) & 7))
2784/* Set if this symbol is not defined in this translation unit. */
2785#define SYMBOL_FLAG_EXTERNAL(1 << 6) (1 << 6)
2786#define SYMBOL_REF_EXTERNAL_P(RTX)(((__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if
(((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("SYMBOL_REF_FLAGS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2786, __FUNCTION__); _rtx; }) ->u2.symbol_ref_flags) &
(1 << 6)) != 0)
\
2787 ((SYMBOL_REF_FLAGS (RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("SYMBOL_REF_FLAGS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2787, __FUNCTION__); _rtx; }) ->u2.symbol_ref_flags)
& SYMBOL_FLAG_EXTERNAL(1 << 6)) != 0)
2788/* Set if this symbol has a block_symbol structure associated with it. */
2789#define SYMBOL_FLAG_HAS_BLOCK_INFO(1 << 7) (1 << 7)
2790#define SYMBOL_REF_HAS_BLOCK_INFO_P(RTX)(((__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if
(((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("SYMBOL_REF_FLAGS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2790, __FUNCTION__); _rtx; }) ->u2.symbol_ref_flags) &
(1 << 7)) != 0)
\
2791 ((SYMBOL_REF_FLAGS (RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("SYMBOL_REF_FLAGS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2791, __FUNCTION__); _rtx; }) ->u2.symbol_ref_flags)
& SYMBOL_FLAG_HAS_BLOCK_INFO(1 << 7)) != 0)
2792/* Set if this symbol is a section anchor. SYMBOL_REF_ANCHOR_P implies
2793 SYMBOL_REF_HAS_BLOCK_INFO_P. */
2794#define SYMBOL_FLAG_ANCHOR(1 << 8) (1 << 8)
2795#define SYMBOL_REF_ANCHOR_P(RTX)(((__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if
(((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("SYMBOL_REF_FLAGS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2795, __FUNCTION__); _rtx; }) ->u2.symbol_ref_flags) &
(1 << 8)) != 0)
\
2796 ((SYMBOL_REF_FLAGS (RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("SYMBOL_REF_FLAGS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2796, __FUNCTION__); _rtx; }) ->u2.symbol_ref_flags)
& SYMBOL_FLAG_ANCHOR(1 << 8)) != 0)
2797
2798/* Subsequent bits are available for the target to use. */
2799#define SYMBOL_FLAG_MACH_DEP_SHIFT9 9
2800#define SYMBOL_FLAG_MACH_DEP(1 << 9) (1 << SYMBOL_FLAG_MACH_DEP_SHIFT9)
2801
2802/* If SYMBOL_REF_HAS_BLOCK_INFO_P (RTX), this is the object_block
2803 structure to which the symbol belongs, or NULL if it has not been
2804 assigned a block. */
2805#define SYMBOL_REF_BLOCK(RTX)((&(RTX)->u.block_sym)->block) (BLOCK_SYMBOL_CHECK (RTX)(&(RTX)->u.block_sym)->block)
2806
2807/* If SYMBOL_REF_HAS_BLOCK_INFO_P (RTX), this is the offset of RTX from
2808 the first object in SYMBOL_REF_BLOCK (RTX). The value is negative if
2809 RTX has not yet been assigned to a block, or it has not been given an
2810 offset within that block. */
2811#define SYMBOL_REF_BLOCK_OFFSET(RTX)((&(RTX)->u.block_sym)->offset) (BLOCK_SYMBOL_CHECK (RTX)(&(RTX)->u.block_sym)->offset)
2812
2813/* True if RTX is flagged to be a scheduling barrier. */
2814#define PREFETCH_SCHEDULE_BARRIER_P(RTX)(__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if (
((enum rtx_code) (_rtx)->code) != PREFETCH) rtl_check_failed_flag
("PREFETCH_SCHEDULE_BARRIER_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2814, __FUNCTION__); _rtx; })->volatil)
\
2815 (RTL_FLAG_CHECK1 ("PREFETCH_SCHEDULE_BARRIER_P", (RTX), PREFETCH)__extension__ ({ __typeof ((RTX)) const _rtx = ((RTX)); if ((
(enum rtx_code) (_rtx)->code) != PREFETCH) rtl_check_failed_flag
("PREFETCH_SCHEDULE_BARRIER_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 2815, __FUNCTION__); _rtx; })
->volatil)
2816
2817/* Indicate whether the machine has any sort of auto increment addressing.
2818 If not, we can avoid checking for REG_INC notes. */
2819
2820#if (defined (HAVE_PRE_INCREMENT0) || defined (HAVE_PRE_DECREMENT0) \
2821 || defined (HAVE_POST_INCREMENT0) || defined (HAVE_POST_DECREMENT0) \
2822 || defined (HAVE_PRE_MODIFY_DISP0) || defined (HAVE_POST_MODIFY_DISP0) \
2823 || defined (HAVE_PRE_MODIFY_REG0) || defined (HAVE_POST_MODIFY_REG0))
2824#define AUTO_INC_DEC0 1
2825#else
2826#define AUTO_INC_DEC0 0
2827#endif
2828
2829/* Define a macro to look for REG_INC notes,
2830 but save time on machines where they never exist. */
2831
2832#if AUTO_INC_DEC0
2833#define FIND_REG_INC_NOTE(INSN, REG)0 \
2834 ((REG) != NULL_RTX(rtx) 0 && REG_P ((REG))(((enum rtx_code) ((REG))->code) == REG) \
2835 ? find_regno_note ((INSN), REG_INC, REGNO (REG)(rhs_regno(REG))) \
2836 : find_reg_note ((INSN), REG_INC, (REG)))
2837#else
2838#define FIND_REG_INC_NOTE(INSN, REG)0 0
2839#endif
2840
2841#ifndef HAVE_PRE_INCREMENT0
2842#define HAVE_PRE_INCREMENT0 0
2843#endif
2844
2845#ifndef HAVE_PRE_DECREMENT0
2846#define HAVE_PRE_DECREMENT0 0
2847#endif
2848
2849#ifndef HAVE_POST_INCREMENT0
2850#define HAVE_POST_INCREMENT0 0
2851#endif
2852
2853#ifndef HAVE_POST_DECREMENT0
2854#define HAVE_POST_DECREMENT0 0
2855#endif
2856
2857#ifndef HAVE_POST_MODIFY_DISP0
2858#define HAVE_POST_MODIFY_DISP0 0
2859#endif
2860
2861#ifndef HAVE_POST_MODIFY_REG0
2862#define HAVE_POST_MODIFY_REG0 0
2863#endif
2864
2865#ifndef HAVE_PRE_MODIFY_DISP0
2866#define HAVE_PRE_MODIFY_DISP0 0
2867#endif
2868
2869#ifndef HAVE_PRE_MODIFY_REG0
2870#define HAVE_PRE_MODIFY_REG0 0
2871#endif
2872
2873
2874/* Some architectures do not have complete pre/post increment/decrement
2875 instruction sets, or only move some modes efficiently. These macros
2876 allow us to tune autoincrement generation. */
2877
2878#ifndef USE_LOAD_POST_INCREMENT
2879#define USE_LOAD_POST_INCREMENT(MODE)0 HAVE_POST_INCREMENT0
2880#endif
2881
2882#ifndef USE_LOAD_POST_DECREMENT
2883#define USE_LOAD_POST_DECREMENT(MODE)0 HAVE_POST_DECREMENT0
2884#endif
2885
2886#ifndef USE_LOAD_PRE_INCREMENT
2887#define USE_LOAD_PRE_INCREMENT(MODE)0 HAVE_PRE_INCREMENT0
2888#endif
2889
2890#ifndef USE_LOAD_PRE_DECREMENT
2891#define USE_LOAD_PRE_DECREMENT(MODE)0 HAVE_PRE_DECREMENT0
2892#endif
2893
2894#ifndef USE_STORE_POST_INCREMENT
2895#define USE_STORE_POST_INCREMENT(MODE)0 HAVE_POST_INCREMENT0
2896#endif
2897
2898#ifndef USE_STORE_POST_DECREMENT
2899#define USE_STORE_POST_DECREMENT(MODE)0 HAVE_POST_DECREMENT0
2900#endif
2901
2902#ifndef USE_STORE_PRE_INCREMENT
2903#define USE_STORE_PRE_INCREMENT(MODE)0 HAVE_PRE_INCREMENT0
2904#endif
2905
2906#ifndef USE_STORE_PRE_DECREMENT
2907#define USE_STORE_PRE_DECREMENT(MODE)0 HAVE_PRE_DECREMENT0
2908#endif
2909
2910/* Nonzero when we are generating CONCATs. */
2911extern int generating_concat_p;
2912
2913/* Nonzero when we are expanding trees to RTL. */
2914extern int currently_expanding_to_rtl;
2915
2916/* Generally useful functions. */
2917
2918#ifndef GENERATOR_FILE
2919/* Return the cost of SET X. SPEED_P is true if optimizing for speed
2920 rather than size. */
2921
2922inline int
2923set_rtx_cost (rtx x, bool speed_p)
2924{
2925 return rtx_cost (x, VOIDmode((void) 0, E_VOIDmode), INSN, 4, speed_p);
2926}
2927
2928/* Like set_rtx_cost, but return both the speed and size costs in C. */
2929
2930inline void
2931get_full_set_rtx_cost (rtx x, struct full_rtx_costs *c)
2932{
2933 get_full_rtx_cost (x, VOIDmode((void) 0, E_VOIDmode), INSN, 4, c);
2934}
2935
2936/* Return the cost of moving X into a register, relative to the cost
2937 of a register move. SPEED_P is true if optimizing for speed rather
2938 than size. */
2939
2940inline int
2941set_src_cost (rtx x, machine_mode mode, bool speed_p)
2942{
2943 return rtx_cost (x, mode, SET, 1, speed_p);
2944}
2945
2946/* Like set_src_cost, but return both the speed and size costs in C. */
2947
2948inline void
2949get_full_set_src_cost (rtx x, machine_mode mode, struct full_rtx_costs *c)
2950{
2951 get_full_rtx_cost (x, mode, SET, 1, c);
2952}
2953#endif
2954
2955/* A convenience macro to validate the arguments of a zero_extract
2956 expression. It determines whether SIZE lies inclusively within
2957 [1, RANGE], POS lies inclusively within between [0, RANGE - 1]
2958 and the sum lies inclusively within [1, RANGE]. RANGE must be
2959 >= 1, but SIZE and POS may be negative. */
2960#define EXTRACT_ARGS_IN_RANGE(SIZE, POS, RANGE)(((unsigned long) ((POS)) - (unsigned long) (0) <= (unsigned
long) ((unsigned long) (RANGE) - 1) - (unsigned long) (0)) &&
((unsigned long) ((SIZE)) - (unsigned long) (1) <= (unsigned
long) ((unsigned long) (RANGE) - (unsigned long)(POS)) - (unsigned
long) (1)))
\
2961 (IN_RANGE ((POS), 0, (unsigned HOST_WIDE_INT) (RANGE) - 1)((unsigned long) ((POS)) - (unsigned long) (0) <= (unsigned
long) ((unsigned long) (RANGE) - 1) - (unsigned long) (0))
\
2962 && IN_RANGE ((SIZE), 1, (unsigned HOST_WIDE_INT) (RANGE) \((unsigned long) ((SIZE)) - (unsigned long) (1) <= (unsigned
long) ((unsigned long) (RANGE) - (unsigned long)(POS)) - (unsigned
long) (1))
2963 - (unsigned HOST_WIDE_INT)(POS))((unsigned long) ((SIZE)) - (unsigned long) (1) <= (unsigned
long) ((unsigned long) (RANGE) - (unsigned long)(POS)) - (unsigned
long) (1))
)
2964
2965/* In explow.cc */
2966extern HOST_WIDE_INTlong trunc_int_for_mode (HOST_WIDE_INTlong, machine_mode);
2967extern poly_int64 trunc_int_for_mode (poly_int64, machine_mode);
2968extern rtx plus_constant (machine_mode, rtx, poly_int64, bool = false);
2969extern HOST_WIDE_INTlong get_stack_check_protect (void);
2970
2971/* In rtl.cc */
2972extern rtx rtx_alloc (RTX_CODEenum rtx_code CXX_MEM_STAT_INFO);
2973inline rtx
2974rtx_init (rtx rt, RTX_CODEenum rtx_code code)
2975{
2976 memset (rt, 0, RTX_HDR_SIZE__builtin_offsetof(struct rtx_def, u));
2977 PUT_CODE (rt, code)((rt)->code = (code));
2978 return rt;
2979}
2980#define rtx_alloca(code)rtx_init ((rtx) __builtin_alloca(rtx_code_size[(code)]), (code
))
\
2981 rtx_init ((rtx) alloca (RTX_CODE_SIZE ((code)))__builtin_alloca(rtx_code_size[(code)]), (code))
2982extern rtx rtx_alloc_stat_v (RTX_CODEenum rtx_code MEM_STAT_DECL, int);
2983#define rtx_alloc_v(c, SZ)rtx_alloc_stat_v (c , SZ) rtx_alloc_stat_v (c MEM_STAT_INFO, SZ)
2984#define const_wide_int_alloc(NWORDS)rtx_alloc_stat_v (CONST_WIDE_INT , (sizeof (struct hwivec_def
) + ((NWORDS)-1) * sizeof (long)))
\
2985 rtx_alloc_v (CONST_WIDE_INT, \rtx_alloc_stat_v (CONST_WIDE_INT , (sizeof (struct hwivec_def
) + ((NWORDS)-1) * sizeof (long)))
2986 (sizeof (struct hwivec_def) \rtx_alloc_stat_v (CONST_WIDE_INT , (sizeof (struct hwivec_def
) + ((NWORDS)-1) * sizeof (long)))
2987 + ((NWORDS)-1) * sizeof (HOST_WIDE_INT)))rtx_alloc_stat_v (CONST_WIDE_INT , (sizeof (struct hwivec_def
) + ((NWORDS)-1) * sizeof (long)))
\
2988
2989extern rtvec rtvec_alloc (size_t);
2990extern rtvec shallow_copy_rtvec (rtvec);
2991extern bool shared_const_p (const_rtx);
2992extern rtx copy_rtx (rtx);
2993extern enum rtx_code classify_insn (rtx);
2994extern void dump_rtx_statistics (void);
2995
2996/* In emit-rtl.cc */
2997extern rtx copy_rtx_if_shared (rtx);
2998
2999/* In rtl.cc */
3000extern unsigned int rtx_size (const_rtx);
3001extern rtx shallow_copy_rtx (const_rtx CXX_MEM_STAT_INFO);
3002extern int rtx_equal_p (const_rtx, const_rtx);
3003extern bool rtvec_all_equal_p (const_rtvec);
3004extern bool rtvec_series_p (rtvec, int);
3005
3006/* Return true if X is a vector constant with a duplicated element value. */
3007
3008inline bool
3009const_vec_duplicate_p (const_rtx x)
3010{
3011 return (GET_CODE (x)((enum rtx_code) (x)->code) == CONST_VECTOR
3012 && CONST_VECTOR_NPATTERNS (x)(__extension__ ({ __typeof ((x)) const _rtx = ((x)); if (((enum
rtx_code) (_rtx)->code) != CONST_VECTOR) rtl_check_failed_flag
("CONST_VECTOR_NPATTERNS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 3012, __FUNCTION__); _rtx; }) ->u2.const_vector.npatterns
)
== 1
3013 && CONST_VECTOR_DUPLICATE_P (x)((__extension__ ({ __typeof ((x)) const _rtx = ((x)); if (((enum
rtx_code) (_rtx)->code) != CONST_VECTOR) rtl_check_failed_flag
("CONST_VECTOR_NELTS_PER_PATTERN", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 3013, __FUNCTION__); _rtx; }) ->u2.const_vector.nelts_per_pattern
) == 1)
);
3014}
3015
3016/* Return true if X is a vector constant with a duplicated element value.
3017 Store the duplicated element in *ELT if so. */
3018
3019template <typename T>
3020inline bool
3021const_vec_duplicate_p (T x, T *elt)
3022{
3023 if (const_vec_duplicate_p (x))
3024 {
3025 *elt = CONST_VECTOR_ENCODED_ELT (x, 0)(((((x)->u.fld[0]).rt_rtvec))->elem[0]);
3026 return true;
3027 }
3028 return false;
3029}
3030
3031/* Return true if X is a vector with a duplicated element value, either
3032 constant or nonconstant. Store the duplicated element in *ELT if so. */
3033
3034template <typename T>
3035inline bool
3036vec_duplicate_p (T x, T *elt)
3037{
3038 if (GET_CODE (x)((enum rtx_code) (x)->code) == VEC_DUPLICATE
3039 && !VECTOR_MODE_P (GET_MODE (XEXP (x, 0)))(((enum mode_class) mode_class[((machine_mode) ((((x)->u.fld
[0]).rt_rtx))->mode)]) == MODE_VECTOR_BOOL || ((enum mode_class
) mode_class[((machine_mode) ((((x)->u.fld[0]).rt_rtx))->
mode)]) == MODE_VECTOR_INT || ((enum mode_class) mode_class[(
(machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode)]) == MODE_VECTOR_FLOAT
|| ((enum mode_class) mode_class[((machine_mode) ((((x)->
u.fld[0]).rt_rtx))->mode)]) == MODE_VECTOR_FRACT || ((enum
mode_class) mode_class[((machine_mode) ((((x)->u.fld[0]).
rt_rtx))->mode)]) == MODE_VECTOR_UFRACT || ((enum mode_class
) mode_class[((machine_mode) ((((x)->u.fld[0]).rt_rtx))->
mode)]) == MODE_VECTOR_ACCUM || ((enum mode_class) mode_class
[((machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode)]) ==
MODE_VECTOR_UACCUM)
)
3040 {
3041 *elt = XEXP (x, 0)(((x)->u.fld[0]).rt_rtx);
3042 return true;
3043 }
3044 return const_vec_duplicate_p (x, elt);
3045}
3046
3047/* If X is a vector constant with a duplicated element value, return that
3048 element value, otherwise return X. */
3049
3050template <typename T>
3051inline T
3052unwrap_const_vec_duplicate (T x)
3053{
3054 if (const_vec_duplicate_p (x))
3055 x = CONST_VECTOR_ELT (x, 0)const_vector_elt (x, 0);
3056 return x;
3057}
3058
3059/* In emit-rtl.cc. */
3060extern wide_int const_vector_int_elt (const_rtx, unsigned int);
3061extern rtx const_vector_elt (const_rtx, unsigned int);
3062extern bool const_vec_series_p_1 (const_rtx, rtx *, rtx *);
3063
3064/* Return true if X is an integer constant vector that contains a linear
3065 series of the form:
3066
3067 { B, B + S, B + 2 * S, B + 3 * S, ... }
3068
3069 for a nonzero S. Store B and S in *BASE_OUT and *STEP_OUT on sucess. */
3070
3071inline bool
3072const_vec_series_p (const_rtx x, rtx *base_out, rtx *step_out)
3073{
3074 if (GET_CODE (x)((enum rtx_code) (x)->code) == CONST_VECTOR
3075 && CONST_VECTOR_NPATTERNS (x)(__extension__ ({ __typeof ((x)) const _rtx = ((x)); if (((enum
rtx_code) (_rtx)->code) != CONST_VECTOR) rtl_check_failed_flag
("CONST_VECTOR_NPATTERNS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 3075, __FUNCTION__); _rtx; }) ->u2.const_vector.npatterns
)
== 1
3076 && !CONST_VECTOR_DUPLICATE_P (x)((__extension__ ({ __typeof ((x)) const _rtx = ((x)); if (((enum
rtx_code) (_rtx)->code) != CONST_VECTOR) rtl_check_failed_flag
("CONST_VECTOR_NELTS_PER_PATTERN", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 3076, __FUNCTION__); _rtx; }) ->u2.const_vector.nelts_per_pattern
) == 1)
)
3077 return const_vec_series_p_1 (x, base_out, step_out);
3078 return false;
3079}
3080
3081/* Return true if X is a vector that contains a linear series of the
3082 form:
3083
3084 { B, B + S, B + 2 * S, B + 3 * S, ... }
3085
3086 where B and S are constant or nonconstant. Store B and S in
3087 *BASE_OUT and *STEP_OUT on sucess. */
3088
3089inline bool
3090vec_series_p (const_rtx x, rtx *base_out, rtx *step_out)
3091{
3092 if (GET_CODE (x)((enum rtx_code) (x)->code) == VEC_SERIES)
3093 {
3094 *base_out = XEXP (x, 0)(((x)->u.fld[0]).rt_rtx);
3095 *step_out = XEXP (x, 1)(((x)->u.fld[1]).rt_rtx);
3096 return true;
3097 }
3098 return const_vec_series_p (x, base_out, step_out);
3099}
3100
3101/* Return true if CONST_VECTORs X and Y, which are known to have the same mode,
3102 also have the same encoding. This means that they are equal whenever their
3103 operands are equal. */
3104
3105inline bool
3106same_vector_encodings_p (const_rtx x, const_rtx y)
3107{
3108 /* Don't be fussy about the encoding of constant-length vectors,
3109 since XVECEXP (X, 0) and XVECEXP (Y, 0) list all the elements anyway. */
3110 if (poly_uint64 (CONST_VECTOR_NUNITS (x)GET_MODE_NUNITS (((machine_mode) (x)->mode))).is_constant ())
3111 return true;
3112
3113 return (CONST_VECTOR_NPATTERNS (x)(__extension__ ({ __typeof ((x)) const _rtx = ((x)); if (((enum
rtx_code) (_rtx)->code) != CONST_VECTOR) rtl_check_failed_flag
("CONST_VECTOR_NPATTERNS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 3113, __FUNCTION__); _rtx; }) ->u2.const_vector.npatterns
)
== CONST_VECTOR_NPATTERNS (y)(__extension__ ({ __typeof ((y)) const _rtx = ((y)); if (((enum
rtx_code) (_rtx)->code) != CONST_VECTOR) rtl_check_failed_flag
("CONST_VECTOR_NPATTERNS", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 3113, __FUNCTION__); _rtx; }) ->u2.const_vector.npatterns
)
3114 && (CONST_VECTOR_NELTS_PER_PATTERN (x)(__extension__ ({ __typeof ((x)) const _rtx = ((x)); if (((enum
rtx_code) (_rtx)->code) != CONST_VECTOR) rtl_check_failed_flag
("CONST_VECTOR_NELTS_PER_PATTERN", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 3114, __FUNCTION__); _rtx; }) ->u2.const_vector.nelts_per_pattern
)
3115 == CONST_VECTOR_NELTS_PER_PATTERN (y)(__extension__ ({ __typeof ((y)) const _rtx = ((y)); if (((enum
rtx_code) (_rtx)->code) != CONST_VECTOR) rtl_check_failed_flag
("CONST_VECTOR_NELTS_PER_PATTERN", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 3115, __FUNCTION__); _rtx; }) ->u2.const_vector.nelts_per_pattern
)
));
3116}
3117
3118/* Return the unpromoted (outer) mode of SUBREG_PROMOTED_VAR_P subreg X. */
3119
3120inline scalar_int_mode
3121subreg_unpromoted_mode (rtx x)
3122{
3123 gcc_checking_assert (SUBREG_PROMOTED_VAR_P (x))((void)(!((__extension__ ({ __typeof ((x)) const _rtx = ((x))
; if (((enum rtx_code) (_rtx)->code) != SUBREG) rtl_check_failed_flag
("SUBREG_PROMOTED", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 3123, __FUNCTION__); _rtx; })->in_struct)) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 3123, __FUNCTION__), 0 : 0))
;
3124 return as_a <scalar_int_mode> (GET_MODE (x)((machine_mode) (x)->mode));
3125}
3126
3127/* Return the promoted (inner) mode of SUBREG_PROMOTED_VAR_P subreg X. */
3128
3129inline scalar_int_mode
3130subreg_promoted_mode (rtx x)
3131{
3132 gcc_checking_assert (SUBREG_PROMOTED_VAR_P (x))((void)(!((__extension__ ({ __typeof ((x)) const _rtx = ((x))
; if (((enum rtx_code) (_rtx)->code) != SUBREG) rtl_check_failed_flag
("SUBREG_PROMOTED", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 3132, __FUNCTION__); _rtx; })->in_struct)) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/rtl.h"
, 3132, __FUNCTION__), 0 : 0))
;
3133 return as_a <scalar_int_mode> (GET_MODE (SUBREG_REG (x))((machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode));
3134}
3135
3136/* In emit-rtl.cc */
3137extern rtvec gen_rtvec_v (int, rtx *);
3138extern rtvec gen_rtvec_v (int, rtx_insn **);
3139extern rtx gen_reg_rtx (machine_mode);
3140extern rtx gen_rtx_REG_offset (rtx, machine_mode, unsigned int, poly_int64);
3141extern rtx gen_reg_rtx_offset (rtx, machine_mode, int);
3142extern rtx gen_reg_rtx_and_attrs (rtx);
3143extern rtx_code_label *gen_label_rtx (void);
3144extern rtx gen_lowpart_common (machine_mode, rtx);
3145
3146/* In cse.cc */