File: | build/gcc/reload1.cc |
Warning: | line 3565, column 2 Value stored to 'plus_cst_src' is never read |
Press '?' to see keyboard shortcuts
Keyboard shortcuts:
1 | /* Reload pseudo regs into hard regs for insns that require hard regs. |
2 | Copyright (C) 1987-2023 Free Software Foundation, Inc. |
3 | |
4 | This file is part of GCC. |
5 | |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free |
8 | Software Foundation; either version 3, or (at your option) any later |
9 | version. |
10 | |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
14 | for more details. |
15 | |
16 | You should have received a copy of the GNU General Public License |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ |
19 | |
20 | #include "config.h" |
21 | #include "system.h" |
22 | #include "coretypes.h" |
23 | #include "backend.h" |
24 | #include "target.h" |
25 | #include "rtl.h" |
26 | #include "tree.h" |
27 | #include "predict.h" |
28 | #include "df.h" |
29 | #include "memmodel.h" |
30 | #include "tm_p.h" |
31 | #include "optabs.h" |
32 | #include "regs.h" |
33 | #include "ira.h" |
34 | #include "recog.h" |
35 | |
36 | #include "rtl-error.h" |
37 | #include "expr.h" |
38 | #include "addresses.h" |
39 | #include "cfgrtl.h" |
40 | #include "cfgbuild.h" |
41 | #include "reload.h" |
42 | #include "except.h" |
43 | #include "dumpfile.h" |
44 | #include "rtl-iter.h" |
45 | #include "function-abi.h" |
46 | |
47 | /* This file contains the reload pass of the compiler, which is |
48 | run after register allocation has been done. It checks that |
49 | each insn is valid (operands required to be in registers really |
50 | are in registers of the proper class) and fixes up invalid ones |
51 | by copying values temporarily into registers for the insns |
52 | that need them. |
53 | |
54 | The results of register allocation are described by the vector |
55 | reg_renumber; the insns still contain pseudo regs, but reg_renumber |
56 | can be used to find which hard reg, if any, a pseudo reg is in. |
57 | |
58 | The technique we always use is to free up a few hard regs that are |
59 | called ``reload regs'', and for each place where a pseudo reg |
60 | must be in a hard reg, copy it temporarily into one of the reload regs. |
61 | |
62 | Reload regs are allocated locally for every instruction that needs |
63 | reloads. When there are pseudos which are allocated to a register that |
64 | has been chosen as a reload reg, such pseudos must be ``spilled''. |
65 | This means that they go to other hard regs, or to stack slots if no other |
66 | available hard regs can be found. Spilling can invalidate more |
67 | insns, requiring additional need for reloads, so we must keep checking |
68 | until the process stabilizes. |
69 | |
70 | For machines with different classes of registers, we must keep track |
71 | of the register class needed for each reload, and make sure that |
72 | we allocate enough reload registers of each class. |
73 | |
74 | The file reload.cc contains the code that checks one insn for |
75 | validity and reports the reloads that it needs. This file |
76 | is in charge of scanning the entire rtl code, accumulating the |
77 | reload needs, spilling, assigning reload registers to use for |
78 | fixing up each insn, and generating the new insns to copy values |
79 | into the reload registers. */ |
80 | |
81 | struct target_reload default_target_reload; |
82 | #if SWITCHABLE_TARGET1 |
83 | struct target_reload *this_target_reload = &default_target_reload; |
84 | #endif |
85 | |
86 | #define spill_indirect_levels(this_target_reload->x_spill_indirect_levels) \ |
87 | (this_target_reload->x_spill_indirect_levels) |
88 | |
89 | /* During reload_as_needed, element N contains a REG rtx for the hard reg |
90 | into which reg N has been reloaded (perhaps for a previous insn). */ |
91 | static rtx *reg_last_reload_reg; |
92 | |
93 | /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn |
94 | for an output reload that stores into reg N. */ |
95 | static regset_head reg_has_output_reload; |
96 | |
97 | /* Indicates which hard regs are reload-registers for an output reload |
98 | in the current insn. */ |
99 | static HARD_REG_SET reg_is_output_reload; |
100 | |
101 | /* Widest mode in which each pseudo reg is referred to (via subreg). */ |
102 | static machine_mode *reg_max_ref_mode; |
103 | |
104 | /* Vector to remember old contents of reg_renumber before spilling. */ |
105 | static short *reg_old_renumber; |
106 | |
107 | /* During reload_as_needed, element N contains the last pseudo regno reloaded |
108 | into hard register N. If that pseudo reg occupied more than one register, |
109 | reg_reloaded_contents points to that pseudo for each spill register in |
110 | use; all of these must remain set for an inheritance to occur. */ |
111 | static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER76]; |
112 | |
113 | /* During reload_as_needed, element N contains the insn for which |
114 | hard register N was last used. Its contents are significant only |
115 | when reg_reloaded_valid is set for this register. */ |
116 | static rtx_insn *reg_reloaded_insn[FIRST_PSEUDO_REGISTER76]; |
117 | |
118 | /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid. */ |
119 | static HARD_REG_SET reg_reloaded_valid; |
120 | /* Indicate if the register was dead at the end of the reload. |
121 | This is only valid if reg_reloaded_contents is set and valid. */ |
122 | static HARD_REG_SET reg_reloaded_dead; |
123 | |
124 | /* Number of spill-regs so far; number of valid elements of spill_regs. */ |
125 | static int n_spills; |
126 | |
127 | /* In parallel with spill_regs, contains REG rtx's for those regs. |
128 | Holds the last rtx used for any given reg, or 0 if it has never |
129 | been used for spilling yet. This rtx is reused, provided it has |
130 | the proper mode. */ |
131 | static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER76]; |
132 | |
133 | /* In parallel with spill_regs, contains nonzero for a spill reg |
134 | that was stored after the last time it was used. |
135 | The precise value is the insn generated to do the store. */ |
136 | static rtx_insn *spill_reg_store[FIRST_PSEUDO_REGISTER76]; |
137 | |
138 | /* This is the register that was stored with spill_reg_store. This is a |
139 | copy of reload_out / reload_out_reg when the value was stored; if |
140 | reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */ |
141 | static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER76]; |
142 | |
143 | /* This table is the inverse mapping of spill_regs: |
144 | indexed by hard reg number, |
145 | it contains the position of that reg in spill_regs, |
146 | or -1 for something that is not in spill_regs. |
147 | |
148 | ?!? This is no longer accurate. */ |
149 | static short spill_reg_order[FIRST_PSEUDO_REGISTER76]; |
150 | |
151 | /* This reg set indicates registers that can't be used as spill registers for |
152 | the currently processed insn. These are the hard registers which are live |
153 | during the insn, but not allocated to pseudos, as well as fixed |
154 | registers. */ |
155 | static HARD_REG_SET bad_spill_regs; |
156 | |
157 | /* These are the hard registers that can't be used as spill register for any |
158 | insn. This includes registers used for user variables and registers that |
159 | we can't eliminate. A register that appears in this set also can't be used |
160 | to retry register allocation. */ |
161 | static HARD_REG_SET bad_spill_regs_global; |
162 | |
163 | /* Describes order of use of registers for reloading |
164 | of spilled pseudo-registers. `n_spills' is the number of |
165 | elements that are actually valid; new ones are added at the end. |
166 | |
167 | Both spill_regs and spill_reg_order are used on two occasions: |
168 | once during find_reload_regs, where they keep track of the spill registers |
169 | for a single insn, but also during reload_as_needed where they show all |
170 | the registers ever used by reload. For the latter case, the information |
171 | is calculated during finish_spills. */ |
172 | static short spill_regs[FIRST_PSEUDO_REGISTER76]; |
173 | |
174 | /* This vector of reg sets indicates, for each pseudo, which hard registers |
175 | may not be used for retrying global allocation because the register was |
176 | formerly spilled from one of them. If we allowed reallocating a pseudo to |
177 | a register that it was already allocated to, reload might not |
178 | terminate. */ |
179 | static HARD_REG_SET *pseudo_previous_regs; |
180 | |
181 | /* This vector of reg sets indicates, for each pseudo, which hard |
182 | registers may not be used for retrying global allocation because they |
183 | are used as spill registers during one of the insns in which the |
184 | pseudo is live. */ |
185 | static HARD_REG_SET *pseudo_forbidden_regs; |
186 | |
187 | /* All hard regs that have been used as spill registers for any insn are |
188 | marked in this set. */ |
189 | static HARD_REG_SET used_spill_regs; |
190 | |
191 | /* Index of last register assigned as a spill register. We allocate in |
192 | a round-robin fashion. */ |
193 | static int last_spill_reg; |
194 | |
195 | /* Record the stack slot for each spilled hard register. */ |
196 | static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER76]; |
197 | |
198 | /* Width allocated so far for that stack slot. */ |
199 | static poly_uint64_pod spill_stack_slot_width[FIRST_PSEUDO_REGISTER76]; |
200 | |
201 | /* Record which pseudos needed to be spilled. */ |
202 | static regset_head spilled_pseudos; |
203 | |
204 | /* Record which pseudos changed their allocation in finish_spills. */ |
205 | static regset_head changed_allocation_pseudos; |
206 | |
207 | /* Used for communication between order_regs_for_reload and count_pseudo. |
208 | Used to avoid counting one pseudo twice. */ |
209 | static regset_head pseudos_counted; |
210 | |
211 | /* First uid used by insns created by reload in this function. |
212 | Used in find_equiv_reg. */ |
213 | int reload_first_uid; |
214 | |
215 | /* Flag set by local-alloc or global-alloc if anything is live in |
216 | a call-clobbered reg across calls. */ |
217 | int caller_save_needed; |
218 | |
219 | /* Set to 1 while reload_as_needed is operating. |
220 | Required by some machines to handle any generated moves differently. */ |
221 | int reload_in_progress = 0; |
222 | |
223 | /* This obstack is used for allocation of rtl during register elimination. |
224 | The allocated storage can be freed once find_reloads has processed the |
225 | insn. */ |
226 | static struct obstack reload_obstack; |
227 | |
228 | /* Points to the beginning of the reload_obstack. All insn_chain structures |
229 | are allocated first. */ |
230 | static char *reload_startobj; |
231 | |
232 | /* The point after all insn_chain structures. Used to quickly deallocate |
233 | memory allocated in copy_reloads during calculate_needs_all_insns. */ |
234 | static char *reload_firstobj; |
235 | |
236 | /* This points before all local rtl generated by register elimination. |
237 | Used to quickly free all memory after processing one insn. */ |
238 | static char *reload_insn_firstobj; |
239 | |
240 | /* List of insn_chain instructions, one for every insn that reload needs to |
241 | examine. */ |
242 | class insn_chain *reload_insn_chain; |
243 | |
244 | /* TRUE if we potentially left dead insns in the insn stream and want to |
245 | run DCE immediately after reload, FALSE otherwise. */ |
246 | static bool need_dce; |
247 | |
248 | /* List of all insns needing reloads. */ |
249 | static class insn_chain *insns_need_reload; |
250 | |
251 | /* This structure is used to record information about register eliminations. |
252 | Each array entry describes one possible way of eliminating a register |
253 | in favor of another. If there is more than one way of eliminating a |
254 | particular register, the most preferred should be specified first. */ |
255 | |
256 | struct elim_table |
257 | { |
258 | int from; /* Register number to be eliminated. */ |
259 | int to; /* Register number used as replacement. */ |
260 | poly_int64_pod initial_offset; /* Initial difference between values. */ |
261 | int can_eliminate; /* Nonzero if this elimination can be done. */ |
262 | int can_eliminate_previous; /* Value returned by TARGET_CAN_ELIMINATE |
263 | target hook in previous scan over insns |
264 | made by reload. */ |
265 | poly_int64_pod offset; /* Current offset between the two regs. */ |
266 | poly_int64_pod previous_offset; /* Offset at end of previous insn. */ |
267 | int ref_outside_mem; /* "to" has been referenced outside a MEM. */ |
268 | rtx from_rtx; /* REG rtx for the register to be eliminated. |
269 | We cannot simply compare the number since |
270 | we might then spuriously replace a hard |
271 | register corresponding to a pseudo |
272 | assigned to the reg to be eliminated. */ |
273 | rtx to_rtx; /* REG rtx for the replacement. */ |
274 | }; |
275 | |
276 | static struct elim_table *reg_eliminate = 0; |
277 | |
278 | /* This is an intermediate structure to initialize the table. It has |
279 | exactly the members provided by ELIMINABLE_REGS. */ |
280 | static const struct elim_table_1 |
281 | { |
282 | const int from; |
283 | const int to; |
284 | } reg_eliminate_1[] = |
285 | |
286 | ELIMINABLE_REGS{{ 16, 7}, { 16, 6}, { 19, 7}, { 19, 6}}; |
287 | |
288 | #define NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0])) ARRAY_SIZE (reg_eliminate_1)(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0])) |
289 | |
290 | /* Record the number of pending eliminations that have an offset not equal |
291 | to their initial offset. If nonzero, we use a new copy of each |
292 | replacement result in any insns encountered. */ |
293 | int num_not_at_initial_offset; |
294 | |
295 | /* Count the number of registers that we may be able to eliminate. */ |
296 | static int num_eliminable; |
297 | /* And the number of registers that are equivalent to a constant that |
298 | can be eliminated to frame_pointer / arg_pointer + constant. */ |
299 | static int num_eliminable_invariants; |
300 | |
301 | /* For each label, we record the offset of each elimination. If we reach |
302 | a label by more than one path and an offset differs, we cannot do the |
303 | elimination. This information is indexed by the difference of the |
304 | number of the label and the first label number. We can't offset the |
305 | pointer itself as this can cause problems on machines with segmented |
306 | memory. The first table is an array of flags that records whether we |
307 | have yet encountered a label and the second table is an array of arrays, |
308 | one entry in the latter array for each elimination. */ |
309 | |
310 | static int first_label_num; |
311 | static char *offsets_known_at; |
312 | static poly_int64_pod (*offsets_at)[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; |
313 | |
314 | vec<reg_equivs_t, va_gc> *reg_equivs; |
315 | |
316 | /* Stack of addresses where an rtx has been changed. We can undo the |
317 | changes by popping items off the stack and restoring the original |
318 | value at each location. |
319 | |
320 | We use this simplistic undo capability rather than copy_rtx as copy_rtx |
321 | will not make a deep copy of a normally sharable rtx, such as |
322 | (const (plus (symbol_ref) (const_int))). If such an expression appears |
323 | as R1 in gen_reload_chain_without_interm_reg_p, then a shared |
324 | rtx expression would be changed. See PR 42431. */ |
325 | |
326 | typedef rtx *rtx_p; |
327 | static vec<rtx_p> substitute_stack; |
328 | |
329 | /* Number of labels in the current function. */ |
330 | |
331 | static int num_labels; |
332 | |
333 | static void replace_pseudos_in (rtx *, machine_mode, rtx); |
334 | static void maybe_fix_stack_asms (void); |
335 | static void copy_reloads (class insn_chain *); |
336 | static void calculate_needs_all_insns (int); |
337 | static int find_reg (class insn_chain *, int); |
338 | static void find_reload_regs (class insn_chain *); |
339 | static void select_reload_regs (void); |
340 | static void delete_caller_save_insns (void); |
341 | |
342 | static void spill_failure (rtx_insn *, enum reg_class); |
343 | static void count_spilled_pseudo (int, int, int); |
344 | static void delete_dead_insn (rtx_insn *); |
345 | static void alter_reg (int, int, bool); |
346 | static void set_label_offsets (rtx, rtx_insn *, int); |
347 | static void check_eliminable_occurrences (rtx); |
348 | static void elimination_effects (rtx, machine_mode); |
349 | static rtx eliminate_regs_1 (rtx, machine_mode, rtx, bool, bool); |
350 | static int eliminate_regs_in_insn (rtx_insn *, int); |
351 | static void update_eliminable_offsets (void); |
352 | static void mark_not_eliminable (rtx, const_rtx, void *); |
353 | static void set_initial_elim_offsets (void); |
354 | static bool verify_initial_elim_offsets (void); |
355 | static void set_initial_label_offsets (void); |
356 | static void set_offsets_for_label (rtx_insn *); |
357 | static void init_eliminable_invariants (rtx_insn *, bool); |
358 | static void init_elim_table (void); |
359 | static void free_reg_equiv (void); |
360 | static void update_eliminables (HARD_REG_SET *); |
361 | static bool update_eliminables_and_spill (void); |
362 | static void elimination_costs_in_insn (rtx_insn *); |
363 | static void spill_hard_reg (unsigned int, int); |
364 | static int finish_spills (int); |
365 | static void scan_paradoxical_subregs (rtx); |
366 | static void count_pseudo (int); |
367 | static void order_regs_for_reload (class insn_chain *); |
368 | static void reload_as_needed (int); |
369 | static void forget_old_reloads_1 (rtx, const_rtx, void *); |
370 | static void forget_marked_reloads (regset); |
371 | static int reload_reg_class_lower (const void *, const void *); |
372 | static void mark_reload_reg_in_use (unsigned int, int, enum reload_type, |
373 | machine_mode); |
374 | static void clear_reload_reg_in_use (unsigned int, int, enum reload_type, |
375 | machine_mode); |
376 | static int reload_reg_free_p (unsigned int, int, enum reload_type); |
377 | static int reload_reg_free_for_value_p (int, int, int, enum reload_type, |
378 | rtx, rtx, int, int); |
379 | static int free_for_value_p (int, machine_mode, int, enum reload_type, |
380 | rtx, rtx, int, int); |
381 | static int allocate_reload_reg (class insn_chain *, int, int); |
382 | static int conflicts_with_override (rtx); |
383 | static void failed_reload (rtx_insn *, int); |
384 | static int set_reload_reg (int, int); |
385 | static void choose_reload_regs_init (class insn_chain *, rtx *); |
386 | static void choose_reload_regs (class insn_chain *); |
387 | static void emit_input_reload_insns (class insn_chain *, struct reload *, |
388 | rtx, int); |
389 | static void emit_output_reload_insns (class insn_chain *, struct reload *, |
390 | int); |
391 | static void do_input_reload (class insn_chain *, struct reload *, int); |
392 | static void do_output_reload (class insn_chain *, struct reload *, int); |
393 | static void emit_reload_insns (class insn_chain *); |
394 | static void delete_output_reload (rtx_insn *, int, int, rtx); |
395 | static void delete_address_reloads (rtx_insn *, rtx_insn *); |
396 | static void delete_address_reloads_1 (rtx_insn *, rtx, rtx_insn *); |
397 | static void inc_for_reload (rtx, rtx, rtx, poly_int64); |
398 | static void substitute (rtx *, const_rtx, rtx); |
399 | static bool gen_reload_chain_without_interm_reg_p (int, int); |
400 | static int reloads_conflict (int, int); |
401 | static rtx_insn *gen_reload (rtx, rtx, int, enum reload_type); |
402 | static rtx_insn *emit_insn_if_valid_for_reload (rtx); |
403 | |
404 | /* Initialize the reload pass. This is called at the beginning of compilation |
405 | and may be called again if the target is reinitialized. */ |
406 | |
407 | void |
408 | init_reload (void) |
409 | { |
410 | int i; |
411 | |
412 | /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack. |
413 | Set spill_indirect_levels to the number of levels such addressing is |
414 | permitted, zero if it is not permitted at all. */ |
415 | |
416 | rtx tem |
417 | = gen_rtx_MEM (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), |
418 | gen_rtx_PLUS (Pmode,gen_rtx_fmt_ee_stat ((PLUS), (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) ))), ((gen_rtx_REG ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : ( scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), ((( 76)) + 5) + 1))), ((gen_int_mode (4, (global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) )))) ) |
419 | gen_rtx_REG (Pmode,gen_rtx_fmt_ee_stat ((PLUS), (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) ))), ((gen_rtx_REG ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : ( scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), ((( 76)) + 5) + 1))), ((gen_int_mode (4, (global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) )))) ) |
420 | LAST_VIRTUAL_REGISTER + 1),gen_rtx_fmt_ee_stat ((PLUS), (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) ))), ((gen_rtx_REG ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : ( scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), ((( 76)) + 5) + 1))), ((gen_int_mode (4, (global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) )))) ) |
421 | gen_int_mode (4, Pmode))gen_rtx_fmt_ee_stat ((PLUS), (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) ))), ((gen_rtx_REG ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : ( scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), ((( 76)) + 5) + 1))), ((gen_int_mode (4, (global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) )))) )); |
422 | spill_indirect_levels(this_target_reload->x_spill_indirect_levels) = 0; |
423 | |
424 | while (memory_address_p (QImode, tem)memory_address_addr_space_p (((scalar_int_mode ((scalar_int_mode ::from_int) E_QImode))), (tem), 0)) |
425 | { |
426 | spill_indirect_levels(this_target_reload->x_spill_indirect_levels)++; |
427 | tem = gen_rtx_MEM (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), tem); |
428 | } |
429 | |
430 | /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */ |
431 | |
432 | tem = gen_rtx_MEM (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), gen_rtx_SYMBOL_REF (Pmode, "foo")gen_rtx_fmt_s0_stat ((SYMBOL_REF), (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) ))), (("foo")) )); |
433 | indirect_symref_ok(this_target_reload->x_indirect_symref_ok) = memory_address_p (QImode, tem)memory_address_addr_space_p (((scalar_int_mode ((scalar_int_mode ::from_int) E_QImode))), (tem), 0); |
434 | |
435 | /* See if reg+reg is a valid (and offsettable) address. */ |
436 | |
437 | for (i = 0; i < FIRST_PSEUDO_REGISTER76; i++) |
438 | { |
439 | tem = gen_rtx_PLUS (Pmode,gen_rtx_fmt_ee_stat ((PLUS), (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) ))), ((gen_rtx_REG ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : ( scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), 6)) ), ((gen_rtx_REG ((global_options.x_ix86_pmode == PMODE_DI ? ( scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), i))) ) |
440 | gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),gen_rtx_fmt_ee_stat ((PLUS), (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) ))), ((gen_rtx_REG ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : ( scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), 6)) ), ((gen_rtx_REG ((global_options.x_ix86_pmode == PMODE_DI ? ( scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), i))) ) |
441 | gen_rtx_REG (Pmode, i))gen_rtx_fmt_ee_stat ((PLUS), (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) ))), ((gen_rtx_REG ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : ( scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), 6)) ), ((gen_rtx_REG ((global_options.x_ix86_pmode == PMODE_DI ? ( scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), i))) ); |
442 | |
443 | /* This way, we make sure that reg+reg is an offsettable address. */ |
444 | tem = plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), tem, 4); |
445 | |
446 | for (int mode = 0; mode < MAX_MACHINE_MODE; mode++) |
447 | if (!double_reg_address_ok(this_target_reload->x_double_reg_address_ok)[mode] |
448 | && memory_address_p ((enum machine_mode)mode, tem)memory_address_addr_space_p (((enum machine_mode)mode), (tem) , 0)) |
449 | double_reg_address_ok(this_target_reload->x_double_reg_address_ok)[mode] = 1; |
450 | } |
451 | |
452 | /* Initialize obstack for our rtl allocation. */ |
453 | if (reload_startobj == NULLnullptr) |
454 | { |
455 | gcc_obstack_init (&reload_obstack)_obstack_begin (((&reload_obstack)), (memory_block_pool:: block_size), (0), (mempool_obstack_chunk_alloc), (mempool_obstack_chunk_free )); |
456 | reload_startobj = XOBNEWVAR (&reload_obstack, char, 0)((char *) __extension__ ({ struct obstack *__h = ((&reload_obstack )); __extension__ ({ struct obstack *__o = (__h); size_t __len = (((0))); if (__extension__ ({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->next_free); }) < __len) _obstack_newchunk (__o, __len); ((void) ((__o )->next_free += (__len))); }); __extension__ ({ struct obstack *__o1 = (__h); void *__value = (void *) __o1->object_base ; if (__o1->next_free == __value) __o1->maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t) < sizeof (void *) ? ((__o1->object_base) + (((__o1->next_free) - (__o1 ->object_base) + (__o1->alignment_mask)) & ~(__o1-> alignment_mask))) : (char *) (((ptrdiff_t) (__o1->next_free ) + (__o1->alignment_mask)) & ~(__o1->alignment_mask ))); if ((size_t) (__o1->next_free - (char *) __o1->chunk ) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk )) __o1->next_free = __o1->chunk_limit; __o1->object_base = __o1->next_free; __value; }); })); |
457 | } |
458 | |
459 | INIT_REG_SET (&spilled_pseudos)bitmap_initialize (&spilled_pseudos, ®_obstack); |
460 | INIT_REG_SET (&changed_allocation_pseudos)bitmap_initialize (&changed_allocation_pseudos, ®_obstack ); |
461 | INIT_REG_SET (&pseudos_counted)bitmap_initialize (&pseudos_counted, ®_obstack); |
462 | } |
463 | |
464 | /* List of insn chains that are currently unused. */ |
465 | static class insn_chain *unused_insn_chains = 0; |
466 | |
467 | /* Allocate an empty insn_chain structure. */ |
468 | class insn_chain * |
469 | new_insn_chain (void) |
470 | { |
471 | class insn_chain *c; |
472 | |
473 | if (unused_insn_chains == 0) |
474 | { |
475 | c = XOBNEW (&reload_obstack, class insn_chain)((class insn_chain *) __extension__ ({ struct obstack *__h = ( (&reload_obstack)); __extension__ ({ struct obstack *__o = (__h); size_t __len = ((sizeof (class insn_chain))); if (__extension__ ({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->next_free); }) < __len) _obstack_newchunk (__o , __len); ((void) ((__o)->next_free += (__len))); }); __extension__ ({ struct obstack *__o1 = (__h); void *__value = (void *) __o1 ->object_base; if (__o1->next_free == __value) __o1-> maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t ) < sizeof (void *) ? ((__o1->object_base) + (((__o1-> next_free) - (__o1->object_base) + (__o1->alignment_mask )) & ~(__o1->alignment_mask))) : (char *) (((ptrdiff_t ) (__o1->next_free) + (__o1->alignment_mask)) & ~(__o1 ->alignment_mask))); if ((size_t) (__o1->next_free - (char *) __o1->chunk) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk)) __o1->next_free = __o1->chunk_limit ; __o1->object_base = __o1->next_free; __value; }); })); |
476 | INIT_REG_SET (&c->live_throughout)bitmap_initialize (&c->live_throughout, ®_obstack ); |
477 | INIT_REG_SET (&c->dead_or_set)bitmap_initialize (&c->dead_or_set, ®_obstack); |
478 | } |
479 | else |
480 | { |
481 | c = unused_insn_chains; |
482 | unused_insn_chains = c->next; |
483 | } |
484 | c->is_caller_save_insn = 0; |
485 | c->need_operand_change = 0; |
486 | c->need_reload = 0; |
487 | c->need_elim = 0; |
488 | return c; |
489 | } |
490 | |
491 | /* Small utility function to set all regs in hard reg set TO which are |
492 | allocated to pseudos in regset FROM. */ |
493 | |
494 | void |
495 | compute_use_by_pseudos (HARD_REG_SET *to, regset from) |
496 | { |
497 | unsigned int regno; |
498 | reg_set_iterator rsi; |
499 | |
500 | EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)for (bmp_iter_set_init (&(rsi), (from), (76), &(regno )); bmp_iter_set (&(rsi), &(regno)); bmp_iter_next (& (rsi), &(regno))) |
501 | { |
502 | int r = reg_renumber[regno]; |
503 | |
504 | if (r < 0) |
505 | { |
506 | /* reload_combine uses the information from DF_LIVE_IN, |
507 | which might still contain registers that have not |
508 | actually been allocated since they have an |
509 | equivalence. */ |
510 | gcc_assert (ira_conflicts_p || reload_completed)((void)(!(ira_conflicts_p || reload_completed) ? fancy_abort ( "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 510, __FUNCTION__), 0 : 0)); |
511 | } |
512 | else |
513 | add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno)((machine_mode) (regno_reg_rtx[regno])->mode), r); |
514 | } |
515 | } |
516 | |
517 | /* Replace all pseudos found in LOC with their corresponding |
518 | equivalences. */ |
519 | |
520 | static void |
521 | replace_pseudos_in (rtx *loc, machine_mode mem_mode, rtx usage) |
522 | { |
523 | rtx x = *loc; |
524 | enum rtx_code code; |
525 | const char *fmt; |
526 | int i, j; |
527 | |
528 | if (! x) |
529 | return; |
530 | |
531 | code = GET_CODE (x)((enum rtx_code) (x)->code); |
532 | if (code == REG) |
533 | { |
534 | unsigned int regno = REGNO (x)(rhs_regno(x)); |
535 | |
536 | if (regno < FIRST_PSEUDO_REGISTER76) |
537 | return; |
538 | |
539 | x = eliminate_regs_1 (x, mem_mode, usage, true, false); |
540 | if (x != *loc) |
541 | { |
542 | *loc = x; |
543 | replace_pseudos_in (loc, mem_mode, usage); |
544 | return; |
545 | } |
546 | |
547 | if (reg_equiv_constant (regno)(*reg_equivs)[(regno)].constant) |
548 | *loc = reg_equiv_constant (regno)(*reg_equivs)[(regno)].constant; |
549 | else if (reg_equiv_invariant (regno)(*reg_equivs)[(regno)].invariant) |
550 | *loc = reg_equiv_invariant (regno)(*reg_equivs)[(regno)].invariant; |
551 | else if (reg_equiv_mem (regno)(*reg_equivs)[(regno)].mem) |
552 | *loc = reg_equiv_mem (regno)(*reg_equivs)[(regno)].mem; |
553 | else if (reg_equiv_address (regno)(*reg_equivs)[(regno)].address) |
554 | *loc = gen_rtx_MEM (GET_MODE (x)((machine_mode) (x)->mode), reg_equiv_address (regno)(*reg_equivs)[(regno)].address); |
555 | else |
556 | { |
557 | gcc_assert (!REG_P (regno_reg_rtx[regno])((void)(!(!(((enum rtx_code) (regno_reg_rtx[regno])->code) == REG) || (rhs_regno(regno_reg_rtx[regno])) != regno) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 558, __FUNCTION__), 0 : 0)) |
558 | || REGNO (regno_reg_rtx[regno]) != regno)((void)(!(!(((enum rtx_code) (regno_reg_rtx[regno])->code) == REG) || (rhs_regno(regno_reg_rtx[regno])) != regno) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 558, __FUNCTION__), 0 : 0)); |
559 | *loc = regno_reg_rtx[regno]; |
560 | } |
561 | |
562 | return; |
563 | } |
564 | else if (code == MEM) |
565 | { |
566 | replace_pseudos_in (& XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), GET_MODE (x)((machine_mode) (x)->mode), usage); |
567 | return; |
568 | } |
569 | |
570 | /* Process each of our operands recursively. */ |
571 | fmt = GET_RTX_FORMAT (code)(rtx_format[(int) (code)]); |
572 | for (i = 0; i < GET_RTX_LENGTH (code)(rtx_length[(int) (code)]); i++, fmt++) |
573 | if (*fmt == 'e') |
574 | replace_pseudos_in (&XEXP (x, i)(((x)->u.fld[i]).rt_rtx), mem_mode, usage); |
575 | else if (*fmt == 'E') |
576 | for (j = 0; j < XVECLEN (x, i)(((((x)->u.fld[i]).rt_rtvec))->num_elem); j++) |
577 | replace_pseudos_in (& XVECEXP (x, i, j)(((((x)->u.fld[i]).rt_rtvec))->elem[j]), mem_mode, usage); |
578 | } |
579 | |
580 | /* Determine if the current function has an exception receiver block |
581 | that reaches the exit block via non-exceptional edges */ |
582 | |
583 | static bool |
584 | has_nonexceptional_receiver (void) |
585 | { |
586 | edge e; |
587 | edge_iterator ei; |
588 | basic_block *tos, *worklist, bb; |
589 | |
590 | /* If we're not optimizing, then just err on the safe side. */ |
591 | if (!optimizeglobal_options.x_optimize) |
592 | return true; |
593 | |
594 | /* First determine which blocks can reach exit via normal paths. */ |
595 | tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1)((basic_block *) xmalloc (sizeof (basic_block) * ((((cfun + 0 ))->cfg->x_n_basic_blocks) + 1))); |
596 | |
597 | FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb ; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb-> next_bb) |
598 | bb->flags &= ~BB_REACHABLE; |
599 | |
600 | /* Place the exit block on our worklist. */ |
601 | EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr)->flags |= BB_REACHABLE; |
602 | *tos++ = EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr); |
603 | |
604 | /* Iterate: find everything reachable from what we've already seen. */ |
605 | while (tos != worklist) |
606 | { |
607 | bb = *--tos; |
608 | |
609 | FOR_EACH_EDGE (e, ei, bb->preds)for ((ei) = ei_start_1 (&((bb->preds))); ei_cond ((ei) , &(e)); ei_next (&(ei))) |
610 | if (!(e->flags & EDGE_ABNORMAL)) |
611 | { |
612 | basic_block src = e->src; |
613 | |
614 | if (!(src->flags & BB_REACHABLE)) |
615 | { |
616 | src->flags |= BB_REACHABLE; |
617 | *tos++ = src; |
618 | } |
619 | } |
620 | } |
621 | free (worklist); |
622 | |
623 | /* Now see if there's a reachable block with an exceptional incoming |
624 | edge. */ |
625 | FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb ; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb-> next_bb) |
626 | if (bb->flags & BB_REACHABLE && bb_has_abnormal_pred (bb)) |
627 | return true; |
628 | |
629 | /* No exceptional block reached exit unexceptionally. */ |
630 | return false; |
631 | } |
632 | |
633 | /* Grow (or allocate) the REG_EQUIVS array from its current size (which may be |
634 | zero elements) to MAX_REG_NUM elements. |
635 | |
636 | Initialize all new fields to NULL and update REG_EQUIVS_SIZE. */ |
637 | void |
638 | grow_reg_equivs (void) |
639 | { |
640 | int old_size = vec_safe_length (reg_equivs); |
641 | int max_regno = max_reg_num (); |
642 | int i; |
643 | reg_equivs_t ze; |
644 | |
645 | memset (&ze, 0, sizeof (reg_equivs_t)); |
646 | vec_safe_reserve (reg_equivs, max_regno); |
647 | for (i = old_size; i < max_regno; i++) |
648 | reg_equivs->quick_insert (i, ze); |
649 | } |
650 | |
651 | |
652 | /* Global variables used by reload and its subroutines. */ |
653 | |
654 | /* The current basic block while in calculate_elim_costs_all_insns. */ |
655 | static basic_block elim_bb; |
656 | |
657 | /* Set during calculate_needs if an insn needs register elimination. */ |
658 | static int something_needs_elimination; |
659 | /* Set during calculate_needs if an insn needs an operand changed. */ |
660 | static int something_needs_operands_changed; |
661 | /* Set by alter_regs if we spilled a register to the stack. */ |
662 | static bool something_was_spilled; |
663 | |
664 | /* Nonzero means we couldn't get enough spill regs. */ |
665 | static int failure; |
666 | |
667 | /* Temporary array of pseudo-register number. */ |
668 | static int *temp_pseudo_reg_arr; |
669 | |
670 | /* If a pseudo has no hard reg, delete the insns that made the equivalence. |
671 | If that insn didn't set the register (i.e., it copied the register to |
672 | memory), just delete that insn instead of the equivalencing insn plus |
673 | anything now dead. If we call delete_dead_insn on that insn, we may |
674 | delete the insn that actually sets the register if the register dies |
675 | there and that is incorrect. */ |
676 | static void |
677 | remove_init_insns () |
678 | { |
679 | for (int i = FIRST_PSEUDO_REGISTER76; i < max_regno; i++) |
680 | { |
681 | if (reg_renumber[i] < 0 && reg_equiv_init (i)(*reg_equivs)[(i)].init != 0) |
682 | { |
683 | rtx list; |
684 | for (list = reg_equiv_init (i)(*reg_equivs)[(i)].init; list; list = XEXP (list, 1)(((list)->u.fld[1]).rt_rtx)) |
685 | { |
686 | rtx_insn *equiv_insn = as_a <rtx_insn *> (XEXP (list, 0)(((list)->u.fld[0]).rt_rtx)); |
687 | |
688 | /* If we already deleted the insn or if it may trap, we can't |
689 | delete it. The latter case shouldn't happen, but can |
690 | if an insn has a variable address, gets a REG_EH_REGION |
691 | note added to it, and then gets converted into a load |
692 | from a constant address. */ |
693 | if (NOTE_P (equiv_insn)(((enum rtx_code) (equiv_insn)->code) == NOTE) |
694 | || can_throw_internal (equiv_insn)) |
695 | ; |
696 | else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn))) |
697 | delete_dead_insn (equiv_insn); |
698 | else |
699 | SET_INSN_DELETED (equiv_insn)set_insn_deleted (equiv_insn);; |
700 | } |
701 | } |
702 | } |
703 | } |
704 | |
705 | /* Return true if remove_init_insns will delete INSN. */ |
706 | static bool |
707 | will_delete_init_insn_p (rtx_insn *insn) |
708 | { |
709 | rtx set = single_set (insn); |
710 | if (!set || !REG_P (SET_DEST (set))(((enum rtx_code) ((((set)->u.fld[0]).rt_rtx))->code) == REG)) |
711 | return false; |
712 | unsigned regno = REGNO (SET_DEST (set))(rhs_regno((((set)->u.fld[0]).rt_rtx))); |
713 | |
714 | if (can_throw_internal (insn)) |
715 | return false; |
716 | |
717 | if (regno < FIRST_PSEUDO_REGISTER76 || reg_renumber[regno] >= 0) |
718 | return false; |
719 | |
720 | for (rtx list = reg_equiv_init (regno)(*reg_equivs)[(regno)].init; list; list = XEXP (list, 1)(((list)->u.fld[1]).rt_rtx)) |
721 | { |
722 | rtx equiv_insn = XEXP (list, 0)(((list)->u.fld[0]).rt_rtx); |
723 | if (equiv_insn == insn) |
724 | return true; |
725 | } |
726 | return false; |
727 | } |
728 | |
729 | /* Main entry point for the reload pass. |
730 | |
731 | FIRST is the first insn of the function being compiled. |
732 | |
733 | GLOBAL nonzero means we were called from global_alloc |
734 | and should attempt to reallocate any pseudoregs that we |
735 | displace from hard regs we will use for reloads. |
736 | If GLOBAL is zero, we do not have enough information to do that, |
737 | so any pseudo reg that is spilled must go to the stack. |
738 | |
739 | Return value is TRUE if reload likely left dead insns in the |
740 | stream and a DCE pass should be run to elimiante them. Else the |
741 | return value is FALSE. */ |
742 | |
743 | bool |
744 | reload (rtx_insn *first, int global) |
745 | { |
746 | int i, n; |
747 | rtx_insn *insn; |
748 | struct elim_table *ep; |
749 | basic_block bb; |
750 | bool inserted; |
751 | |
752 | /* Make sure even insns with volatile mem refs are recognizable. */ |
753 | init_recog (); |
754 | |
755 | failure = 0; |
756 | |
757 | reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0)((char *) __extension__ ({ struct obstack *__h = ((&reload_obstack )); __extension__ ({ struct obstack *__o = (__h); size_t __len = (((0))); if (__extension__ ({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->next_free); }) < __len) _obstack_newchunk (__o, __len); ((void) ((__o )->next_free += (__len))); }); __extension__ ({ struct obstack *__o1 = (__h); void *__value = (void *) __o1->object_base ; if (__o1->next_free == __value) __o1->maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t) < sizeof (void *) ? ((__o1->object_base) + (((__o1->next_free) - (__o1 ->object_base) + (__o1->alignment_mask)) & ~(__o1-> alignment_mask))) : (char *) (((ptrdiff_t) (__o1->next_free ) + (__o1->alignment_mask)) & ~(__o1->alignment_mask ))); if ((size_t) (__o1->next_free - (char *) __o1->chunk ) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk )) __o1->next_free = __o1->chunk_limit; __o1->object_base = __o1->next_free; __value; }); })); |
758 | |
759 | /* Make sure that the last insn in the chain |
760 | is not something that needs reloading. */ |
761 | emit_note (NOTE_INSN_DELETED); |
762 | |
763 | /* Enable find_equiv_reg to distinguish insns made by reload. */ |
764 | reload_first_uid = get_max_uid (); |
765 | |
766 | /* Initialize the secondary memory table. */ |
767 | clear_secondary_mem (); |
768 | |
769 | /* We don't have a stack slot for any spill reg yet. */ |
770 | memset (spill_stack_slot, 0, sizeof spill_stack_slot); |
771 | memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width); |
772 | |
773 | /* Initialize the save area information for caller-save, in case some |
774 | are needed. */ |
775 | init_save_areas (); |
776 | |
777 | /* Compute which hard registers are now in use |
778 | as homes for pseudo registers. |
779 | This is done here rather than (eg) in global_alloc |
780 | because this point is reached even if not optimizing. */ |
781 | for (i = FIRST_PSEUDO_REGISTER76; i < max_regno; i++) |
782 | mark_home_live (i); |
783 | |
784 | /* A function that has a nonlocal label that can reach the exit |
785 | block via non-exceptional paths must save all call-saved |
786 | registers. */ |
787 | if (cfun(cfun + 0)->has_nonlocal_label |
788 | && has_nonexceptional_receiver ()) |
789 | crtl(&x_rtl)->saves_all_registers = 1; |
790 | |
791 | if (crtl(&x_rtl)->saves_all_registers) |
792 | for (i = 0; i < FIRST_PSEUDO_REGISTER76; i++) |
793 | if (! crtl(&x_rtl)->abi->clobbers_full_reg_p (i) |
794 | && ! fixed_regs(this_target_hard_regs->x_fixed_regs)[i] |
795 | && ! LOCAL_REGNO (i)0) |
796 | df_set_regs_ever_live (i, true); |
797 | |
798 | /* Find all the pseudo registers that didn't get hard regs |
799 | but do have known equivalent constants or memory slots. |
800 | These include parameters (known equivalent to parameter slots) |
801 | and cse'd or loop-moved constant memory addresses. |
802 | |
803 | Record constant equivalents in reg_equiv_constant |
804 | so they will be substituted by find_reloads. |
805 | Record memory equivalents in reg_mem_equiv so they can |
806 | be substituted eventually by altering the REG-rtx's. */ |
807 | |
808 | grow_reg_equivs (); |
809 | reg_old_renumber = XCNEWVEC (short, max_regno)((short *) xcalloc ((max_regno), sizeof (short))); |
810 | memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short)); |
811 | pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno)((HARD_REG_SET *) xmalloc (sizeof (HARD_REG_SET) * (max_regno ))); |
812 | pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno)((HARD_REG_SET *) xcalloc ((max_regno), sizeof (HARD_REG_SET) )); |
813 | |
814 | CLEAR_HARD_REG_SET (bad_spill_regs_global); |
815 | |
816 | init_eliminable_invariants (first, true); |
817 | init_elim_table (); |
818 | |
819 | /* Alter each pseudo-reg rtx to contain its hard reg number. Assign |
820 | stack slots to the pseudos that lack hard regs or equivalents. |
821 | Do not touch virtual registers. */ |
822 | |
823 | temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1)((int *) xmalloc (sizeof (int) * (max_regno - (((76)) + 5) - 1 ))); |
824 | for (n = 0, i = LAST_VIRTUAL_REGISTER(((76)) + 5) + 1; i < max_regno; i++) |
825 | temp_pseudo_reg_arr[n++] = i; |
826 | |
827 | if (ira_conflicts_p) |
828 | /* Ask IRA to order pseudo-registers for better stack slot |
829 | sharing. */ |
830 | ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_mode); |
831 | |
832 | for (i = 0; i < n; i++) |
833 | alter_reg (temp_pseudo_reg_arr[i], -1, false); |
834 | |
835 | /* If we have some registers we think can be eliminated, scan all insns to |
836 | see if there is an insn that sets one of these registers to something |
837 | other than itself plus a constant. If so, the register cannot be |
838 | eliminated. Doing this scan here eliminates an extra pass through the |
839 | main reload loop in the most common case where register elimination |
840 | cannot be done. */ |
841 | for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn)) |
842 | if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) == DEBUG_INSN))) |
843 | note_pattern_stores (PATTERN (insn), mark_not_eliminable, NULLnullptr); |
844 | |
845 | maybe_fix_stack_asms (); |
846 | |
847 | insns_need_reload = 0; |
848 | something_needs_elimination = 0; |
849 | |
850 | /* Initialize to -1, which means take the first spill register. */ |
851 | last_spill_reg = -1; |
852 | |
853 | /* Spill any hard regs that we know we can't eliminate. */ |
854 | CLEAR_HARD_REG_SET (used_spill_regs); |
855 | /* There can be multiple ways to eliminate a register; |
856 | they should be listed adjacently. |
857 | Elimination for any register fails only if all possible ways fail. */ |
858 | for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ) |
859 | { |
860 | int from = ep->from; |
861 | int can_eliminate = 0; |
862 | do |
863 | { |
864 | can_eliminate |= ep->can_eliminate; |
865 | ep++; |
866 | } |
867 | while (ep < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))] && ep->from == from); |
868 | if (! can_eliminate) |
869 | spill_hard_reg (from, 1); |
870 | } |
871 | |
872 | if (!HARD_FRAME_POINTER_IS_FRAME_POINTER(6 == 19) && frame_pointer_needed((&x_rtl)->frame_pointer_needed)) |
873 | spill_hard_reg (HARD_FRAME_POINTER_REGNUM6, 1); |
874 | |
875 | finish_spills (global); |
876 | |
877 | /* From now on, we may need to generate moves differently. We may also |
878 | allow modifications of insns which cause them to not be recognized. |
879 | Any such modifications will be cleaned up during reload itself. */ |
880 | reload_in_progress = 1; |
881 | |
882 | /* This loop scans the entire function each go-round |
883 | and repeats until one repetition spills no additional hard regs. */ |
884 | for (;;) |
885 | { |
886 | int something_changed; |
887 | poly_int64 starting_frame_size; |
888 | |
889 | starting_frame_size = get_frame_size (); |
890 | something_was_spilled = false; |
891 | |
892 | set_initial_elim_offsets (); |
893 | set_initial_label_offsets (); |
894 | |
895 | /* For each pseudo register that has an equivalent location defined, |
896 | try to eliminate any eliminable registers (such as the frame pointer) |
897 | assuming initial offsets for the replacement register, which |
898 | is the normal case. |
899 | |
900 | If the resulting location is directly addressable, substitute |
901 | the MEM we just got directly for the old REG. |
902 | |
903 | If it is not addressable but is a constant or the sum of a hard reg |
904 | and constant, it is probably not addressable because the constant is |
905 | out of range, in that case record the address; we will generate |
906 | hairy code to compute the address in a register each time it is |
907 | needed. Similarly if it is a hard register, but one that is not |
908 | valid as an address register. |
909 | |
910 | If the location is not addressable, but does not have one of the |
911 | above forms, assign a stack slot. We have to do this to avoid the |
912 | potential of producing lots of reloads if, e.g., a location involves |
913 | a pseudo that didn't get a hard register and has an equivalent memory |
914 | location that also involves a pseudo that didn't get a hard register. |
915 | |
916 | Perhaps at some point we will improve reload_when_needed handling |
917 | so this problem goes away. But that's very hairy. */ |
918 | |
919 | for (i = FIRST_PSEUDO_REGISTER76; i < max_regno; i++) |
920 | if (reg_renumber[i] < 0 && reg_equiv_memory_loc (i)(*reg_equivs)[(i)].memory_loc) |
921 | { |
922 | rtx x = eliminate_regs (reg_equiv_memory_loc (i)(*reg_equivs)[(i)].memory_loc, VOIDmode((void) 0, E_VOIDmode), |
923 | NULL_RTX(rtx) 0); |
924 | |
925 | if (strict_memory_address_addr_space_p |
926 | (GET_MODE (regno_reg_rtx[i])((machine_mode) (regno_reg_rtx[i])->mode), XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), |
927 | MEM_ADDR_SPACE (x)(get_mem_attrs (x)->addrspace))) |
928 | reg_equiv_mem (i)(*reg_equivs)[(i)].mem = x, reg_equiv_address (i)(*reg_equivs)[(i)].address = 0; |
929 | else if (CONSTANT_P (XEXP (x, 0))((rtx_class[(int) (((enum rtx_code) ((((x)->u.fld[0]).rt_rtx ))->code))]) == RTX_CONST_OBJ) |
930 | || (REG_P (XEXP (x, 0))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == REG ) |
931 | && REGNO (XEXP (x, 0))(rhs_regno((((x)->u.fld[0]).rt_rtx))) < FIRST_PSEUDO_REGISTER76) |
932 | || (GET_CODE (XEXP (x, 0))((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == PLUS |
933 | && REG_P (XEXP (XEXP (x, 0), 0))(((enum rtx_code) (((((((x)->u.fld[0]).rt_rtx))->u.fld[ 0]).rt_rtx))->code) == REG) |
934 | && (REGNO (XEXP (XEXP (x, 0), 0))(rhs_regno(((((((x)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx ))) |
935 | < FIRST_PSEUDO_REGISTER76) |
936 | && CONSTANT_P (XEXP (XEXP (x, 0), 1))((rtx_class[(int) (((enum rtx_code) (((((((x)->u.fld[0]).rt_rtx ))->u.fld[1]).rt_rtx))->code))]) == RTX_CONST_OBJ))) |
937 | reg_equiv_address (i)(*reg_equivs)[(i)].address = XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), reg_equiv_mem (i)(*reg_equivs)[(i)].mem = 0; |
938 | else |
939 | { |
940 | /* Make a new stack slot. Then indicate that something |
941 | changed so we go back and recompute offsets for |
942 | eliminable registers because the allocation of memory |
943 | below might change some offset. reg_equiv_{mem,address} |
944 | will be set up for this pseudo on the next pass around |
945 | the loop. */ |
946 | reg_equiv_memory_loc (i)(*reg_equivs)[(i)].memory_loc = 0; |
947 | reg_equiv_init (i)(*reg_equivs)[(i)].init = 0; |
948 | alter_reg (i, -1, true); |
949 | } |
950 | } |
951 | |
952 | if (caller_save_needed) |
953 | setup_save_areas (); |
954 | |
955 | if (maybe_ne (starting_frame_size, 0) && crtl(&x_rtl)->stack_alignment_needed) |
956 | { |
957 | /* If we have a stack frame, we must align it now. The |
958 | stack size may be a part of the offset computation for |
959 | register elimination. So if this changes the stack size, |
960 | then repeat the elimination bookkeeping. We don't |
961 | realign when there is no stack, as that will cause a |
962 | stack frame when none is needed should |
963 | TARGET_STARTING_FRAME_OFFSET not be already aligned to |
964 | STACK_BOUNDARY. */ |
965 | assign_stack_local (BLKmode((void) 0, E_BLKmode), 0, crtl(&x_rtl)->stack_alignment_needed); |
966 | } |
967 | /* If we allocated another stack slot, redo elimination bookkeeping. */ |
968 | if (something_was_spilled |
969 | || maybe_ne (starting_frame_size, get_frame_size ())) |
970 | { |
971 | if (update_eliminables_and_spill ()) |
972 | finish_spills (0); |
973 | continue; |
974 | } |
975 | |
976 | if (caller_save_needed) |
977 | { |
978 | save_call_clobbered_regs (); |
979 | /* That might have allocated new insn_chain structures. */ |
980 | reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0)((char *) __extension__ ({ struct obstack *__h = ((&reload_obstack )); __extension__ ({ struct obstack *__o = (__h); size_t __len = (((0))); if (__extension__ ({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->next_free); }) < __len) _obstack_newchunk (__o, __len); ((void) ((__o )->next_free += (__len))); }); __extension__ ({ struct obstack *__o1 = (__h); void *__value = (void *) __o1->object_base ; if (__o1->next_free == __value) __o1->maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t) < sizeof (void *) ? ((__o1->object_base) + (((__o1->next_free) - (__o1 ->object_base) + (__o1->alignment_mask)) & ~(__o1-> alignment_mask))) : (char *) (((ptrdiff_t) (__o1->next_free ) + (__o1->alignment_mask)) & ~(__o1->alignment_mask ))); if ((size_t) (__o1->next_free - (char *) __o1->chunk ) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk )) __o1->next_free = __o1->chunk_limit; __o1->object_base = __o1->next_free; __value; }); })); |
981 | } |
982 | |
983 | calculate_needs_all_insns (global); |
984 | |
985 | if (! ira_conflicts_p) |
986 | /* Don't do it for IRA. We need this info because we don't |
987 | change live_throughout and dead_or_set for chains when IRA |
988 | is used. */ |
989 | CLEAR_REG_SET (&spilled_pseudos)bitmap_clear (&spilled_pseudos); |
990 | |
991 | something_changed = 0; |
992 | |
993 | /* If we allocated any new memory locations, make another pass |
994 | since it might have changed elimination offsets. */ |
995 | if (something_was_spilled |
996 | || maybe_ne (starting_frame_size, get_frame_size ())) |
997 | something_changed = 1; |
998 | |
999 | /* Even if the frame size remained the same, we might still have |
1000 | changed elimination offsets, e.g. if find_reloads called |
1001 | force_const_mem requiring the back end to allocate a constant |
1002 | pool base register that needs to be saved on the stack. */ |
1003 | else if (!verify_initial_elim_offsets ()) |
1004 | something_changed = 1; |
1005 | |
1006 | if (update_eliminables_and_spill ()) |
1007 | { |
1008 | finish_spills (0); |
1009 | something_changed = 1; |
1010 | } |
1011 | else |
1012 | { |
1013 | select_reload_regs (); |
1014 | if (failure) |
1015 | goto failed; |
1016 | if (insns_need_reload) |
1017 | something_changed |= finish_spills (global); |
1018 | } |
1019 | |
1020 | if (! something_changed) |
1021 | break; |
1022 | |
1023 | if (caller_save_needed) |
1024 | delete_caller_save_insns (); |
1025 | |
1026 | obstack_free (&reload_obstack, reload_firstobj)__extension__ ({ struct obstack *__o = (&reload_obstack); void *__obj = (void *) (reload_firstobj); if (__obj > (void *) __o->chunk && __obj < (void *) __o->chunk_limit ) __o->next_free = __o->object_base = (char *) __obj; else _obstack_free (__o, __obj); }); |
1027 | } |
1028 | |
1029 | /* If global-alloc was run, notify it of any register eliminations we have |
1030 | done. */ |
1031 | if (global) |
1032 | for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++) |
1033 | if (ep->can_eliminate) |
1034 | mark_elimination (ep->from, ep->to); |
1035 | |
1036 | remove_init_insns (); |
1037 | |
1038 | /* Use the reload registers where necessary |
1039 | by generating move instructions to move the must-be-register |
1040 | values into or out of the reload registers. */ |
1041 | |
1042 | if (insns_need_reload != 0 || something_needs_elimination |
1043 | || something_needs_operands_changed) |
1044 | { |
1045 | poly_int64 old_frame_size = get_frame_size (); |
1046 | |
1047 | reload_as_needed (global); |
1048 | |
1049 | gcc_assert (known_eq (old_frame_size, get_frame_size ()))((void)(!((!maybe_ne (old_frame_size, get_frame_size ()))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 1049, __FUNCTION__), 0 : 0)); |
1050 | |
1051 | gcc_assert (verify_initial_elim_offsets ())((void)(!(verify_initial_elim_offsets ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 1051, __FUNCTION__), 0 : 0)); |
1052 | } |
1053 | |
1054 | /* If we were able to eliminate the frame pointer, show that it is no |
1055 | longer live at the start of any basic block. If it ls live by |
1056 | virtue of being in a pseudo, that pseudo will be marked live |
1057 | and hence the frame pointer will be known to be live via that |
1058 | pseudo. */ |
1059 | |
1060 | if (! frame_pointer_needed((&x_rtl)->frame_pointer_needed)) |
1061 | FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb ; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb-> next_bb) |
1062 | bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM6); |
1063 | |
1064 | /* Come here (with failure set nonzero) if we can't get enough spill |
1065 | regs. */ |
1066 | failed: |
1067 | |
1068 | CLEAR_REG_SET (&changed_allocation_pseudos)bitmap_clear (&changed_allocation_pseudos); |
1069 | CLEAR_REG_SET (&spilled_pseudos)bitmap_clear (&spilled_pseudos); |
1070 | reload_in_progress = 0; |
1071 | |
1072 | /* Now eliminate all pseudo regs by modifying them into |
1073 | their equivalent memory references. |
1074 | The REG-rtx's for the pseudos are modified in place, |
1075 | so all insns that used to refer to them now refer to memory. |
1076 | |
1077 | For a reg that has a reg_equiv_address, all those insns |
1078 | were changed by reloading so that no insns refer to it any longer; |
1079 | but the DECL_RTL of a variable decl may refer to it, |
1080 | and if so this causes the debugging info to mention the variable. */ |
1081 | |
1082 | for (i = FIRST_PSEUDO_REGISTER76; i < max_regno; i++) |
1083 | { |
1084 | rtx addr = 0; |
1085 | |
1086 | if (reg_equiv_mem (i)(*reg_equivs)[(i)].mem) |
1087 | addr = XEXP (reg_equiv_mem (i), 0)((((*reg_equivs)[(i)].mem)->u.fld[0]).rt_rtx); |
1088 | |
1089 | if (reg_equiv_address (i)(*reg_equivs)[(i)].address) |
1090 | addr = reg_equiv_address (i)(*reg_equivs)[(i)].address; |
1091 | |
1092 | if (addr) |
1093 | { |
1094 | if (reg_renumber[i] < 0) |
1095 | { |
1096 | rtx reg = regno_reg_rtx[i]; |
1097 | |
1098 | REG_USERVAR_P (reg)(__extension__ ({ __typeof ((reg)) const _rtx = ((reg)); if ( ((enum rtx_code) (_rtx)->code) != REG) rtl_check_failed_flag ("REG_USERVAR_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 1098, __FUNCTION__); _rtx; })->volatil) = 0; |
1099 | PUT_CODE (reg, MEM)((reg)->code = (MEM)); |
1100 | XEXP (reg, 0)(((reg)->u.fld[0]).rt_rtx) = addr; |
1101 | if (reg_equiv_memory_loc (i)(*reg_equivs)[(i)].memory_loc) |
1102 | MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc (i))((__extension__ ({ __typeof ((reg)) const _rtx = ((reg)); if ( ((enum rtx_code) (_rtx)->code) != MEM && ((enum rtx_code ) (_rtx)->code) != ASM_OPERANDS && ((enum rtx_code ) (_rtx)->code) != ASM_INPUT) rtl_check_failed_flag ("MEM_VOLATILE_P" , _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 1102, __FUNCTION__); _rtx; })->volatil) = (__extension__ ({ __typeof (((*reg_equivs)[(i)].memory_loc)) const _rtx = ( ((*reg_equivs)[(i)].memory_loc)); if (((enum rtx_code) (_rtx) ->code) != MEM && ((enum rtx_code) (_rtx)->code ) != ASM_OPERANDS && ((enum rtx_code) (_rtx)->code ) != ASM_INPUT) rtl_check_failed_flag ("MEM_VOLATILE_P", _rtx , "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 1102, __FUNCTION__); _rtx; })->volatil), (__extension__ ( { __typeof ((reg)) const _rtx = ((reg)); if (((enum rtx_code) (_rtx)->code) != MEM) rtl_check_failed_flag ("MEM_NOTRAP_P" , _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 1102, __FUNCTION__); _rtx; })->call) = (__extension__ ({ __typeof (((*reg_equivs)[(i)].memory_loc)) const _rtx = (((* reg_equivs)[(i)].memory_loc)); if (((enum rtx_code) (_rtx)-> code) != MEM) rtl_check_failed_flag ("MEM_NOTRAP_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 1102, __FUNCTION__); _rtx; })->call), (__extension__ ({ __typeof ((reg)) const _rtx = ((reg)); if (((enum rtx_code) (_rtx)-> code) != MEM) rtl_check_failed_flag ("MEM_READONLY_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 1102, __FUNCTION__); _rtx; })->unchanging) = (__extension__ ({ __typeof (((*reg_equivs)[(i)].memory_loc)) const _rtx = ( ((*reg_equivs)[(i)].memory_loc)); if (((enum rtx_code) (_rtx) ->code) != MEM) rtl_check_failed_flag ("MEM_READONLY_P", _rtx , "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 1102, __FUNCTION__); _rtx; })->unchanging), (__extension__ ({ __typeof ((reg)) const _rtx = ((reg)); if (((enum rtx_code ) (_rtx)->code) != MEM) rtl_check_failed_flag ("MEM_KEEP_ALIAS_SET_P" , _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 1102, __FUNCTION__); _rtx; })->jump) = (__extension__ ({ __typeof (((*reg_equivs)[(i)].memory_loc)) const _rtx = (((* reg_equivs)[(i)].memory_loc)); if (((enum rtx_code) (_rtx)-> code) != MEM) rtl_check_failed_flag ("MEM_KEEP_ALIAS_SET_P", _rtx , "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 1102, __FUNCTION__); _rtx; })->jump), (__extension__ ({ __typeof ((reg)) const _rtx = ((reg)); if (((enum rtx_code) (_rtx)-> code) != MEM) rtl_check_failed_flag ("MEM_POINTER", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 1102, __FUNCTION__); _rtx; })->frame_related) = (__extension__ ({ __typeof (((*reg_equivs)[(i)].memory_loc)) const _rtx = ( ((*reg_equivs)[(i)].memory_loc)); if (((enum rtx_code) (_rtx) ->code) != MEM) rtl_check_failed_flag ("MEM_POINTER", _rtx , "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 1102, __FUNCTION__); _rtx; })->frame_related), (((reg)-> u.fld[1]).rt_mem) = ((((*reg_equivs)[(i)].memory_loc)->u.fld [1]).rt_mem)); |
1103 | else |
1104 | MEM_ATTRS (reg)(((reg)->u.fld[1]).rt_mem) = 0; |
1105 | MEM_NOTRAP_P (reg)(__extension__ ({ __typeof ((reg)) const _rtx = ((reg)); if ( ((enum rtx_code) (_rtx)->code) != MEM) rtl_check_failed_flag ("MEM_NOTRAP_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 1105, __FUNCTION__); _rtx; })->call) = 1; |
1106 | } |
1107 | else if (reg_equiv_mem (i)(*reg_equivs)[(i)].mem) |
1108 | XEXP (reg_equiv_mem (i), 0)((((*reg_equivs)[(i)].mem)->u.fld[0]).rt_rtx) = addr; |
1109 | } |
1110 | |
1111 | /* We don't want complex addressing modes in debug insns |
1112 | if simpler ones will do, so delegitimize equivalences |
1113 | in debug insns. */ |
1114 | if (MAY_HAVE_DEBUG_BIND_INSNSglobal_options.x_flag_var_tracking_assignments && reg_renumber[i] < 0) |
1115 | { |
1116 | rtx reg = regno_reg_rtx[i]; |
1117 | rtx equiv = 0; |
1118 | df_ref use, next; |
1119 | |
1120 | if (reg_equiv_constant (i)(*reg_equivs)[(i)].constant) |
1121 | equiv = reg_equiv_constant (i)(*reg_equivs)[(i)].constant; |
1122 | else if (reg_equiv_invariant (i)(*reg_equivs)[(i)].invariant) |
1123 | equiv = reg_equiv_invariant (i)(*reg_equivs)[(i)].invariant; |
1124 | else if (reg && MEM_P (reg)(((enum rtx_code) (reg)->code) == MEM)) |
1125 | equiv = targetm.delegitimize_address (reg); |
1126 | else if (reg && REG_P (reg)(((enum rtx_code) (reg)->code) == REG) && (int)REGNO (reg)(rhs_regno(reg)) != i) |
1127 | equiv = reg; |
1128 | |
1129 | if (equiv == reg) |
1130 | continue; |
1131 | |
1132 | for (use = DF_REG_USE_CHAIN (i)(df->use_regs[(i)]->reg_chain); use; use = next) |
1133 | { |
1134 | insn = DF_REF_INSN (use)((use)->base.insn_info->insn); |
1135 | |
1136 | /* Make sure the next ref is for a different instruction, |
1137 | so that we're not affected by the rescan. */ |
1138 | next = DF_REF_NEXT_REG (use)((use)->base.next_reg); |
1139 | while (next && DF_REF_INSN (next)((next)->base.insn_info->insn) == insn) |
1140 | next = DF_REF_NEXT_REG (next)((next)->base.next_reg); |
1141 | |
1142 | if (DEBUG_BIND_INSN_P (insn)((((enum rtx_code) (insn)->code) == DEBUG_INSN) && (((enum rtx_code) (PATTERN (insn))->code) == VAR_LOCATION ))) |
1143 | { |
1144 | if (!equiv) |
1145 | { |
1146 | INSN_VAR_LOCATION_LOC (insn)((((((__extension__ ({ __typeof (PATTERN (insn)) const _rtx = (PATTERN (insn)); if (((enum rtx_code) (_rtx)->code) != VAR_LOCATION ) rtl_check_failed_flag ("INSN_VAR_LOCATION", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 1146, __FUNCTION__); _rtx; }))))->u.fld[1]).rt_rtx)) = gen_rtx_UNKNOWN_VAR_LOC ()(gen_rtx_fmt_e_stat ((CLOBBER), ((((void) 0, E_VOIDmode))), ( ((const_int_rtx[64]))) )); |
1147 | df_insn_rescan_debug_internal (insn); |
1148 | } |
1149 | else |
1150 | INSN_VAR_LOCATION_LOC (insn)((((((__extension__ ({ __typeof (PATTERN (insn)) const _rtx = (PATTERN (insn)); if (((enum rtx_code) (_rtx)->code) != VAR_LOCATION ) rtl_check_failed_flag ("INSN_VAR_LOCATION", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 1150, __FUNCTION__); _rtx; }))))->u.fld[1]).rt_rtx)) |
1151 | = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn)((((((__extension__ ({ __typeof (PATTERN (insn)) const _rtx = (PATTERN (insn)); if (((enum rtx_code) (_rtx)->code) != VAR_LOCATION ) rtl_check_failed_flag ("INSN_VAR_LOCATION", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 1151, __FUNCTION__); _rtx; }))))->u.fld[1]).rt_rtx)), |
1152 | reg, equiv); |
1153 | } |
1154 | } |
1155 | } |
1156 | } |
1157 | |
1158 | /* We must set reload_completed now since the cleanup_subreg_operands call |
1159 | below will re-recognize each insn and reload may have generated insns |
1160 | which are only valid during and after reload. */ |
1161 | reload_completed = 1; |
1162 | |
1163 | /* Make a pass over all the insns and delete all USEs which we inserted |
1164 | only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED |
1165 | notes. Delete all CLOBBER insns, except those that refer to the return |
1166 | value and the special mem:BLK CLOBBERs added to prevent the scheduler |
1167 | from misarranging variable-array code, and simplify (subreg (reg)) |
1168 | operands. Strip and regenerate REG_INC notes that may have been moved |
1169 | around. */ |
1170 | |
1171 | for (insn = first; insn; insn = NEXT_INSN (insn)) |
1172 | if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) == DEBUG_INSN))) |
1173 | { |
1174 | rtx *pnote; |
1175 | |
1176 | if (CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN)) |
1177 | replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn)(((insn)->u.fld[7]).rt_rtx), |
1178 | VOIDmode((void) 0, E_VOIDmode), CALL_INSN_FUNCTION_USAGE (insn)(((insn)->u.fld[7]).rt_rtx)); |
1179 | |
1180 | if ((GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == USE |
1181 | /* We mark with QImode USEs introduced by reload itself. */ |
1182 | && (GET_MODE (insn)((machine_mode) (insn)->mode) == QImode(scalar_int_mode ((scalar_int_mode::from_int) E_QImode)) |
1183 | || find_reg_note (insn, REG_EQUAL, NULL_RTX(rtx) 0))) |
1184 | || (GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == CLOBBER |
1185 | && (!MEM_P (XEXP (PATTERN (insn), 0))(((enum rtx_code) ((((PATTERN (insn))->u.fld[0]).rt_rtx))-> code) == MEM) |
1186 | || GET_MODE (XEXP (PATTERN (insn), 0))((machine_mode) ((((PATTERN (insn))->u.fld[0]).rt_rtx))-> mode) != BLKmode((void) 0, E_BLKmode) |
1187 | || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0))((enum rtx_code) (((((((PATTERN (insn))->u.fld[0]).rt_rtx) )->u.fld[0]).rt_rtx))->code) != SCRATCH |
1188 | && XEXP (XEXP (PATTERN (insn), 0), 0)((((((PATTERN (insn))->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx ) |
1189 | != stack_pointer_rtx((this_target_rtl->x_global_rtl)[GR_STACK_POINTER]))) |
1190 | && (!REG_P (XEXP (PATTERN (insn), 0))(((enum rtx_code) ((((PATTERN (insn))->u.fld[0]).rt_rtx))-> code) == REG) |
1191 | || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0))(__extension__ ({ __typeof (((((PATTERN (insn))->u.fld[0]) .rt_rtx))) const _rtx = (((((PATTERN (insn))->u.fld[0]).rt_rtx ))); if (((enum rtx_code) (_rtx)->code) != REG && ( (enum rtx_code) (_rtx)->code) != PARALLEL) rtl_check_failed_flag ("REG_FUNCTION_VALUE_P",_rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 1191, __FUNCTION__); _rtx; })->return_val)))) |
1192 | { |
1193 | delete_insn (insn); |
1194 | continue; |
1195 | } |
1196 | |
1197 | /* Some CLOBBERs may survive until here and still reference unassigned |
1198 | pseudos with const equivalent, which may in turn cause ICE in later |
1199 | passes if the reference remains in place. */ |
1200 | if (GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == CLOBBER) |
1201 | replace_pseudos_in (& XEXP (PATTERN (insn), 0)(((PATTERN (insn))->u.fld[0]).rt_rtx), |
1202 | VOIDmode((void) 0, E_VOIDmode), PATTERN (insn)); |
1203 | |
1204 | /* Discard obvious no-ops, even without -O. This optimization |
1205 | is fast and doesn't interfere with debugging. */ |
1206 | if (NONJUMP_INSN_P (insn)(((enum rtx_code) (insn)->code) == INSN) |
1207 | && GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == SET |
1208 | && REG_P (SET_SRC (PATTERN (insn)))(((enum rtx_code) ((((PATTERN (insn))->u.fld[1]).rt_rtx))-> code) == REG) |
1209 | && REG_P (SET_DEST (PATTERN (insn)))(((enum rtx_code) ((((PATTERN (insn))->u.fld[0]).rt_rtx))-> code) == REG) |
1210 | && (REGNO (SET_SRC (PATTERN (insn)))(rhs_regno((((PATTERN (insn))->u.fld[1]).rt_rtx))) |
1211 | == REGNO (SET_DEST (PATTERN (insn)))(rhs_regno((((PATTERN (insn))->u.fld[0]).rt_rtx))))) |
1212 | { |
1213 | delete_insn (insn); |
1214 | continue; |
1215 | } |
1216 | |
1217 | pnote = ®_NOTES (insn)(((insn)->u.fld[6]).rt_rtx); |
1218 | while (*pnote != 0) |
1219 | { |
1220 | if (REG_NOTE_KIND (*pnote)((enum reg_note) ((machine_mode) (*pnote)->mode)) == REG_DEAD |
1221 | || REG_NOTE_KIND (*pnote)((enum reg_note) ((machine_mode) (*pnote)->mode)) == REG_UNUSED |
1222 | || REG_NOTE_KIND (*pnote)((enum reg_note) ((machine_mode) (*pnote)->mode)) == REG_INC) |
1223 | *pnote = XEXP (*pnote, 1)(((*pnote)->u.fld[1]).rt_rtx); |
1224 | else |
1225 | pnote = &XEXP (*pnote, 1)(((*pnote)->u.fld[1]).rt_rtx); |
1226 | } |
1227 | |
1228 | if (AUTO_INC_DEC0) |
1229 | add_auto_inc_notes (insn, PATTERN (insn)); |
1230 | |
1231 | /* Simplify (subreg (reg)) if it appears as an operand. */ |
1232 | cleanup_subreg_operands (insn); |
1233 | |
1234 | /* Clean up invalid ASMs so that they don't confuse later passes. |
1235 | See PR 21299. */ |
1236 | if (asm_noperands (PATTERN (insn)) >= 0) |
1237 | { |
1238 | extract_insn (insn); |
1239 | if (!constrain_operands (1, get_enabled_alternatives (insn))) |
1240 | { |
1241 | error_for_asm (insn, |
1242 | "%<asm%> operand has impossible constraints"); |
1243 | delete_insn (insn); |
1244 | continue; |
1245 | } |
1246 | } |
1247 | } |
1248 | |
1249 | free (temp_pseudo_reg_arr); |
1250 | |
1251 | /* Indicate that we no longer have known memory locations or constants. */ |
1252 | free_reg_equiv (); |
1253 | |
1254 | free (reg_max_ref_mode); |
1255 | free (reg_old_renumber); |
1256 | free (pseudo_previous_regs); |
1257 | free (pseudo_forbidden_regs); |
1258 | |
1259 | CLEAR_HARD_REG_SET (used_spill_regs); |
1260 | for (i = 0; i < n_spills; i++) |
1261 | SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]); |
1262 | |
1263 | /* Free all the insn_chain structures at once. */ |
1264 | obstack_free (&reload_obstack, reload_startobj)__extension__ ({ struct obstack *__o = (&reload_obstack); void *__obj = (void *) (reload_startobj); if (__obj > (void *) __o->chunk && __obj < (void *) __o->chunk_limit ) __o->next_free = __o->object_base = (char *) __obj; else _obstack_free (__o, __obj); }); |
1265 | unused_insn_chains = 0; |
1266 | |
1267 | inserted = fixup_abnormal_edges (); |
1268 | |
1269 | /* We've possibly turned single trapping insn into multiple ones. */ |
1270 | if (cfun(cfun + 0)->can_throw_non_call_exceptions) |
1271 | { |
1272 | auto_sbitmap blocks (last_basic_block_for_fn (cfun)(((cfun + 0))->cfg->x_last_basic_block)); |
1273 | bitmap_ones (blocks); |
1274 | find_many_sub_basic_blocks (blocks); |
1275 | } |
1276 | |
1277 | if (inserted) |
1278 | commit_edge_insertions (); |
1279 | |
1280 | /* Replacing pseudos with their memory equivalents might have |
1281 | created shared rtx. Subsequent passes would get confused |
1282 | by this, so unshare everything here. */ |
1283 | unshare_all_rtl_again (first); |
1284 | |
1285 | #ifdef STACK_BOUNDARY((((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) && ix86_cfun_abi () == MS_ABI) ? 128 : ((8) * ((( global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4))) |
1286 | /* init_emit has set the alignment of the hard frame pointer |
1287 | to STACK_BOUNDARY. It is very likely no longer valid if |
1288 | the hard frame pointer was used for register allocation. */ |
1289 | if (!frame_pointer_needed((&x_rtl)->frame_pointer_needed)) |
1290 | REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM)((&x_rtl)->emit.regno_pointer_align[6]) = BITS_PER_UNIT(8); |
1291 | #endif |
1292 | |
1293 | substitute_stack.release (); |
1294 | |
1295 | gcc_assert (bitmap_empty_p (&spilled_pseudos))((void)(!(bitmap_empty_p (&spilled_pseudos)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 1295, __FUNCTION__), 0 : 0)); |
1296 | |
1297 | reload_completed = !failure; |
1298 | |
1299 | return need_dce; |
1300 | } |
1301 | |
1302 | /* Yet another special case. Unfortunately, reg-stack forces people to |
1303 | write incorrect clobbers in asm statements. These clobbers must not |
1304 | cause the register to appear in bad_spill_regs, otherwise we'll call |
1305 | fatal_insn later. We clear the corresponding regnos in the live |
1306 | register sets to avoid this. |
1307 | The whole thing is rather sick, I'm afraid. */ |
1308 | |
1309 | static void |
1310 | maybe_fix_stack_asms (void) |
1311 | { |
1312 | #ifdef STACK_REGS |
1313 | const char *constraints[MAX_RECOG_OPERANDS30]; |
1314 | machine_mode operand_mode[MAX_RECOG_OPERANDS30]; |
1315 | class insn_chain *chain; |
1316 | |
1317 | for (chain = reload_insn_chain; chain != 0; chain = chain->next) |
1318 | { |
1319 | int i, noperands; |
1320 | HARD_REG_SET clobbered, allowed; |
1321 | rtx pat; |
1322 | |
1323 | if (! INSN_P (chain->insn)(((((enum rtx_code) (chain->insn)->code) == INSN) || (( (enum rtx_code) (chain->insn)->code) == JUMP_INSN) || ( ((enum rtx_code) (chain->insn)->code) == CALL_INSN)) || (((enum rtx_code) (chain->insn)->code) == DEBUG_INSN)) |
1324 | || (noperands = asm_noperands (PATTERN (chain->insn))) < 0) |
1325 | continue; |
1326 | pat = PATTERN (chain->insn); |
1327 | if (GET_CODE (pat)((enum rtx_code) (pat)->code) != PARALLEL) |
1328 | continue; |
1329 | |
1330 | CLEAR_HARD_REG_SET (clobbered); |
1331 | CLEAR_HARD_REG_SET (allowed); |
1332 | |
1333 | /* First, make a mask of all stack regs that are clobbered. */ |
1334 | for (i = 0; i < XVECLEN (pat, 0)(((((pat)->u.fld[0]).rt_rtvec))->num_elem); i++) |
1335 | { |
1336 | rtx t = XVECEXP (pat, 0, i)(((((pat)->u.fld[0]).rt_rtvec))->elem[i]); |
1337 | if (GET_CODE (t)((enum rtx_code) (t)->code) == CLOBBER && STACK_REG_P (XEXP (t, 0))((((enum rtx_code) ((((t)->u.fld[0]).rt_rtx))->code) == REG) && ((unsigned long) (((rhs_regno((((t)->u.fld [0]).rt_rtx))))) - (unsigned long) (8) <= (unsigned long) ( 15) - (unsigned long) (8)))) |
1338 | SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0))(rhs_regno((((t)->u.fld[0]).rt_rtx)))); |
1339 | } |
1340 | |
1341 | /* Get the operand values and constraints out of the insn. */ |
1342 | decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc, |
1343 | constraints, operand_mode, NULLnullptr); |
1344 | |
1345 | /* For every operand, see what registers are allowed. */ |
1346 | for (i = 0; i < noperands; i++) |
1347 | { |
1348 | const char *p = constraints[i]; |
1349 | /* For every alternative, we compute the class of registers allowed |
1350 | for reloading in CLS, and merge its contents into the reg set |
1351 | ALLOWED. */ |
1352 | int cls = (int) NO_REGS; |
1353 | |
1354 | for (;;) |
1355 | { |
1356 | char c = *p; |
1357 | |
1358 | if (c == '\0' || c == ',' || c == '#') |
1359 | { |
1360 | /* End of one alternative - mark the regs in the current |
1361 | class, and reset the class. */ |
1362 | allowed |= reg_class_contents(this_target_hard_regs->x_reg_class_contents)[cls]; |
1363 | cls = NO_REGS; |
1364 | p++; |
1365 | if (c == '#') |
1366 | do { |
1367 | c = *p++; |
1368 | } while (c != '\0' && c != ','); |
1369 | if (c == '\0') |
1370 | break; |
1371 | continue; |
1372 | } |
1373 | |
1374 | switch (c) |
1375 | { |
1376 | case 'g': |
1377 | cls = (int) reg_class_subunion(this_target_hard_regs->x_reg_class_subunion)[cls][(int) GENERAL_REGS]; |
1378 | break; |
1379 | |
1380 | default: |
1381 | enum constraint_num cn = lookup_constraint (p); |
1382 | if (insn_extra_address_constraint (cn)) |
1383 | cls = (int) reg_class_subunion(this_target_hard_regs->x_reg_class_subunion)[cls] |
1384 | [(int) base_reg_class (VOIDmode((void) 0, E_VOIDmode), ADDR_SPACE_GENERIC0, |
1385 | ADDRESS, SCRATCH)]; |
1386 | else |
1387 | cls = (int) reg_class_subunion(this_target_hard_regs->x_reg_class_subunion)[cls] |
1388 | [reg_class_for_constraint (cn)]; |
1389 | break; |
1390 | } |
1391 | p += CONSTRAINT_LEN (c, p)insn_constraint_len (c,p); |
1392 | } |
1393 | } |
1394 | /* Those of the registers which are clobbered, but allowed by the |
1395 | constraints, must be usable as reload registers. So clear them |
1396 | out of the life information. */ |
1397 | allowed &= clobbered; |
1398 | for (i = 0; i < FIRST_PSEUDO_REGISTER76; i++) |
1399 | if (TEST_HARD_REG_BIT (allowed, i)) |
1400 | { |
1401 | CLEAR_REGNO_REG_SET (&chain->live_throughout, i)bitmap_clear_bit (&chain->live_throughout, i); |
1402 | CLEAR_REGNO_REG_SET (&chain->dead_or_set, i)bitmap_clear_bit (&chain->dead_or_set, i); |
1403 | } |
1404 | } |
1405 | |
1406 | #endif |
1407 | } |
1408 | |
1409 | /* Copy the global variables n_reloads and rld into the corresponding elts |
1410 | of CHAIN. */ |
1411 | static void |
1412 | copy_reloads (class insn_chain *chain) |
1413 | { |
1414 | chain->n_reloads = n_reloads; |
1415 | chain->rld = XOBNEWVEC (&reload_obstack, struct reload, n_reloads)((struct reload *) __extension__ ({ struct obstack *__h = ((& reload_obstack)); __extension__ ({ struct obstack *__o = (__h ); size_t __len = ((sizeof (struct reload) * (n_reloads))); if (__extension__ ({ struct obstack const *__o1 = (__o); (size_t ) (__o1->chunk_limit - __o1->next_free); }) < __len) _obstack_newchunk (__o, __len); ((void) ((__o)->next_free += (__len))); }); __extension__ ({ struct obstack *__o1 = (__h ); void *__value = (void *) __o1->object_base; if (__o1-> next_free == __value) __o1->maybe_empty_object = 1; __o1-> next_free = (sizeof (ptrdiff_t) < sizeof (void *) ? ((__o1 ->object_base) + (((__o1->next_free) - (__o1->object_base ) + (__o1->alignment_mask)) & ~(__o1->alignment_mask ))) : (char *) (((ptrdiff_t) (__o1->next_free) + (__o1-> alignment_mask)) & ~(__o1->alignment_mask))); if ((size_t ) (__o1->next_free - (char *) __o1->chunk) > (size_t ) (__o1->chunk_limit - (char *) __o1->chunk)) __o1-> next_free = __o1->chunk_limit; __o1->object_base = __o1 ->next_free; __value; }); })); |
1416 | memcpy (chain->rld, rld, n_reloads * sizeof (struct reload)); |
1417 | reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0)((char *) __extension__ ({ struct obstack *__h = ((&reload_obstack )); __extension__ ({ struct obstack *__o = (__h); size_t __len = (((0))); if (__extension__ ({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->next_free); }) < __len) _obstack_newchunk (__o, __len); ((void) ((__o )->next_free += (__len))); }); __extension__ ({ struct obstack *__o1 = (__h); void *__value = (void *) __o1->object_base ; if (__o1->next_free == __value) __o1->maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t) < sizeof (void *) ? ((__o1->object_base) + (((__o1->next_free) - (__o1 ->object_base) + (__o1->alignment_mask)) & ~(__o1-> alignment_mask))) : (char *) (((ptrdiff_t) (__o1->next_free ) + (__o1->alignment_mask)) & ~(__o1->alignment_mask ))); if ((size_t) (__o1->next_free - (char *) __o1->chunk ) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk )) __o1->next_free = __o1->chunk_limit; __o1->object_base = __o1->next_free; __value; }); })); |
1418 | } |
1419 | |
1420 | /* Walk the chain of insns, and determine for each whether it needs reloads |
1421 | and/or eliminations. Build the corresponding insns_need_reload list, and |
1422 | set something_needs_elimination as appropriate. */ |
1423 | static void |
1424 | calculate_needs_all_insns (int global) |
1425 | { |
1426 | class insn_chain **pprev_reload = &insns_need_reload; |
1427 | class insn_chain *chain, *next = 0; |
1428 | |
1429 | something_needs_elimination = 0; |
1430 | |
1431 | reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0)((char *) __extension__ ({ struct obstack *__h = ((&reload_obstack )); __extension__ ({ struct obstack *__o = (__h); size_t __len = (((0))); if (__extension__ ({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->next_free); }) < __len) _obstack_newchunk (__o, __len); ((void) ((__o )->next_free += (__len))); }); __extension__ ({ struct obstack *__o1 = (__h); void *__value = (void *) __o1->object_base ; if (__o1->next_free == __value) __o1->maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t) < sizeof (void *) ? ((__o1->object_base) + (((__o1->next_free) - (__o1 ->object_base) + (__o1->alignment_mask)) & ~(__o1-> alignment_mask))) : (char *) (((ptrdiff_t) (__o1->next_free ) + (__o1->alignment_mask)) & ~(__o1->alignment_mask ))); if ((size_t) (__o1->next_free - (char *) __o1->chunk ) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk )) __o1->next_free = __o1->chunk_limit; __o1->object_base = __o1->next_free; __value; }); })); |
1432 | for (chain = reload_insn_chain; chain != 0; chain = next) |
1433 | { |
1434 | rtx_insn *insn = chain->insn; |
1435 | |
1436 | next = chain->next; |
1437 | |
1438 | /* Clear out the shortcuts. */ |
1439 | chain->n_reloads = 0; |
1440 | chain->need_elim = 0; |
1441 | chain->need_reload = 0; |
1442 | chain->need_operand_change = 0; |
1443 | |
1444 | /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might |
1445 | include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see |
1446 | what effects this has on the known offsets at labels. */ |
1447 | |
1448 | if (LABEL_P (insn)(((enum rtx_code) (insn)->code) == CODE_LABEL) || JUMP_P (insn)(((enum rtx_code) (insn)->code) == JUMP_INSN) || JUMP_TABLE_DATA_P (insn)(((enum rtx_code) (insn)->code) == JUMP_TABLE_DATA) |
1449 | || (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) == DEBUG_INSN)) && REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx) != 0)) |
1450 | set_label_offsets (insn, insn, 0); |
1451 | |
1452 | if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) == DEBUG_INSN))) |
1453 | { |
1454 | rtx old_body = PATTERN (insn); |
1455 | int old_code = INSN_CODE (insn)(((insn)->u.fld[5]).rt_int); |
1456 | rtx old_notes = REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx); |
1457 | int did_elimination = 0; |
1458 | int operands_changed = 0; |
1459 | |
1460 | /* Skip insns that only set an equivalence. */ |
1461 | if (will_delete_init_insn_p (insn)) |
1462 | continue; |
1463 | |
1464 | /* If needed, eliminate any eliminable registers. */ |
1465 | if (num_eliminable || num_eliminable_invariants) |
1466 | did_elimination = eliminate_regs_in_insn (insn, 0); |
1467 | |
1468 | /* Analyze the instruction. */ |
1469 | operands_changed = find_reloads (insn, 0, spill_indirect_levels(this_target_reload->x_spill_indirect_levels), |
1470 | global, spill_reg_order); |
1471 | |
1472 | /* If a no-op set needs more than one reload, this is likely |
1473 | to be something that needs input address reloads. We |
1474 | can't get rid of this cleanly later, and it is of no use |
1475 | anyway, so discard it now. |
1476 | We only do this when expensive_optimizations is enabled, |
1477 | since this complements reload inheritance / output |
1478 | reload deletion, and it can make debugging harder. */ |
1479 | if (flag_expensive_optimizationsglobal_options.x_flag_expensive_optimizations && n_reloads > 1) |
1480 | { |
1481 | rtx set = single_set (insn); |
1482 | if (set |
1483 | && |
1484 | ((SET_SRC (set)(((set)->u.fld[1]).rt_rtx) == SET_DEST (set)(((set)->u.fld[0]).rt_rtx) |
1485 | && REG_P (SET_SRC (set))(((enum rtx_code) ((((set)->u.fld[1]).rt_rtx))->code) == REG) |
1486 | && REGNO (SET_SRC (set))(rhs_regno((((set)->u.fld[1]).rt_rtx))) >= FIRST_PSEUDO_REGISTER76) |
1487 | || (REG_P (SET_SRC (set))(((enum rtx_code) ((((set)->u.fld[1]).rt_rtx))->code) == REG) && REG_P (SET_DEST (set))(((enum rtx_code) ((((set)->u.fld[0]).rt_rtx))->code) == REG) |
1488 | && reg_renumber[REGNO (SET_SRC (set))(rhs_regno((((set)->u.fld[1]).rt_rtx)))] < 0 |
1489 | && reg_renumber[REGNO (SET_DEST (set))(rhs_regno((((set)->u.fld[0]).rt_rtx)))] < 0 |
1490 | && reg_equiv_memory_loc (REGNO (SET_SRC (set)))(*reg_equivs)[((rhs_regno((((set)->u.fld[1]).rt_rtx))))].memory_loc != NULLnullptr |
1491 | && reg_equiv_memory_loc (REGNO (SET_DEST (set)))(*reg_equivs)[((rhs_regno((((set)->u.fld[0]).rt_rtx))))].memory_loc != NULLnullptr |
1492 | && rtx_equal_p (reg_equiv_memory_loc (REGNO (SET_SRC (set)))(*reg_equivs)[((rhs_regno((((set)->u.fld[1]).rt_rtx))))].memory_loc, |
1493 | reg_equiv_memory_loc (REGNO (SET_DEST (set)))(*reg_equivs)[((rhs_regno((((set)->u.fld[0]).rt_rtx))))].memory_loc)))) |
1494 | { |
1495 | if (ira_conflicts_p) |
1496 | /* Inform IRA about the insn deletion. */ |
1497 | ira_mark_memory_move_deletion (REGNO (SET_DEST (set))(rhs_regno((((set)->u.fld[0]).rt_rtx))), |
1498 | REGNO (SET_SRC (set))(rhs_regno((((set)->u.fld[1]).rt_rtx)))); |
1499 | delete_insn (insn); |
1500 | /* Delete it from the reload chain. */ |
1501 | if (chain->prev) |
1502 | chain->prev->next = next; |
1503 | else |
1504 | reload_insn_chain = next; |
1505 | if (next) |
1506 | next->prev = chain->prev; |
1507 | chain->next = unused_insn_chains; |
1508 | unused_insn_chains = chain; |
1509 | continue; |
1510 | } |
1511 | } |
1512 | if (num_eliminable) |
1513 | update_eliminable_offsets (); |
1514 | |
1515 | /* Remember for later shortcuts which insns had any reloads or |
1516 | register eliminations. */ |
1517 | chain->need_elim = did_elimination; |
1518 | chain->need_reload = n_reloads > 0; |
1519 | chain->need_operand_change = operands_changed; |
1520 | |
1521 | /* Discard any register replacements done. */ |
1522 | if (did_elimination) |
1523 | { |
1524 | obstack_free (&reload_obstack, reload_insn_firstobj)__extension__ ({ struct obstack *__o = (&reload_obstack); void *__obj = (void *) (reload_insn_firstobj); if (__obj > (void *) __o->chunk && __obj < (void *) __o-> chunk_limit) __o->next_free = __o->object_base = (char * ) __obj; else _obstack_free (__o, __obj); }); |
1525 | PATTERN (insn) = old_body; |
1526 | INSN_CODE (insn)(((insn)->u.fld[5]).rt_int) = old_code; |
1527 | REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx) = old_notes; |
1528 | something_needs_elimination = 1; |
1529 | } |
1530 | |
1531 | something_needs_operands_changed |= operands_changed; |
1532 | |
1533 | if (n_reloads != 0) |
1534 | { |
1535 | copy_reloads (chain); |
1536 | *pprev_reload = chain; |
1537 | pprev_reload = &chain->next_need_reload; |
1538 | } |
1539 | } |
1540 | } |
1541 | *pprev_reload = 0; |
1542 | } |
1543 | |
1544 | /* This function is called from the register allocator to set up estimates |
1545 | for the cost of eliminating pseudos which have REG_EQUIV equivalences to |
1546 | an invariant. The structure is similar to calculate_needs_all_insns. */ |
1547 | |
1548 | void |
1549 | calculate_elim_costs_all_insns (void) |
1550 | { |
1551 | int *reg_equiv_init_cost; |
1552 | basic_block bb; |
1553 | int i; |
1554 | |
1555 | reg_equiv_init_cost = XCNEWVEC (int, max_regno)((int *) xcalloc ((max_regno), sizeof (int))); |
1556 | init_elim_table (); |
1557 | init_eliminable_invariants (get_insns (), false); |
1558 | |
1559 | set_initial_elim_offsets (); |
1560 | set_initial_label_offsets (); |
1561 | |
1562 | FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb ; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb-> next_bb) |
1563 | { |
1564 | rtx_insn *insn; |
1565 | elim_bb = bb; |
1566 | |
1567 | FOR_BB_INSNS (bb, insn)for ((insn) = (bb)->il.x.head_; (insn) && (insn) != NEXT_INSN ((bb)->il.x.rtl->end_); (insn) = NEXT_INSN ( insn)) |
1568 | { |
1569 | /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might |
1570 | include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see |
1571 | what effects this has on the known offsets at labels. */ |
1572 | |
1573 | if (LABEL_P (insn)(((enum rtx_code) (insn)->code) == CODE_LABEL) || JUMP_P (insn)(((enum rtx_code) (insn)->code) == JUMP_INSN) || JUMP_TABLE_DATA_P (insn)(((enum rtx_code) (insn)->code) == JUMP_TABLE_DATA) |
1574 | || (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) == DEBUG_INSN)) && REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx) != 0)) |
1575 | set_label_offsets (insn, insn, 0); |
1576 | |
1577 | if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) == DEBUG_INSN))) |
1578 | { |
1579 | rtx set = single_set (insn); |
1580 | |
1581 | /* Skip insns that only set an equivalence. */ |
1582 | if (set && REG_P (SET_DEST (set))(((enum rtx_code) ((((set)->u.fld[0]).rt_rtx))->code) == REG) |
1583 | && reg_renumber[REGNO (SET_DEST (set))(rhs_regno((((set)->u.fld[0]).rt_rtx)))] < 0 |
1584 | && (reg_equiv_constant (REGNO (SET_DEST (set)))(*reg_equivs)[((rhs_regno((((set)->u.fld[0]).rt_rtx))))].constant |
1585 | || reg_equiv_invariant (REGNO (SET_DEST (set)))(*reg_equivs)[((rhs_regno((((set)->u.fld[0]).rt_rtx))))].invariant)) |
1586 | { |
1587 | unsigned regno = REGNO (SET_DEST (set))(rhs_regno((((set)->u.fld[0]).rt_rtx))); |
1588 | rtx_insn_list *init = reg_equiv_init (regno)(*reg_equivs)[(regno)].init; |
1589 | if (init) |
1590 | { |
1591 | rtx t = eliminate_regs_1 (SET_SRC (set)(((set)->u.fld[1]).rt_rtx), VOIDmode((void) 0, E_VOIDmode), insn, |
1592 | false, true); |
1593 | machine_mode mode = GET_MODE (SET_DEST (set))((machine_mode) ((((set)->u.fld[0]).rt_rtx))->mode); |
1594 | int cost = set_src_cost (t, mode, |
1595 | optimize_bb_for_speed_p (bb)); |
1596 | int freq = REG_FREQ_FROM_BB (bb)((optimize_function_for_size_p ((cfun + 0)) || !(cfun + 0)-> cfg->count_max.initialized_p ()) ? 1000 : ((bb)->count. to_frequency ((cfun + 0)) * 1000 / 10000) ? ((bb)->count.to_frequency ((cfun + 0)) * 1000 / 10000) : 1); |
1597 | |
1598 | reg_equiv_init_cost[regno] = cost * freq; |
1599 | continue; |
1600 | } |
1601 | } |
1602 | /* If needed, eliminate any eliminable registers. */ |
1603 | if (num_eliminable || num_eliminable_invariants) |
1604 | elimination_costs_in_insn (insn); |
1605 | |
1606 | if (num_eliminable) |
1607 | update_eliminable_offsets (); |
1608 | } |
1609 | } |
1610 | } |
1611 | for (i = FIRST_PSEUDO_REGISTER76; i < max_regno; i++) |
1612 | { |
1613 | if (reg_equiv_invariant (i)(*reg_equivs)[(i)].invariant) |
1614 | { |
1615 | if (reg_equiv_init (i)(*reg_equivs)[(i)].init) |
1616 | { |
1617 | int cost = reg_equiv_init_cost[i]; |
1618 | if (dump_file) |
1619 | fprintf (dump_file, |
1620 | "Reg %d has equivalence, initial gains %d\n", i, cost); |
1621 | if (cost != 0) |
1622 | ira_adjust_equiv_reg_cost (i, cost); |
1623 | } |
1624 | else |
1625 | { |
1626 | if (dump_file) |
1627 | fprintf (dump_file, |
1628 | "Reg %d had equivalence, but can't be eliminated\n", |
1629 | i); |
1630 | ira_adjust_equiv_reg_cost (i, 0); |
1631 | } |
1632 | } |
1633 | } |
1634 | |
1635 | free (reg_equiv_init_cost); |
1636 | free (offsets_known_at); |
1637 | free (offsets_at); |
1638 | offsets_at = NULLnullptr; |
1639 | offsets_known_at = NULLnullptr; |
1640 | } |
1641 | |
1642 | /* Comparison function for qsort to decide which of two reloads |
1643 | should be handled first. *P1 and *P2 are the reload numbers. */ |
1644 | |
1645 | static int |
1646 | reload_reg_class_lower (const void *r1p, const void *r2p) |
1647 | { |
1648 | int r1 = *(const short *) r1p, r2 = *(const short *) r2p; |
1649 | int t; |
1650 | |
1651 | /* Consider required reloads before optional ones. */ |
1652 | t = rld[r1].optional - rld[r2].optional; |
1653 | if (t != 0) |
1654 | return t; |
1655 | |
1656 | /* Count all solitary classes before non-solitary ones. */ |
1657 | t = ((reg_class_size(this_target_hard_regs->x_reg_class_size)[(int) rld[r2].rclass] == 1) |
1658 | - (reg_class_size(this_target_hard_regs->x_reg_class_size)[(int) rld[r1].rclass] == 1)); |
1659 | if (t != 0) |
1660 | return t; |
1661 | |
1662 | /* Aside from solitaires, consider all multi-reg groups first. */ |
1663 | t = rld[r2].nregs - rld[r1].nregs; |
1664 | if (t != 0) |
1665 | return t; |
1666 | |
1667 | /* Consider reloads in order of increasing reg-class number. */ |
1668 | t = (int) rld[r1].rclass - (int) rld[r2].rclass; |
1669 | if (t != 0) |
1670 | return t; |
1671 | |
1672 | /* If reloads are equally urgent, sort by reload number, |
1673 | so that the results of qsort leave nothing to chance. */ |
1674 | return r1 - r2; |
1675 | } |
1676 | |
1677 | /* The cost of spilling each hard reg. */ |
1678 | static int spill_cost[FIRST_PSEUDO_REGISTER76]; |
1679 | |
1680 | /* When spilling multiple hard registers, we use SPILL_COST for the first |
1681 | spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST |
1682 | only the first hard reg for a multi-reg pseudo. */ |
1683 | static int spill_add_cost[FIRST_PSEUDO_REGISTER76]; |
1684 | |
1685 | /* Map of hard regno to pseudo regno currently occupying the hard |
1686 | reg. */ |
1687 | static int hard_regno_to_pseudo_regno[FIRST_PSEUDO_REGISTER76]; |
1688 | |
1689 | /* Update the spill cost arrays, considering that pseudo REG is live. */ |
1690 | |
1691 | static void |
1692 | count_pseudo (int reg) |
1693 | { |
1694 | int freq = REG_FREQ (reg)(reg_info_p[reg].freq); |
1695 | int r = reg_renumber[reg]; |
1696 | int nregs; |
1697 | |
1698 | /* Ignore spilled pseudo-registers which can be here only if IRA is used. */ |
1699 | if (ira_conflicts_p && r < 0) |
1700 | return; |
1701 | |
1702 | if (REGNO_REG_SET_P (&pseudos_counted, reg)bitmap_bit_p (&pseudos_counted, reg) |
1703 | || REGNO_REG_SET_P (&spilled_pseudos, reg)bitmap_bit_p (&spilled_pseudos, reg)) |
1704 | return; |
1705 | |
1706 | SET_REGNO_REG_SET (&pseudos_counted, reg)bitmap_set_bit (&pseudos_counted, reg); |
1707 | |
1708 | gcc_assert (r >= 0)((void)(!(r >= 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 1708, __FUNCTION__), 0 : 0)); |
1709 | |
1710 | spill_add_cost[r] += freq; |
1711 | nregs = hard_regno_nregs (r, PSEUDO_REGNO_MODE (reg)((machine_mode) (regno_reg_rtx[reg])->mode)); |
1712 | while (nregs-- > 0) |
1713 | { |
1714 | hard_regno_to_pseudo_regno[r + nregs] = reg; |
1715 | spill_cost[r + nregs] += freq; |
1716 | } |
1717 | } |
1718 | |
1719 | /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the |
1720 | contents of BAD_SPILL_REGS for the insn described by CHAIN. */ |
1721 | |
1722 | static void |
1723 | order_regs_for_reload (class insn_chain *chain) |
1724 | { |
1725 | unsigned i; |
1726 | HARD_REG_SET used_by_pseudos; |
1727 | HARD_REG_SET used_by_pseudos2; |
1728 | reg_set_iterator rsi; |
1729 | |
1730 | bad_spill_regs = fixed_reg_set(this_target_hard_regs->x_fixed_reg_set); |
1731 | |
1732 | memset (spill_cost, 0, sizeof spill_cost); |
1733 | memset (spill_add_cost, 0, sizeof spill_add_cost); |
1734 | for (i = 0; i < FIRST_PSEUDO_REGISTER76; i++) |
1735 | hard_regno_to_pseudo_regno[i] = -1; |
1736 | |
1737 | /* Count number of uses of each hard reg by pseudo regs allocated to it |
1738 | and then order them by decreasing use. First exclude hard registers |
1739 | that are live in or across this insn. */ |
1740 | |
1741 | REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout)do { CLEAR_HARD_REG_SET (used_by_pseudos); reg_set_to_hard_reg_set (&used_by_pseudos, &chain->live_throughout); } while (0); |
1742 | REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set)do { CLEAR_HARD_REG_SET (used_by_pseudos2); reg_set_to_hard_reg_set (&used_by_pseudos2, &chain->dead_or_set); } while (0); |
1743 | bad_spill_regs |= used_by_pseudos; |
1744 | bad_spill_regs |= used_by_pseudos2; |
1745 | |
1746 | /* Now find out which pseudos are allocated to it, and update |
1747 | hard_reg_n_uses. */ |
1748 | CLEAR_REG_SET (&pseudos_counted)bitmap_clear (&pseudos_counted); |
1749 | |
1750 | EXECUTE_IF_SET_IN_REG_SETfor (bmp_iter_set_init (&(rsi), (&chain->live_throughout ), (76), &(i)); bmp_iter_set (&(rsi), &(i)); bmp_iter_next (&(rsi), &(i))) |
1751 | (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)for (bmp_iter_set_init (&(rsi), (&chain->live_throughout ), (76), &(i)); bmp_iter_set (&(rsi), &(i)); bmp_iter_next (&(rsi), &(i))) |
1752 | { |
1753 | count_pseudo (i); |
1754 | } |
1755 | EXECUTE_IF_SET_IN_REG_SETfor (bmp_iter_set_init (&(rsi), (&chain->dead_or_set ), (76), &(i)); bmp_iter_set (&(rsi), &(i)); bmp_iter_next (&(rsi), &(i))) |
1756 | (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)for (bmp_iter_set_init (&(rsi), (&chain->dead_or_set ), (76), &(i)); bmp_iter_set (&(rsi), &(i)); bmp_iter_next (&(rsi), &(i))) |
1757 | { |
1758 | count_pseudo (i); |
1759 | } |
1760 | CLEAR_REG_SET (&pseudos_counted)bitmap_clear (&pseudos_counted); |
1761 | } |
1762 | |
1763 | /* Vector of reload-numbers showing the order in which the reloads should |
1764 | be processed. */ |
1765 | static short reload_order[MAX_RELOADS(2 * 30 * (2 + 1))]; |
1766 | |
1767 | /* This is used to keep track of the spill regs used in one insn. */ |
1768 | static HARD_REG_SET used_spill_regs_local; |
1769 | |
1770 | /* We decided to spill hard register SPILLED, which has a size of |
1771 | SPILLED_NREGS. Determine how pseudo REG, which is live during the insn, |
1772 | is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will |
1773 | update SPILL_COST/SPILL_ADD_COST. */ |
1774 | |
1775 | static void |
1776 | count_spilled_pseudo (int spilled, int spilled_nregs, int reg) |
1777 | { |
1778 | int freq = REG_FREQ (reg)(reg_info_p[reg].freq); |
1779 | int r = reg_renumber[reg]; |
1780 | int nregs; |
1781 | |
1782 | /* Ignore spilled pseudo-registers which can be here only if IRA is used. */ |
1783 | if (ira_conflicts_p && r < 0) |
1784 | return; |
1785 | |
1786 | gcc_assert (r >= 0)((void)(!(r >= 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 1786, __FUNCTION__), 0 : 0)); |
1787 | |
1788 | nregs = hard_regno_nregs (r, PSEUDO_REGNO_MODE (reg)((machine_mode) (regno_reg_rtx[reg])->mode)); |
1789 | |
1790 | if (REGNO_REG_SET_P (&spilled_pseudos, reg)bitmap_bit_p (&spilled_pseudos, reg) |
1791 | || spilled + spilled_nregs <= r || r + nregs <= spilled) |
1792 | return; |
1793 | |
1794 | SET_REGNO_REG_SET (&spilled_pseudos, reg)bitmap_set_bit (&spilled_pseudos, reg); |
1795 | |
1796 | spill_add_cost[r] -= freq; |
1797 | while (nregs-- > 0) |
1798 | { |
1799 | hard_regno_to_pseudo_regno[r + nregs] = -1; |
1800 | spill_cost[r + nregs] -= freq; |
1801 | } |
1802 | } |
1803 | |
1804 | /* Find reload register to use for reload number ORDER. */ |
1805 | |
1806 | static int |
1807 | find_reg (class insn_chain *chain, int order) |
1808 | { |
1809 | int rnum = reload_order[order]; |
1810 | struct reload *rl = rld + rnum; |
1811 | int best_cost = INT_MAX2147483647; |
1812 | int best_reg = -1; |
1813 | unsigned int i, j, n; |
1814 | int k; |
1815 | HARD_REG_SET not_usable; |
1816 | HARD_REG_SET used_by_other_reload; |
1817 | reg_set_iterator rsi; |
1818 | static int regno_pseudo_regs[FIRST_PSEUDO_REGISTER76]; |
1819 | static int best_regno_pseudo_regs[FIRST_PSEUDO_REGISTER76]; |
1820 | |
1821 | not_usable = (bad_spill_regs |
1822 | | bad_spill_regs_global |
1823 | | ~reg_class_contents(this_target_hard_regs->x_reg_class_contents)[rl->rclass]); |
1824 | |
1825 | CLEAR_HARD_REG_SET (used_by_other_reload); |
1826 | for (k = 0; k < order; k++) |
1827 | { |
1828 | int other = reload_order[k]; |
1829 | |
1830 | if (rld[other].regno >= 0 && reloads_conflict (other, rnum)) |
1831 | for (j = 0; j < rld[other].nregs; j++) |
1832 | SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j); |
1833 | } |
1834 | |
1835 | for (i = 0; i < FIRST_PSEUDO_REGISTER76; i++) |
1836 | { |
1837 | #ifdef REG_ALLOC_ORDER{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17 , 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48 , 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75 } |
1838 | unsigned int regno = reg_alloc_order(this_target_hard_regs->x_reg_alloc_order)[i]; |
1839 | #else |
1840 | unsigned int regno = i; |
1841 | #endif |
1842 | |
1843 | if (! TEST_HARD_REG_BIT (not_usable, regno) |
1844 | && ! TEST_HARD_REG_BIT (used_by_other_reload, regno) |
1845 | && targetm.hard_regno_mode_ok (regno, rl->mode)) |
1846 | { |
1847 | int this_cost = spill_cost[regno]; |
1848 | int ok = 1; |
1849 | unsigned int this_nregs = hard_regno_nregs (regno, rl->mode); |
1850 | |
1851 | for (j = 1; j < this_nregs; j++) |
1852 | { |
1853 | this_cost += spill_add_cost[regno + j]; |
1854 | if ((TEST_HARD_REG_BIT (not_usable, regno + j)) |
1855 | || TEST_HARD_REG_BIT (used_by_other_reload, regno + j)) |
1856 | ok = 0; |
1857 | } |
1858 | if (! ok) |
1859 | continue; |
1860 | |
1861 | if (ira_conflicts_p) |
1862 | { |
1863 | /* Ask IRA to find a better pseudo-register for |
1864 | spilling. */ |
1865 | for (n = j = 0; j < this_nregs; j++) |
1866 | { |
1867 | int r = hard_regno_to_pseudo_regno[regno + j]; |
1868 | |
1869 | if (r < 0) |
1870 | continue; |
1871 | if (n == 0 || regno_pseudo_regs[n - 1] != r) |
1872 | regno_pseudo_regs[n++] = r; |
1873 | } |
1874 | regno_pseudo_regs[n++] = -1; |
1875 | if (best_reg < 0 |
1876 | || ira_better_spill_reload_regno_p (regno_pseudo_regs, |
1877 | best_regno_pseudo_regs, |
1878 | rl->in, rl->out, |
1879 | chain->insn)) |
1880 | { |
1881 | best_reg = regno; |
1882 | for (j = 0;; j++) |
1883 | { |
1884 | best_regno_pseudo_regs[j] = regno_pseudo_regs[j]; |
1885 | if (regno_pseudo_regs[j] < 0) |
1886 | break; |
1887 | } |
1888 | } |
1889 | continue; |
1890 | } |
1891 | |
1892 | if (rl->in && REG_P (rl->in)(((enum rtx_code) (rl->in)->code) == REG) && REGNO (rl->in)(rhs_regno(rl->in)) == regno) |
1893 | this_cost--; |
1894 | if (rl->out && REG_P (rl->out)(((enum rtx_code) (rl->out)->code) == REG) && REGNO (rl->out)(rhs_regno(rl->out)) == regno) |
1895 | this_cost--; |
1896 | if (this_cost < best_cost |
1897 | /* Among registers with equal cost, prefer caller-saved ones, or |
1898 | use REG_ALLOC_ORDER if it is defined. */ |
1899 | || (this_cost == best_cost |
1900 | #ifdef REG_ALLOC_ORDER{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17 , 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48 , 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75 } |
1901 | && (inv_reg_alloc_order(this_target_hard_regs->x_inv_reg_alloc_order)[regno] |
1902 | < inv_reg_alloc_order(this_target_hard_regs->x_inv_reg_alloc_order)[best_reg]) |
1903 | #else |
1904 | && crtl(&x_rtl)->abi->clobbers_full_reg_p (regno) |
1905 | && !crtl(&x_rtl)->abi->clobbers_full_reg_p (best_reg) |
1906 | #endif |
1907 | )) |
1908 | { |
1909 | best_reg = regno; |
1910 | best_cost = this_cost; |
1911 | } |
1912 | } |
1913 | } |
1914 | if (best_reg == -1) |
1915 | return 0; |
1916 | |
1917 | if (dump_file) |
1918 | fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum); |
1919 | |
1920 | rl->nregs = hard_regno_nregs (best_reg, rl->mode); |
1921 | rl->regno = best_reg; |
1922 | |
1923 | EXECUTE_IF_SET_IN_REG_SETfor (bmp_iter_set_init (&(rsi), (&chain->live_throughout ), (76), &(j)); bmp_iter_set (&(rsi), &(j)); bmp_iter_next (&(rsi), &(j))) |
1924 | (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)for (bmp_iter_set_init (&(rsi), (&chain->live_throughout ), (76), &(j)); bmp_iter_set (&(rsi), &(j)); bmp_iter_next (&(rsi), &(j))) |
1925 | { |
1926 | count_spilled_pseudo (best_reg, rl->nregs, j); |
1927 | } |
1928 | |
1929 | EXECUTE_IF_SET_IN_REG_SETfor (bmp_iter_set_init (&(rsi), (&chain->dead_or_set ), (76), &(j)); bmp_iter_set (&(rsi), &(j)); bmp_iter_next (&(rsi), &(j))) |
1930 | (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)for (bmp_iter_set_init (&(rsi), (&chain->dead_or_set ), (76), &(j)); bmp_iter_set (&(rsi), &(j)); bmp_iter_next (&(rsi), &(j))) |
1931 | { |
1932 | count_spilled_pseudo (best_reg, rl->nregs, j); |
1933 | } |
1934 | |
1935 | for (i = 0; i < rl->nregs; i++) |
1936 | { |
1937 | gcc_assert (spill_cost[best_reg + i] == 0)((void)(!(spill_cost[best_reg + i] == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 1937, __FUNCTION__), 0 : 0)); |
1938 | gcc_assert (spill_add_cost[best_reg + i] == 0)((void)(!(spill_add_cost[best_reg + i] == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 1938, __FUNCTION__), 0 : 0)); |
1939 | gcc_assert (hard_regno_to_pseudo_regno[best_reg + i] == -1)((void)(!(hard_regno_to_pseudo_regno[best_reg + i] == -1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 1939, __FUNCTION__), 0 : 0)); |
1940 | SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i); |
1941 | } |
1942 | return 1; |
1943 | } |
1944 | |
1945 | /* Find more reload regs to satisfy the remaining need of an insn, which |
1946 | is given by CHAIN. |
1947 | Do it by ascending class number, since otherwise a reg |
1948 | might be spilled for a big class and might fail to count |
1949 | for a smaller class even though it belongs to that class. */ |
1950 | |
1951 | static void |
1952 | find_reload_regs (class insn_chain *chain) |
1953 | { |
1954 | int i; |
1955 | |
1956 | /* In order to be certain of getting the registers we need, |
1957 | we must sort the reloads into order of increasing register class. |
1958 | Then our grabbing of reload registers will parallel the process |
1959 | that provided the reload registers. */ |
1960 | for (i = 0; i < chain->n_reloads; i++) |
1961 | { |
1962 | /* Show whether this reload already has a hard reg. */ |
1963 | if (chain->rld[i].reg_rtx) |
1964 | { |
1965 | chain->rld[i].regno = REGNO (chain->rld[i].reg_rtx)(rhs_regno(chain->rld[i].reg_rtx)); |
1966 | chain->rld[i].nregs = REG_NREGS (chain->rld[i].reg_rtx)((&(chain->rld[i].reg_rtx)->u.reg)->nregs); |
1967 | } |
1968 | else |
1969 | chain->rld[i].regno = -1; |
1970 | reload_order[i] = i; |
1971 | } |
1972 | |
1973 | n_reloads = chain->n_reloads; |
1974 | memcpy (rld, chain->rld, n_reloads * sizeof (struct reload)); |
1975 | |
1976 | CLEAR_HARD_REG_SET (used_spill_regs_local); |
1977 | |
1978 | if (dump_file) |
1979 | fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn)); |
1980 | |
1981 | qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower)gcc_qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower ); |
1982 | |
1983 | /* Compute the order of preference for hard registers to spill. */ |
1984 | |
1985 | order_regs_for_reload (chain); |
1986 | |
1987 | for (i = 0; i < n_reloads; i++) |
1988 | { |
1989 | int r = reload_order[i]; |
1990 | |
1991 | /* Ignore reloads that got marked inoperative. */ |
1992 | if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p) |
1993 | && ! rld[r].optional |
1994 | && rld[r].regno == -1) |
1995 | if (! find_reg (chain, i)) |
1996 | { |
1997 | if (dump_file) |
1998 | fprintf (dump_file, "reload failure for reload %d\n", r); |
1999 | spill_failure (chain->insn, rld[r].rclass); |
2000 | failure = 1; |
2001 | return; |
2002 | } |
2003 | } |
2004 | |
2005 | chain->used_spill_regs = used_spill_regs_local; |
2006 | used_spill_regs |= used_spill_regs_local; |
2007 | |
2008 | memcpy (chain->rld, rld, n_reloads * sizeof (struct reload)); |
2009 | } |
2010 | |
2011 | static void |
2012 | select_reload_regs (void) |
2013 | { |
2014 | class insn_chain *chain; |
2015 | |
2016 | /* Try to satisfy the needs for each insn. */ |
2017 | for (chain = insns_need_reload; chain != 0; |
2018 | chain = chain->next_need_reload) |
2019 | find_reload_regs (chain); |
2020 | } |
2021 | |
2022 | /* Delete all insns that were inserted by emit_caller_save_insns during |
2023 | this iteration. */ |
2024 | static void |
2025 | delete_caller_save_insns (void) |
2026 | { |
2027 | class insn_chain *c = reload_insn_chain; |
2028 | |
2029 | while (c != 0) |
2030 | { |
2031 | while (c != 0 && c->is_caller_save_insn) |
2032 | { |
2033 | class insn_chain *next = c->next; |
2034 | rtx_insn *insn = c->insn; |
2035 | |
2036 | if (c == reload_insn_chain) |
2037 | reload_insn_chain = next; |
2038 | delete_insn (insn); |
2039 | |
2040 | if (next) |
2041 | next->prev = c->prev; |
2042 | if (c->prev) |
2043 | c->prev->next = next; |
2044 | c->next = unused_insn_chains; |
2045 | unused_insn_chains = c; |
2046 | c = next; |
2047 | } |
2048 | if (c != 0) |
2049 | c = c->next; |
2050 | } |
2051 | } |
2052 | |
2053 | /* Handle the failure to find a register to spill. |
2054 | INSN should be one of the insns which needed this particular spill reg. */ |
2055 | |
2056 | static void |
2057 | spill_failure (rtx_insn *insn, enum reg_class rclass) |
2058 | { |
2059 | if (asm_noperands (PATTERN (insn)) >= 0) |
2060 | error_for_asm (insn, "cannot find a register in class %qs while " |
2061 | "reloading %<asm%>", |
2062 | reg_class_names[rclass]); |
2063 | else |
2064 | { |
2065 | error ("unable to find a register to spill in class %qs", |
2066 | reg_class_names[rclass]); |
2067 | |
2068 | if (dump_file) |
2069 | { |
2070 | fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn)); |
2071 | debug_reload_to_stream (dump_file); |
2072 | } |
2073 | fatal_insn ("this is the insn:", insn)_fatal_insn ("this is the insn:", insn, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 2073, __FUNCTION__); |
2074 | } |
2075 | } |
2076 | |
2077 | /* Delete an unneeded INSN and any previous insns who sole purpose is loading |
2078 | data that is dead in INSN. */ |
2079 | |
2080 | static void |
2081 | delete_dead_insn (rtx_insn *insn) |
2082 | { |
2083 | rtx_insn *prev = prev_active_insn (insn); |
2084 | rtx prev_dest; |
2085 | |
2086 | /* If the previous insn sets a register that dies in our insn make |
2087 | a note that we want to run DCE immediately after reload. |
2088 | |
2089 | We used to delete the previous insn & recurse, but that's wrong for |
2090 | block local equivalences. Instead of trying to figure out the exact |
2091 | circumstances where we can delete the potentially dead insns, just |
2092 | let DCE do the job. */ |
2093 | if (prev && BLOCK_FOR_INSN (prev) == BLOCK_FOR_INSN (insn) |
2094 | && GET_CODE (PATTERN (prev))((enum rtx_code) (PATTERN (prev))->code) == SET |
2095 | && (prev_dest = SET_DEST (PATTERN (prev))(((PATTERN (prev))->u.fld[0]).rt_rtx), REG_P (prev_dest)(((enum rtx_code) (prev_dest)->code) == REG)) |
2096 | && reg_mentioned_p (prev_dest, PATTERN (insn)) |
2097 | && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)(rhs_regno(prev_dest))) |
2098 | && ! side_effects_p (SET_SRC (PATTERN (prev))(((PATTERN (prev))->u.fld[1]).rt_rtx))) |
2099 | need_dce = 1; |
2100 | |
2101 | SET_INSN_DELETED (insn)set_insn_deleted (insn);; |
2102 | } |
2103 | |
2104 | /* Modify the home of pseudo-reg I. |
2105 | The new home is present in reg_renumber[I]. |
2106 | |
2107 | FROM_REG may be the hard reg that the pseudo-reg is being spilled from; |
2108 | or it may be -1, meaning there is none or it is not relevant. |
2109 | This is used so that all pseudos spilled from a given hard reg |
2110 | can share one stack slot. */ |
2111 | |
2112 | static void |
2113 | alter_reg (int i, int from_reg, bool dont_share_p) |
2114 | { |
2115 | /* When outputting an inline function, this can happen |
2116 | for a reg that isn't actually used. */ |
2117 | if (regno_reg_rtx[i] == 0) |
2118 | return; |
2119 | |
2120 | /* If the reg got changed to a MEM at rtl-generation time, |
2121 | ignore it. */ |
2122 | if (!REG_P (regno_reg_rtx[i])(((enum rtx_code) (regno_reg_rtx[i])->code) == REG)) |
2123 | return; |
2124 | |
2125 | /* Modify the reg-rtx to contain the new hard reg |
2126 | number or else to contain its pseudo reg number. */ |
2127 | SET_REGNO (regno_reg_rtx[i],(df_ref_change_reg_with_loc (regno_reg_rtx[i], reg_renumber[i ] >= 0 ? reg_renumber[i] : i)) |
2128 | reg_renumber[i] >= 0 ? reg_renumber[i] : i)(df_ref_change_reg_with_loc (regno_reg_rtx[i], reg_renumber[i ] >= 0 ? reg_renumber[i] : i)); |
2129 | |
2130 | /* If we have a pseudo that is needed but has no hard reg or equivalent, |
2131 | allocate a stack slot for it. */ |
2132 | |
2133 | if (reg_renumber[i] < 0 |
2134 | && REG_N_REFS (i) > 0 |
2135 | && reg_equiv_constant (i)(*reg_equivs)[(i)].constant == 0 |
2136 | && (reg_equiv_invariant (i)(*reg_equivs)[(i)].invariant == 0 |
2137 | || reg_equiv_init (i)(*reg_equivs)[(i)].init == 0) |
2138 | && reg_equiv_memory_loc (i)(*reg_equivs)[(i)].memory_loc == 0) |
2139 | { |
2140 | rtx x = NULL_RTX(rtx) 0; |
2141 | machine_mode mode = GET_MODE (regno_reg_rtx[i])((machine_mode) (regno_reg_rtx[i])->mode); |
2142 | poly_uint64 inherent_size = GET_MODE_SIZE (mode); |
2143 | unsigned int inherent_align = GET_MODE_ALIGNMENT (mode)get_mode_alignment (mode); |
2144 | machine_mode wider_mode = wider_subreg_mode (mode, reg_max_ref_mode[i]); |
2145 | poly_uint64 total_size = GET_MODE_SIZE (wider_mode); |
2146 | /* ??? Seems strange to derive the minimum alignment from the size, |
2147 | but that's the traditional behavior. For polynomial-size modes, |
2148 | the natural extension is to use the minimum possible size. */ |
2149 | unsigned int min_align |
2150 | = constant_lower_bound (GET_MODE_BITSIZE (reg_max_ref_mode[i])); |
2151 | poly_int64 adjust = 0; |
2152 | |
2153 | something_was_spilled = true; |
2154 | |
2155 | if (ira_conflicts_p) |
2156 | { |
2157 | /* Mark the spill for IRA. */ |
2158 | SET_REGNO_REG_SET (&spilled_pseudos, i)bitmap_set_bit (&spilled_pseudos, i); |
2159 | if (!dont_share_p) |
2160 | x = ira_reuse_stack_slot (i, inherent_size, total_size); |
2161 | } |
2162 | |
2163 | if (x) |
2164 | ; |
2165 | |
2166 | /* Each pseudo reg has an inherent size which comes from its own mode, |
2167 | and a total size which provides room for paradoxical subregs |
2168 | which refer to the pseudo reg in wider modes. |
2169 | |
2170 | We can use a slot already allocated if it provides both |
2171 | enough inherent space and enough total space. |
2172 | Otherwise, we allocate a new slot, making sure that it has no less |
2173 | inherent space, and no less total space, then the previous slot. */ |
2174 | else if (from_reg == -1 || (!dont_share_p && ira_conflicts_p)) |
2175 | { |
2176 | rtx stack_slot; |
2177 | |
2178 | /* The sizes are taken from a subreg operation, which guarantees |
2179 | that they're ordered. */ |
2180 | gcc_checking_assert (ordered_p (total_size, inherent_size))((void)(!(ordered_p (total_size, inherent_size)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 2180, __FUNCTION__), 0 : 0)); |
2181 | |
2182 | /* No known place to spill from => no slot to reuse. */ |
2183 | x = assign_stack_local (mode, total_size, |
2184 | min_align > inherent_align |
2185 | || maybe_gt (total_size, inherent_size)maybe_lt (inherent_size, total_size) |
2186 | ? -1 : 0); |
2187 | |
2188 | stack_slot = x; |
2189 | |
2190 | /* Cancel the big-endian correction done in assign_stack_local. |
2191 | Get the address of the beginning of the slot. This is so we |
2192 | can do a big-endian correction unconditionally below. */ |
2193 | if (BYTES_BIG_ENDIAN0) |
2194 | { |
2195 | adjust = inherent_size - total_size; |
2196 | if (maybe_ne (adjust, 0)) |
2197 | { |
2198 | poly_uint64 total_bits = total_size * BITS_PER_UNIT(8); |
2199 | machine_mode mem_mode |
2200 | = int_mode_for_size (total_bits, 1).else_blk (); |
2201 | stack_slot = adjust_address_nv (x, mem_mode, adjust)adjust_address_1 (x, mem_mode, adjust, 0, 1, 0, 0); |
2202 | } |
2203 | } |
2204 | |
2205 | if (! dont_share_p && ira_conflicts_p) |
2206 | /* Inform IRA about allocation a new stack slot. */ |
2207 | ira_mark_new_stack_slot (stack_slot, i, total_size); |
2208 | } |
2209 | |
2210 | /* Reuse a stack slot if possible. */ |
2211 | else if (spill_stack_slot[from_reg] != 0 |
2212 | && known_ge (spill_stack_slot_width[from_reg], total_size)(!maybe_lt (spill_stack_slot_width[from_reg], total_size)) |
2213 | && known_ge (GET_MODE_SIZE(!maybe_lt (GET_MODE_SIZE (((machine_mode) (spill_stack_slot[ from_reg])->mode)), inherent_size)) |
2214 | (GET_MODE (spill_stack_slot[from_reg])),(!maybe_lt (GET_MODE_SIZE (((machine_mode) (spill_stack_slot[ from_reg])->mode)), inherent_size)) |
2215 | inherent_size)(!maybe_lt (GET_MODE_SIZE (((machine_mode) (spill_stack_slot[ from_reg])->mode)), inherent_size)) |
2216 | && MEM_ALIGN (spill_stack_slot[from_reg])(get_mem_attrs (spill_stack_slot[from_reg])->align) >= min_align) |
2217 | x = spill_stack_slot[from_reg]; |
2218 | |
2219 | /* Allocate a bigger slot. */ |
2220 | else |
2221 | { |
2222 | /* Compute maximum size needed, both for inherent size |
2223 | and for total size. */ |
2224 | rtx stack_slot; |
2225 | |
2226 | if (spill_stack_slot[from_reg]) |
2227 | { |
2228 | if (partial_subreg_p (mode, |
2229 | GET_MODE (spill_stack_slot[from_reg])((machine_mode) (spill_stack_slot[from_reg])->mode))) |
2230 | mode = GET_MODE (spill_stack_slot[from_reg])((machine_mode) (spill_stack_slot[from_reg])->mode); |
2231 | total_size = ordered_max (total_size, |
2232 | spill_stack_slot_width[from_reg]); |
2233 | if (MEM_ALIGN (spill_stack_slot[from_reg])(get_mem_attrs (spill_stack_slot[from_reg])->align) > min_align) |
2234 | min_align = MEM_ALIGN (spill_stack_slot[from_reg])(get_mem_attrs (spill_stack_slot[from_reg])->align); |
2235 | } |
2236 | |
2237 | /* The sizes are taken from a subreg operation, which guarantees |
2238 | that they're ordered. */ |
2239 | gcc_checking_assert (ordered_p (total_size, inherent_size))((void)(!(ordered_p (total_size, inherent_size)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 2239, __FUNCTION__), 0 : 0)); |
2240 | |
2241 | /* Make a slot with that size. */ |
2242 | x = assign_stack_local (mode, total_size, |
2243 | min_align > inherent_align |
2244 | || maybe_gt (total_size, inherent_size)maybe_lt (inherent_size, total_size) |
2245 | ? -1 : 0); |
2246 | stack_slot = x; |
2247 | |
2248 | /* Cancel the big-endian correction done in assign_stack_local. |
2249 | Get the address of the beginning of the slot. This is so we |
2250 | can do a big-endian correction unconditionally below. */ |
2251 | if (BYTES_BIG_ENDIAN0) |
2252 | { |
2253 | adjust = GET_MODE_SIZE (mode) - total_size; |
2254 | if (maybe_ne (adjust, 0)) |
2255 | { |
2256 | poly_uint64 total_bits = total_size * BITS_PER_UNIT(8); |
2257 | machine_mode mem_mode |
2258 | = int_mode_for_size (total_bits, 1).else_blk (); |
2259 | stack_slot = adjust_address_nv (x, mem_mode, adjust)adjust_address_1 (x, mem_mode, adjust, 0, 1, 0, 0); |
2260 | } |
2261 | } |
2262 | |
2263 | spill_stack_slot[from_reg] = stack_slot; |
2264 | spill_stack_slot_width[from_reg] = total_size; |
2265 | } |
2266 | |
2267 | /* On a big endian machine, the "address" of the slot |
2268 | is the address of the low part that fits its inherent mode. */ |
2269 | adjust += subreg_size_lowpart_offset (inherent_size, total_size); |
2270 | |
2271 | /* If we have any adjustment to make, or if the stack slot is the |
2272 | wrong mode, make a new stack slot. */ |
2273 | x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust)adjust_address_1 (x, ((machine_mode) (regno_reg_rtx[i])->mode ), adjust, 0, 1, 0, 0); |
2274 | |
2275 | /* Set all of the memory attributes as appropriate for a spill. */ |
2276 | set_mem_attrs_for_spill (x); |
2277 | |
2278 | /* Save the stack slot for later. */ |
2279 | reg_equiv_memory_loc (i)(*reg_equivs)[(i)].memory_loc = x; |
2280 | } |
2281 | } |
2282 | |
2283 | /* Mark the slots in regs_ever_live for the hard regs used by |
2284 | pseudo-reg number REGNO, accessed in MODE. */ |
2285 | |
2286 | static void |
2287 | mark_home_live_1 (int regno, machine_mode mode) |
2288 | { |
2289 | int i, lim; |
2290 | |
2291 | i = reg_renumber[regno]; |
2292 | if (i < 0) |
2293 | return; |
2294 | lim = end_hard_regno (mode, i); |
2295 | while (i < lim) |
2296 | df_set_regs_ever_live (i++, true); |
2297 | } |
2298 | |
2299 | /* Mark the slots in regs_ever_live for the hard regs |
2300 | used by pseudo-reg number REGNO. */ |
2301 | |
2302 | void |
2303 | mark_home_live (int regno) |
2304 | { |
2305 | if (reg_renumber[regno] >= 0) |
2306 | mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno)((machine_mode) (regno_reg_rtx[regno])->mode)); |
2307 | } |
2308 | |
2309 | /* This function handles the tracking of elimination offsets around branches. |
2310 | |
2311 | X is a piece of RTL being scanned. |
2312 | |
2313 | INSN is the insn that it came from, if any. |
2314 | |
2315 | INITIAL_P is nonzero if we are to set the offset to be the initial |
2316 | offset and zero if we are setting the offset of the label to be the |
2317 | current offset. */ |
2318 | |
2319 | static void |
2320 | set_label_offsets (rtx x, rtx_insn *insn, int initial_p) |
2321 | { |
2322 | enum rtx_code code = GET_CODE (x)((enum rtx_code) (x)->code); |
2323 | rtx tem; |
2324 | unsigned int i; |
2325 | struct elim_table *p; |
2326 | |
2327 | switch (code) |
2328 | { |
2329 | case LABEL_REF: |
2330 | if (LABEL_REF_NONLOCAL_P (x)(__extension__ ({ __typeof ((x)) const _rtx = ((x)); if (((enum rtx_code) (_rtx)->code) != LABEL_REF) rtl_check_failed_flag ("LABEL_REF_NONLOCAL_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 2330, __FUNCTION__); _rtx; })->volatil)) |
2331 | return; |
2332 | |
2333 | x = label_ref_label (x); |
2334 | |
2335 | /* fall through */ |
2336 | |
2337 | case CODE_LABEL: |
2338 | /* If we know nothing about this label, set the desired offsets. Note |
2339 | that this sets the offset at a label to be the offset before a label |
2340 | if we don't know anything about the label. This is not correct for |
2341 | the label after a BARRIER, but is the best guess we can make. If |
2342 | we guessed wrong, we will suppress an elimination that might have |
2343 | been possible had we been able to guess correctly. */ |
2344 | |
2345 | if (! offsets_known_at[CODE_LABEL_NUMBER (x)(((x)->u.fld[5]).rt_int) - first_label_num]) |
2346 | { |
2347 | for (i = 0; i < NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0])); i++) |
2348 | offsets_at[CODE_LABEL_NUMBER (x)(((x)->u.fld[5]).rt_int) - first_label_num][i] |
2349 | = (initial_p ? reg_eliminate[i].initial_offset |
2350 | : reg_eliminate[i].offset); |
2351 | offsets_known_at[CODE_LABEL_NUMBER (x)(((x)->u.fld[5]).rt_int) - first_label_num] = 1; |
2352 | } |
2353 | |
2354 | /* Otherwise, if this is the definition of a label and it is |
2355 | preceded by a BARRIER, set our offsets to the known offset of |
2356 | that label. */ |
2357 | |
2358 | else if (x == insn |
2359 | && (tem = prev_nonnote_insn (insn)) != 0 |
2360 | && BARRIER_P (tem)(((enum rtx_code) (tem)->code) == BARRIER)) |
2361 | set_offsets_for_label (insn); |
2362 | else |
2363 | /* If neither of the above cases is true, compare each offset |
2364 | with those previously recorded and suppress any eliminations |
2365 | where the offsets disagree. */ |
2366 | |
2367 | for (i = 0; i < NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0])); i++) |
2368 | if (maybe_ne (offsets_at[CODE_LABEL_NUMBER (x)(((x)->u.fld[5]).rt_int) - first_label_num][i], |
2369 | (initial_p ? reg_eliminate[i].initial_offset |
2370 | : reg_eliminate[i].offset))) |
2371 | reg_eliminate[i].can_eliminate = 0; |
2372 | |
2373 | return; |
2374 | |
2375 | case JUMP_TABLE_DATA: |
2376 | set_label_offsets (PATTERN (insn), insn, initial_p); |
2377 | return; |
2378 | |
2379 | case JUMP_INSN: |
2380 | set_label_offsets (PATTERN (insn), insn, initial_p); |
2381 | |
2382 | /* fall through */ |
2383 | |
2384 | case INSN: |
2385 | case CALL_INSN: |
2386 | /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched |
2387 | to indirectly and hence must have all eliminations at their |
2388 | initial offsets. */ |
2389 | for (tem = REG_NOTES (x)(((x)->u.fld[6]).rt_rtx); tem; tem = XEXP (tem, 1)(((tem)->u.fld[1]).rt_rtx)) |
2390 | if (REG_NOTE_KIND (tem)((enum reg_note) ((machine_mode) (tem)->mode)) == REG_LABEL_OPERAND) |
2391 | set_label_offsets (XEXP (tem, 0)(((tem)->u.fld[0]).rt_rtx), insn, 1); |
2392 | return; |
2393 | |
2394 | case PARALLEL: |
2395 | case ADDR_VEC: |
2396 | case ADDR_DIFF_VEC: |
2397 | /* Each of the labels in the parallel or address vector must be |
2398 | at their initial offsets. We want the first field for PARALLEL |
2399 | and ADDR_VEC and the second field for ADDR_DIFF_VEC. */ |
2400 | |
2401 | for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC)(((((x)->u.fld[code == ADDR_DIFF_VEC]).rt_rtvec))->num_elem ); i++) |
2402 | set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i)(((((x)->u.fld[code == ADDR_DIFF_VEC]).rt_rtvec))->elem [i]), |
2403 | insn, initial_p); |
2404 | return; |
2405 | |
2406 | case SET: |
2407 | /* We only care about setting PC. If the source is not RETURN, |
2408 | IF_THEN_ELSE, or a label, disable any eliminations not at |
2409 | their initial offsets. Similarly if any arm of the IF_THEN_ELSE |
2410 | isn't one of those possibilities. For branches to a label, |
2411 | call ourselves recursively. |
2412 | |
2413 | Note that this can disable elimination unnecessarily when we have |
2414 | a non-local goto since it will look like a non-constant jump to |
2415 | someplace in the current function. This isn't a significant |
2416 | problem since such jumps will normally be when all elimination |
2417 | pairs are back to their initial offsets. */ |
2418 | |
2419 | if (SET_DEST (x)(((x)->u.fld[0]).rt_rtx) != pc_rtx) |
2420 | return; |
2421 | |
2422 | switch (GET_CODE (SET_SRC (x))((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code)) |
2423 | { |
2424 | case PC: |
2425 | case RETURN: |
2426 | return; |
2427 | |
2428 | case LABEL_REF: |
2429 | set_label_offsets (SET_SRC (x)(((x)->u.fld[1]).rt_rtx), insn, initial_p); |
2430 | return; |
2431 | |
2432 | case IF_THEN_ELSE: |
2433 | tem = XEXP (SET_SRC (x), 1)((((((x)->u.fld[1]).rt_rtx))->u.fld[1]).rt_rtx); |
2434 | if (GET_CODE (tem)((enum rtx_code) (tem)->code) == LABEL_REF) |
2435 | set_label_offsets (label_ref_label (tem), insn, initial_p); |
2436 | else if (GET_CODE (tem)((enum rtx_code) (tem)->code) != PC && GET_CODE (tem)((enum rtx_code) (tem)->code) != RETURN) |
2437 | break; |
2438 | |
2439 | tem = XEXP (SET_SRC (x), 2)((((((x)->u.fld[1]).rt_rtx))->u.fld[2]).rt_rtx); |
2440 | if (GET_CODE (tem)((enum rtx_code) (tem)->code) == LABEL_REF) |
2441 | set_label_offsets (label_ref_label (tem), insn, initial_p); |
2442 | else if (GET_CODE (tem)((enum rtx_code) (tem)->code) != PC && GET_CODE (tem)((enum rtx_code) (tem)->code) != RETURN) |
2443 | break; |
2444 | return; |
2445 | |
2446 | default: |
2447 | break; |
2448 | } |
2449 | |
2450 | /* If we reach here, all eliminations must be at their initial |
2451 | offset because we are doing a jump to a variable address. */ |
2452 | for (p = reg_eliminate; p < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; p++) |
2453 | if (maybe_ne (p->offset, p->initial_offset)) |
2454 | p->can_eliminate = 0; |
2455 | break; |
2456 | |
2457 | default: |
2458 | break; |
2459 | } |
2460 | } |
2461 | |
2462 | /* This function examines every reg that occurs in X and adjusts the |
2463 | costs for its elimination which are gathered by IRA. INSN is the |
2464 | insn in which X occurs. We do not recurse into MEM expressions. */ |
2465 | |
2466 | static void |
2467 | note_reg_elim_costly (const_rtx x, rtx insn) |
2468 | { |
2469 | subrtx_iterator::array_type array; |
2470 | FOR_EACH_SUBRTX (iter, array, x, NONCONST)for (subrtx_iterator iter (array, x, rtx_nonconst_subrtx_bounds ); !iter.at_end (); iter.next ()) |
2471 | { |
2472 | const_rtx x = *iter; |
2473 | if (MEM_P (x)(((enum rtx_code) (x)->code) == MEM)) |
2474 | iter.skip_subrtxes (); |
2475 | else if (REG_P (x)(((enum rtx_code) (x)->code) == REG) |
2476 | && REGNO (x)(rhs_regno(x)) >= FIRST_PSEUDO_REGISTER76 |
2477 | && reg_equiv_init (REGNO (x))(*reg_equivs)[((rhs_regno(x)))].init |
2478 | && reg_equiv_invariant (REGNO (x))(*reg_equivs)[((rhs_regno(x)))].invariant) |
2479 | { |
2480 | rtx t = reg_equiv_invariant (REGNO (x))(*reg_equivs)[((rhs_regno(x)))].invariant; |
2481 | rtx new_rtx = eliminate_regs_1 (t, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), insn, true, true); |
2482 | int cost = set_src_cost (new_rtx, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), |
2483 | optimize_bb_for_speed_p (elim_bb)); |
2484 | int freq = REG_FREQ_FROM_BB (elim_bb)((optimize_function_for_size_p ((cfun + 0)) || !(cfun + 0)-> cfg->count_max.initialized_p ()) ? 1000 : ((elim_bb)->count .to_frequency ((cfun + 0)) * 1000 / 10000) ? ((elim_bb)->count .to_frequency ((cfun + 0)) * 1000 / 10000) : 1); |
2485 | |
2486 | if (cost != 0) |
2487 | ira_adjust_equiv_reg_cost (REGNO (x)(rhs_regno(x)), -cost * freq); |
2488 | } |
2489 | } |
2490 | } |
2491 | |
2492 | /* Scan X and replace any eliminable registers (such as fp) with a |
2493 | replacement (such as sp), plus an offset. |
2494 | |
2495 | MEM_MODE is the mode of an enclosing MEM. We need this to know how |
2496 | much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a |
2497 | MEM, we are allowed to replace a sum of a register and the constant zero |
2498 | with the register, which we cannot do outside a MEM. In addition, we need |
2499 | to record the fact that a register is referenced outside a MEM. |
2500 | |
2501 | If INSN is an insn, it is the insn containing X. If we replace a REG |
2502 | in a SET_DEST with an equivalent MEM and INSN is nonzero, write a |
2503 | CLOBBER of the pseudo after INSN so find_equiv_regs will know that |
2504 | the REG is being modified. |
2505 | |
2506 | Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST). |
2507 | That's used when we eliminate in expressions stored in notes. |
2508 | This means, do not set ref_outside_mem even if the reference |
2509 | is outside of MEMs. |
2510 | |
2511 | If FOR_COSTS is true, we are being called before reload in order to |
2512 | estimate the costs of keeping registers with an equivalence unallocated. |
2513 | |
2514 | REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had |
2515 | replacements done assuming all offsets are at their initial values. If |
2516 | they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we |
2517 | encounter, return the actual location so that find_reloads will do |
2518 | the proper thing. */ |
2519 | |
2520 | static rtx |
2521 | eliminate_regs_1 (rtx x, machine_mode mem_mode, rtx insn, |
2522 | bool may_use_invariant, bool for_costs) |
2523 | { |
2524 | enum rtx_code code = GET_CODE (x)((enum rtx_code) (x)->code); |
2525 | struct elim_table *ep; |
2526 | int regno; |
2527 | rtx new_rtx; |
2528 | int i, j; |
2529 | const char *fmt; |
2530 | int copied = 0; |
2531 | |
2532 | if (! current_function_decl) |
2533 | return x; |
2534 | |
2535 | switch (code) |
2536 | { |
2537 | CASE_CONST_ANYcase CONST_INT: case CONST_WIDE_INT: case CONST_POLY_INT: case CONST_DOUBLE: case CONST_FIXED: case CONST_VECTOR: |
2538 | case CONST: |
2539 | case SYMBOL_REF: |
2540 | case CODE_LABEL: |
2541 | case PC: |
2542 | case ASM_INPUT: |
2543 | case ADDR_VEC: |
2544 | case ADDR_DIFF_VEC: |
2545 | case RETURN: |
2546 | return x; |
2547 | |
2548 | case REG: |
2549 | regno = REGNO (x)(rhs_regno(x)); |
2550 | |
2551 | /* First handle the case where we encounter a bare register that |
2552 | is eliminable. Replace it with a PLUS. */ |
2553 | if (regno < FIRST_PSEUDO_REGISTER76) |
2554 | { |
2555 | for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; |
2556 | ep++) |
2557 | if (ep->from_rtx == x && ep->can_eliminate) |
2558 | return plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), ep->to_rtx, ep->previous_offset); |
2559 | |
2560 | } |
2561 | else if (reg_renumber && reg_renumber[regno] < 0 |
2562 | && reg_equivs |
2563 | && reg_equiv_invariant (regno)(*reg_equivs)[(regno)].invariant) |
2564 | { |
2565 | if (may_use_invariant || (insn && DEBUG_INSN_P (insn)(((enum rtx_code) (insn)->code) == DEBUG_INSN))) |
2566 | return eliminate_regs_1 (copy_rtx (reg_equiv_invariant (regno)(*reg_equivs)[(regno)].invariant), |
2567 | mem_mode, insn, true, for_costs); |
2568 | /* There exists at least one use of REGNO that cannot be |
2569 | eliminated. Prevent the defining insn from being deleted. */ |
2570 | reg_equiv_init (regno)(*reg_equivs)[(regno)].init = NULLnullptr; |
2571 | if (!for_costs) |
2572 | alter_reg (regno, -1, true); |
2573 | } |
2574 | return x; |
2575 | |
2576 | /* You might think handling MINUS in a manner similar to PLUS is a |
2577 | good idea. It is not. It has been tried multiple times and every |
2578 | time the change has had to have been reverted. |
2579 | |
2580 | Other parts of reload know a PLUS is special (gen_reload for example) |
2581 | and require special code to handle code a reloaded PLUS operand. |
2582 | |
2583 | Also consider backends where the flags register is clobbered by a |
2584 | MINUS, but we can emit a PLUS that does not clobber flags (IA-32, |
2585 | lea instruction comes to mind). If we try to reload a MINUS, we |
2586 | may kill the flags register that was holding a useful value. |
2587 | |
2588 | So, please before trying to handle MINUS, consider reload as a |
2589 | whole instead of this little section as well as the backend issues. */ |
2590 | case PLUS: |
2591 | /* If this is the sum of an eliminable register and a constant, rework |
2592 | the sum. */ |
2593 | if (REG_P (XEXP (x, 0))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == REG ) |
2594 | && REGNO (XEXP (x, 0))(rhs_regno((((x)->u.fld[0]).rt_rtx))) < FIRST_PSEUDO_REGISTER76 |
2595 | && CONSTANT_P (XEXP (x, 1))((rtx_class[(int) (((enum rtx_code) ((((x)->u.fld[1]).rt_rtx ))->code))]) == RTX_CONST_OBJ)) |
2596 | { |
2597 | for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; |
2598 | ep++) |
2599 | if (ep->from_rtx == XEXP (x, 0)(((x)->u.fld[0]).rt_rtx) && ep->can_eliminate) |
2600 | { |
2601 | /* The only time we want to replace a PLUS with a REG (this |
2602 | occurs when the constant operand of the PLUS is the negative |
2603 | of the offset) is when we are inside a MEM. We won't want |
2604 | to do so at other times because that would change the |
2605 | structure of the insn in a way that reload can't handle. |
2606 | We special-case the commonest situation in |
2607 | eliminate_regs_in_insn, so just replace a PLUS with a |
2608 | PLUS here, unless inside a MEM. In DEBUG_INSNs, it is |
2609 | always ok to replace a PLUS with just a REG. */ |
2610 | if ((mem_mode != 0 || (insn && DEBUG_INSN_P (insn)(((enum rtx_code) (insn)->code) == DEBUG_INSN))) |
2611 | && CONST_INT_P (XEXP (x, 1))(((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) == CONST_INT ) |
2612 | && known_eq (INTVAL (XEXP (x, 1)), -ep->previous_offset)(!maybe_ne ((((((x)->u.fld[1]).rt_rtx))->u.hwint[0]), - ep->previous_offset))) |
2613 | return ep->to_rtx; |
2614 | else |
2615 | return gen_rtx_PLUS (Pmode, ep->to_rtx,gen_rtx_fmt_ee_stat ((PLUS), (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) ))), ((ep->to_rtx)), ((plus_constant ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) ), (((x)->u.fld[1]).rt_rtx), ep->previous_offset))) ) |
2616 | plus_constant (Pmode, XEXP (x, 1),gen_rtx_fmt_ee_stat ((PLUS), (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) ))), ((ep->to_rtx)), ((plus_constant ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) ), (((x)->u.fld[1]).rt_rtx), ep->previous_offset))) ) |
2617 | ep->previous_offset))gen_rtx_fmt_ee_stat ((PLUS), (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) ))), ((ep->to_rtx)), ((plus_constant ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) ), (((x)->u.fld[1]).rt_rtx), ep->previous_offset))) ); |
2618 | } |
2619 | |
2620 | /* If the register is not eliminable, we are done since the other |
2621 | operand is a constant. */ |
2622 | return x; |
2623 | } |
2624 | |
2625 | /* If this is part of an address, we want to bring any constant to the |
2626 | outermost PLUS. We will do this by doing register replacement in |
2627 | our operands and seeing if a constant shows up in one of them. |
2628 | |
2629 | Note that there is no risk of modifying the structure of the insn, |
2630 | since we only get called for its operands, thus we are either |
2631 | modifying the address inside a MEM, or something like an address |
2632 | operand of a load-address insn. */ |
2633 | |
2634 | { |
2635 | rtx new0 = eliminate_regs_1 (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), mem_mode, insn, true, |
2636 | for_costs); |
2637 | rtx new1 = eliminate_regs_1 (XEXP (x, 1)(((x)->u.fld[1]).rt_rtx), mem_mode, insn, true, |
2638 | for_costs); |
2639 | |
2640 | if (reg_renumber && (new0 != XEXP (x, 0)(((x)->u.fld[0]).rt_rtx) || new1 != XEXP (x, 1)(((x)->u.fld[1]).rt_rtx))) |
2641 | { |
2642 | /* If one side is a PLUS and the other side is a pseudo that |
2643 | didn't get a hard register but has a reg_equiv_constant, |
2644 | we must replace the constant here since it may no longer |
2645 | be in the position of any operand. */ |
2646 | if (GET_CODE (new0)((enum rtx_code) (new0)->code) == PLUS && REG_P (new1)(((enum rtx_code) (new1)->code) == REG) |
2647 | && REGNO (new1)(rhs_regno(new1)) >= FIRST_PSEUDO_REGISTER76 |
2648 | && reg_renumber[REGNO (new1)(rhs_regno(new1))] < 0 |
2649 | && reg_equivs |
2650 | && reg_equiv_constant (REGNO (new1))(*reg_equivs)[((rhs_regno(new1)))].constant != 0) |
2651 | new1 = reg_equiv_constant (REGNO (new1))(*reg_equivs)[((rhs_regno(new1)))].constant; |
2652 | else if (GET_CODE (new1)((enum rtx_code) (new1)->code) == PLUS && REG_P (new0)(((enum rtx_code) (new0)->code) == REG) |
2653 | && REGNO (new0)(rhs_regno(new0)) >= FIRST_PSEUDO_REGISTER76 |
2654 | && reg_renumber[REGNO (new0)(rhs_regno(new0))] < 0 |
2655 | && reg_equiv_constant (REGNO (new0))(*reg_equivs)[((rhs_regno(new0)))].constant != 0) |
2656 | new0 = reg_equiv_constant (REGNO (new0))(*reg_equivs)[((rhs_regno(new0)))].constant; |
2657 | |
2658 | new_rtx = form_sum (GET_MODE (x)((machine_mode) (x)->mode), new0, new1); |
2659 | |
2660 | /* As above, if we are not inside a MEM we do not want to |
2661 | turn a PLUS into something else. We might try to do so here |
2662 | for an addition of 0 if we aren't optimizing. */ |
2663 | if (! mem_mode && GET_CODE (new_rtx)((enum rtx_code) (new_rtx)->code) != PLUS) |
2664 | return gen_rtx_PLUS (GET_MODE (x), new_rtx, const0_rtx)gen_rtx_fmt_ee_stat ((PLUS), ((((machine_mode) (x)->mode)) ), ((new_rtx)), (((const_int_rtx[64]))) ); |
2665 | else |
2666 | return new_rtx; |
2667 | } |
2668 | } |
2669 | return x; |
2670 | |
2671 | case MULT: |
2672 | /* If this is the product of an eliminable register and a |
2673 | constant, apply the distribute law and move the constant out |
2674 | so that we have (plus (mult ..) ..). This is needed in order |
2675 | to keep load-address insns valid. This case is pathological. |
2676 | We ignore the possibility of overflow here. */ |
2677 | if (REG_P (XEXP (x, 0))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == REG ) |
2678 | && REGNO (XEXP (x, 0))(rhs_regno((((x)->u.fld[0]).rt_rtx))) < FIRST_PSEUDO_REGISTER76 |
2679 | && CONST_INT_P (XEXP (x, 1))(((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) == CONST_INT )) |
2680 | for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; |
2681 | ep++) |
2682 | if (ep->from_rtx == XEXP (x, 0)(((x)->u.fld[0]).rt_rtx) && ep->can_eliminate) |
2683 | { |
2684 | if (! mem_mode |
2685 | /* Refs inside notes or in DEBUG_INSNs don't count for |
2686 | this purpose. */ |
2687 | && ! (insn != 0 && (GET_CODE (insn)((enum rtx_code) (insn)->code) == EXPR_LIST |
2688 | || GET_CODE (insn)((enum rtx_code) (insn)->code) == INSN_LIST |
2689 | || DEBUG_INSN_P (insn)(((enum rtx_code) (insn)->code) == DEBUG_INSN)))) |
2690 | ep->ref_outside_mem = 1; |
2691 | |
2692 | return |
2693 | plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), |
2694 | gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1))gen_rtx_fmt_ee_stat ((MULT), (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) ))), ((ep->to_rtx)), (((((x)->u.fld[1]).rt_rtx))) ), |
2695 | ep->previous_offset * INTVAL (XEXP (x, 1))(((((x)->u.fld[1]).rt_rtx))->u.hwint[0])); |
2696 | } |
2697 | |
2698 | /* fall through */ |
2699 | |
2700 | case CALL: |
2701 | case COMPARE: |
2702 | /* See comments before PLUS about handling MINUS. */ |
2703 | case MINUS: |
2704 | case DIV: case UDIV: |
2705 | case MOD: case UMOD: |
2706 | case AND: case IOR: case XOR: |
2707 | case ROTATERT: case ROTATE: |
2708 | case ASHIFTRT: case LSHIFTRT: case ASHIFT: |
2709 | case NE: case EQ: |
2710 | case GE: case GT: case GEU: case GTU: |
2711 | case LE: case LT: case LEU: case LTU: |
2712 | { |
2713 | rtx new0 = eliminate_regs_1 (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), mem_mode, insn, false, |
2714 | for_costs); |
2715 | rtx new1 = XEXP (x, 1)(((x)->u.fld[1]).rt_rtx) |
2716 | ? eliminate_regs_1 (XEXP (x, 1)(((x)->u.fld[1]).rt_rtx), mem_mode, insn, false, |
2717 | for_costs) : 0; |
2718 | |
2719 | if (new0 != XEXP (x, 0)(((x)->u.fld[0]).rt_rtx) || new1 != XEXP (x, 1)(((x)->u.fld[1]).rt_rtx)) |
2720 | return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1)gen_rtx_fmt_ee_stat ((code), (((machine_mode) (x)->mode)), (new0), (new1) ); |
2721 | } |
2722 | return x; |
2723 | |
2724 | case EXPR_LIST: |
2725 | /* If we have something in XEXP (x, 0), the usual case, eliminate it. */ |
2726 | if (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx)) |
2727 | { |
2728 | new_rtx = eliminate_regs_1 (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), mem_mode, insn, true, |
2729 | for_costs); |
2730 | if (new_rtx != XEXP (x, 0)(((x)->u.fld[0]).rt_rtx)) |
2731 | { |
2732 | /* If this is a REG_DEAD note, it is not valid anymore. |
2733 | Using the eliminated version could result in creating a |
2734 | REG_DEAD note for the stack or frame pointer. */ |
2735 | if (REG_NOTE_KIND (x)((enum reg_note) ((machine_mode) (x)->mode)) == REG_DEAD) |
2736 | return (XEXP (x, 1)(((x)->u.fld[1]).rt_rtx) |
2737 | ? eliminate_regs_1 (XEXP (x, 1)(((x)->u.fld[1]).rt_rtx), mem_mode, insn, true, |
2738 | for_costs) |
2739 | : NULL_RTX(rtx) 0); |
2740 | |
2741 | x = alloc_reg_note (REG_NOTE_KIND (x)((enum reg_note) ((machine_mode) (x)->mode)), new_rtx, XEXP (x, 1)(((x)->u.fld[1]).rt_rtx)); |
2742 | } |
2743 | } |
2744 | |
2745 | /* fall through */ |
2746 | |
2747 | case INSN_LIST: |
2748 | case INT_LIST: |
2749 | /* Now do eliminations in the rest of the chain. If this was |
2750 | an EXPR_LIST, this might result in allocating more memory than is |
2751 | strictly needed, but it simplifies the code. */ |
2752 | if (XEXP (x, 1)(((x)->u.fld[1]).rt_rtx)) |
2753 | { |
2754 | new_rtx = eliminate_regs_1 (XEXP (x, 1)(((x)->u.fld[1]).rt_rtx), mem_mode, insn, true, |
2755 | for_costs); |
2756 | if (new_rtx != XEXP (x, 1)(((x)->u.fld[1]).rt_rtx)) |
2757 | return |
2758 | gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx)gen_rtx_fmt_ee_stat ((((enum rtx_code) (x)->code)), (((machine_mode ) (x)->mode)), ((((x)->u.fld[0]).rt_rtx)), (new_rtx) ); |
2759 | } |
2760 | return x; |
2761 | |
2762 | case PRE_INC: |
2763 | case POST_INC: |
2764 | case PRE_DEC: |
2765 | case POST_DEC: |
2766 | /* We do not support elimination of a register that is modified. |
2767 | elimination_effects has already make sure that this does not |
2768 | happen. */ |
2769 | return x; |
2770 | |
2771 | case PRE_MODIFY: |
2772 | case POST_MODIFY: |
2773 | /* We do not support elimination of a register that is modified. |
2774 | elimination_effects has already make sure that this does not |
2775 | happen. The only remaining case we need to consider here is |
2776 | that the increment value may be an eliminable register. */ |
2777 | if (GET_CODE (XEXP (x, 1))((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) == PLUS |
2778 | && XEXP (XEXP (x, 1), 0)((((((x)->u.fld[1]).rt_rtx))->u.fld[0]).rt_rtx) == XEXP (x, 0)(((x)->u.fld[0]).rt_rtx)) |
2779 | { |
2780 | rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1)((((((x)->u.fld[1]).rt_rtx))->u.fld[1]).rt_rtx), mem_mode, |
2781 | insn, true, for_costs); |
2782 | |
2783 | if (new_rtx != XEXP (XEXP (x, 1), 1)((((((x)->u.fld[1]).rt_rtx))->u.fld[1]).rt_rtx)) |
2784 | return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),gen_rtx_fmt_ee_stat ((code), (((machine_mode) (x)->mode)), ((((x)->u.fld[0]).rt_rtx)), (gen_rtx_fmt_ee_stat ((PLUS), ((((machine_mode) (x)->mode))), (((((x)->u.fld[0]).rt_rtx ))), ((new_rtx)) )) ) |
2785 | gen_rtx_PLUS (GET_MODE (x),gen_rtx_fmt_ee_stat ((code), (((machine_mode) (x)->mode)), ((((x)->u.fld[0]).rt_rtx)), (gen_rtx_fmt_ee_stat ((PLUS), ((((machine_mode) (x)->mode))), (((((x)->u.fld[0]).rt_rtx ))), ((new_rtx)) )) ) |
2786 | XEXP (x, 0), new_rtx))gen_rtx_fmt_ee_stat ((code), (((machine_mode) (x)->mode)), ((((x)->u.fld[0]).rt_rtx)), (gen_rtx_fmt_ee_stat ((PLUS), ((((machine_mode) (x)->mode))), (((((x)->u.fld[0]).rt_rtx ))), ((new_rtx)) )) ); |
2787 | } |
2788 | return x; |
2789 | |
2790 | case STRICT_LOW_PART: |
2791 | case NEG: case NOT: |
2792 | case SIGN_EXTEND: case ZERO_EXTEND: |
2793 | case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE: |
2794 | case FLOAT: case FIX: |
2795 | case UNSIGNED_FIX: case UNSIGNED_FLOAT: |
2796 | case ABS: |
2797 | case SQRT: |
2798 | case FFS: |
2799 | case CLZ: |
2800 | case CTZ: |
2801 | case POPCOUNT: |
2802 | case PARITY: |
2803 | case BSWAP: |
2804 | new_rtx = eliminate_regs_1 (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), mem_mode, insn, false, |
2805 | for_costs); |
2806 | if (new_rtx != XEXP (x, 0)(((x)->u.fld[0]).rt_rtx)) |
2807 | return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx)gen_rtx_fmt_e_stat ((code), (((machine_mode) (x)->mode)), ( new_rtx) ); |
2808 | return x; |
2809 | |
2810 | case SUBREG: |
2811 | /* Similar to above processing, but preserve SUBREG_BYTE. |
2812 | Convert (subreg (mem)) to (mem) if not paradoxical. |
2813 | Also, if we have a non-paradoxical (subreg (pseudo)) and the |
2814 | pseudo didn't get a hard reg, we must replace this with the |
2815 | eliminated version of the memory location because push_reload |
2816 | may do the replacement in certain circumstances. */ |
2817 | if (REG_P (SUBREG_REG (x))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == REG ) |
2818 | && !paradoxical_subreg_p (x) |
2819 | && reg_equivs |
2820 | && reg_equiv_memory_loc (REGNO (SUBREG_REG (x)))(*reg_equivs)[((rhs_regno((((x)->u.fld[0]).rt_rtx))))].memory_loc != 0) |
2821 | { |
2822 | new_rtx = SUBREG_REG (x)(((x)->u.fld[0]).rt_rtx); |
2823 | } |
2824 | else |
2825 | new_rtx = eliminate_regs_1 (SUBREG_REG (x)(((x)->u.fld[0]).rt_rtx), mem_mode, insn, false, for_costs); |
2826 | |
2827 | if (new_rtx != SUBREG_REG (x)(((x)->u.fld[0]).rt_rtx)) |
2828 | { |
2829 | poly_int64 x_size = GET_MODE_SIZE (GET_MODE (x)((machine_mode) (x)->mode)); |
2830 | poly_int64 new_size = GET_MODE_SIZE (GET_MODE (new_rtx)((machine_mode) (new_rtx)->mode)); |
2831 | |
2832 | if (MEM_P (new_rtx)(((enum rtx_code) (new_rtx)->code) == MEM) |
2833 | && ((partial_subreg_p (GET_MODE (x)((machine_mode) (x)->mode), GET_MODE (new_rtx)((machine_mode) (new_rtx)->mode)) |
2834 | /* On RISC machines, combine can create rtl of the form |
2835 | (set (subreg:m1 (reg:m2 R) 0) ...) |
2836 | where m1 < m2, and expects something interesting to |
2837 | happen to the entire word. Moreover, it will use the |
2838 | (reg:m2 R) later, expecting all bits to be preserved. |
2839 | So if the number of words is the same, preserve the |
2840 | subreg so that push_reload can see it. */ |
2841 | && !(WORD_REGISTER_OPERATIONS0 |
2842 | && known_equal_after_align_down (x_size - 1, |
2843 | new_size - 1, |
2844 | UNITS_PER_WORD(((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4)))) |
2845 | || known_eq (x_size, new_size)(!maybe_ne (x_size, new_size))) |
2846 | ) |
2847 | return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x))adjust_address_1 (new_rtx, ((machine_mode) (x)->mode), ((( x)->u.fld[1]).rt_subreg), 0, 1, 0, 0); |
2848 | else if (insn && GET_CODE (insn)((enum rtx_code) (insn)->code) == DEBUG_INSN) |
2849 | return gen_rtx_raw_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x))gen_rtx_fmt_ep_stat ((SUBREG), ((((machine_mode) (x)->mode ))), ((new_rtx)), (((((x)->u.fld[1]).rt_subreg))) ); |
2850 | else |
2851 | return gen_rtx_SUBREG (GET_MODE (x)((machine_mode) (x)->mode), new_rtx, SUBREG_BYTE (x)(((x)->u.fld[1]).rt_subreg)); |
2852 | } |
2853 | |
2854 | return x; |
2855 | |
2856 | case MEM: |
2857 | /* Our only special processing is to pass the mode of the MEM to our |
2858 | recursive call and copy the flags. While we are here, handle this |
2859 | case more efficiently. */ |
2860 | |
2861 | new_rtx = eliminate_regs_1 (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), GET_MODE (x)((machine_mode) (x)->mode), insn, true, |
2862 | for_costs); |
2863 | if (for_costs |
2864 | && memory_address_p (GET_MODE (x), XEXP (x, 0))memory_address_addr_space_p ((((machine_mode) (x)->mode)), ((((x)->u.fld[0]).rt_rtx)), 0) |
2865 | && !memory_address_p (GET_MODE (x), new_rtx)memory_address_addr_space_p ((((machine_mode) (x)->mode)), (new_rtx), 0)) |
2866 | note_reg_elim_costly (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), insn); |
2867 | |
2868 | return replace_equiv_address_nv (x, new_rtx); |
2869 | |
2870 | case USE: |
2871 | /* Handle insn_list USE that a call to a pure function may generate. */ |
2872 | new_rtx = eliminate_regs_1 (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), VOIDmode((void) 0, E_VOIDmode), insn, false, |
2873 | for_costs); |
2874 | if (new_rtx != XEXP (x, 0)(((x)->u.fld[0]).rt_rtx)) |
2875 | return gen_rtx_USE (GET_MODE (x), new_rtx)gen_rtx_fmt_e_stat ((USE), ((((machine_mode) (x)->mode))), ((new_rtx)) ); |
2876 | return x; |
2877 | |
2878 | case CLOBBER: |
2879 | case ASM_OPERANDS: |
2880 | gcc_assert (insn && DEBUG_INSN_P (insn))((void)(!(insn && (((enum rtx_code) (insn)->code) == DEBUG_INSN)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 2880, __FUNCTION__), 0 : 0)); |
2881 | break; |
2882 | |
2883 | case SET: |
2884 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 2884, __FUNCTION__)); |
2885 | |
2886 | default: |
2887 | break; |
2888 | } |
2889 | |
2890 | /* Process each of our operands recursively. If any have changed, make a |
2891 | copy of the rtx. */ |
2892 | fmt = GET_RTX_FORMAT (code)(rtx_format[(int) (code)]); |
2893 | for (i = 0; i < GET_RTX_LENGTH (code)(rtx_length[(int) (code)]); i++, fmt++) |
2894 | { |
2895 | if (*fmt == 'e') |
2896 | { |
2897 | new_rtx = eliminate_regs_1 (XEXP (x, i)(((x)->u.fld[i]).rt_rtx), mem_mode, insn, false, |
2898 | for_costs); |
2899 | if (new_rtx != XEXP (x, i)(((x)->u.fld[i]).rt_rtx) && ! copied) |
2900 | { |
2901 | x = shallow_copy_rtx (x); |
2902 | copied = 1; |
2903 | } |
2904 | XEXP (x, i)(((x)->u.fld[i]).rt_rtx) = new_rtx; |
2905 | } |
2906 | else if (*fmt == 'E') |
2907 | { |
2908 | int copied_vec = 0; |
2909 | for (j = 0; j < XVECLEN (x, i)(((((x)->u.fld[i]).rt_rtvec))->num_elem); j++) |
2910 | { |
2911 | new_rtx = eliminate_regs_1 (XVECEXP (x, i, j)(((((x)->u.fld[i]).rt_rtvec))->elem[j]), mem_mode, insn, false, |
2912 | for_costs); |
2913 | if (new_rtx != XVECEXP (x, i, j)(((((x)->u.fld[i]).rt_rtvec))->elem[j]) && ! copied_vec) |
2914 | { |
2915 | rtvec new_v = gen_rtvec_v (XVECLEN (x, i)(((((x)->u.fld[i]).rt_rtvec))->num_elem), |
2916 | XVEC (x, i)(((x)->u.fld[i]).rt_rtvec)->elem); |
2917 | if (! copied) |
2918 | { |
2919 | x = shallow_copy_rtx (x); |
2920 | copied = 1; |
2921 | } |
2922 | XVEC (x, i)(((x)->u.fld[i]).rt_rtvec) = new_v; |
2923 | copied_vec = 1; |
2924 | } |
2925 | XVECEXP (x, i, j)(((((x)->u.fld[i]).rt_rtvec))->elem[j]) = new_rtx; |
2926 | } |
2927 | } |
2928 | } |
2929 | |
2930 | return x; |
2931 | } |
2932 | |
2933 | rtx |
2934 | eliminate_regs (rtx x, machine_mode mem_mode, rtx insn) |
2935 | { |
2936 | if (reg_eliminate == NULLnullptr) |
2937 | { |
2938 | gcc_assert (targetm.no_register_allocation)((void)(!(targetm.no_register_allocation) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 2938, __FUNCTION__), 0 : 0)); |
2939 | return x; |
2940 | } |
2941 | return eliminate_regs_1 (x, mem_mode, insn, false, false); |
2942 | } |
2943 | |
2944 | /* Scan rtx X for modifications of elimination target registers. Update |
2945 | the table of eliminables to reflect the changed state. MEM_MODE is |
2946 | the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */ |
2947 | |
2948 | static void |
2949 | elimination_effects (rtx x, machine_mode mem_mode) |
2950 | { |
2951 | enum rtx_code code = GET_CODE (x)((enum rtx_code) (x)->code); |
2952 | struct elim_table *ep; |
2953 | int regno; |
2954 | int i, j; |
2955 | const char *fmt; |
2956 | |
2957 | switch (code) |
2958 | { |
2959 | CASE_CONST_ANYcase CONST_INT: case CONST_WIDE_INT: case CONST_POLY_INT: case CONST_DOUBLE: case CONST_FIXED: case CONST_VECTOR: |
2960 | case CONST: |
2961 | case SYMBOL_REF: |
2962 | case CODE_LABEL: |
2963 | case PC: |
2964 | case ASM_INPUT: |
2965 | case ADDR_VEC: |
2966 | case ADDR_DIFF_VEC: |
2967 | case RETURN: |
2968 | return; |
2969 | |
2970 | case REG: |
2971 | regno = REGNO (x)(rhs_regno(x)); |
2972 | |
2973 | /* First handle the case where we encounter a bare register that |
2974 | is eliminable. Replace it with a PLUS. */ |
2975 | if (regno < FIRST_PSEUDO_REGISTER76) |
2976 | { |
2977 | for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; |
2978 | ep++) |
2979 | if (ep->from_rtx == x && ep->can_eliminate) |
2980 | { |
2981 | if (! mem_mode) |
2982 | ep->ref_outside_mem = 1; |
2983 | return; |
2984 | } |
2985 | |
2986 | } |
2987 | else if (reg_renumber[regno] < 0 |
2988 | && reg_equivs |
2989 | && reg_equiv_constant (regno)(*reg_equivs)[(regno)].constant |
2990 | && ! function_invariant_p (reg_equiv_constant (regno)(*reg_equivs)[(regno)].constant)) |
2991 | elimination_effects (reg_equiv_constant (regno)(*reg_equivs)[(regno)].constant, mem_mode); |
2992 | return; |
2993 | |
2994 | case PRE_INC: |
2995 | case POST_INC: |
2996 | case PRE_DEC: |
2997 | case POST_DEC: |
2998 | case POST_MODIFY: |
2999 | case PRE_MODIFY: |
3000 | /* If we modify the source of an elimination rule, disable it. */ |
3001 | for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++) |
3002 | if (ep->from_rtx == XEXP (x, 0)(((x)->u.fld[0]).rt_rtx)) |
3003 | ep->can_eliminate = 0; |
3004 | |
3005 | /* If we modify the target of an elimination rule by adding a constant, |
3006 | update its offset. If we modify the target in any other way, we'll |
3007 | have to disable the rule as well. */ |
3008 | for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++) |
3009 | if (ep->to_rtx == XEXP (x, 0)(((x)->u.fld[0]).rt_rtx)) |
3010 | { |
3011 | poly_int64 size = GET_MODE_SIZE (mem_mode); |
3012 | |
3013 | /* If more bytes than MEM_MODE are pushed, account for them. */ |
3014 | #ifdef PUSH_ROUNDING |
3015 | if (ep->to_rtx == stack_pointer_rtx((this_target_rtl->x_global_rtl)[GR_STACK_POINTER])) |
3016 | size = PUSH_ROUNDING (size)ix86_push_rounding (size); |
3017 | #endif |
3018 | if (code == PRE_DEC || code == POST_DEC) |
3019 | ep->offset += size; |
3020 | else if (code == PRE_INC || code == POST_INC) |
3021 | ep->offset -= size; |
3022 | else if (code == PRE_MODIFY || code == POST_MODIFY) |
3023 | { |
3024 | if (GET_CODE (XEXP (x, 1))((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) == PLUS |
3025 | && XEXP (x, 0)(((x)->u.fld[0]).rt_rtx) == XEXP (XEXP (x, 1), 0)((((((x)->u.fld[1]).rt_rtx))->u.fld[0]).rt_rtx) |
3026 | && CONST_INT_P (XEXP (XEXP (x, 1), 1))(((enum rtx_code) (((((((x)->u.fld[1]).rt_rtx))->u.fld[ 1]).rt_rtx))->code) == CONST_INT)) |
3027 | ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1))((((((((x)->u.fld[1]).rt_rtx))->u.fld[1]).rt_rtx))-> u.hwint[0]); |
3028 | else |
3029 | ep->can_eliminate = 0; |
3030 | } |
3031 | } |
3032 | |
3033 | /* These two aren't unary operators. */ |
3034 | if (code == POST_MODIFY || code == PRE_MODIFY) |
3035 | break; |
3036 | |
3037 | /* Fall through to generic unary operation case. */ |
3038 | gcc_fallthrough (); |
3039 | case STRICT_LOW_PART: |
3040 | case NEG: case NOT: |
3041 | case SIGN_EXTEND: case ZERO_EXTEND: |
3042 | case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE: |
3043 | case FLOAT: case FIX: |
3044 | case UNSIGNED_FIX: case UNSIGNED_FLOAT: |
3045 | case ABS: |
3046 | case SQRT: |
3047 | case FFS: |
3048 | case CLZ: |
3049 | case CTZ: |
3050 | case POPCOUNT: |
3051 | case PARITY: |
3052 | case BSWAP: |
3053 | elimination_effects (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), mem_mode); |
3054 | return; |
3055 | |
3056 | case SUBREG: |
3057 | if (REG_P (SUBREG_REG (x))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == REG ) |
3058 | && !paradoxical_subreg_p (x) |
3059 | && reg_equivs |
3060 | && reg_equiv_memory_loc (REGNO (SUBREG_REG (x)))(*reg_equivs)[((rhs_regno((((x)->u.fld[0]).rt_rtx))))].memory_loc != 0) |
3061 | return; |
3062 | |
3063 | elimination_effects (SUBREG_REG (x)(((x)->u.fld[0]).rt_rtx), mem_mode); |
3064 | return; |
3065 | |
3066 | case USE: |
3067 | /* If using a register that is the source of an eliminate we still |
3068 | think can be performed, note it cannot be performed since we don't |
3069 | know how this register is used. */ |
3070 | for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++) |
3071 | if (ep->from_rtx == XEXP (x, 0)(((x)->u.fld[0]).rt_rtx)) |
3072 | ep->can_eliminate = 0; |
3073 | |
3074 | elimination_effects (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), mem_mode); |
3075 | return; |
3076 | |
3077 | case CLOBBER: |
3078 | /* If clobbering a register that is the replacement register for an |
3079 | elimination we still think can be performed, note that it cannot |
3080 | be performed. Otherwise, we need not be concerned about it. */ |
3081 | for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++) |
3082 | if (ep->to_rtx == XEXP (x, 0)(((x)->u.fld[0]).rt_rtx)) |
3083 | ep->can_eliminate = 0; |
3084 | |
3085 | elimination_effects (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), mem_mode); |
3086 | return; |
3087 | |
3088 | case SET: |
3089 | /* Check for setting a register that we know about. */ |
3090 | if (REG_P (SET_DEST (x))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == REG )) |
3091 | { |
3092 | /* See if this is setting the replacement register for an |
3093 | elimination. |
3094 | |
3095 | If DEST is the hard frame pointer, we do nothing because we |
3096 | assume that all assignments to the frame pointer are for |
3097 | non-local gotos and are being done at a time when they are valid |
3098 | and do not disturb anything else. Some machines want to |
3099 | eliminate a fake argument pointer (or even a fake frame pointer) |
3100 | with either the real frame or the stack pointer. Assignments to |
3101 | the hard frame pointer must not prevent this elimination. */ |
3102 | |
3103 | for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; |
3104 | ep++) |
3105 | if (ep->to_rtx == SET_DEST (x)(((x)->u.fld[0]).rt_rtx) |
3106 | && SET_DEST (x)(((x)->u.fld[0]).rt_rtx) != hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER])) |
3107 | { |
3108 | /* If it is being incremented, adjust the offset. Otherwise, |
3109 | this elimination can't be done. */ |
3110 | rtx src = SET_SRC (x)(((x)->u.fld[1]).rt_rtx); |
3111 | |
3112 | if (GET_CODE (src)((enum rtx_code) (src)->code) == PLUS |
3113 | && XEXP (src, 0)(((src)->u.fld[0]).rt_rtx) == SET_DEST (x)(((x)->u.fld[0]).rt_rtx) |
3114 | && CONST_INT_P (XEXP (src, 1))(((enum rtx_code) ((((src)->u.fld[1]).rt_rtx))->code) == CONST_INT)) |
3115 | ep->offset -= INTVAL (XEXP (src, 1))(((((src)->u.fld[1]).rt_rtx))->u.hwint[0]); |
3116 | else |
3117 | ep->can_eliminate = 0; |
3118 | } |
3119 | } |
3120 | |
3121 | elimination_effects (SET_DEST (x)(((x)->u.fld[0]).rt_rtx), VOIDmode((void) 0, E_VOIDmode)); |
3122 | elimination_effects (SET_SRC (x)(((x)->u.fld[1]).rt_rtx), VOIDmode((void) 0, E_VOIDmode)); |
3123 | return; |
3124 | |
3125 | case MEM: |
3126 | /* Our only special processing is to pass the mode of the MEM to our |
3127 | recursive call. */ |
3128 | elimination_effects (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), GET_MODE (x)((machine_mode) (x)->mode)); |
3129 | return; |
3130 | |
3131 | default: |
3132 | break; |
3133 | } |
3134 | |
3135 | fmt = GET_RTX_FORMAT (code)(rtx_format[(int) (code)]); |
3136 | for (i = 0; i < GET_RTX_LENGTH (code)(rtx_length[(int) (code)]); i++, fmt++) |
3137 | { |
3138 | if (*fmt == 'e') |
3139 | elimination_effects (XEXP (x, i)(((x)->u.fld[i]).rt_rtx), mem_mode); |
3140 | else if (*fmt == 'E') |
3141 | for (j = 0; j < XVECLEN (x, i)(((((x)->u.fld[i]).rt_rtvec))->num_elem); j++) |
3142 | elimination_effects (XVECEXP (x, i, j)(((((x)->u.fld[i]).rt_rtvec))->elem[j]), mem_mode); |
3143 | } |
3144 | } |
3145 | |
3146 | /* Descend through rtx X and verify that no references to eliminable registers |
3147 | remain. If any do remain, mark the involved register as not |
3148 | eliminable. */ |
3149 | |
3150 | static void |
3151 | check_eliminable_occurrences (rtx x) |
3152 | { |
3153 | const char *fmt; |
3154 | int i; |
3155 | enum rtx_code code; |
3156 | |
3157 | if (x == 0) |
3158 | return; |
3159 | |
3160 | code = GET_CODE (x)((enum rtx_code) (x)->code); |
3161 | |
3162 | if (code == REG && REGNO (x)(rhs_regno(x)) < FIRST_PSEUDO_REGISTER76) |
3163 | { |
3164 | struct elim_table *ep; |
3165 | |
3166 | for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++) |
3167 | if (ep->from_rtx == x) |
3168 | ep->can_eliminate = 0; |
3169 | return; |
3170 | } |
3171 | |
3172 | fmt = GET_RTX_FORMAT (code)(rtx_format[(int) (code)]); |
3173 | for (i = 0; i < GET_RTX_LENGTH (code)(rtx_length[(int) (code)]); i++, fmt++) |
3174 | { |
3175 | if (*fmt == 'e') |
3176 | check_eliminable_occurrences (XEXP (x, i)(((x)->u.fld[i]).rt_rtx)); |
3177 | else if (*fmt == 'E') |
3178 | { |
3179 | int j; |
3180 | for (j = 0; j < XVECLEN (x, i)(((((x)->u.fld[i]).rt_rtvec))->num_elem); j++) |
3181 | check_eliminable_occurrences (XVECEXP (x, i, j)(((((x)->u.fld[i]).rt_rtvec))->elem[j])); |
3182 | } |
3183 | } |
3184 | } |
3185 | |
3186 | /* Scan INSN and eliminate all eliminable registers in it. |
3187 | |
3188 | If REPLACE is nonzero, do the replacement destructively. Also |
3189 | delete the insn as dead it if it is setting an eliminable register. |
3190 | |
3191 | If REPLACE is zero, do all our allocations in reload_obstack. |
3192 | |
3193 | If no eliminations were done and this insn doesn't require any elimination |
3194 | processing (these are not identical conditions: it might be updating sp, |
3195 | but not referencing fp; this needs to be seen during reload_as_needed so |
3196 | that the offset between fp and sp can be taken into consideration), zero |
3197 | is returned. Otherwise, 1 is returned. */ |
3198 | |
3199 | static int |
3200 | eliminate_regs_in_insn (rtx_insn *insn, int replace) |
3201 | { |
3202 | int icode = recog_memoized (insn); |
3203 | rtx old_body = PATTERN (insn); |
3204 | int insn_is_asm = asm_noperands (old_body) >= 0; |
3205 | rtx old_set = single_set (insn); |
3206 | rtx new_body; |
3207 | int val = 0; |
3208 | int i; |
3209 | rtx substed_operand[MAX_RECOG_OPERANDS30]; |
3210 | rtx orig_operand[MAX_RECOG_OPERANDS30]; |
3211 | struct elim_table *ep; |
3212 | rtx plus_src, plus_cst_src; |
3213 | |
3214 | if (! insn_is_asm && icode < 0) |
3215 | { |
3216 | gcc_assert (DEBUG_INSN_P (insn)((void)(!((((enum rtx_code) (insn)->code) == DEBUG_INSN) || ((enum rtx_code) (PATTERN (insn))->code) == USE || ((enum rtx_code) (PATTERN (insn))->code) == CLOBBER || ((enum rtx_code ) (PATTERN (insn))->code) == ASM_INPUT) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 3219, __FUNCTION__), 0 : 0)) |
3217 | || GET_CODE (PATTERN (insn)) == USE((void)(!((((enum rtx_code) (insn)->code) == DEBUG_INSN) || ((enum rtx_code) (PATTERN (insn))->code) == USE || ((enum rtx_code) (PATTERN (insn))->code) == CLOBBER || ((enum rtx_code ) (PATTERN (insn))->code) == ASM_INPUT) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 3219, __FUNCTION__), 0 : 0)) |
3218 | || GET_CODE (PATTERN (insn)) == CLOBBER((void)(!((((enum rtx_code) (insn)->code) == DEBUG_INSN) || ((enum rtx_code) (PATTERN (insn))->code) == USE || ((enum rtx_code) (PATTERN (insn))->code) == CLOBBER || ((enum rtx_code ) (PATTERN (insn))->code) == ASM_INPUT) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 3219, __FUNCTION__), 0 : 0)) |
3219 | || GET_CODE (PATTERN (insn)) == ASM_INPUT)((void)(!((((enum rtx_code) (insn)->code) == DEBUG_INSN) || ((enum rtx_code) (PATTERN (insn))->code) == USE || ((enum rtx_code) (PATTERN (insn))->code) == CLOBBER || ((enum rtx_code ) (PATTERN (insn))->code) == ASM_INPUT) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 3219, __FUNCTION__), 0 : 0)); |
3220 | if (DEBUG_BIND_INSN_P (insn)((((enum rtx_code) (insn)->code) == DEBUG_INSN) && (((enum rtx_code) (PATTERN (insn))->code) == VAR_LOCATION ))) |
3221 | INSN_VAR_LOCATION_LOC (insn)((((((__extension__ ({ __typeof (PATTERN (insn)) const _rtx = (PATTERN (insn)); if (((enum rtx_code) (_rtx)->code) != VAR_LOCATION ) rtl_check_failed_flag ("INSN_VAR_LOCATION", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 3221, __FUNCTION__); _rtx; }))))->u.fld[1]).rt_rtx)) |
3222 | = eliminate_regs (INSN_VAR_LOCATION_LOC (insn)((((((__extension__ ({ __typeof (PATTERN (insn)) const _rtx = (PATTERN (insn)); if (((enum rtx_code) (_rtx)->code) != VAR_LOCATION ) rtl_check_failed_flag ("INSN_VAR_LOCATION", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 3222, __FUNCTION__); _rtx; }))))->u.fld[1]).rt_rtx)), VOIDmode((void) 0, E_VOIDmode), insn); |
3223 | return 0; |
3224 | } |
3225 | |
3226 | /* We allow one special case which happens to work on all machines we |
3227 | currently support: a single set with the source or a REG_EQUAL |
3228 | note being a PLUS of an eliminable register and a constant. */ |
3229 | plus_src = plus_cst_src = 0; |
3230 | if (old_set && REG_P (SET_DEST (old_set))(((enum rtx_code) ((((old_set)->u.fld[0]).rt_rtx))->code ) == REG)) |
3231 | { |
3232 | if (GET_CODE (SET_SRC (old_set))((enum rtx_code) ((((old_set)->u.fld[1]).rt_rtx))->code ) == PLUS) |
3233 | plus_src = SET_SRC (old_set)(((old_set)->u.fld[1]).rt_rtx); |
3234 | /* First see if the source is of the form (plus (...) CST). */ |
3235 | if (plus_src |
3236 | && CONST_INT_P (XEXP (plus_src, 1))(((enum rtx_code) ((((plus_src)->u.fld[1]).rt_rtx))->code ) == CONST_INT)) |
3237 | plus_cst_src = plus_src; |
3238 | else if (REG_P (SET_SRC (old_set))(((enum rtx_code) ((((old_set)->u.fld[1]).rt_rtx))->code ) == REG) |
3239 | || plus_src) |
3240 | { |
3241 | /* Otherwise, see if we have a REG_EQUAL note of the form |
3242 | (plus (...) CST). */ |
3243 | rtx links; |
3244 | for (links = REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx); links; links = XEXP (links, 1)(((links)->u.fld[1]).rt_rtx)) |
3245 | { |
3246 | if ((REG_NOTE_KIND (links)((enum reg_note) ((machine_mode) (links)->mode)) == REG_EQUAL |
3247 | || REG_NOTE_KIND (links)((enum reg_note) ((machine_mode) (links)->mode)) == REG_EQUIV) |
3248 | && GET_CODE (XEXP (links, 0))((enum rtx_code) ((((links)->u.fld[0]).rt_rtx))->code) == PLUS |
3249 | && CONST_INT_P (XEXP (XEXP (links, 0), 1))(((enum rtx_code) (((((((links)->u.fld[0]).rt_rtx))->u. fld[1]).rt_rtx))->code) == CONST_INT)) |
3250 | { |
3251 | plus_cst_src = XEXP (links, 0)(((links)->u.fld[0]).rt_rtx); |
3252 | break; |
3253 | } |
3254 | } |
3255 | } |
3256 | |
3257 | /* Check that the first operand of the PLUS is a hard reg or |
3258 | the lowpart subreg of one. */ |
3259 | if (plus_cst_src) |
3260 | { |
3261 | rtx reg = XEXP (plus_cst_src, 0)(((plus_cst_src)->u.fld[0]).rt_rtx); |
3262 | if (GET_CODE (reg)((enum rtx_code) (reg)->code) == SUBREG && subreg_lowpart_p (reg)) |
3263 | reg = SUBREG_REG (reg)(((reg)->u.fld[0]).rt_rtx); |
3264 | |
3265 | if (!REG_P (reg)(((enum rtx_code) (reg)->code) == REG) || REGNO (reg)(rhs_regno(reg)) >= FIRST_PSEUDO_REGISTER76) |
3266 | plus_cst_src = 0; |
3267 | } |
3268 | } |
3269 | if (plus_cst_src) |
3270 | { |
3271 | rtx reg = XEXP (plus_cst_src, 0)(((plus_cst_src)->u.fld[0]).rt_rtx); |
3272 | poly_int64 offset = INTVAL (XEXP (plus_cst_src, 1))(((((plus_cst_src)->u.fld[1]).rt_rtx))->u.hwint[0]); |
3273 | |
3274 | if (GET_CODE (reg)((enum rtx_code) (reg)->code) == SUBREG) |
3275 | reg = SUBREG_REG (reg)(((reg)->u.fld[0]).rt_rtx); |
3276 | |
3277 | for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++) |
3278 | if (ep->from_rtx == reg && ep->can_eliminate) |
3279 | { |
3280 | rtx to_rtx = ep->to_rtx; |
3281 | offset += ep->offset; |
3282 | offset = trunc_int_for_mode (offset, GET_MODE (plus_cst_src)((machine_mode) (plus_cst_src)->mode)); |
3283 | |
3284 | if (GET_CODE (XEXP (plus_cst_src, 0))((enum rtx_code) ((((plus_cst_src)->u.fld[0]).rt_rtx))-> code) == SUBREG) |
3285 | to_rtx = gen_lowpartrtl_hooks.gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0))((machine_mode) ((((plus_cst_src)->u.fld[0]).rt_rtx))-> mode), |
3286 | to_rtx); |
3287 | /* If we have a nonzero offset, and the source is already |
3288 | a simple REG, the following transformation would |
3289 | increase the cost of the insn by replacing a simple REG |
3290 | with (plus (reg sp) CST). So try only when we already |
3291 | had a PLUS before. */ |
3292 | if (known_eq (offset, 0)(!maybe_ne (offset, 0)) || plus_src) |
3293 | { |
3294 | rtx new_src = plus_constant (GET_MODE (to_rtx)((machine_mode) (to_rtx)->mode), |
3295 | to_rtx, offset); |
3296 | |
3297 | new_body = old_body; |
3298 | if (! replace) |
3299 | { |
3300 | new_body = copy_insn (old_body); |
3301 | if (REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx)) |
3302 | REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx) = copy_insn_1 (REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx)); |
3303 | } |
3304 | PATTERN (insn) = new_body; |
3305 | old_set = single_set (insn); |
3306 | |
3307 | /* First see if this insn remains valid when we make the |
3308 | change. If not, try to replace the whole pattern with |
3309 | a simple set (this may help if the original insn was a |
3310 | PARALLEL that was only recognized as single_set due to |
3311 | REG_UNUSED notes). If this isn't valid either, keep |
3312 | the INSN_CODE the same and let reload fix it up. */ |
3313 | if (!validate_change (insn, &SET_SRC (old_set)(((old_set)->u.fld[1]).rt_rtx), new_src, 0)) |
3314 | { |
3315 | rtx new_pat = gen_rtx_SET (SET_DEST (old_set), new_src)gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), (((((old_set )->u.fld[0]).rt_rtx))), ((new_src)) ); |
3316 | |
3317 | if (!validate_change (insn, &PATTERN (insn), new_pat, 0)) |
3318 | SET_SRC (old_set)(((old_set)->u.fld[1]).rt_rtx) = new_src; |
3319 | } |
3320 | } |
3321 | else |
3322 | break; |
3323 | |
3324 | val = 1; |
3325 | /* This can't have an effect on elimination offsets, so skip right |
3326 | to the end. */ |
3327 | goto done; |
3328 | } |
3329 | } |
3330 | |
3331 | /* Determine the effects of this insn on elimination offsets. */ |
3332 | elimination_effects (old_body, VOIDmode((void) 0, E_VOIDmode)); |
3333 | |
3334 | /* Eliminate all eliminable registers occurring in operands that |
3335 | can be handled by reload. */ |
3336 | extract_insn (insn); |
3337 | for (i = 0; i < recog_data.n_operands; i++) |
3338 | { |
3339 | orig_operand[i] = recog_data.operand[i]; |
3340 | substed_operand[i] = recog_data.operand[i]; |
3341 | |
3342 | /* For an asm statement, every operand is eliminable. */ |
3343 | if (insn_is_asm || insn_data[icode].operand[i].eliminable) |
3344 | { |
3345 | bool is_set_src, in_plus; |
3346 | |
3347 | /* Check for setting a register that we know about. */ |
3348 | if (recog_data.operand_type[i] != OP_IN |
3349 | && REG_P (orig_operand[i])(((enum rtx_code) (orig_operand[i])->code) == REG)) |
3350 | { |
3351 | /* If we are assigning to a register that can be eliminated, it |
3352 | must be as part of a PARALLEL, since the code above handles |
3353 | single SETs. We must indicate that we can no longer |
3354 | eliminate this reg. */ |
3355 | for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; |
3356 | ep++) |
3357 | if (ep->from_rtx == orig_operand[i]) |
3358 | ep->can_eliminate = 0; |
3359 | } |
3360 | |
3361 | /* Companion to the above plus substitution, we can allow |
3362 | invariants as the source of a plain move. */ |
3363 | is_set_src = false; |
3364 | if (old_set |
3365 | && recog_data.operand_loc[i] == &SET_SRC (old_set)(((old_set)->u.fld[1]).rt_rtx)) |
3366 | is_set_src = true; |
3367 | in_plus = false; |
3368 | if (plus_src |
3369 | && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)(((plus_src)->u.fld[0]).rt_rtx) |
3370 | || recog_data.operand_loc[i] == &XEXP (plus_src, 1)(((plus_src)->u.fld[1]).rt_rtx))) |
3371 | in_plus = true; |
3372 | |
3373 | substed_operand[i] |
3374 | = eliminate_regs_1 (recog_data.operand[i], VOIDmode((void) 0, E_VOIDmode), |
3375 | replace ? insn : NULL_RTX(rtx) 0, |
3376 | is_set_src || in_plus, false); |
3377 | if (substed_operand[i] != orig_operand[i]) |
3378 | val = 1; |
3379 | /* Terminate the search in check_eliminable_occurrences at |
3380 | this point. */ |
3381 | *recog_data.operand_loc[i] = 0; |
3382 | |
3383 | /* If an output operand changed from a REG to a MEM and INSN is an |
3384 | insn, write a CLOBBER insn. */ |
3385 | if (recog_data.operand_type[i] != OP_IN |
3386 | && REG_P (orig_operand[i])(((enum rtx_code) (orig_operand[i])->code) == REG) |
3387 | && MEM_P (substed_operand[i])(((enum rtx_code) (substed_operand[i])->code) == MEM) |
3388 | && replace) |
3389 | emit_insn_after (gen_clobber (orig_operand[i]), insn); |
3390 | } |
3391 | } |
3392 | |
3393 | for (i = 0; i < recog_data.n_dups; i++) |
3394 | *recog_data.dup_loc[i] |
3395 | = *recog_data.operand_loc[(int) recog_data.dup_num[i]]; |
3396 | |
3397 | /* If any eliminable remain, they aren't eliminable anymore. */ |
3398 | check_eliminable_occurrences (old_body); |
3399 | |
3400 | /* Substitute the operands; the new values are in the substed_operand |
3401 | array. */ |
3402 | for (i = 0; i < recog_data.n_operands; i++) |
3403 | *recog_data.operand_loc[i] = substed_operand[i]; |
3404 | for (i = 0; i < recog_data.n_dups; i++) |
3405 | *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]]; |
3406 | |
3407 | /* If we are replacing a body that was a (set X (plus Y Z)), try to |
3408 | re-recognize the insn. We do this in case we had a simple addition |
3409 | but now can do this as a load-address. This saves an insn in this |
3410 | common case. |
3411 | If re-recognition fails, the old insn code number will still be used, |
3412 | and some register operands may have changed into PLUS expressions. |
3413 | These will be handled by find_reloads by loading them into a register |
3414 | again. */ |
3415 | |
3416 | if (val) |
3417 | { |
3418 | /* If we aren't replacing things permanently and we changed something, |
3419 | make another copy to ensure that all the RTL is new. Otherwise |
3420 | things can go wrong if find_reload swaps commutative operands |
3421 | and one is inside RTL that has been copied while the other is not. */ |
3422 | new_body = old_body; |
3423 | if (! replace) |
3424 | { |
3425 | new_body = copy_insn (old_body); |
3426 | if (REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx)) |
3427 | REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx) = copy_insn_1 (REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx)); |
3428 | } |
3429 | PATTERN (insn) = new_body; |
3430 | |
3431 | /* If we had a move insn but now we don't, rerecognize it. This will |
3432 | cause spurious re-recognition if the old move had a PARALLEL since |
3433 | the new one still will, but we can't call single_set without |
3434 | having put NEW_BODY into the insn and the re-recognition won't |
3435 | hurt in this rare case. */ |
3436 | /* ??? Why this huge if statement - why don't we just rerecognize the |
3437 | thing always? */ |
3438 | if (! insn_is_asm |
3439 | && old_set != 0 |
3440 | && ((REG_P (SET_SRC (old_set))(((enum rtx_code) ((((old_set)->u.fld[1]).rt_rtx))->code ) == REG) |
3441 | && (GET_CODE (new_body)((enum rtx_code) (new_body)->code) != SET |
3442 | || !REG_P (SET_SRC (new_body))(((enum rtx_code) ((((new_body)->u.fld[1]).rt_rtx))->code ) == REG))) |
3443 | /* If this was a load from or store to memory, compare |
3444 | the MEM in recog_data.operand to the one in the insn. |
3445 | If they are not equal, then rerecognize the insn. */ |
3446 | || (old_set != 0 |
3447 | && ((MEM_P (SET_SRC (old_set))(((enum rtx_code) ((((old_set)->u.fld[1]).rt_rtx))->code ) == MEM) |
3448 | && SET_SRC (old_set)(((old_set)->u.fld[1]).rt_rtx) != recog_data.operand[1]) |
3449 | || (MEM_P (SET_DEST (old_set))(((enum rtx_code) ((((old_set)->u.fld[0]).rt_rtx))->code ) == MEM) |
3450 | && SET_DEST (old_set)(((old_set)->u.fld[0]).rt_rtx) != recog_data.operand[0]))) |
3451 | /* If this was an add insn before, rerecognize. */ |
3452 | || GET_CODE (SET_SRC (old_set))((enum rtx_code) ((((old_set)->u.fld[1]).rt_rtx))->code ) == PLUS)) |
3453 | { |
3454 | int new_icode = recog (PATTERN (insn), insn, 0); |
3455 | if (new_icode >= 0) |
3456 | INSN_CODE (insn)(((insn)->u.fld[5]).rt_int) = new_icode; |
3457 | } |
3458 | } |
3459 | |
3460 | /* Restore the old body. If there were any changes to it, we made a copy |
3461 | of it while the changes were still in place, so we'll correctly return |
3462 | a modified insn below. */ |
3463 | if (! replace) |
3464 | { |
3465 | /* Restore the old body. */ |
3466 | for (i = 0; i < recog_data.n_operands; i++) |
3467 | /* Restoring a top-level match_parallel would clobber the new_body |
3468 | we installed in the insn. */ |
3469 | if (recog_data.operand_loc[i] != &PATTERN (insn)) |
3470 | *recog_data.operand_loc[i] = orig_operand[i]; |
3471 | for (i = 0; i < recog_data.n_dups; i++) |
3472 | *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]]; |
3473 | } |
3474 | |
3475 | /* Update all elimination pairs to reflect the status after the current |
3476 | insn. The changes we make were determined by the earlier call to |
3477 | elimination_effects. |
3478 | |
3479 | We also detect cases where register elimination cannot be done, |
3480 | namely, if a register would be both changed and referenced outside a MEM |
3481 | in the resulting insn since such an insn is often undefined and, even if |
3482 | not, we cannot know what meaning will be given to it. Note that it is |
3483 | valid to have a register used in an address in an insn that changes it |
3484 | (presumably with a pre- or post-increment or decrement). |
3485 | |
3486 | If anything changes, return nonzero. */ |
3487 | |
3488 | for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++) |
3489 | { |
3490 | if (maybe_ne (ep->previous_offset, ep->offset) && ep->ref_outside_mem) |
3491 | ep->can_eliminate = 0; |
3492 | |
3493 | ep->ref_outside_mem = 0; |
3494 | |
3495 | if (maybe_ne (ep->previous_offset, ep->offset)) |
3496 | val = 1; |
3497 | } |
3498 | |
3499 | done: |
3500 | /* If we changed something, perform elimination in REG_NOTES. This is |
3501 | needed even when REPLACE is zero because a REG_DEAD note might refer |
3502 | to a register that we eliminate and could cause a different number |
3503 | of spill registers to be needed in the final reload pass than in |
3504 | the pre-passes. */ |
3505 | if (val && REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx) != 0) |
3506 | REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx) |
3507 | = eliminate_regs_1 (REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx), VOIDmode((void) 0, E_VOIDmode), REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx), true, |
3508 | false); |
3509 | |
3510 | return val; |
3511 | } |
3512 | |
3513 | /* Like eliminate_regs_in_insn, but only estimate costs for the use of the |
3514 | register allocator. INSN is the instruction we need to examine, we perform |
3515 | eliminations in its operands and record cases where eliminating a reg with |
3516 | an invariant equivalence would add extra cost. */ |
3517 | |
3518 | #pragma GCC diagnostic push |
3519 | #pragma GCC diagnostic warning "-Wmaybe-uninitialized" |
3520 | static void |
3521 | elimination_costs_in_insn (rtx_insn *insn) |
3522 | { |
3523 | int icode = recog_memoized (insn); |
3524 | rtx old_body = PATTERN (insn); |
3525 | int insn_is_asm = asm_noperands (old_body) >= 0; |
3526 | rtx old_set = single_set (insn); |
3527 | int i; |
3528 | rtx orig_operand[MAX_RECOG_OPERANDS30]; |
3529 | rtx orig_dup[MAX_RECOG_OPERANDS30]; |
3530 | struct elim_table *ep; |
3531 | rtx plus_src, plus_cst_src; |
3532 | bool sets_reg_p; |
3533 | |
3534 | if (! insn_is_asm && icode < 0) |
3535 | { |
3536 | gcc_assert (DEBUG_INSN_P (insn)((void)(!((((enum rtx_code) (insn)->code) == DEBUG_INSN) || ((enum rtx_code) (PATTERN (insn))->code) == USE || ((enum rtx_code) (PATTERN (insn))->code) == CLOBBER || ((enum rtx_code ) (PATTERN (insn))->code) == ASM_INPUT) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 3539, __FUNCTION__), 0 : 0)) |
3537 | || GET_CODE (PATTERN (insn)) == USE((void)(!((((enum rtx_code) (insn)->code) == DEBUG_INSN) || ((enum rtx_code) (PATTERN (insn))->code) == USE || ((enum rtx_code) (PATTERN (insn))->code) == CLOBBER || ((enum rtx_code ) (PATTERN (insn))->code) == ASM_INPUT) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 3539, __FUNCTION__), 0 : 0)) |
3538 | || GET_CODE (PATTERN (insn)) == CLOBBER((void)(!((((enum rtx_code) (insn)->code) == DEBUG_INSN) || ((enum rtx_code) (PATTERN (insn))->code) == USE || ((enum rtx_code) (PATTERN (insn))->code) == CLOBBER || ((enum rtx_code ) (PATTERN (insn))->code) == ASM_INPUT) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 3539, __FUNCTION__), 0 : 0)) |
3539 | || GET_CODE (PATTERN (insn)) == ASM_INPUT)((void)(!((((enum rtx_code) (insn)->code) == DEBUG_INSN) || ((enum rtx_code) (PATTERN (insn))->code) == USE || ((enum rtx_code) (PATTERN (insn))->code) == CLOBBER || ((enum rtx_code ) (PATTERN (insn))->code) == ASM_INPUT) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 3539, __FUNCTION__), 0 : 0)); |
3540 | return; |
3541 | } |
3542 | |
3543 | if (old_set != 0 && REG_P (SET_DEST (old_set))(((enum rtx_code) ((((old_set)->u.fld[0]).rt_rtx))->code ) == REG) |
3544 | && REGNO (SET_DEST (old_set))(rhs_regno((((old_set)->u.fld[0]).rt_rtx))) < FIRST_PSEUDO_REGISTER76) |
3545 | { |
3546 | /* Check for setting an eliminable register. */ |
3547 | for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++) |
3548 | if (ep->from_rtx == SET_DEST (old_set)(((old_set)->u.fld[0]).rt_rtx) && ep->can_eliminate) |
3549 | return; |
3550 | } |
3551 | |
3552 | /* We allow one special case which happens to work on all machines we |
3553 | currently support: a single set with the source or a REG_EQUAL |
3554 | note being a PLUS of an eliminable register and a constant. */ |
3555 | plus_src = plus_cst_src = 0; |
3556 | sets_reg_p = false; |
3557 | if (old_set && REG_P (SET_DEST (old_set))(((enum rtx_code) ((((old_set)->u.fld[0]).rt_rtx))->code ) == REG)) |
3558 | { |
3559 | sets_reg_p = true; |
3560 | if (GET_CODE (SET_SRC (old_set))((enum rtx_code) ((((old_set)->u.fld[1]).rt_rtx))->code ) == PLUS) |
3561 | plus_src = SET_SRC (old_set)(((old_set)->u.fld[1]).rt_rtx); |
3562 | /* First see if the source is of the form (plus (...) CST). */ |
3563 | if (plus_src |
3564 | && CONST_INT_P (XEXP (plus_src, 1))(((enum rtx_code) ((((plus_src)->u.fld[1]).rt_rtx))->code ) == CONST_INT)) |
3565 | plus_cst_src = plus_src; |
Value stored to 'plus_cst_src' is never read | |
3566 | else if (REG_P (SET_SRC (old_set))(((enum rtx_code) ((((old_set)->u.fld[1]).rt_rtx))->code ) == REG) |
3567 | || plus_src) |
3568 | { |
3569 | /* Otherwise, see if we have a REG_EQUAL note of the form |
3570 | (plus (...) CST). */ |
3571 | rtx links; |
3572 | for (links = REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx); links; links = XEXP (links, 1)(((links)->u.fld[1]).rt_rtx)) |
3573 | { |
3574 | if ((REG_NOTE_KIND (links)((enum reg_note) ((machine_mode) (links)->mode)) == REG_EQUAL |
3575 | || REG_NOTE_KIND (links)((enum reg_note) ((machine_mode) (links)->mode)) == REG_EQUIV) |
3576 | && GET_CODE (XEXP (links, 0))((enum rtx_code) ((((links)->u.fld[0]).rt_rtx))->code) == PLUS |
3577 | && CONST_INT_P (XEXP (XEXP (links, 0), 1))(((enum rtx_code) (((((((links)->u.fld[0]).rt_rtx))->u. fld[1]).rt_rtx))->code) == CONST_INT)) |
3578 | { |
3579 | plus_cst_src = XEXP (links, 0)(((links)->u.fld[0]).rt_rtx); |
3580 | break; |
3581 | } |
3582 | } |
3583 | } |
3584 | } |
3585 | |
3586 | /* Determine the effects of this insn on elimination offsets. */ |
3587 | elimination_effects (old_body, VOIDmode((void) 0, E_VOIDmode)); |
3588 | |
3589 | /* Eliminate all eliminable registers occurring in operands that |
3590 | can be handled by reload. */ |
3591 | extract_insn (insn); |
3592 | int n_dups = recog_data.n_dups; |
3593 | for (i = 0; i < n_dups; i++) |
3594 | orig_dup[i] = *recog_data.dup_loc[i]; |
3595 | |
3596 | int n_operands = recog_data.n_operands; |
3597 | for (i = 0; i < n_operands; i++) |
3598 | { |
3599 | orig_operand[i] = recog_data.operand[i]; |
3600 | |
3601 | /* For an asm statement, every operand is eliminable. */ |
3602 | if (insn_is_asm || insn_data[icode].operand[i].eliminable) |
3603 | { |
3604 | bool is_set_src, in_plus; |
3605 | |
3606 | /* Check for setting a register that we know about. */ |
3607 | if (recog_data.operand_type[i] != OP_IN |
3608 | && REG_P (orig_operand[i])(((enum rtx_code) (orig_operand[i])->code) == REG)) |
3609 | { |
3610 | /* If we are assigning to a register that can be eliminated, it |
3611 | must be as part of a PARALLEL, since the code above handles |
3612 | single SETs. We must indicate that we can no longer |
3613 | eliminate this reg. */ |
3614 | for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; |
3615 | ep++) |
3616 | if (ep->from_rtx == orig_operand[i]) |
3617 | ep->can_eliminate = 0; |
3618 | } |
3619 | |
3620 | /* Companion to the above plus substitution, we can allow |
3621 | invariants as the source of a plain move. */ |
3622 | is_set_src = false; |
3623 | if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set)(((old_set)->u.fld[1]).rt_rtx)) |
3624 | is_set_src = true; |
3625 | if (is_set_src && !sets_reg_p) |
3626 | note_reg_elim_costly (SET_SRC (old_set)(((old_set)->u.fld[1]).rt_rtx), insn); |
3627 | in_plus = false; |
3628 | if (plus_src && sets_reg_p |
3629 | && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)(((plus_src)->u.fld[0]).rt_rtx) |
3630 | || recog_data.operand_loc[i] == &XEXP (plus_src, 1)(((plus_src)->u.fld[1]).rt_rtx))) |
3631 | in_plus = true; |
3632 | |
3633 | eliminate_regs_1 (recog_data.operand[i], VOIDmode((void) 0, E_VOIDmode), |
3634 | NULL_RTX(rtx) 0, |
3635 | is_set_src || in_plus, true); |
3636 | /* Terminate the search in check_eliminable_occurrences at |
3637 | this point. */ |
3638 | *recog_data.operand_loc[i] = 0; |
3639 | } |
3640 | } |
3641 | |
3642 | for (i = 0; i < n_dups; i++) |
3643 | *recog_data.dup_loc[i] |
3644 | = *recog_data.operand_loc[(int) recog_data.dup_num[i]]; |
3645 | |
3646 | /* If any eliminable remain, they aren't eliminable anymore. */ |
3647 | check_eliminable_occurrences (old_body); |
3648 | |
3649 | /* Restore the old body. */ |
3650 | for (i = 0; i < n_operands; i++) |
3651 | *recog_data.operand_loc[i] = orig_operand[i]; |
3652 | for (i = 0; i < n_dups; i++) |
3653 | *recog_data.dup_loc[i] = orig_dup[i]; |
3654 | |
3655 | /* Update all elimination pairs to reflect the status after the current |
3656 | insn. The changes we make were determined by the earlier call to |
3657 | elimination_effects. */ |
3658 | |
3659 | for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++) |
3660 | { |
3661 | if (maybe_ne (ep->previous_offset, ep->offset) && ep->ref_outside_mem) |
3662 | ep->can_eliminate = 0; |
3663 | |
3664 | ep->ref_outside_mem = 0; |
3665 | } |
3666 | |
3667 | return; |
3668 | } |
3669 | #pragma GCC diagnostic pop |
3670 | |
3671 | /* Loop through all elimination pairs. |
3672 | Recalculate the number not at initial offset. |
3673 | |
3674 | Compute the maximum offset (minimum offset if the stack does not |
3675 | grow downward) for each elimination pair. */ |
3676 | |
3677 | static void |
3678 | update_eliminable_offsets (void) |
3679 | { |
3680 | struct elim_table *ep; |
3681 | |
3682 | num_not_at_initial_offset = 0; |
3683 | for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++) |
3684 | { |
3685 | ep->previous_offset = ep->offset; |
3686 | if (ep->can_eliminate && maybe_ne (ep->offset, ep->initial_offset)) |
3687 | num_not_at_initial_offset++; |
3688 | } |
3689 | } |
3690 | |
3691 | /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register |
3692 | replacement we currently believe is valid, mark it as not eliminable if X |
3693 | modifies DEST in any way other than by adding a constant integer to it. |
3694 | |
3695 | If DEST is the frame pointer, we do nothing because we assume that |
3696 | all assignments to the hard frame pointer are nonlocal gotos and are being |
3697 | done at a time when they are valid and do not disturb anything else. |
3698 | Some machines want to eliminate a fake argument pointer with either the |
3699 | frame or stack pointer. Assignments to the hard frame pointer must not |
3700 | prevent this elimination. |
3701 | |
3702 | Called via note_stores from reload before starting its passes to scan |
3703 | the insns of the function. */ |
3704 | |
3705 | static void |
3706 | mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED__attribute__ ((__unused__))) |
3707 | { |
3708 | unsigned int i; |
3709 | |
3710 | /* A SUBREG of a hard register here is just changing its mode. We should |
3711 | not see a SUBREG of an eliminable hard register, but check just in |
3712 | case. */ |
3713 | if (GET_CODE (dest)((enum rtx_code) (dest)->code) == SUBREG) |
3714 | dest = SUBREG_REG (dest)(((dest)->u.fld[0]).rt_rtx); |
3715 | |
3716 | if (dest == hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER])) |
3717 | return; |
3718 | |
3719 | for (i = 0; i < NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0])); i++) |
3720 | if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx |
3721 | && (GET_CODE (x)((enum rtx_code) (x)->code) != SET |
3722 | || GET_CODE (SET_SRC (x))((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) != PLUS |
3723 | || XEXP (SET_SRC (x), 0)((((((x)->u.fld[1]).rt_rtx))->u.fld[0]).rt_rtx) != dest |
3724 | || !CONST_INT_P (XEXP (SET_SRC (x), 1))(((enum rtx_code) (((((((x)->u.fld[1]).rt_rtx))->u.fld[ 1]).rt_rtx))->code) == CONST_INT))) |
3725 | { |
3726 | reg_eliminate[i].can_eliminate_previous |
3727 | = reg_eliminate[i].can_eliminate = 0; |
3728 | num_eliminable--; |
3729 | } |
3730 | } |
3731 | |
3732 | /* Verify that the initial elimination offsets did not change since the |
3733 | last call to set_initial_elim_offsets. This is used to catch cases |
3734 | where something illegal happened during reload_as_needed that could |
3735 | cause incorrect code to be generated if we did not check for it. */ |
3736 | |
3737 | static bool |
3738 | verify_initial_elim_offsets (void) |
3739 | { |
3740 | poly_int64 t; |
3741 | struct elim_table *ep; |
3742 | |
3743 | if (!num_eliminable) |
3744 | return true; |
3745 | |
3746 | targetm.compute_frame_layout (); |
3747 | for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++) |
3748 | { |
3749 | INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t)((t) = ix86_initial_elimination_offset ((ep->from), (ep-> to))); |
3750 | if (maybe_ne (t, ep->initial_offset)) |
3751 | return false; |
3752 | } |
3753 | |
3754 | return true; |
3755 | } |
3756 | |
3757 | /* Reset all offsets on eliminable registers to their initial values. */ |
3758 | |
3759 | static void |
3760 | set_initial_elim_offsets (void) |
3761 | { |
3762 | struct elim_table *ep = reg_eliminate; |
3763 | |
3764 | targetm.compute_frame_layout (); |
3765 | for (; ep < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++) |
3766 | { |
3767 | INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset)((ep->initial_offset) = ix86_initial_elimination_offset (( ep->from), (ep->to))); |
3768 | ep->previous_offset = ep->offset = ep->initial_offset; |
3769 | } |
3770 | |
3771 | num_not_at_initial_offset = 0; |
3772 | } |
3773 | |
3774 | /* Subroutine of set_initial_label_offsets called via for_each_eh_label. */ |
3775 | |
3776 | static void |
3777 | set_initial_eh_label_offset (rtx label) |
3778 | { |
3779 | set_label_offsets (label, NULLnullptr, 1); |
3780 | } |
3781 | |
3782 | /* Initialize the known label offsets. |
3783 | Set a known offset for each forced label to be at the initial offset |
3784 | of each elimination. We do this because we assume that all |
3785 | computed jumps occur from a location where each elimination is |
3786 | at its initial offset. |
3787 | For all other labels, show that we don't know the offsets. */ |
3788 | |
3789 | static void |
3790 | set_initial_label_offsets (void) |
3791 | { |
3792 | memset (offsets_known_at, 0, num_labels); |
3793 | |
3794 | unsigned int i; |
3795 | rtx_insn *insn; |
3796 | FOR_EACH_VEC_SAFE_ELT (forced_labels, i, insn)for (i = 0; vec_safe_iterate ((((&x_rtl)->expr.x_forced_labels )), (i), &(insn)); ++(i)) |
3797 | set_label_offsets (insn, NULLnullptr, 1); |
3798 | |
3799 | for (rtx_insn_list *x = nonlocal_goto_handler_labels((&x_rtl)->x_nonlocal_goto_handler_labels); x; x = x->next ()) |
3800 | if (x->insn ()) |
3801 | set_label_offsets (x->insn (), NULLnullptr, 1); |
3802 | |
3803 | for_each_eh_label (set_initial_eh_label_offset); |
3804 | } |
3805 | |
3806 | /* Set all elimination offsets to the known values for the code label given |
3807 | by INSN. */ |
3808 | |
3809 | static void |
3810 | set_offsets_for_label (rtx_insn *insn) |
3811 | { |
3812 | unsigned int i; |
3813 | int label_nr = CODE_LABEL_NUMBER (insn)(((insn)->u.fld[5]).rt_int); |
3814 | struct elim_table *ep; |
3815 | |
3816 | num_not_at_initial_offset = 0; |
3817 | for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0])); ep++, i++) |
3818 | { |
3819 | ep->offset = ep->previous_offset |
3820 | = offsets_at[label_nr - first_label_num][i]; |
3821 | if (ep->can_eliminate && maybe_ne (ep->offset, ep->initial_offset)) |
3822 | num_not_at_initial_offset++; |
3823 | } |
3824 | } |
3825 | |
3826 | /* See if anything that happened changes which eliminations are valid. |
3827 | For example, on the SPARC, whether or not the frame pointer can |
3828 | be eliminated can depend on what registers have been used. We need |
3829 | not check some conditions again (such as flag_omit_frame_pointer) |
3830 | since they can't have changed. */ |
3831 | |
3832 | static void |
3833 | update_eliminables (HARD_REG_SET *pset) |
3834 | { |
3835 | int previous_frame_pointer_needed = frame_pointer_needed((&x_rtl)->frame_pointer_needed); |
3836 | struct elim_table *ep; |
3837 | |
3838 | for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++) |
3839 | if ((ep->from == HARD_FRAME_POINTER_REGNUM6 |
3840 | && targetm.frame_pointer_required ()) |
3841 | || ! targetm.can_eliminate (ep->from, ep->to) |
3842 | ) |
3843 | ep->can_eliminate = 0; |
3844 | |
3845 | /* Look for the case where we have discovered that we can't replace |
3846 | register A with register B and that means that we will now be |
3847 | trying to replace register A with register C. This means we can |
3848 | no longer replace register C with register B and we need to disable |
3849 | such an elimination, if it exists. This occurs often with A == ap, |
3850 | B == sp, and C == fp. */ |
3851 | |
3852 | for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++) |
3853 | { |
3854 | struct elim_table *op; |
3855 | int new_to = -1; |
3856 | |
3857 | if (! ep->can_eliminate && ep->can_eliminate_previous) |
3858 | { |
3859 | /* Find the current elimination for ep->from, if there is a |
3860 | new one. */ |
3861 | for (op = reg_eliminate; |
3862 | op < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; op++) |
3863 | if (op->from == ep->from && op->can_eliminate) |
3864 | { |
3865 | new_to = op->to; |
3866 | break; |
3867 | } |
3868 | |
3869 | /* See if there is an elimination of NEW_TO -> EP->TO. If so, |
3870 | disable it. */ |
3871 | for (op = reg_eliminate; |
3872 | op < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; op++) |
3873 | if (op->from == new_to && op->to == ep->to) |
3874 | op->can_eliminate = 0; |
3875 | } |
3876 | } |
3877 | |
3878 | /* See if any registers that we thought we could eliminate the previous |
3879 | time are no longer eliminable. If so, something has changed and we |
3880 | must spill the register. Also, recompute the number of eliminable |
3881 | registers and see if the frame pointer is needed; it is if there is |
3882 | no elimination of the frame pointer that we can perform. */ |
3883 | |
3884 | frame_pointer_needed((&x_rtl)->frame_pointer_needed) = 1; |
3885 | for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++) |
3886 | { |
3887 | if (ep->can_eliminate |
3888 | && ep->from == FRAME_POINTER_REGNUM19 |
3889 | && ep->to != HARD_FRAME_POINTER_REGNUM6 |
3890 | && (! SUPPORTS_STACK_ALIGNMENT((((unsigned int) 1 << 28) * 8) > ((((global_options .x_ix86_isa_flags & (1UL << 1)) != 0) && ix86_cfun_abi () == MS_ABI) ? 128 : ((8) * (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4)))) |
3891 | || ! crtl(&x_rtl)->stack_realign_needed)) |
3892 | frame_pointer_needed((&x_rtl)->frame_pointer_needed) = 0; |
3893 | |
3894 | if (! ep->can_eliminate && ep->can_eliminate_previous) |
3895 | { |
3896 | ep->can_eliminate_previous = 0; |
3897 | SET_HARD_REG_BIT (*pset, ep->from); |
3898 | num_eliminable--; |
3899 | } |
3900 | } |
3901 | |
3902 | /* If we didn't need a frame pointer last time, but we do now, spill |
3903 | the hard frame pointer. */ |
3904 | if (frame_pointer_needed((&x_rtl)->frame_pointer_needed) && ! previous_frame_pointer_needed) |
3905 | SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM6); |
3906 | } |
3907 | |
3908 | /* Call update_eliminables an spill any registers we can't eliminate anymore. |
3909 | Return true iff a register was spilled. */ |
3910 | |
3911 | static bool |
3912 | update_eliminables_and_spill (void) |
3913 | { |
3914 | int i; |
3915 | bool did_spill = false; |
3916 | HARD_REG_SET to_spill; |
3917 | CLEAR_HARD_REG_SET (to_spill); |
3918 | update_eliminables (&to_spill); |
3919 | used_spill_regs &= ~to_spill; |
3920 | |
3921 | for (i = 0; i < FIRST_PSEUDO_REGISTER76; i++) |
3922 | if (TEST_HARD_REG_BIT (to_spill, i)) |
3923 | { |
3924 | spill_hard_reg (i, 1); |
3925 | did_spill = true; |
3926 | |
3927 | /* Regardless of the state of spills, if we previously had |
3928 | a register that we thought we could eliminate, but now |
3929 | cannot eliminate, we must run another pass. |
3930 | |
3931 | Consider pseudos which have an entry in reg_equiv_* which |
3932 | reference an eliminable register. We must make another pass |
3933 | to update reg_equiv_* so that we do not substitute in the |
3934 | old value from when we thought the elimination could be |
3935 | performed. */ |
3936 | } |
3937 | return did_spill; |
3938 | } |
3939 | |
3940 | /* Return true if X is used as the target register of an elimination. */ |
3941 | |
3942 | bool |
3943 | elimination_target_reg_p (rtx x) |
3944 | { |
3945 | struct elim_table *ep; |
3946 | |
3947 | for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++) |
3948 | if (ep->to_rtx == x && ep->can_eliminate) |
3949 | return true; |
3950 | |
3951 | return false; |
3952 | } |
3953 | |
3954 | /* Initialize the table of registers to eliminate. |
3955 | Pre-condition: global flag frame_pointer_needed has been set before |
3956 | calling this function. */ |
3957 | |
3958 | static void |
3959 | init_elim_table (void) |
3960 | { |
3961 | struct elim_table *ep; |
3962 | const struct elim_table_1 *ep1; |
3963 | |
3964 | if (!reg_eliminate) |
3965 | reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS)((struct elim_table *) xcalloc (((sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))), sizeof (struct elim_table))); |
3966 | |
3967 | num_eliminable = 0; |
3968 | |
3969 | for (ep = reg_eliminate, ep1 = reg_eliminate_1; |
3970 | ep < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++, ep1++) |
3971 | { |
3972 | ep->from = ep1->from; |
3973 | ep->to = ep1->to; |
3974 | ep->can_eliminate = ep->can_eliminate_previous |
3975 | = (targetm.can_eliminate (ep->from, ep->to) |
3976 | && ! (ep->to == STACK_POINTER_REGNUM7 |
3977 | && frame_pointer_needed((&x_rtl)->frame_pointer_needed) |
3978 | && (! SUPPORTS_STACK_ALIGNMENT((((unsigned int) 1 << 28) * 8) > ((((global_options .x_ix86_isa_flags & (1UL << 1)) != 0) && ix86_cfun_abi () == MS_ABI) ? 128 : ((8) * (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4)))) |
3979 | || ! stack_realign_fp((&x_rtl)->stack_realign_needed && !(&x_rtl )->need_drap)))); |
3980 | } |
3981 | |
3982 | /* Count the number of eliminable registers and build the FROM and TO |
3983 | REG rtx's. Note that code in gen_rtx_REG will cause, e.g., |
3984 | gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx. |
3985 | We depend on this. */ |
3986 | for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++) |
3987 | { |
3988 | num_eliminable += ep->can_eliminate; |
3989 | ep->from_rtx = gen_rtx_REG (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), ep->from); |
3990 | ep->to_rtx = gen_rtx_REG (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), ep->to); |
3991 | } |
3992 | } |
3993 | |
3994 | /* Find all the pseudo registers that didn't get hard regs |
3995 | but do have known equivalent constants or memory slots. |
3996 | These include parameters (known equivalent to parameter slots) |
3997 | and cse'd or loop-moved constant memory addresses. |
3998 | |
3999 | Record constant equivalents in reg_equiv_constant |
4000 | so they will be substituted by find_reloads. |
4001 | Record memory equivalents in reg_mem_equiv so they can |
4002 | be substituted eventually by altering the REG-rtx's. */ |
4003 | |
4004 | static void |
4005 | init_eliminable_invariants (rtx_insn *first, bool do_subregs) |
4006 | { |
4007 | int i; |
4008 | rtx_insn *insn; |
4009 | |
4010 | grow_reg_equivs (); |
4011 | if (do_subregs) |
4012 | reg_max_ref_mode = XCNEWVEC (machine_mode, max_regno)((machine_mode *) xcalloc ((max_regno), sizeof (machine_mode) )); |
4013 | else |
4014 | reg_max_ref_mode = NULLnullptr; |
4015 | |
4016 | num_eliminable_invariants = 0; |
4017 | |
4018 | first_label_num = get_first_label_num (); |
4019 | num_labels = max_label_num () - first_label_num; |
4020 | |
4021 | /* Allocate the tables used to store offset information at labels. */ |
4022 | offsets_known_at = XNEWVEC (char, num_labels)((char *) xmalloc (sizeof (char) * (num_labels))); |
4023 | offsets_at = (poly_int64_pod (*)[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]) |
4024 | xmalloc (num_labels * NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0])) * sizeof (poly_int64)); |
4025 | |
4026 | /* Look for REG_EQUIV notes; record what each pseudo is equivalent |
4027 | to. If DO_SUBREGS is true, also find all paradoxical subregs and |
4028 | find largest such for each pseudo. FIRST is the head of the insn |
4029 | list. */ |
4030 | |
4031 | for (insn = first; insn; insn = NEXT_INSN (insn)) |
4032 | { |
4033 | rtx set = single_set (insn); |
4034 | |
4035 | /* We may introduce USEs that we want to remove at the end, so |
4036 | we'll mark them with QImode. Make sure there are no |
4037 | previously-marked insns left by say regmove. */ |
4038 | if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) == DEBUG_INSN)) && GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == USE |
4039 | && GET_MODE (insn)((machine_mode) (insn)->mode) != VOIDmode((void) 0, E_VOIDmode)) |
4040 | PUT_MODE (insn, VOIDmode((void) 0, E_VOIDmode)); |
4041 | |
4042 | if (do_subregs && NONDEBUG_INSN_P (insn)((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN))) |
4043 | scan_paradoxical_subregs (PATTERN (insn)); |
4044 | |
4045 | if (set != 0 && REG_P (SET_DEST (set))(((enum rtx_code) ((((set)->u.fld[0]).rt_rtx))->code) == REG)) |
4046 | { |
4047 | rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX(rtx) 0); |
4048 | rtx x; |
4049 | |
4050 | if (! note) |
4051 | continue; |
4052 | |
4053 | i = REGNO (SET_DEST (set))(rhs_regno((((set)->u.fld[0]).rt_rtx))); |
4054 | x = XEXP (note, 0)(((note)->u.fld[0]).rt_rtx); |
4055 | |
4056 | if (i <= LAST_VIRTUAL_REGISTER(((76)) + 5)) |
4057 | continue; |
4058 | |
4059 | /* If flag_pic and we have constant, verify it's legitimate. */ |
4060 | if (!CONSTANT_P (x)((rtx_class[(int) (((enum rtx_code) (x)->code))]) == RTX_CONST_OBJ ) |
4061 | || !flag_picglobal_options.x_flag_pic || LEGITIMATE_PIC_OPERAND_P (x)legitimate_pic_operand_p (x)) |
4062 | { |
4063 | /* It can happen that a REG_EQUIV note contains a MEM |
4064 | that is not a legitimate memory operand. As later |
4065 | stages of reload assume that all addresses found |
4066 | in the reg_equiv_* arrays were originally legitimate, |
4067 | we ignore such REG_EQUIV notes. */ |
4068 | if (memory_operand (x, VOIDmode((void) 0, E_VOIDmode))) |
4069 | { |
4070 | /* Always unshare the equivalence, so we can |
4071 | substitute into this insn without touching the |
4072 | equivalence. */ |
4073 | reg_equiv_memory_loc (i)(*reg_equivs)[(i)].memory_loc = copy_rtx (x); |
4074 | } |
4075 | else if (function_invariant_p (x)) |
4076 | { |
4077 | machine_mode mode; |
4078 | |
4079 | mode = GET_MODE (SET_DEST (set))((machine_mode) ((((set)->u.fld[0]).rt_rtx))->mode); |
4080 | if (GET_CODE (x)((enum rtx_code) (x)->code) == PLUS) |
4081 | { |
4082 | /* This is PLUS of frame pointer and a constant, |
4083 | and might be shared. Unshare it. */ |
4084 | reg_equiv_invariant (i)(*reg_equivs)[(i)].invariant = copy_rtx (x); |
4085 | num_eliminable_invariants++; |
4086 | } |
4087 | else if (x == frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_FRAME_POINTER]) || x == arg_pointer_rtx((this_target_rtl->x_global_rtl)[GR_ARG_POINTER])) |
4088 | { |
4089 | reg_equiv_invariant (i)(*reg_equivs)[(i)].invariant = x; |
4090 | num_eliminable_invariants++; |
4091 | } |
4092 | else if (targetm.legitimate_constant_p (mode, x)) |
4093 | reg_equiv_constant (i)(*reg_equivs)[(i)].constant = x; |
4094 | else |
4095 | { |
4096 | reg_equiv_memory_loc (i)(*reg_equivs)[(i)].memory_loc = force_const_mem (mode, x); |
4097 | if (! reg_equiv_memory_loc (i)(*reg_equivs)[(i)].memory_loc) |
4098 | reg_equiv_init (i)(*reg_equivs)[(i)].init = NULLnullptr; |
4099 | } |
4100 | } |
4101 | else |
4102 | { |
4103 | reg_equiv_init (i)(*reg_equivs)[(i)].init = NULLnullptr; |
4104 | continue; |
4105 | } |
4106 | } |
4107 | else |
4108 | reg_equiv_init (i)(*reg_equivs)[(i)].init = NULLnullptr; |
4109 | } |
4110 | } |
4111 | |
4112 | if (dump_file) |
4113 | for (i = FIRST_PSEUDO_REGISTER76; i < max_regno; i++) |
4114 | if (reg_equiv_init (i)(*reg_equivs)[(i)].init) |
4115 | { |
4116 | fprintf (dump_file, "init_insns for %u: ", i); |
4117 | print_inline_rtx (dump_file, reg_equiv_init (i)(*reg_equivs)[(i)].init, 20); |
4118 | fprintf (dump_file, "\n"); |
4119 | } |
4120 | } |
4121 | |
4122 | /* Indicate that we no longer have known memory locations or constants. |
4123 | Free all data involved in tracking these. */ |
4124 | |
4125 | static void |
4126 | free_reg_equiv (void) |
4127 | { |
4128 | int i; |
4129 | |
4130 | free (offsets_known_at); |
4131 | free (offsets_at); |
4132 | offsets_at = 0; |
4133 | offsets_known_at = 0; |
4134 | |
4135 | for (i = 0; i < FIRST_PSEUDO_REGISTER76; i++) |
4136 | if (reg_equiv_alt_mem_list (i)(*reg_equivs)[(i)].alt_mem_list) |
4137 | free_EXPR_LIST_list (®_equiv_alt_mem_list (i)(*reg_equivs)[(i)].alt_mem_list); |
4138 | vec_free (reg_equivs); |
4139 | } |
4140 | |
4141 | /* Kick all pseudos out of hard register REGNO. |
4142 | |
4143 | If CANT_ELIMINATE is nonzero, it means that we are doing this spill |
4144 | because we found we can't eliminate some register. In the case, no pseudos |
4145 | are allowed to be in the register, even if they are only in a block that |
4146 | doesn't require spill registers, unlike the case when we are spilling this |
4147 | hard reg to produce another spill register. |
4148 | |
4149 | Return nonzero if any pseudos needed to be kicked out. */ |
4150 | |
4151 | static void |
4152 | spill_hard_reg (unsigned int regno, int cant_eliminate) |
4153 | { |
4154 | int i; |
4155 | |
4156 | if (cant_eliminate) |
4157 | { |
4158 | SET_HARD_REG_BIT (bad_spill_regs_global, regno); |
4159 | df_set_regs_ever_live (regno, true); |
4160 | } |
4161 | |
4162 | /* Spill every pseudo reg that was allocated to this reg |
4163 | or to something that overlaps this reg. */ |
4164 | |
4165 | for (i = FIRST_PSEUDO_REGISTER76; i < max_regno; i++) |
4166 | if (reg_renumber[i] >= 0 |
4167 | && (unsigned int) reg_renumber[i] <= regno |
4168 | && end_hard_regno (PSEUDO_REGNO_MODE (i)((machine_mode) (regno_reg_rtx[i])->mode), reg_renumber[i]) > regno) |
4169 | SET_REGNO_REG_SET (&spilled_pseudos, i)bitmap_set_bit (&spilled_pseudos, i); |
4170 | } |
4171 | |
4172 | /* After spill_hard_reg was called and/or find_reload_regs was run for all |
4173 | insns that need reloads, this function is used to actually spill pseudo |
4174 | registers and try to reallocate them. It also sets up the spill_regs |
4175 | array for use by choose_reload_regs. |
4176 | |
4177 | GLOBAL nonzero means we should attempt to reallocate any pseudo registers |
4178 | that we displace from hard registers. */ |
4179 | |
4180 | static int |
4181 | finish_spills (int global) |
4182 | { |
4183 | class insn_chain *chain; |
4184 | int something_changed = 0; |
4185 | unsigned i; |
4186 | reg_set_iterator rsi; |
4187 | |
4188 | /* Build the spill_regs array for the function. */ |
4189 | /* If there are some registers still to eliminate and one of the spill regs |
4190 | wasn't ever used before, additional stack space may have to be |
4191 | allocated to store this register. Thus, we may have changed the offset |
4192 | between the stack and frame pointers, so mark that something has changed. |
4193 | |
4194 | One might think that we need only set VAL to 1 if this is a call-used |
4195 | register. However, the set of registers that must be saved by the |
4196 | prologue is not identical to the call-used set. For example, the |
4197 | register used by the call insn for the return PC is a call-used register, |
4198 | but must be saved by the prologue. */ |
4199 | |
4200 | n_spills = 0; |
4201 | for (i = 0; i < FIRST_PSEUDO_REGISTER76; i++) |
4202 | if (TEST_HARD_REG_BIT (used_spill_regs, i)) |
4203 | { |
4204 | spill_reg_order[i] = n_spills; |
4205 | spill_regs[n_spills++] = i; |
4206 | if (num_eliminable && ! df_regs_ever_live_p (i)) |
4207 | something_changed = 1; |
4208 | df_set_regs_ever_live (i, true); |
4209 | } |
4210 | else |
4211 | spill_reg_order[i] = -1; |
4212 | |
4213 | EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)for (bmp_iter_set_init (&(rsi), (&spilled_pseudos), ( 76), &(i)); bmp_iter_set (&(rsi), &(i)); bmp_iter_next (&(rsi), &(i))) |
4214 | if (reg_renumber[i] >= 0) |
4215 | { |
4216 | SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]); |
4217 | /* Mark it as no longer having a hard register home. */ |
4218 | reg_renumber[i] = -1; |
4219 | if (ira_conflicts_p) |
4220 | /* Inform IRA about the change. */ |
4221 | ira_mark_allocation_change (i); |
4222 | /* We will need to scan everything again. */ |
4223 | something_changed = 1; |
4224 | } |
4225 | |
4226 | /* Retry global register allocation if possible. */ |
4227 | if (global && ira_conflicts_p) |
4228 | { |
4229 | unsigned int n; |
4230 | |
4231 | memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET)); |
4232 | /* For every insn that needs reloads, set the registers used as spill |
4233 | regs in pseudo_forbidden_regs for every pseudo live across the |
4234 | insn. */ |
4235 | for (chain = insns_need_reload; chain; chain = chain->next_need_reload) |
4236 | { |
4237 | EXECUTE_IF_SET_IN_REG_SETfor (bmp_iter_set_init (&(rsi), (&chain->live_throughout ), (76), &(i)); bmp_iter_set (&(rsi), &(i)); bmp_iter_next (&(rsi), &(i))) |
4238 | (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)for (bmp_iter_set_init (&(rsi), (&chain->live_throughout ), (76), &(i)); bmp_iter_set (&(rsi), &(i)); bmp_iter_next (&(rsi), &(i))) |
4239 | { |
4240 | pseudo_forbidden_regs[i] |= chain->used_spill_regs; |
4241 | } |
4242 | EXECUTE_IF_SET_IN_REG_SETfor (bmp_iter_set_init (&(rsi), (&chain->dead_or_set ), (76), &(i)); bmp_iter_set (&(rsi), &(i)); bmp_iter_next (&(rsi), &(i))) |
4243 | (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)for (bmp_iter_set_init (&(rsi), (&chain->dead_or_set ), (76), &(i)); bmp_iter_set (&(rsi), &(i)); bmp_iter_next (&(rsi), &(i))) |
4244 | { |
4245 | pseudo_forbidden_regs[i] |= chain->used_spill_regs; |
4246 | } |
4247 | } |
4248 | |
4249 | /* Retry allocating the pseudos spilled in IRA and the |
4250 | reload. For each reg, merge the various reg sets that |
4251 | indicate which hard regs can't be used, and call |
4252 | ira_reassign_pseudos. */ |
4253 | for (n = 0, i = FIRST_PSEUDO_REGISTER76; i < (unsigned) max_regno; i++) |
4254 | if (reg_old_renumber[i] != reg_renumber[i]) |
4255 | { |
4256 | if (reg_renumber[i] < 0) |
4257 | temp_pseudo_reg_arr[n++] = i; |
4258 | else |
4259 | CLEAR_REGNO_REG_SET (&spilled_pseudos, i)bitmap_clear_bit (&spilled_pseudos, i); |
4260 | } |
4261 | if (ira_reassign_pseudos (temp_pseudo_reg_arr, n, |
4262 | bad_spill_regs_global, |
4263 | pseudo_forbidden_regs, pseudo_previous_regs, |
4264 | &spilled_pseudos)) |
4265 | something_changed = 1; |
4266 | } |
4267 | /* Fix up the register information in the insn chain. |
4268 | This involves deleting those of the spilled pseudos which did not get |
4269 | a new hard register home from the live_{before,after} sets. */ |
4270 | for (chain = reload_insn_chain; chain; chain = chain->next) |
4271 | { |
4272 | HARD_REG_SET used_by_pseudos; |
4273 | HARD_REG_SET used_by_pseudos2; |
4274 | |
4275 | if (! ira_conflicts_p) |
4276 | { |
4277 | /* Don't do it for IRA because IRA and the reload still can |
4278 | assign hard registers to the spilled pseudos on next |
4279 | reload iterations. */ |
4280 | AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos)bitmap_and_compl_into (&chain->live_throughout, &spilled_pseudos ); |
4281 | AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos)bitmap_and_compl_into (&chain->dead_or_set, &spilled_pseudos ); |
4282 | } |
4283 | /* Mark any unallocated hard regs as available for spills. That |
4284 | makes inheritance work somewhat better. */ |
4285 | if (chain->need_reload) |
4286 | { |
4287 | REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout)do { CLEAR_HARD_REG_SET (used_by_pseudos); reg_set_to_hard_reg_set (&used_by_pseudos, &chain->live_throughout); } while (0); |
4288 | REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set)do { CLEAR_HARD_REG_SET (used_by_pseudos2); reg_set_to_hard_reg_set (&used_by_pseudos2, &chain->dead_or_set); } while (0); |
4289 | used_by_pseudos |= used_by_pseudos2; |
4290 | |
4291 | compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout); |
4292 | compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set); |
4293 | /* Value of chain->used_spill_regs from previous iteration |
4294 | may be not included in the value calculated here because |
4295 | of possible removing caller-saves insns (see function |
4296 | delete_caller_save_insns. */ |
4297 | chain->used_spill_regs = ~used_by_pseudos & used_spill_regs; |
4298 | } |
4299 | } |
4300 | |
4301 | CLEAR_REG_SET (&changed_allocation_pseudos)bitmap_clear (&changed_allocation_pseudos); |
4302 | /* Let alter_reg modify the reg rtx's for the modified pseudos. */ |
4303 | for (i = FIRST_PSEUDO_REGISTER76; i < (unsigned)max_regno; i++) |
4304 | { |
4305 | int regno = reg_renumber[i]; |
4306 | if (reg_old_renumber[i] == regno) |
4307 | continue; |
4308 | |
4309 | SET_REGNO_REG_SET (&changed_allocation_pseudos, i)bitmap_set_bit (&changed_allocation_pseudos, i); |
4310 | |
4311 | alter_reg (i, reg_old_renumber[i], false); |
4312 | reg_old_renumber[i] = regno; |
4313 | if (dump_file) |
4314 | { |
4315 | if (regno == -1) |
4316 | fprintf (dump_file, " Register %d now on stack.\n\n", i); |
4317 | else |
4318 | fprintf (dump_file, " Register %d now in %d.\n\n", |
4319 | i, reg_renumber[i]); |
4320 | } |
4321 | } |
4322 | |
4323 | return something_changed; |
4324 | } |
4325 | |
4326 | /* Find all paradoxical subregs within X and update reg_max_ref_mode. */ |
4327 | |
4328 | static void |
4329 | scan_paradoxical_subregs (rtx x) |
4330 | { |
4331 | int i; |
4332 | const char *fmt; |
4333 | enum rtx_code code = GET_CODE (x)((enum rtx_code) (x)->code); |
4334 | |
4335 | switch (code) |
4336 | { |
4337 | case REG: |
4338 | case CONST: |
4339 | case SYMBOL_REF: |
4340 | case LABEL_REF: |
4341 | CASE_CONST_ANYcase CONST_INT: case CONST_WIDE_INT: case CONST_POLY_INT: case CONST_DOUBLE: case CONST_FIXED: case CONST_VECTOR: |
4342 | case PC: |
4343 | case USE: |
4344 | case CLOBBER: |
4345 | return; |
4346 | |
4347 | case SUBREG: |
4348 | if (REG_P (SUBREG_REG (x))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == REG )) |
4349 | { |
4350 | unsigned int regno = REGNO (SUBREG_REG (x))(rhs_regno((((x)->u.fld[0]).rt_rtx))); |
4351 | if (partial_subreg_p (reg_max_ref_mode[regno], GET_MODE (x)((machine_mode) (x)->mode))) |
4352 | { |
4353 | reg_max_ref_mode[regno] = GET_MODE (x)((machine_mode) (x)->mode); |
4354 | mark_home_live_1 (regno, GET_MODE (x)((machine_mode) (x)->mode)); |
4355 | } |
4356 | } |
4357 | return; |
4358 | |
4359 | default: |
4360 | break; |
4361 | } |
4362 | |
4363 | fmt = GET_RTX_FORMAT (code)(rtx_format[(int) (code)]); |
4364 | for (i = GET_RTX_LENGTH (code)(rtx_length[(int) (code)]) - 1; i >= 0; i--) |
4365 | { |
4366 | if (fmt[i] == 'e') |
4367 | scan_paradoxical_subregs (XEXP (x, i)(((x)->u.fld[i]).rt_rtx)); |
4368 | else if (fmt[i] == 'E') |
4369 | { |
4370 | int j; |
4371 | for (j = XVECLEN (x, i)(((((x)->u.fld[i]).rt_rtvec))->num_elem) - 1; j >= 0; j--) |
4372 | scan_paradoxical_subregs (XVECEXP (x, i, j)(((((x)->u.fld[i]).rt_rtvec))->elem[j])); |
4373 | } |
4374 | } |
4375 | } |
4376 | |
4377 | /* *OP_PTR and *OTHER_PTR are two operands to a conceptual reload. |
4378 | If *OP_PTR is a paradoxical subreg, try to remove that subreg |
4379 | and apply the corresponding narrowing subreg to *OTHER_PTR. |
4380 | Return true if the operands were changed, false otherwise. */ |
4381 | |
4382 | static bool |
4383 | strip_paradoxical_subreg (rtx *op_ptr, rtx *other_ptr) |
4384 | { |
4385 | rtx op, inner, other, tem; |
4386 | |
4387 | op = *op_ptr; |
4388 | if (!paradoxical_subreg_p (op)) |
4389 | return false; |
4390 | inner = SUBREG_REG (op)(((op)->u.fld[0]).rt_rtx); |
4391 | |
4392 | other = *other_ptr; |
4393 | tem = gen_lowpart_common (GET_MODE (inner)((machine_mode) (inner)->mode), other); |
4394 | if (!tem) |
4395 | return false; |
4396 | |
4397 | /* If the lowpart operation turned a hard register into a subreg, |
4398 | rather than simplifying it to another hard register, then the |
4399 | mode change cannot be properly represented. For example, OTHER |
4400 | might be valid in its current mode, but not in the new one. */ |
4401 | if (GET_CODE (tem)((enum rtx_code) (tem)->code) == SUBREG |
4402 | && REG_P (other)(((enum rtx_code) (other)->code) == REG) |
4403 | && HARD_REGISTER_P (other)((((rhs_regno(other))) < 76))) |
4404 | return false; |
4405 | |
4406 | *op_ptr = inner; |
4407 | *other_ptr = tem; |
4408 | return true; |
4409 | } |
4410 | |
4411 | /* A subroutine of reload_as_needed. If INSN has a REG_EH_REGION note, |
4412 | examine all of the reload insns between PREV and NEXT exclusive, and |
4413 | annotate all that may trap. */ |
4414 | |
4415 | static void |
4416 | fixup_eh_region_note (rtx_insn *insn, rtx_insn *prev, rtx_insn *next) |
4417 | { |
4418 | rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX(rtx) 0); |
4419 | if (note == NULLnullptr) |
4420 | return; |
4421 | if (!insn_could_throw_p (insn)) |
4422 | remove_note (insn, note); |
4423 | copy_reg_eh_region_note_forward (note, NEXT_INSN (prev), next); |
4424 | } |
4425 | |
4426 | /* Reload pseudo-registers into hard regs around each insn as needed. |
4427 | Additional register load insns are output before the insn that needs it |
4428 | and perhaps store insns after insns that modify the reloaded pseudo reg. |
4429 | |
4430 | reg_last_reload_reg and reg_reloaded_contents keep track of |
4431 | which registers are already available in reload registers. |
4432 | We update these for the reloads that we perform, |
4433 | as the insns are scanned. */ |
4434 | |
4435 | static void |
4436 | reload_as_needed (int live_known) |
4437 | { |
4438 | class insn_chain *chain; |
4439 | #if AUTO_INC_DEC0 |
4440 | int i; |
4441 | #endif |
4442 | rtx_note *marker; |
4443 | |
4444 | memset (spill_reg_rtx, 0, sizeof spill_reg_rtx); |
4445 | memset (spill_reg_store, 0, sizeof spill_reg_store); |
4446 | reg_last_reload_reg = XCNEWVEC (rtx, max_regno)((rtx *) xcalloc ((max_regno), sizeof (rtx))); |
4447 | INIT_REG_SET (®_has_output_reload)bitmap_initialize (®_has_output_reload, ®_obstack ); |
4448 | CLEAR_HARD_REG_SET (reg_reloaded_valid); |
4449 | |
4450 | set_initial_elim_offsets (); |
4451 | |
4452 | /* Generate a marker insn that we will move around. */ |
4453 | marker = emit_note (NOTE_INSN_DELETED); |
4454 | unlink_insn_chain (marker, marker); |
4455 | |
4456 | for (chain = reload_insn_chain; chain; chain = chain->next) |
4457 | { |
4458 | rtx_insn *prev = 0; |
4459 | rtx_insn *insn = chain->insn; |
4460 | rtx_insn *old_next = NEXT_INSN (insn); |
4461 | #if AUTO_INC_DEC0 |
4462 | rtx_insn *old_prev = PREV_INSN (insn); |
4463 | #endif |
4464 | |
4465 | if (will_delete_init_insn_p (insn)) |
4466 | continue; |
4467 | |
4468 | /* If we pass a label, copy the offsets from the label information |
4469 | into the current offsets of each elimination. */ |
4470 | if (LABEL_P (insn)(((enum rtx_code) (insn)->code) == CODE_LABEL)) |
4471 | set_offsets_for_label (insn); |
4472 | |
4473 | else if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) == DEBUG_INSN))) |
4474 | { |
4475 | regset_head regs_to_forget; |
4476 | INIT_REG_SET (®s_to_forget)bitmap_initialize (®s_to_forget, ®_obstack); |
4477 | note_stores (insn, forget_old_reloads_1, ®s_to_forget); |
4478 | |
4479 | /* If this is a USE and CLOBBER of a MEM, ensure that any |
4480 | references to eliminable registers have been removed. */ |
4481 | |
4482 | if ((GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == USE |
4483 | || GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == CLOBBER) |
4484 | && MEM_P (XEXP (PATTERN (insn), 0))(((enum rtx_code) ((((PATTERN (insn))->u.fld[0]).rt_rtx))-> code) == MEM)) |
4485 | XEXP (XEXP (PATTERN (insn), 0), 0)((((((PATTERN (insn))->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx ) |
4486 | = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0)((((((PATTERN (insn))->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx ), |
4487 | GET_MODE (XEXP (PATTERN (insn), 0))((machine_mode) ((((PATTERN (insn))->u.fld[0]).rt_rtx))-> mode), |
4488 | NULL_RTX(rtx) 0); |
4489 | |
4490 | /* If we need to do register elimination processing, do so. |
4491 | This might delete the insn, in which case we are done. */ |
4492 | if ((num_eliminable || num_eliminable_invariants) && chain->need_elim) |
4493 | { |
4494 | eliminate_regs_in_insn (insn, 1); |
4495 | if (NOTE_P (insn)(((enum rtx_code) (insn)->code) == NOTE)) |
4496 | { |
4497 | update_eliminable_offsets (); |
4498 | CLEAR_REG_SET (®s_to_forget)bitmap_clear (®s_to_forget); |
4499 | continue; |
4500 | } |
4501 | } |
4502 | |
4503 | /* If need_elim is nonzero but need_reload is zero, one might think |
4504 | that we could simply set n_reloads to 0. However, find_reloads |
4505 | could have done some manipulation of the insn (such as swapping |
4506 | commutative operands), and these manipulations are lost during |
4507 | the first pass for every insn that needs register elimination. |
4508 | So the actions of find_reloads must be redone here. */ |
4509 | |
4510 | if (! chain->need_elim && ! chain->need_reload |
4511 | && ! chain->need_operand_change) |
4512 | n_reloads = 0; |
4513 | /* First find the pseudo regs that must be reloaded for this insn. |
4514 | This info is returned in the tables reload_... (see reload.h). |
4515 | Also modify the body of INSN by substituting RELOAD |
4516 | rtx's for those pseudo regs. */ |
4517 | else |
4518 | { |
4519 | CLEAR_REG_SET (®_has_output_reload)bitmap_clear (®_has_output_reload); |
4520 | CLEAR_HARD_REG_SET (reg_is_output_reload); |
4521 | |
4522 | find_reloads (insn, 1, spill_indirect_levels(this_target_reload->x_spill_indirect_levels), live_known, |
4523 | spill_reg_order); |
4524 | } |
4525 | |
4526 | if (n_reloads > 0) |
4527 | { |
4528 | rtx_insn *next = NEXT_INSN (insn); |
4529 | |
4530 | /* ??? PREV can get deleted by reload inheritance. |
4531 | Work around this by emitting a marker note. */ |
4532 | prev = PREV_INSN (insn); |
4533 | reorder_insns_nobb (marker, marker, prev); |
4534 | |
4535 | /* Now compute which reload regs to reload them into. Perhaps |
4536 | reusing reload regs from previous insns, or else output |
4537 | load insns to reload them. Maybe output store insns too. |
4538 | Record the choices of reload reg in reload_reg_rtx. */ |
4539 | choose_reload_regs (chain); |
4540 | |
4541 | /* Generate the insns to reload operands into or out of |
4542 | their reload regs. */ |
4543 | emit_reload_insns (chain); |
4544 | |
4545 | /* Substitute the chosen reload regs from reload_reg_rtx |
4546 | into the insn's body (or perhaps into the bodies of other |
4547 | load and store insn that we just made for reloading |
4548 | and that we moved the structure into). */ |
4549 | subst_reloads (insn); |
4550 | |
4551 | prev = PREV_INSN (marker); |
4552 | unlink_insn_chain (marker, marker); |
4553 | |
4554 | /* Adjust the exception region notes for loads and stores. */ |
4555 | if (cfun(cfun + 0)->can_throw_non_call_exceptions && !CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN)) |
4556 | fixup_eh_region_note (insn, prev, next); |
4557 | |
4558 | /* Adjust the location of REG_ARGS_SIZE. */ |
4559 | rtx p = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX(rtx) 0); |
4560 | if (p) |
4561 | { |
4562 | remove_note (insn, p); |
4563 | fixup_args_size_notes (prev, PREV_INSN (next), |
4564 | get_args_size (p)); |
4565 | } |
4566 | |
4567 | /* If this was an ASM, make sure that all the reload insns |
4568 | we have generated are valid. If not, give an error |
4569 | and delete them. */ |
4570 | if (asm_noperands (PATTERN (insn)) >= 0) |
4571 | for (rtx_insn *p = NEXT_INSN (prev); |
4572 | p != next; |
4573 | p = NEXT_INSN (p)) |
4574 | if (p != insn && INSN_P (p)(((((enum rtx_code) (p)->code) == INSN) || (((enum rtx_code ) (p)->code) == JUMP_INSN) || (((enum rtx_code) (p)->code ) == CALL_INSN)) || (((enum rtx_code) (p)->code) == DEBUG_INSN )) |
4575 | && GET_CODE (PATTERN (p))((enum rtx_code) (PATTERN (p))->code) != USE |
4576 | && (recog_memoized (p) < 0 |
4577 | || (extract_insn (p), |
4578 | !(constrain_operands (1, |
4579 | get_enabled_alternatives (p)))))) |
4580 | { |
4581 | error_for_asm (insn, |
4582 | "%<asm%> operand requires " |
4583 | "impossible reload"); |
4584 | delete_insn (p); |
4585 | } |
4586 | } |
4587 | |
4588 | if (num_eliminable && chain->need_elim) |
4589 | update_eliminable_offsets (); |
4590 | |
4591 | /* Any previously reloaded spilled pseudo reg, stored in this insn, |
4592 | is no longer validly lying around to save a future reload. |
4593 | Note that this does not detect pseudos that were reloaded |
4594 | for this insn in order to be stored in |
4595 | (obeying register constraints). That is correct; such reload |
4596 | registers ARE still valid. */ |
4597 | forget_marked_reloads (®s_to_forget); |
4598 | CLEAR_REG_SET (®s_to_forget)bitmap_clear (®s_to_forget); |
4599 | |
4600 | /* There may have been CLOBBER insns placed after INSN. So scan |
4601 | between INSN and NEXT and use them to forget old reloads. */ |
4602 | for (rtx_insn *x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x)) |
4603 | if (NONJUMP_INSN_P (x)(((enum rtx_code) (x)->code) == INSN) && GET_CODE (PATTERN (x))((enum rtx_code) (PATTERN (x))->code) == CLOBBER) |
4604 | note_stores (x, forget_old_reloads_1, NULLnullptr); |
4605 | |
4606 | #if AUTO_INC_DEC0 |
4607 | /* Likewise for regs altered by auto-increment in this insn. |
4608 | REG_INC notes have been changed by reloading: |
4609 | find_reloads_address_1 records substitutions for them, |
4610 | which have been performed by subst_reloads above. */ |
4611 | for (i = n_reloads - 1; i >= 0; i--) |
4612 | { |
4613 | rtx in_reg = rld[i].in_reg; |
4614 | if (in_reg) |
4615 | { |
4616 | enum rtx_code code = GET_CODE (in_reg)((enum rtx_code) (in_reg)->code); |
4617 | /* PRE_INC / PRE_DEC will have the reload register ending up |
4618 | with the same value as the stack slot, but that doesn't |
4619 | hold true for POST_INC / POST_DEC. Either we have to |
4620 | convert the memory access to a true POST_INC / POST_DEC, |
4621 | or we can't use the reload register for inheritance. */ |
4622 | if ((code == POST_INC || code == POST_DEC) |
4623 | && TEST_HARD_REG_BIT (reg_reloaded_valid, |
4624 | REGNO (rld[i].reg_rtx)(rhs_regno(rld[i].reg_rtx))) |
4625 | /* Make sure it is the inc/dec pseudo, and not |
4626 | some other (e.g. output operand) pseudo. */ |
4627 | && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)(rhs_regno(rld[i].reg_rtx))] |
4628 | == REGNO (XEXP (in_reg, 0))(rhs_regno((((in_reg)->u.fld[0]).rt_rtx))))) |
4629 | |
4630 | { |
4631 | rtx reload_reg = rld[i].reg_rtx; |
4632 | machine_mode mode = GET_MODE (reload_reg)((machine_mode) (reload_reg)->mode); |
4633 | int n = 0; |
4634 | rtx_insn *p; |
4635 | |
4636 | for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p)) |
4637 | { |
4638 | /* We really want to ignore REG_INC notes here, so |
4639 | use PATTERN (p) as argument to reg_set_p . */ |
4640 | if (reg_set_p (reload_reg, PATTERN (p))) |
4641 | break; |
4642 | n = count_occurrences (PATTERN (p), reload_reg, 0); |
4643 | if (! n) |
4644 | continue; |
4645 | if (n == 1) |
4646 | { |
4647 | rtx replace_reg |
4648 | = gen_rtx_fmt_e (code, mode, reload_reg)gen_rtx_fmt_e_stat ((code), (mode), (reload_reg) ); |
4649 | |
4650 | validate_replace_rtx_group (reload_reg, |
4651 | replace_reg, p); |
4652 | n = verify_changes (0); |
4653 | |
4654 | /* We must also verify that the constraints |
4655 | are met after the replacement. Make sure |
4656 | extract_insn is only called for an insn |
4657 | where the replacements were found to be |
4658 | valid so far. */ |
4659 | if (n) |
4660 | { |
4661 | extract_insn (p); |
4662 | n = constrain_operands (1, |
4663 | get_enabled_alternatives (p)); |
4664 | } |
4665 | |
4666 | /* If the constraints were not met, then |
4667 | undo the replacement, else confirm it. */ |
4668 | if (!n) |
4669 | cancel_changes (0); |
4670 | else |
4671 | confirm_change_group (); |
4672 | } |
4673 | break; |
4674 | } |
4675 | if (n == 1) |
4676 | { |
4677 | add_reg_note (p, REG_INC, reload_reg); |
4678 | /* Mark this as having an output reload so that the |
4679 | REG_INC processing code below won't invalidate |
4680 | the reload for inheritance. */ |
4681 | SET_HARD_REG_BIT (reg_is_output_reload, |
4682 | REGNO (reload_reg)(rhs_regno(reload_reg))); |
4683 | SET_REGNO_REG_SET (®_has_output_reload,bitmap_set_bit (®_has_output_reload, (rhs_regno((((in_reg )->u.fld[0]).rt_rtx)))) |
4684 | REGNO (XEXP (in_reg, 0)))bitmap_set_bit (®_has_output_reload, (rhs_regno((((in_reg )->u.fld[0]).rt_rtx)))); |
4685 | } |
4686 | else |
4687 | forget_old_reloads_1 (XEXP (in_reg, 0)(((in_reg)->u.fld[0]).rt_rtx), NULL_RTX(rtx) 0, |
4688 | NULLnullptr); |
4689 | } |
4690 | else if ((code == PRE_INC || code == PRE_DEC) |
4691 | && TEST_HARD_REG_BIT (reg_reloaded_valid, |
4692 | REGNO (rld[i].reg_rtx)(rhs_regno(rld[i].reg_rtx))) |
4693 | /* Make sure it is the inc/dec pseudo, and not |
4694 | some other (e.g. output operand) pseudo. */ |
4695 | && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)(rhs_regno(rld[i].reg_rtx))] |
4696 | == REGNO (XEXP (in_reg, 0))(rhs_regno((((in_reg)->u.fld[0]).rt_rtx))))) |
4697 | { |
4698 | SET_HARD_REG_BIT (reg_is_output_reload, |
4699 | REGNO (rld[i].reg_rtx)(rhs_regno(rld[i].reg_rtx))); |
4700 | SET_REGNO_REG_SET (®_has_output_reload,bitmap_set_bit (®_has_output_reload, (rhs_regno((((in_reg )->u.fld[0]).rt_rtx)))) |
4701 | REGNO (XEXP (in_reg, 0)))bitmap_set_bit (®_has_output_reload, (rhs_regno((((in_reg )->u.fld[0]).rt_rtx)))); |
4702 | } |
4703 | else if (code == PRE_INC || code == PRE_DEC |
4704 | || code == POST_INC || code == POST_DEC) |
4705 | { |
4706 | int in_regno = REGNO (XEXP (in_reg, 0))(rhs_regno((((in_reg)->u.fld[0]).rt_rtx))); |
4707 | |
4708 | if (reg_last_reload_reg[in_regno] != NULL_RTX(rtx) 0) |
4709 | { |
4710 | int in_hard_regno; |
4711 | bool forget_p = true; |
4712 | |
4713 | in_hard_regno = REGNO (reg_last_reload_reg[in_regno])(rhs_regno(reg_last_reload_reg[in_regno])); |
4714 | if (TEST_HARD_REG_BIT (reg_reloaded_valid, |
4715 | in_hard_regno)) |
4716 | { |
4717 | for (rtx_insn *x = (old_prev ? |
4718 | NEXT_INSN (old_prev) : insn); |
4719 | x != old_next; |
4720 | x = NEXT_INSN (x)) |
4721 | if (x == reg_reloaded_insn[in_hard_regno]) |
4722 | { |
4723 | forget_p = false; |
4724 | break; |
4725 | } |
4726 | } |
4727 | /* If for some reasons, we didn't set up |
4728 | reg_last_reload_reg in this insn, |
4729 | invalidate inheritance from previous |
4730 | insns for the incremented/decremented |
4731 | register. Such registers will be not in |
4732 | reg_has_output_reload. Invalidate it |
4733 | also if the corresponding element in |
4734 | reg_reloaded_insn is also |
4735 | invalidated. */ |
4736 | if (forget_p) |
4737 | forget_old_reloads_1 (XEXP (in_reg, 0)(((in_reg)->u.fld[0]).rt_rtx), |
4738 | NULL_RTX(rtx) 0, NULLnullptr); |
4739 | } |
4740 | } |
4741 | } |
4742 | } |
4743 | /* If a pseudo that got a hard register is auto-incremented, |
4744 | we must purge records of copying it into pseudos without |
4745 | hard registers. */ |
4746 | for (rtx x = REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx); x; x = XEXP (x, 1)(((x)->u.fld[1]).rt_rtx)) |
4747 | if (REG_NOTE_KIND (x)((enum reg_note) ((machine_mode) (x)->mode)) == REG_INC) |
4748 | { |
4749 | /* See if this pseudo reg was reloaded in this insn. |
4750 | If so, its last-reload info is still valid |
4751 | because it is based on this insn's reload. */ |
4752 | for (i = 0; i < n_reloads; i++) |
4753 | if (rld[i].out == XEXP (x, 0)(((x)->u.fld[0]).rt_rtx)) |
4754 | break; |
4755 | |
4756 | if (i == n_reloads) |
4757 | forget_old_reloads_1 (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), NULL_RTX(rtx) 0, NULLnullptr); |
4758 | } |
4759 | #endif |
4760 | } |
4761 | /* A reload reg's contents are unknown after a label. */ |
4762 | if (LABEL_P (insn)(((enum rtx_code) (insn)->code) == CODE_LABEL)) |
4763 | CLEAR_HARD_REG_SET (reg_reloaded_valid); |
4764 | |
4765 | /* Don't assume a reload reg is still good after a call insn |
4766 | if it is a call-used reg, or if it contains a value that will |
4767 | be partially clobbered by the call. */ |
4768 | else if (CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN)) |
4769 | { |
4770 | reg_reloaded_valid |
4771 | &= ~insn_callee_abi (insn).full_and_partial_reg_clobbers (); |
4772 | |
4773 | /* If this is a call to a setjmp-type function, we must not |
4774 | reuse any reload reg contents across the call; that will |
4775 | just be clobbered by other uses of the register in later |
4776 | code, before the longjmp. */ |
4777 | if (find_reg_note (insn, REG_SETJMP, NULL_RTX(rtx) 0)) |
4778 | CLEAR_HARD_REG_SET (reg_reloaded_valid); |
4779 | } |
4780 | } |
4781 | |
4782 | /* Clean up. */ |
4783 | free (reg_last_reload_reg); |
4784 | CLEAR_REG_SET (®_has_output_reload)bitmap_clear (®_has_output_reload); |
4785 | } |
4786 | |
4787 | /* Discard all record of any value reloaded from X, |
4788 | or reloaded in X from someplace else; |
4789 | unless X is an output reload reg of the current insn. |
4790 | |
4791 | X may be a hard reg (the reload reg) |
4792 | or it may be a pseudo reg that was reloaded from. |
4793 | |
4794 | When DATA is non-NULL just mark the registers in regset |
4795 | to be forgotten later. */ |
4796 | |
4797 | static void |
4798 | forget_old_reloads_1 (rtx x, const_rtx, void *data) |
4799 | { |
4800 | unsigned int regno; |
4801 | unsigned int nr; |
4802 | regset regs = (regset) data; |
4803 | |
4804 | /* note_stores does give us subregs of hard regs, |
4805 | subreg_regno_offset requires a hard reg. */ |
4806 | while (GET_CODE (x)((enum rtx_code) (x)->code) == SUBREG) |
4807 | { |
4808 | /* We ignore the subreg offset when calculating the regno, |
4809 | because we are using the entire underlying hard register |
4810 | below. */ |
4811 | x = SUBREG_REG (x)(((x)->u.fld[0]).rt_rtx); |
4812 | } |
4813 | |
4814 | if (!REG_P (x)(((enum rtx_code) (x)->code) == REG)) |
4815 | return; |
4816 | |
4817 | regno = REGNO (x)(rhs_regno(x)); |
4818 | |
4819 | if (regno >= FIRST_PSEUDO_REGISTER76) |
4820 | nr = 1; |
4821 | else |
4822 | { |
4823 | unsigned int i; |
4824 | |
4825 | nr = REG_NREGS (x)((&(x)->u.reg)->nregs); |
4826 | /* Storing into a spilled-reg invalidates its contents. |
4827 | This can happen if a block-local pseudo is allocated to that reg |
4828 | and it wasn't spilled because this block's total need is 0. |
4829 | Then some insn might have an optional reload and use this reg. */ |
4830 | if (!regs) |
4831 | for (i = 0; i < nr; i++) |
4832 | /* But don't do this if the reg actually serves as an output |
4833 | reload reg in the current instruction. */ |
4834 | if (n_reloads == 0 |
4835 | || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)) |
4836 | { |
4837 | CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i); |
4838 | spill_reg_store[regno + i] = 0; |
4839 | } |
4840 | } |
4841 | |
4842 | if (regs) |
4843 | while (nr-- > 0) |
4844 | SET_REGNO_REG_SET (regs, regno + nr)bitmap_set_bit (regs, regno + nr); |
4845 | else |
4846 | { |
4847 | /* Since value of X has changed, |
4848 | forget any value previously copied from it. */ |
4849 | |
4850 | while (nr-- > 0) |
4851 | /* But don't forget a copy if this is the output reload |
4852 | that establishes the copy's validity. */ |
4853 | if (n_reloads == 0 |
4854 | || !REGNO_REG_SET_P (®_has_output_reload, regno + nr)bitmap_bit_p (®_has_output_reload, regno + nr)) |
4855 | reg_last_reload_reg[regno + nr] = 0; |
4856 | } |
4857 | } |
4858 | |
4859 | /* Forget the reloads marked in regset by previous function. */ |
4860 | static void |
4861 | forget_marked_reloads (regset regs) |
4862 | { |
4863 | unsigned int reg; |
4864 | reg_set_iterator rsi; |
4865 | EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)for (bmp_iter_set_init (&(rsi), (regs), (0), &(reg)); bmp_iter_set (&(rsi), &(reg)); bmp_iter_next (&( rsi), &(reg))) |
4866 | { |
4867 | if (reg < FIRST_PSEUDO_REGISTER76 |
4868 | /* But don't do this if the reg actually serves as an output |
4869 | reload reg in the current instruction. */ |
4870 | && (n_reloads == 0 |
4871 | || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg))) |
4872 | { |
4873 | CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg); |
4874 | spill_reg_store[reg] = 0; |
4875 | } |
4876 | if (n_reloads == 0 |
4877 | || !REGNO_REG_SET_P (®_has_output_reload, reg)bitmap_bit_p (®_has_output_reload, reg)) |
4878 | reg_last_reload_reg[reg] = 0; |
4879 | } |
4880 | } |
4881 | |
4882 | /* The following HARD_REG_SETs indicate when each hard register is |
4883 | used for a reload of various parts of the current insn. */ |
4884 | |
4885 | /* If reg is unavailable for all reloads. */ |
4886 | static HARD_REG_SET reload_reg_unavailable; |
4887 | /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */ |
4888 | static HARD_REG_SET reload_reg_used; |
4889 | /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */ |
4890 | static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS30]; |
4891 | /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */ |
4892 | static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS30]; |
4893 | /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */ |
4894 | static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS30]; |
4895 | /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */ |
4896 | static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS30]; |
4897 | /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */ |
4898 | static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS30]; |
4899 | /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */ |
4900 | static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS30]; |
4901 | /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */ |
4902 | static HARD_REG_SET reload_reg_used_in_op_addr; |
4903 | /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */ |
4904 | static HARD_REG_SET reload_reg_used_in_op_addr_reload; |
4905 | /* If reg is in use for a RELOAD_FOR_INSN reload. */ |
4906 | static HARD_REG_SET reload_reg_used_in_insn; |
4907 | /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */ |
4908 | static HARD_REG_SET reload_reg_used_in_other_addr; |
4909 | |
4910 | /* If reg is in use as a reload reg for any sort of reload. */ |
4911 | static HARD_REG_SET reload_reg_used_at_all; |
4912 | |
4913 | /* If reg is use as an inherited reload. We just mark the first register |
4914 | in the group. */ |
4915 | static HARD_REG_SET reload_reg_used_for_inherit; |
4916 | |
4917 | /* Records which hard regs are used in any way, either as explicit use or |
4918 | by being allocated to a pseudo during any point of the current insn. */ |
4919 | static HARD_REG_SET reg_used_in_insn; |
4920 | |
4921 | /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and |
4922 | TYPE. MODE is used to indicate how many consecutive regs are |
4923 | actually used. */ |
4924 | |
4925 | static void |
4926 | mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type, |
4927 | machine_mode mode) |
4928 | { |
4929 | switch (type) |
4930 | { |
4931 | case RELOAD_OTHER: |
4932 | add_to_hard_reg_set (&reload_reg_used, mode, regno); |
4933 | break; |
4934 | |
4935 | case RELOAD_FOR_INPUT_ADDRESS: |
4936 | add_to_hard_reg_set (&reload_reg_used_in_input_addr[opnum], mode, regno); |
4937 | break; |
4938 | |
4939 | case RELOAD_FOR_INPADDR_ADDRESS: |
4940 | add_to_hard_reg_set (&reload_reg_used_in_inpaddr_addr[opnum], mode, regno); |
4941 | break; |
4942 | |
4943 | case RELOAD_FOR_OUTPUT_ADDRESS: |
4944 | add_to_hard_reg_set (&reload_reg_used_in_output_addr[opnum], mode, regno); |
4945 | break; |
4946 | |
4947 | case RELOAD_FOR_OUTADDR_ADDRESS: |
4948 | add_to_hard_reg_set (&reload_reg_used_in_outaddr_addr[opnum], mode, regno); |
4949 | break; |
4950 | |
4951 | case RELOAD_FOR_OPERAND_ADDRESS: |
4952 | add_to_hard_reg_set (&reload_reg_used_in_op_addr, mode, regno); |
4953 | break; |
4954 | |
4955 | case RELOAD_FOR_OPADDR_ADDR: |
4956 | add_to_hard_reg_set (&reload_reg_used_in_op_addr_reload, mode, regno); |
4957 | break; |
4958 | |
4959 | case RELOAD_FOR_OTHER_ADDRESS: |
4960 | add_to_hard_reg_set (&reload_reg_used_in_other_addr, mode, regno); |
4961 | break; |
4962 | |
4963 | case RELOAD_FOR_INPUT: |
4964 | add_to_hard_reg_set (&reload_reg_used_in_input[opnum], mode, regno); |
4965 | break; |
4966 | |
4967 | case RELOAD_FOR_OUTPUT: |
4968 | add_to_hard_reg_set (&reload_reg_used_in_output[opnum], mode, regno); |
4969 | break; |
4970 | |
4971 | case RELOAD_FOR_INSN: |
4972 | add_to_hard_reg_set (&reload_reg_used_in_insn, mode, regno); |
4973 | break; |
4974 | } |
4975 | |
4976 | add_to_hard_reg_set (&reload_reg_used_at_all, mode, regno); |
4977 | } |
4978 | |
4979 | /* Similarly, but show REGNO is no longer in use for a reload. */ |
4980 | |
4981 | static void |
4982 | clear_reload_reg_in_use (unsigned int regno, int opnum, |
4983 | enum reload_type type, machine_mode mode) |
4984 | { |
4985 | unsigned int nregs = hard_regno_nregs (regno, mode); |
4986 | unsigned int start_regno, end_regno, r; |
4987 | int i; |
4988 | /* A complication is that for some reload types, inheritance might |
4989 | allow multiple reloads of the same types to share a reload register. |
4990 | We set check_opnum if we have to check only reloads with the same |
4991 | operand number, and check_any if we have to check all reloads. */ |
4992 | int check_opnum = 0; |
4993 | int check_any = 0; |
4994 | HARD_REG_SET *used_in_set; |
4995 | |
4996 | switch (type) |
4997 | { |
4998 | case RELOAD_OTHER: |
4999 | used_in_set = &reload_reg_used; |
5000 | break; |
5001 | |
5002 | case RELOAD_FOR_INPUT_ADDRESS: |
5003 | used_in_set = &reload_reg_used_in_input_addr[opnum]; |
5004 | break; |
5005 | |
5006 | case RELOAD_FOR_INPADDR_ADDRESS: |
5007 | check_opnum = 1; |
5008 | used_in_set = &reload_reg_used_in_inpaddr_addr[opnum]; |
5009 | break; |
5010 | |
5011 | case RELOAD_FOR_OUTPUT_ADDRESS: |
5012 | used_in_set = &reload_reg_used_in_output_addr[opnum]; |
5013 | break; |
5014 | |
5015 | case RELOAD_FOR_OUTADDR_ADDRESS: |
5016 | check_opnum = 1; |
5017 | used_in_set = &reload_reg_used_in_outaddr_addr[opnum]; |
5018 | break; |
5019 | |
5020 | case RELOAD_FOR_OPERAND_ADDRESS: |
5021 | used_in_set = &reload_reg_used_in_op_addr; |
5022 | break; |
5023 | |
5024 | case RELOAD_FOR_OPADDR_ADDR: |
5025 | check_any = 1; |
5026 | used_in_set = &reload_reg_used_in_op_addr_reload; |
5027 | break; |
5028 | |
5029 | case RELOAD_FOR_OTHER_ADDRESS: |
5030 | used_in_set = &reload_reg_used_in_other_addr; |
5031 | check_any = 1; |
5032 | break; |
5033 | |
5034 | case RELOAD_FOR_INPUT: |
5035 | used_in_set = &reload_reg_used_in_input[opnum]; |
5036 | break; |
5037 | |
5038 | case RELOAD_FOR_OUTPUT: |
5039 | used_in_set = &reload_reg_used_in_output[opnum]; |
5040 | break; |
5041 | |
5042 | case RELOAD_FOR_INSN: |
5043 | used_in_set = &reload_reg_used_in_insn; |
5044 | break; |
5045 | default: |
5046 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 5046, __FUNCTION__)); |
5047 | } |
5048 | /* We resolve conflicts with remaining reloads of the same type by |
5049 | excluding the intervals of reload registers by them from the |
5050 | interval of freed reload registers. Since we only keep track of |
5051 | one set of interval bounds, we might have to exclude somewhat |
5052 | more than what would be necessary if we used a HARD_REG_SET here. |
5053 | But this should only happen very infrequently, so there should |
5054 | be no reason to worry about it. */ |
5055 | |
5056 | start_regno = regno; |
5057 | end_regno = regno + nregs; |
5058 | if (check_opnum || check_any) |
5059 | { |
5060 | for (i = n_reloads - 1; i >= 0; i--) |
5061 | { |
5062 | if (rld[i].when_needed == type |
5063 | && (check_any || rld[i].opnum == opnum) |
5064 | && rld[i].reg_rtx) |
5065 | { |
5066 | unsigned int conflict_start = true_regnum (rld[i].reg_rtx); |
5067 | unsigned int conflict_end |
5068 | = end_hard_regno (rld[i].mode, conflict_start); |
5069 | |
5070 | /* If there is an overlap with the first to-be-freed register, |
5071 | adjust the interval start. */ |
5072 | if (conflict_start <= start_regno && conflict_end > start_regno) |
5073 | start_regno = conflict_end; |
5074 | /* Otherwise, if there is a conflict with one of the other |
5075 | to-be-freed registers, adjust the interval end. */ |
5076 | if (conflict_start > start_regno && conflict_start < end_regno) |
5077 | end_regno = conflict_start; |
5078 | } |
5079 | } |
5080 | } |
5081 | |
5082 | for (r = start_regno; r < end_regno; r++) |
5083 | CLEAR_HARD_REG_BIT (*used_in_set, r); |
5084 | } |
5085 | |
5086 | /* 1 if reg REGNO is free as a reload reg for a reload of the sort |
5087 | specified by OPNUM and TYPE. */ |
5088 | |
5089 | static int |
5090 | reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type) |
5091 | { |
5092 | int i; |
5093 | |
5094 | /* In use for a RELOAD_OTHER means it's not available for anything. */ |
5095 | if (TEST_HARD_REG_BIT (reload_reg_used, regno) |
5096 | || TEST_HARD_REG_BIT (reload_reg_unavailable, regno)) |
5097 | return 0; |
5098 | |
5099 | switch (type) |
5100 | { |
5101 | case RELOAD_OTHER: |
5102 | /* In use for anything means we can't use it for RELOAD_OTHER. */ |
5103 | if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno) |
5104 | || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno) |
5105 | || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno) |
5106 | || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)) |
5107 | return 0; |
5108 | |
5109 | for (i = 0; i < reload_n_operands; i++) |
5110 | if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno) |
5111 | || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno) |
5112 | || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno) |
5113 | || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno) |
5114 | || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno) |
5115 | || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)) |
5116 | return 0; |
5117 | |
5118 | return 1; |
5119 | |
5120 | case RELOAD_FOR_INPUT: |
5121 | if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno) |
5122 | || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)) |
5123 | return 0; |
5124 | |
5125 | if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)) |
5126 | return 0; |
5127 | |
5128 | /* If it is used for some other input, can't use it. */ |
5129 | for (i = 0; i < reload_n_operands; i++) |
5130 | if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)) |
5131 | return 0; |
5132 | |
5133 | /* If it is used in a later operand's address, can't use it. */ |
5134 | for (i = opnum + 1; i < reload_n_operands; i++) |
5135 | if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno) |
5136 | || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)) |
5137 | return 0; |
5138 | |
5139 | return 1; |
5140 | |
5141 | case RELOAD_FOR_INPUT_ADDRESS: |
5142 | /* Can't use a register if it is used for an input address for this |
5143 | operand or used as an input in an earlier one. */ |
5144 | if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno) |
5145 | || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno)) |
5146 | return 0; |
5147 | |
5148 | for (i = 0; i < opnum; i++) |
5149 | if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)) |
5150 | return 0; |
5151 | |
5152 | return 1; |
5153 | |
5154 | case RELOAD_FOR_INPADDR_ADDRESS: |
5155 | /* Can't use a register if it is used for an input address |
5156 | for this operand or used as an input in an earlier |
5157 | one. */ |
5158 | if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno)) |
5159 | return 0; |
5160 | |
5161 | for (i = 0; i < opnum; i++) |
5162 | if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)) |
5163 | return 0; |
5164 | |
5165 | return 1; |
5166 | |
5167 | case RELOAD_FOR_OUTPUT_ADDRESS: |
5168 | /* Can't use a register if it is used for an output address for this |
5169 | operand or used as an output in this or a later operand. Note |
5170 | that multiple output operands are emitted in reverse order, so |
5171 | the conflicting ones are those with lower indices. */ |
5172 | if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno)) |
5173 | return 0; |
5174 | |
5175 | for (i = 0; i <= opnum; i++) |
5176 | if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)) |
5177 | return 0; |
5178 | |
5179 | return 1; |
5180 | |
5181 | case RELOAD_FOR_OUTADDR_ADDRESS: |
5182 | /* Can't use a register if it is used for an output address |
5183 | for this operand or used as an output in this or a |
5184 | later operand. Note that multiple output operands are |
5185 | emitted in reverse order, so the conflicting ones are |
5186 | those with lower indices. */ |
5187 | if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno)) |
5188 | return 0; |
5189 | |
5190 | for (i = 0; i <= opnum; i++) |
5191 | if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)) |
5192 | return 0; |
5193 | |
5194 | return 1; |
5195 | |
5196 | case RELOAD_FOR_OPERAND_ADDRESS: |
5197 | for (i = 0; i < reload_n_operands; i++) |
5198 | if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)) |
5199 | return 0; |
5200 | |
5201 | return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno) |
5202 | && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)); |
5203 | |
5204 | case RELOAD_FOR_OPADDR_ADDR: |
5205 | for (i = 0; i < reload_n_operands; i++) |
5206 | if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)) |
5207 | return 0; |
5208 | |
5209 | return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)); |
5210 | |
5211 | case RELOAD_FOR_OUTPUT: |
5212 | /* This cannot share a register with RELOAD_FOR_INSN reloads, other |
5213 | outputs, or an operand address for this or an earlier output. |
5214 | Note that multiple output operands are emitted in reverse order, |
5215 | so the conflicting ones are those with higher indices. */ |
5216 | if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)) |
5217 | return 0; |
5218 | |
5219 | for (i = 0; i < reload_n_operands; i++) |
5220 | if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)) |
5221 | return 0; |
5222 | |
5223 | for (i = opnum; i < reload_n_operands; i++) |
5224 | if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno) |
5225 | || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)) |
5226 | return 0; |
5227 | |
5228 | return 1; |
5229 | |
5230 | case RELOAD_FOR_INSN: |
5231 | for (i = 0; i < reload_n_operands; i++) |
5232 | if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno) |
5233 | || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)) |
5234 | return 0; |
5235 | |
5236 | return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno) |
5237 | && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)); |
5238 | |
5239 | case RELOAD_FOR_OTHER_ADDRESS: |
5240 | return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno); |
5241 | |
5242 | default: |
5243 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 5243, __FUNCTION__)); |
5244 | } |
5245 | } |
5246 | |
5247 | /* Return 1 if the value in reload reg REGNO, as used by the reload with |
5248 | the number RELOADNUM, is still available in REGNO at the end of the insn. |
5249 | |
5250 | We can assume that the reload reg was already tested for availability |
5251 | at the time it is needed, and we should not check this again, |
5252 | in case the reg has already been marked in use. */ |
5253 | |
5254 | static int |
5255 | reload_reg_reaches_end_p (unsigned int regno, int reloadnum) |
5256 | { |
5257 | int opnum = rld[reloadnum].opnum; |
5258 | enum reload_type type = rld[reloadnum].when_needed; |
5259 | int i; |
5260 | |
5261 | /* See if there is a reload with the same type for this operand, using |
5262 | the same register. This case is not handled by the code below. */ |
5263 | for (i = reloadnum + 1; i < n_reloads; i++) |
5264 | { |
5265 | rtx reg; |
5266 | |
5267 | if (rld[i].opnum != opnum || rld[i].when_needed != type) |
5268 | continue; |
5269 | reg = rld[i].reg_rtx; |
5270 | if (reg == NULL_RTX(rtx) 0) |
5271 | continue; |
5272 | if (regno >= REGNO (reg)(rhs_regno(reg)) && regno < END_REGNO (reg)) |
5273 | return 0; |
5274 | } |
5275 | |
5276 | switch (type) |
5277 | { |
5278 | case RELOAD_OTHER: |
5279 | /* Since a RELOAD_OTHER reload claims the reg for the entire insn, |
5280 | its value must reach the end. */ |
5281 | return 1; |
5282 | |
5283 | /* If this use is for part of the insn, |
5284 | its value reaches if no subsequent part uses the same register. |
5285 | Just like the above function, don't try to do this with lots |
5286 | of fallthroughs. */ |
5287 | |
5288 | case RELOAD_FOR_OTHER_ADDRESS: |
5289 | /* Here we check for everything else, since these don't conflict |
5290 | with anything else and everything comes later. */ |
5291 | |
5292 | for (i = 0; i < reload_n_operands; i++) |
5293 | if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno) |
5294 | || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno) |
5295 | || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno) |
5296 | || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno) |
5297 | || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno) |
5298 | || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)) |
5299 | return 0; |
5300 | |
5301 | return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno) |
5302 | && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno) |
5303 | && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno) |
5304 | && ! TEST_HARD_REG_BIT (reload_reg_used, regno)); |
5305 | |
5306 | case RELOAD_FOR_INPUT_ADDRESS: |
5307 | case RELOAD_FOR_INPADDR_ADDRESS: |
5308 | /* Similar, except that we check only for this and subsequent inputs |
5309 | and the address of only subsequent inputs and we do not need |
5310 | to check for RELOAD_OTHER objects since they are known not to |
5311 | conflict. */ |
5312 | |
5313 | for (i = opnum; i < reload_n_operands; i++) |
5314 | if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)) |
5315 | return 0; |
5316 | |
5317 | /* Reload register of reload with type RELOAD_FOR_INPADDR_ADDRESS |
5318 | could be killed if the register is also used by reload with type |
5319 | RELOAD_FOR_INPUT_ADDRESS, so check it. */ |
5320 | if (type == RELOAD_FOR_INPADDR_ADDRESS |
5321 | && TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)) |
5322 | return 0; |
5323 | |
5324 | for (i = opnum + 1; i < reload_n_operands; i++) |
5325 | if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno) |
5326 | || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)) |
5327 | return 0; |
5328 | |
5329 | for (i = 0; i < reload_n_operands; i++) |
5330 | if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno) |
5331 | || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno) |
5332 | || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)) |
5333 | return 0; |
5334 | |
5335 | if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)) |
5336 | return 0; |
5337 | |
5338 | return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno) |
5339 | && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno) |
5340 | && !TEST_HARD_REG_BIT (reload_reg_used, regno)); |
5341 | |
5342 | case RELOAD_FOR_INPUT: |
5343 | /* Similar to input address, except we start at the next operand for |
5344 | both input and input address and we do not check for |
5345 | RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these |
5346 | would conflict. */ |
5347 | |
5348 | for (i = opnum + 1; i < reload_n_operands; i++) |
5349 | if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno) |
5350 | || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno) |
5351 | || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)) |
5352 | return 0; |
5353 | |
5354 | /* ... fall through ... */ |
5355 | |
5356 | case RELOAD_FOR_OPERAND_ADDRESS: |
5357 | /* Check outputs and their addresses. */ |
5358 | |
5359 | for (i = 0; i < reload_n_operands; i++) |
5360 | if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno) |
5361 | || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno) |
5362 | || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)) |
5363 | return 0; |
5364 | |
5365 | return (!TEST_HARD_REG_BIT (reload_reg_used, regno)); |
5366 | |
5367 | case RELOAD_FOR_OPADDR_ADDR: |
5368 | for (i = 0; i < reload_n_operands; i++) |
5369 | if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno) |
5370 | || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno) |
5371 | || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)) |
5372 | return 0; |
5373 | |
5374 | return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno) |
5375 | && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno) |
5376 | && !TEST_HARD_REG_BIT (reload_reg_used, regno)); |
5377 | |
5378 | case RELOAD_FOR_INSN: |
5379 | /* These conflict with other outputs with RELOAD_OTHER. So |
5380 | we need only check for output addresses. */ |
5381 | |
5382 | opnum = reload_n_operands; |
5383 | |
5384 | /* fall through */ |
5385 | |
5386 | case RELOAD_FOR_OUTPUT: |
5387 | case RELOAD_FOR_OUTPUT_ADDRESS: |
5388 | case RELOAD_FOR_OUTADDR_ADDRESS: |
5389 | /* We already know these can't conflict with a later output. So the |
5390 | only thing to check are later output addresses. |
5391 | Note that multiple output operands are emitted in reverse order, |
5392 | so the conflicting ones are those with lower indices. */ |
5393 | for (i = 0; i < opnum; i++) |
5394 | if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno) |
5395 | || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)) |
5396 | return 0; |
5397 | |
5398 | /* Reload register of reload with type RELOAD_FOR_OUTADDR_ADDRESS |
5399 | could be killed if the register is also used by reload with type |
5400 | RELOAD_FOR_OUTPUT_ADDRESS, so check it. */ |
5401 | if (type == RELOAD_FOR_OUTADDR_ADDRESS |
5402 | && TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno)) |
5403 | return 0; |
5404 | |
5405 | return 1; |
5406 | |
5407 | default: |
5408 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 5408, __FUNCTION__)); |
5409 | } |
5410 | } |
5411 | |
5412 | /* Like reload_reg_reaches_end_p, but check that the condition holds for |
5413 | every register in REG. */ |
5414 | |
5415 | static bool |
5416 | reload_reg_rtx_reaches_end_p (rtx reg, int reloadnum) |
5417 | { |
5418 | unsigned int i; |
5419 | |
5420 | for (i = REGNO (reg)(rhs_regno(reg)); i < END_REGNO (reg); i++) |
5421 | if (!reload_reg_reaches_end_p (i, reloadnum)) |
5422 | return false; |
5423 | return true; |
5424 | } |
5425 | |
5426 | |
5427 | /* Returns whether R1 and R2 are uniquely chained: the value of one |
5428 | is used by the other, and that value is not used by any other |
5429 | reload for this insn. This is used to partially undo the decision |
5430 | made in find_reloads when in the case of multiple |
5431 | RELOAD_FOR_OPERAND_ADDRESS reloads it converts all |
5432 | RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS |
5433 | reloads. This code tries to avoid the conflict created by that |
5434 | change. It might be cleaner to explicitly keep track of which |
5435 | RELOAD_FOR_OPADDR_ADDR reload is associated with which |
5436 | RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect |
5437 | this after the fact. */ |
5438 | static bool |
5439 | reloads_unique_chain_p (int r1, int r2) |
5440 | { |
5441 | int i; |
5442 | |
5443 | /* We only check input reloads. */ |
5444 | if (! rld[r1].in || ! rld[r2].in) |
5445 | return false; |
5446 | |
5447 | /* Avoid anything with output reloads. */ |
5448 | if (rld[r1].out || rld[r2].out) |
5449 | return false; |
5450 | |
5451 | /* "chained" means one reload is a component of the other reload, |
5452 | not the same as the other reload. */ |
5453 | if (rld[r1].opnum != rld[r2].opnum |
5454 | || rtx_equal_p (rld[r1].in, rld[r2].in) |
5455 | || rld[r1].optional || rld[r2].optional |
5456 | || ! (reg_mentioned_p (rld[r1].in, rld[r2].in) |
5457 | || reg_mentioned_p (rld[r2].in, rld[r1].in))) |
5458 | return false; |
5459 | |
5460 | /* The following loop assumes that r1 is the reload that feeds r2. */ |
5461 | if (r1 > r2) |
5462 | std::swap (r1, r2); |
5463 | |
5464 | for (i = 0; i < n_reloads; i ++) |
5465 | /* Look for input reloads that aren't our two */ |
5466 | if (i != r1 && i != r2 && rld[i].in) |
5467 | { |
5468 | /* If our reload is mentioned at all, it isn't a simple chain. */ |
5469 | if (reg_mentioned_p (rld[r1].in, rld[i].in)) |
5470 | return false; |
5471 | } |
5472 | return true; |
5473 | } |
5474 | |
5475 | /* The recursive function change all occurrences of WHAT in *WHERE |
5476 | to REPL. */ |
5477 | static void |
5478 | substitute (rtx *where, const_rtx what, rtx repl) |
5479 | { |
5480 | const char *fmt; |
5481 | int i; |
5482 | enum rtx_code code; |
5483 | |
5484 | if (*where == 0) |
5485 | return; |
5486 | |
5487 | if (*where == what || rtx_equal_p (*where, what)) |
5488 | { |
5489 | /* Record the location of the changed rtx. */ |
5490 | substitute_stack.safe_push (where); |
5491 | *where = repl; |
5492 | return; |
5493 | } |
5494 | |
5495 | code = GET_CODE (*where)((enum rtx_code) (*where)->code); |
5496 | fmt = GET_RTX_FORMAT (code)(rtx_format[(int) (code)]); |
5497 | for (i = GET_RTX_LENGTH (code)(rtx_length[(int) (code)]) - 1; i >= 0; i--) |
5498 | { |
5499 | if (fmt[i] == 'E') |
5500 | { |
5501 | int j; |
5502 | |
5503 | for (j = XVECLEN (*where, i)(((((*where)->u.fld[i]).rt_rtvec))->num_elem) - 1; j >= 0; j--) |
5504 | substitute (&XVECEXP (*where, i, j)(((((*where)->u.fld[i]).rt_rtvec))->elem[j]), what, repl); |
5505 | } |
5506 | else if (fmt[i] == 'e') |
5507 | substitute (&XEXP (*where, i)(((*where)->u.fld[i]).rt_rtx), what, repl); |
5508 | } |
5509 | } |
5510 | |
5511 | /* The function returns TRUE if chain of reload R1 and R2 (in any |
5512 | order) can be evaluated without usage of intermediate register for |
5513 | the reload containing another reload. It is important to see |
5514 | gen_reload to understand what the function is trying to do. As an |
5515 | example, let us have reload chain |
5516 | |
5517 | r2: const |
5518 | r1: <something> + const |
5519 | |
5520 | and reload R2 got reload reg HR. The function returns true if |
5521 | there is a correct insn HR = HR + <something>. Otherwise, |
5522 | gen_reload will use intermediate register (and this is the reload |
5523 | reg for R1) to reload <something>. |
5524 | |
5525 | We need this function to find a conflict for chain reloads. In our |
5526 | example, if HR = HR + <something> is incorrect insn, then we cannot |
5527 | use HR as a reload register for R2. If we do use it then we get a |
5528 | wrong code: |
5529 | |
5530 | HR = const |
5531 | HR = <something> |
5532 | HR = HR + HR |
5533 | |
5534 | */ |
5535 | static bool |
5536 | gen_reload_chain_without_interm_reg_p (int r1, int r2) |
5537 | { |
5538 | /* Assume other cases in gen_reload are not possible for |
5539 | chain reloads or do need an intermediate hard registers. */ |
5540 | bool result = true; |
5541 | int regno, code; |
5542 | rtx out, in; |
5543 | rtx_insn *insn; |
5544 | rtx_insn *last = get_last_insn (); |
5545 | |
5546 | /* Make r2 a component of r1. */ |
5547 | if (reg_mentioned_p (rld[r1].in, rld[r2].in)) |
5548 | std::swap (r1, r2); |
5549 | |
5550 | gcc_assert (reg_mentioned_p (rld[r2].in, rld[r1].in))((void)(!(reg_mentioned_p (rld[r2].in, rld[r1].in)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 5550, __FUNCTION__), 0 : 0)); |
5551 | regno = rld[r1].regno >= 0 ? rld[r1].regno : rld[r2].regno; |
5552 | gcc_assert (regno >= 0)((void)(!(regno >= 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 5552, __FUNCTION__), 0 : 0)); |
5553 | out = gen_rtx_REG (rld[r1].mode, regno); |
5554 | in = rld[r1].in; |
5555 | substitute (&in, rld[r2].in, gen_rtx_REG (rld[r2].mode, regno)); |
5556 | |
5557 | /* If IN is a paradoxical SUBREG, remove it and try to put the |
5558 | opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */ |
5559 | strip_paradoxical_subreg (&in, &out); |
5560 | |
5561 | if (GET_CODE (in)((enum rtx_code) (in)->code) == PLUS |
5562 | && (REG_P (XEXP (in, 0))(((enum rtx_code) ((((in)->u.fld[0]).rt_rtx))->code) == REG) |
5563 | || GET_CODE (XEXP (in, 0))((enum rtx_code) ((((in)->u.fld[0]).rt_rtx))->code) == SUBREG |
5564 | || MEM_P (XEXP (in, 0))(((enum rtx_code) ((((in)->u.fld[0]).rt_rtx))->code) == MEM)) |
5565 | && (REG_P (XEXP (in, 1))(((enum rtx_code) ((((in)->u.fld[1]).rt_rtx))->code) == REG) |
5566 | || GET_CODE (XEXP (in, 1))((enum rtx_code) ((((in)->u.fld[1]).rt_rtx))->code) == SUBREG |
5567 | || CONSTANT_P (XEXP (in, 1))((rtx_class[(int) (((enum rtx_code) ((((in)->u.fld[1]).rt_rtx ))->code))]) == RTX_CONST_OBJ) |
5568 | || MEM_P (XEXP (in, 1))(((enum rtx_code) ((((in)->u.fld[1]).rt_rtx))->code) == MEM))) |
5569 | { |
5570 | insn = emit_insn (gen_rtx_SET (out, in)gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((out)) , ((in)) )); |
5571 | code = recog_memoized (insn); |
5572 | result = false; |
5573 | |
5574 | if (code >= 0) |
5575 | { |
5576 | extract_insn (insn); |
5577 | /* We want constrain operands to treat this insn strictly in |
5578 | its validity determination, i.e., the way it would after |
5579 | reload has completed. */ |
5580 | result = constrain_operands (1, get_enabled_alternatives (insn)); |
5581 | } |
5582 | |
5583 | delete_insns_since (last); |
5584 | } |
5585 | |
5586 | /* Restore the original value at each changed address within R1. */ |
5587 | while (!substitute_stack.is_empty ()) |
5588 | { |
5589 | rtx *where = substitute_stack.pop (); |
5590 | *where = rld[r2].in; |
5591 | } |
5592 | |
5593 | return result; |
5594 | } |
5595 | |
5596 | /* Return 1 if the reloads denoted by R1 and R2 cannot share a register. |
5597 | Return 0 otherwise. |
5598 | |
5599 | This function uses the same algorithm as reload_reg_free_p above. */ |
5600 | |
5601 | static int |
5602 | reloads_conflict (int r1, int r2) |
5603 | { |
5604 | enum reload_type r1_type = rld[r1].when_needed; |
5605 | enum reload_type r2_type = rld[r2].when_needed; |
5606 | int r1_opnum = rld[r1].opnum; |
5607 | int r2_opnum = rld[r2].opnum; |
5608 | |
5609 | /* RELOAD_OTHER conflicts with everything. */ |
5610 | if (r2_type == RELOAD_OTHER) |
5611 | return 1; |
5612 | |
5613 | /* Otherwise, check conflicts differently for each type. */ |
5614 | |
5615 | switch (r1_type) |
5616 | { |
5617 | case RELOAD_FOR_INPUT: |
5618 | return (r2_type == RELOAD_FOR_INSN |
5619 | || r2_type == RELOAD_FOR_OPERAND_ADDRESS |
5620 | || r2_type == RELOAD_FOR_OPADDR_ADDR |
5621 | || r2_type == RELOAD_FOR_INPUT |
5622 | || ((r2_type == RELOAD_FOR_INPUT_ADDRESS |
5623 | || r2_type == RELOAD_FOR_INPADDR_ADDRESS) |
5624 | && r2_opnum > r1_opnum)); |
5625 | |
5626 | case RELOAD_FOR_INPUT_ADDRESS: |
5627 | return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum) |
5628 | || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum)); |
5629 | |
5630 | case RELOAD_FOR_INPADDR_ADDRESS: |
5631 | return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum) |
5632 | || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum)); |
5633 | |
5634 | case RELOAD_FOR_OUTPUT_ADDRESS: |
5635 | return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum) |
5636 | || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum)); |
5637 | |
5638 | case RELOAD_FOR_OUTADDR_ADDRESS: |
5639 | return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum) |
5640 | || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum)); |
5641 | |
5642 | case RELOAD_FOR_OPERAND_ADDRESS: |
5643 | return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN |
5644 | || (r2_type == RELOAD_FOR_OPERAND_ADDRESS |
5645 | && (!reloads_unique_chain_p (r1, r2) |
5646 | || !gen_reload_chain_without_interm_reg_p (r1, r2)))); |
5647 | |
5648 | case RELOAD_FOR_OPADDR_ADDR: |
5649 | return (r2_type == RELOAD_FOR_INPUT |
5650 | || r2_type == RELOAD_FOR_OPADDR_ADDR); |
5651 | |
5652 | case RELOAD_FOR_OUTPUT: |
5653 | return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT |
5654 | || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS |
5655 | || r2_type == RELOAD_FOR_OUTADDR_ADDRESS) |
5656 | && r2_opnum >= r1_opnum)); |
5657 | |
5658 | case RELOAD_FOR_INSN: |
5659 | return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT |
5660 | || r2_type == RELOAD_FOR_INSN |
5661 | || r2_type == RELOAD_FOR_OPERAND_ADDRESS); |
5662 | |
5663 | case RELOAD_FOR_OTHER_ADDRESS: |
5664 | return r2_type == RELOAD_FOR_OTHER_ADDRESS; |
5665 | |
5666 | case RELOAD_OTHER: |
5667 | return 1; |
5668 | |
5669 | default: |
5670 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.cc" , 5670, __FUNCTION__)); |
5671 | } |
5672 | } |
5673 | |
5674 | /* Indexed by reload number, 1 if incoming value |
5675 | inherited from previous insns. */ |
5676 | static char reload_inherited[MAX_RELOADS(2 * 30 * (2 + 1))]; |
5677 | |
5678 | /* For an inherited reload, this is the insn the reload was inherited from, |
5679 | if we know it. Otherwise, this is 0. */ |
5680 | static rtx_insn *reload_inheritance_insn[MAX_RELOADS(2 * 30 * (2 + 1))]; |
5681 | |
5682 | /* If nonzero, this is a place to get the value of the reload, |
5683 | rather than using reload_in. */ |
5684 | static rtx reload_override_in[MAX_RELOADS(2 * 30 * (2 + 1)) |