File: | build/gcc/postreload.cc |
Warning: | line 558, column 28 The left operand of '==' is a garbage value |
Press '?' to see keyboard shortcuts
Keyboard shortcuts:
1 | /* Perform simple optimizations to clean up the result of reload. | |||
2 | Copyright (C) 1987-2023 Free Software Foundation, Inc. | |||
3 | ||||
4 | This file is part of GCC. | |||
5 | ||||
6 | GCC is free software; you can redistribute it and/or modify it under | |||
7 | the terms of the GNU General Public License as published by the Free | |||
8 | Software Foundation; either version 3, or (at your option) any later | |||
9 | version. | |||
10 | ||||
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |||
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |||
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |||
14 | for more details. | |||
15 | ||||
16 | You should have received a copy of the GNU General Public License | |||
17 | along with GCC; see the file COPYING3. If not see | |||
18 | <http://www.gnu.org/licenses/>. */ | |||
19 | ||||
20 | #include "config.h" | |||
21 | #include "system.h" | |||
22 | #include "coretypes.h" | |||
23 | #include "backend.h" | |||
24 | #include "target.h" | |||
25 | #include "rtl.h" | |||
26 | #include "tree.h" | |||
27 | #include "predict.h" | |||
28 | #include "df.h" | |||
29 | #include "memmodel.h" | |||
30 | #include "tm_p.h" | |||
31 | #include "optabs.h" | |||
32 | #include "regs.h" | |||
33 | #include "emit-rtl.h" | |||
34 | #include "recog.h" | |||
35 | ||||
36 | #include "cfgrtl.h" | |||
37 | #include "cfgbuild.h" | |||
38 | #include "cfgcleanup.h" | |||
39 | #include "reload.h" | |||
40 | #include "cselib.h" | |||
41 | #include "tree-pass.h" | |||
42 | #include "dbgcnt.h" | |||
43 | #include "function-abi.h" | |||
44 | #include "rtl-iter.h" | |||
45 | ||||
46 | static bool reload_cse_simplify (rtx_insn *, rtx); | |||
47 | static void reload_cse_regs_1 (void); | |||
48 | static int reload_cse_simplify_set (rtx, rtx_insn *); | |||
49 | static int reload_cse_simplify_operands (rtx_insn *, rtx); | |||
50 | ||||
51 | static void reload_combine (void); | |||
52 | static void reload_combine_note_use (rtx *, rtx_insn *, int, rtx); | |||
53 | static void reload_combine_note_store (rtx, const_rtx, void *); | |||
54 | ||||
55 | static bool reload_cse_move2add (rtx_insn *); | |||
56 | static void move2add_note_store (rtx, const_rtx, void *); | |||
57 | ||||
58 | /* Call cse / combine like post-reload optimization phases. | |||
59 | FIRST is the first instruction. */ | |||
60 | ||||
61 | static void | |||
62 | reload_cse_regs (rtx_insn *first ATTRIBUTE_UNUSED__attribute__ ((__unused__))) | |||
63 | { | |||
64 | bool moves_converted; | |||
65 | reload_cse_regs_1 (); | |||
66 | reload_combine (); | |||
67 | moves_converted = reload_cse_move2add (first); | |||
68 | if (flag_expensive_optimizationsglobal_options.x_flag_expensive_optimizations) | |||
69 | { | |||
70 | if (moves_converted) | |||
71 | reload_combine (); | |||
72 | reload_cse_regs_1 (); | |||
73 | } | |||
74 | } | |||
75 | ||||
76 | /* Try to simplify INSN. Return true if the CFG may have changed. */ | |||
77 | static bool | |||
78 | reload_cse_simplify (rtx_insn *insn, rtx testreg) | |||
79 | { | |||
80 | rtx body = PATTERN (insn); | |||
81 | basic_block insn_bb = BLOCK_FOR_INSN (insn); | |||
82 | unsigned insn_bb_succs = EDGE_COUNT (insn_bb->succs)vec_safe_length (insn_bb->succs); | |||
83 | ||||
84 | /* If NO_FUNCTION_CSE has been set by the target, then we should not try | |||
85 | to cse function calls. */ | |||
86 | if (NO_FUNCTION_CSE1 && CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN)) | |||
87 | return false; | |||
88 | ||||
89 | /* Remember if this insn has been sp += const_int. */ | |||
90 | rtx sp_set = set_for_reg_notes (insn); | |||
91 | rtx sp_addend = NULL_RTX(rtx) 0; | |||
92 | if (sp_set | |||
93 | && SET_DEST (sp_set)(((sp_set)->u.fld[0]).rt_rtx) == stack_pointer_rtx((this_target_rtl->x_global_rtl)[GR_STACK_POINTER]) | |||
94 | && GET_CODE (SET_SRC (sp_set))((enum rtx_code) ((((sp_set)->u.fld[1]).rt_rtx))->code) == PLUS | |||
95 | && XEXP (SET_SRC (sp_set), 0)((((((sp_set)->u.fld[1]).rt_rtx))->u.fld[0]).rt_rtx) == stack_pointer_rtx((this_target_rtl->x_global_rtl)[GR_STACK_POINTER]) | |||
96 | && CONST_INT_P (XEXP (SET_SRC (sp_set), 1))(((enum rtx_code) (((((((sp_set)->u.fld[1]).rt_rtx))->u .fld[1]).rt_rtx))->code) == CONST_INT)) | |||
97 | sp_addend = XEXP (SET_SRC (sp_set), 1)((((((sp_set)->u.fld[1]).rt_rtx))->u.fld[1]).rt_rtx); | |||
98 | ||||
99 | if (GET_CODE (body)((enum rtx_code) (body)->code) == SET) | |||
100 | { | |||
101 | int count = 0; | |||
102 | ||||
103 | /* Simplify even if we may think it is a no-op. | |||
104 | We may think a memory load of a value smaller than WORD_SIZE | |||
105 | is redundant because we haven't taken into account possible | |||
106 | implicit extension. reload_cse_simplify_set() will bring | |||
107 | this out, so it's safer to simplify before we delete. */ | |||
108 | count += reload_cse_simplify_set (body, insn); | |||
109 | ||||
110 | if (!count && cselib_redundant_set_p (body)) | |||
111 | { | |||
112 | if (check_for_inc_dec (insn)) | |||
113 | delete_insn_and_edges (insn); | |||
114 | /* We're done with this insn. */ | |||
115 | goto done; | |||
116 | } | |||
117 | ||||
118 | if (count > 0) | |||
119 | apply_change_group (); | |||
120 | else | |||
121 | reload_cse_simplify_operands (insn, testreg); | |||
122 | } | |||
123 | else if (GET_CODE (body)((enum rtx_code) (body)->code) == PARALLEL) | |||
124 | { | |||
125 | int i; | |||
126 | int count = 0; | |||
127 | rtx value = NULL_RTX(rtx) 0; | |||
128 | ||||
129 | /* Registers mentioned in the clobber list for an asm cannot be reused | |||
130 | within the body of the asm. Invalidate those registers now so that | |||
131 | we don't try to substitute values for them. */ | |||
132 | if (asm_noperands (body) >= 0) | |||
133 | { | |||
134 | for (i = XVECLEN (body, 0)(((((body)->u.fld[0]).rt_rtvec))->num_elem) - 1; i >= 0; --i) | |||
135 | { | |||
136 | rtx part = XVECEXP (body, 0, i)(((((body)->u.fld[0]).rt_rtvec))->elem[i]); | |||
137 | if (GET_CODE (part)((enum rtx_code) (part)->code) == CLOBBER && REG_P (XEXP (part, 0))(((enum rtx_code) ((((part)->u.fld[0]).rt_rtx))->code) == REG)) | |||
138 | cselib_invalidate_rtx (XEXP (part, 0)(((part)->u.fld[0]).rt_rtx)); | |||
139 | } | |||
140 | } | |||
141 | ||||
142 | /* If every action in a PARALLEL is a noop, we can delete | |||
143 | the entire PARALLEL. */ | |||
144 | for (i = XVECLEN (body, 0)(((((body)->u.fld[0]).rt_rtvec))->num_elem) - 1; i >= 0; --i) | |||
145 | { | |||
146 | rtx part = XVECEXP (body, 0, i)(((((body)->u.fld[0]).rt_rtvec))->elem[i]); | |||
147 | if (GET_CODE (part)((enum rtx_code) (part)->code) == SET) | |||
148 | { | |||
149 | if (! cselib_redundant_set_p (part)) | |||
150 | break; | |||
151 | if (REG_P (SET_DEST (part))(((enum rtx_code) ((((part)->u.fld[0]).rt_rtx))->code) == REG) | |||
152 | && REG_FUNCTION_VALUE_P (SET_DEST (part))(__extension__ ({ __typeof (((((part)->u.fld[0]).rt_rtx))) const _rtx = (((((part)->u.fld[0]).rt_rtx))); if (((enum rtx_code ) (_rtx)->code) != REG && ((enum rtx_code) (_rtx)-> code) != PARALLEL) rtl_check_failed_flag ("REG_FUNCTION_VALUE_P" ,_rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/postreload.cc" , 152, __FUNCTION__); _rtx; })->return_val)) | |||
153 | { | |||
154 | if (value) | |||
155 | break; | |||
156 | value = SET_DEST (part)(((part)->u.fld[0]).rt_rtx); | |||
157 | } | |||
158 | } | |||
159 | else if (GET_CODE (part)((enum rtx_code) (part)->code) != CLOBBER && GET_CODE (part)((enum rtx_code) (part)->code) != USE) | |||
160 | break; | |||
161 | } | |||
162 | ||||
163 | if (i < 0) | |||
164 | { | |||
165 | if (check_for_inc_dec (insn)) | |||
166 | delete_insn_and_edges (insn); | |||
167 | /* We're done with this insn. */ | |||
168 | goto done; | |||
169 | } | |||
170 | ||||
171 | /* It's not a no-op, but we can try to simplify it. */ | |||
172 | for (i = XVECLEN (body, 0)(((((body)->u.fld[0]).rt_rtvec))->num_elem) - 1; i >= 0; --i) | |||
173 | if (GET_CODE (XVECEXP (body, 0, i))((enum rtx_code) ((((((body)->u.fld[0]).rt_rtvec))->elem [i]))->code) == SET) | |||
174 | count += reload_cse_simplify_set (XVECEXP (body, 0, i)(((((body)->u.fld[0]).rt_rtvec))->elem[i]), insn); | |||
175 | ||||
176 | if (count > 0) | |||
177 | apply_change_group (); | |||
178 | else | |||
179 | reload_cse_simplify_operands (insn, testreg); | |||
180 | } | |||
181 | ||||
182 | /* If sp += const_int insn is changed into sp = reg;, add REG_EQUAL | |||
183 | note so that the stack_adjustments pass can undo it if beneficial. */ | |||
184 | if (sp_addend | |||
185 | && SET_DEST (sp_set)(((sp_set)->u.fld[0]).rt_rtx) == stack_pointer_rtx((this_target_rtl->x_global_rtl)[GR_STACK_POINTER]) | |||
186 | && REG_P (SET_SRC (sp_set))(((enum rtx_code) ((((sp_set)->u.fld[1]).rt_rtx))->code ) == REG)) | |||
187 | set_dst_reg_note (insn, REG_EQUAL, | |||
188 | gen_rtx_PLUS (Pmode, stack_pointer_rtx,gen_rtx_fmt_ee_stat ((PLUS), (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) ))), ((((this_target_rtl->x_global_rtl)[GR_STACK_POINTER]) )), ((sp_addend)) ) | |||
189 | sp_addend)gen_rtx_fmt_ee_stat ((PLUS), (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) ))), ((((this_target_rtl->x_global_rtl)[GR_STACK_POINTER]) )), ((sp_addend)) ), stack_pointer_rtx((this_target_rtl->x_global_rtl)[GR_STACK_POINTER])); | |||
190 | ||||
191 | done: | |||
192 | return (EDGE_COUNT (insn_bb->succs)vec_safe_length (insn_bb->succs) != insn_bb_succs); | |||
193 | } | |||
194 | ||||
195 | /* Do a very simple CSE pass over the hard registers. | |||
196 | ||||
197 | This function detects no-op moves where we happened to assign two | |||
198 | different pseudo-registers to the same hard register, and then | |||
199 | copied one to the other. Reload will generate a useless | |||
200 | instruction copying a register to itself. | |||
201 | ||||
202 | This function also detects cases where we load a value from memory | |||
203 | into two different registers, and (if memory is more expensive than | |||
204 | registers) changes it to simply copy the first register into the | |||
205 | second register. | |||
206 | ||||
207 | Another optimization is performed that scans the operands of each | |||
208 | instruction to see whether the value is already available in a | |||
209 | hard register. It then replaces the operand with the hard register | |||
210 | if possible, much like an optional reload would. */ | |||
211 | ||||
212 | static void | |||
213 | reload_cse_regs_1 (void) | |||
214 | { | |||
215 | bool cfg_changed = false; | |||
216 | basic_block bb; | |||
217 | rtx_insn *insn; | |||
218 | rtx testreg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER(((76)) + 5) + 1); | |||
219 | ||||
220 | cselib_init (CSELIB_RECORD_MEMORY); | |||
221 | init_alias_analysis (); | |||
222 | ||||
223 | FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb ; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb-> next_bb) | |||
224 | FOR_BB_INSNS (bb, insn)for ((insn) = (bb)->il.x.head_; (insn) && (insn) != NEXT_INSN ((bb)->il.x.rtl->end_); (insn) = NEXT_INSN ( insn)) | |||
225 | { | |||
226 | if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) == DEBUG_INSN))) | |||
227 | cfg_changed |= reload_cse_simplify (insn, testreg); | |||
228 | ||||
229 | cselib_process_insn (insn); | |||
230 | } | |||
231 | ||||
232 | /* Clean up. */ | |||
233 | end_alias_analysis (); | |||
234 | cselib_finish (); | |||
235 | if (cfg_changed) | |||
236 | cleanup_cfg (0); | |||
237 | } | |||
238 | ||||
239 | /* Try to simplify a single SET instruction. SET is the set pattern. | |||
240 | INSN is the instruction it came from. | |||
241 | This function only handles one case: if we set a register to a value | |||
242 | which is not a register, we try to find that value in some other register | |||
243 | and change the set into a register copy. */ | |||
244 | ||||
245 | static int | |||
246 | reload_cse_simplify_set (rtx set, rtx_insn *insn) | |||
247 | { | |||
248 | int did_change = 0; | |||
249 | int dreg; | |||
250 | rtx src; | |||
251 | reg_class_t dclass; | |||
252 | int old_cost; | |||
253 | cselib_val *val; | |||
254 | struct elt_loc_list *l; | |||
255 | enum rtx_code extend_op = UNKNOWN; | |||
256 | bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)); | |||
257 | ||||
258 | dreg = true_regnum (SET_DEST (set)(((set)->u.fld[0]).rt_rtx)); | |||
259 | if (dreg < 0) | |||
260 | return 0; | |||
261 | ||||
262 | src = SET_SRC (set)(((set)->u.fld[1]).rt_rtx); | |||
263 | if (side_effects_p (src) || true_regnum (src) >= 0) | |||
264 | return 0; | |||
265 | ||||
266 | dclass = REGNO_REG_CLASS (dreg)(regclass_map[(dreg)]); | |||
267 | ||||
268 | /* When replacing a memory with a register, we need to honor assumptions | |||
269 | that combine made wrt the contents of sign bits. We'll do this by | |||
270 | generating an extend instruction instead of a reg->reg copy. Thus | |||
271 | the destination must be a register that we can widen. */ | |||
272 | if (MEM_P (src)(((enum rtx_code) (src)->code) == MEM) | |||
273 | && (extend_op = load_extend_op (GET_MODE (src)((machine_mode) (src)->mode))) != UNKNOWN | |||
274 | && !REG_P (SET_DEST (set))(((enum rtx_code) ((((set)->u.fld[0]).rt_rtx))->code) == REG)) | |||
275 | return 0; | |||
276 | ||||
277 | val = cselib_lookup (src, GET_MODE (SET_DEST (set))((machine_mode) ((((set)->u.fld[0]).rt_rtx))->mode), 0, VOIDmode((void) 0, E_VOIDmode)); | |||
278 | if (! val) | |||
279 | return 0; | |||
280 | ||||
281 | /* If memory loads are cheaper than register copies, don't change them. */ | |||
282 | if (MEM_P (src)(((enum rtx_code) (src)->code) == MEM)) | |||
283 | old_cost = memory_move_cost (GET_MODE (src)((machine_mode) (src)->mode), dclass, true); | |||
284 | else if (REG_P (src)(((enum rtx_code) (src)->code) == REG)) | |||
285 | old_cost = register_move_cost (GET_MODE (src)((machine_mode) (src)->mode), | |||
286 | REGNO_REG_CLASS (REGNO (src))(regclass_map[((rhs_regno(src)))]), dclass); | |||
287 | else | |||
288 | old_cost = set_src_cost (src, GET_MODE (SET_DEST (set))((machine_mode) ((((set)->u.fld[0]).rt_rtx))->mode), speed); | |||
289 | ||||
290 | for (l = val->locs; l; l = l->next) | |||
291 | { | |||
292 | rtx this_rtx = l->loc; | |||
293 | int this_cost; | |||
294 | ||||
295 | if (CONSTANT_P (this_rtx)((rtx_class[(int) (((enum rtx_code) (this_rtx)->code))]) == RTX_CONST_OBJ) && ! references_value_p (this_rtx, 0)) | |||
296 | { | |||
297 | if (extend_op != UNKNOWN) | |||
298 | { | |||
299 | wide_int result; | |||
300 | ||||
301 | if (!CONST_SCALAR_INT_P (this_rtx)((((enum rtx_code) (this_rtx)->code) == CONST_INT) || (((enum rtx_code) (this_rtx)->code) == CONST_WIDE_INT))) | |||
302 | continue; | |||
303 | ||||
304 | switch (extend_op) | |||
305 | { | |||
306 | case ZERO_EXTEND: | |||
307 | result = wide_int::from (rtx_mode_t (this_rtx, | |||
308 | GET_MODE (src)((machine_mode) (src)->mode)), | |||
309 | BITS_PER_WORD((8) * (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4)), UNSIGNED); | |||
310 | break; | |||
311 | case SIGN_EXTEND: | |||
312 | result = wide_int::from (rtx_mode_t (this_rtx, | |||
313 | GET_MODE (src)((machine_mode) (src)->mode)), | |||
314 | BITS_PER_WORD((8) * (((global_options.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4)), SIGNED); | |||
315 | break; | |||
316 | default: | |||
317 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/postreload.cc" , 317, __FUNCTION__)); | |||
318 | } | |||
319 | this_rtx = immed_wide_int_const (result, word_mode); | |||
320 | } | |||
321 | ||||
322 | this_cost = set_src_cost (this_rtx, GET_MODE (SET_DEST (set))((machine_mode) ((((set)->u.fld[0]).rt_rtx))->mode), speed); | |||
323 | } | |||
324 | else if (REG_P (this_rtx)(((enum rtx_code) (this_rtx)->code) == REG)) | |||
325 | { | |||
326 | if (extend_op != UNKNOWN) | |||
327 | { | |||
328 | this_rtx = gen_rtx_fmt_e (extend_op, word_mode, this_rtx)gen_rtx_fmt_e_stat ((extend_op), (word_mode), (this_rtx) ); | |||
329 | this_cost = set_src_cost (this_rtx, word_mode, speed); | |||
330 | } | |||
331 | else | |||
332 | this_cost = register_move_cost (GET_MODE (this_rtx)((machine_mode) (this_rtx)->mode), | |||
333 | REGNO_REG_CLASS (REGNO (this_rtx))(regclass_map[((rhs_regno(this_rtx)))]), | |||
334 | dclass); | |||
335 | } | |||
336 | else | |||
337 | continue; | |||
338 | ||||
339 | /* If equal costs, prefer registers over anything else. That | |||
340 | tends to lead to smaller instructions on some machines. */ | |||
341 | if (this_cost < old_cost | |||
342 | || (this_cost == old_cost | |||
343 | && REG_P (this_rtx)(((enum rtx_code) (this_rtx)->code) == REG) | |||
344 | && !REG_P (SET_SRC (set))(((enum rtx_code) ((((set)->u.fld[1]).rt_rtx))->code) == REG))) | |||
345 | { | |||
346 | if (extend_op != UNKNOWN | |||
347 | && REG_CAN_CHANGE_MODE_P (REGNO (SET_DEST (set)),(targetm.can_change_mode_class (((machine_mode) ((((set)-> u.fld[0]).rt_rtx))->mode), word_mode, (regclass_map[((rhs_regno ((((set)->u.fld[0]).rt_rtx))))]))) | |||
348 | GET_MODE (SET_DEST (set)), word_mode)(targetm.can_change_mode_class (((machine_mode) ((((set)-> u.fld[0]).rt_rtx))->mode), word_mode, (regclass_map[((rhs_regno ((((set)->u.fld[0]).rt_rtx))))])))) | |||
349 | { | |||
350 | rtx wide_dest = gen_rtx_REG (word_mode, REGNO (SET_DEST (set))(rhs_regno((((set)->u.fld[0]).rt_rtx)))); | |||
351 | ORIGINAL_REGNO (wide_dest)(__extension__ ({ __typeof ((wide_dest)) const _rtx = ((wide_dest )); if (((enum rtx_code) (_rtx)->code) != REG) rtl_check_failed_flag ("ORIGINAL_REGNO", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/postreload.cc" , 351, __FUNCTION__); _rtx; })->u2.original_regno) = ORIGINAL_REGNO (SET_DEST (set))(__extension__ ({ __typeof (((((set)->u.fld[0]).rt_rtx))) const _rtx = (((((set)->u.fld[0]).rt_rtx))); if (((enum rtx_code ) (_rtx)->code) != REG) rtl_check_failed_flag ("ORIGINAL_REGNO" , _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/postreload.cc" , 351, __FUNCTION__); _rtx; })->u2.original_regno); | |||
352 | validate_change (insn, &SET_DEST (set)(((set)->u.fld[0]).rt_rtx), wide_dest, 1); | |||
353 | } | |||
354 | ||||
355 | validate_unshare_change (insn, &SET_SRC (set)(((set)->u.fld[1]).rt_rtx), this_rtx, 1); | |||
356 | old_cost = this_cost, did_change = 1; | |||
357 | } | |||
358 | } | |||
359 | ||||
360 | return did_change; | |||
361 | } | |||
362 | ||||
363 | /* Try to replace operands in INSN with equivalent values that are already | |||
364 | in registers. This can be viewed as optional reloading. | |||
365 | ||||
366 | For each non-register operand in the insn, see if any hard regs are | |||
367 | known to be equivalent to that operand. Record the alternatives which | |||
368 | can accept these hard registers. Among all alternatives, select the | |||
369 | ones which are better or equal to the one currently matching, where | |||
370 | "better" is in terms of '?' and '!' constraints. Among the remaining | |||
371 | alternatives, select the one which replaces most operands with | |||
372 | hard registers. */ | |||
373 | ||||
374 | static int | |||
375 | reload_cse_simplify_operands (rtx_insn *insn, rtx testreg) | |||
376 | { | |||
377 | int i, j; | |||
378 | ||||
379 | /* For each operand, all registers that are equivalent to it. */ | |||
380 | HARD_REG_SET equiv_regs[MAX_RECOG_OPERANDS30]; | |||
381 | ||||
382 | const char *constraints[MAX_RECOG_OPERANDS30]; | |||
383 | ||||
384 | /* Vector recording how bad an alternative is. */ | |||
385 | int *alternative_reject; | |||
386 | /* Vector recording how many registers can be introduced by choosing | |||
387 | this alternative. */ | |||
388 | int *alternative_nregs; | |||
389 | /* Array of vectors recording, for each operand and each alternative, | |||
390 | which hard register to substitute, or -1 if the operand should be | |||
391 | left as it is. */ | |||
392 | int *op_alt_regno[MAX_RECOG_OPERANDS30]; | |||
393 | /* Array of alternatives, sorted in order of decreasing desirability. */ | |||
394 | int *alternative_order; | |||
395 | ||||
396 | extract_constrain_insn (insn); | |||
397 | ||||
398 | if (recog_data.n_alternatives == 0 || recog_data.n_operands == 0) | |||
| ||||
399 | return 0; | |||
400 | ||||
401 | alternative_reject = XALLOCAVEC (int, recog_data.n_alternatives)((int *) __builtin_alloca(sizeof (int) * (recog_data.n_alternatives ))); | |||
402 | alternative_nregs = XALLOCAVEC (int, recog_data.n_alternatives)((int *) __builtin_alloca(sizeof (int) * (recog_data.n_alternatives ))); | |||
403 | alternative_order = XALLOCAVEC (int, recog_data.n_alternatives)((int *) __builtin_alloca(sizeof (int) * (recog_data.n_alternatives ))); | |||
404 | memset (alternative_reject, 0, recog_data.n_alternatives * sizeof (int)); | |||
405 | memset (alternative_nregs, 0, recog_data.n_alternatives * sizeof (int)); | |||
406 | ||||
407 | /* For each operand, find out which regs are equivalent. */ | |||
408 | for (i = 0; i < recog_data.n_operands; i++) | |||
409 | { | |||
410 | cselib_val *v; | |||
411 | struct elt_loc_list *l; | |||
412 | rtx op; | |||
413 | ||||
414 | CLEAR_HARD_REG_SET (equiv_regs[i]); | |||
415 | ||||
416 | /* cselib blows up on CODE_LABELs. Trying to fix that doesn't seem | |||
417 | right, so avoid the problem here. Similarly NOTE_INSN_DELETED_LABEL. | |||
418 | Likewise if we have a constant and the insn pattern doesn't tell us | |||
419 | the mode we need. */ | |||
420 | if (LABEL_P (recog_data.operand[i])(((enum rtx_code) (recog_data.operand[i])->code) == CODE_LABEL ) | |||
421 | || (NOTE_P (recog_data.operand[i])(((enum rtx_code) (recog_data.operand[i])->code) == NOTE) | |||
422 | && NOTE_KIND (recog_data.operand[i])(((recog_data.operand[i])->u.fld[4]).rt_int) == NOTE_INSN_DELETED_LABEL) | |||
423 | || (CONSTANT_P (recog_data.operand[i])((rtx_class[(int) (((enum rtx_code) (recog_data.operand[i])-> code))]) == RTX_CONST_OBJ) | |||
424 | && recog_data.operand_mode[i] == VOIDmode((void) 0, E_VOIDmode))) | |||
425 | continue; | |||
426 | ||||
427 | op = recog_data.operand[i]; | |||
428 | if (MEM_P (op)(((enum rtx_code) (op)->code) == MEM) && load_extend_op (GET_MODE (op)((machine_mode) (op)->mode)) != UNKNOWN) | |||
429 | { | |||
430 | rtx set = single_set (insn); | |||
431 | ||||
432 | /* We might have multiple sets, some of which do implicit | |||
433 | extension. Punt on this for now. */ | |||
434 | if (! set) | |||
435 | continue; | |||
436 | /* If the destination is also a MEM or a STRICT_LOW_PART, no | |||
437 | extension applies. | |||
438 | Also, if there is an explicit extension, we don't have to | |||
439 | worry about an implicit one. */ | |||
440 | else if (MEM_P (SET_DEST (set))(((enum rtx_code) ((((set)->u.fld[0]).rt_rtx))->code) == MEM) | |||
441 | || GET_CODE (SET_DEST (set))((enum rtx_code) ((((set)->u.fld[0]).rt_rtx))->code) == STRICT_LOW_PART | |||
442 | || GET_CODE (SET_SRC (set))((enum rtx_code) ((((set)->u.fld[1]).rt_rtx))->code) == ZERO_EXTEND | |||
443 | || GET_CODE (SET_SRC (set))((enum rtx_code) ((((set)->u.fld[1]).rt_rtx))->code) == SIGN_EXTEND) | |||
444 | ; /* Continue ordinary processing. */ | |||
445 | /* If the register cannot change mode to word_mode, it follows that | |||
446 | it cannot have been used in word_mode. */ | |||
447 | else if (REG_P (SET_DEST (set))(((enum rtx_code) ((((set)->u.fld[0]).rt_rtx))->code) == REG) | |||
448 | && !REG_CAN_CHANGE_MODE_P (REGNO (SET_DEST (set)),(targetm.can_change_mode_class (((machine_mode) ((((set)-> u.fld[0]).rt_rtx))->mode), word_mode, (regclass_map[((rhs_regno ((((set)->u.fld[0]).rt_rtx))))]))) | |||
449 | GET_MODE (SET_DEST (set)),(targetm.can_change_mode_class (((machine_mode) ((((set)-> u.fld[0]).rt_rtx))->mode), word_mode, (regclass_map[((rhs_regno ((((set)->u.fld[0]).rt_rtx))))]))) | |||
450 | word_mode)(targetm.can_change_mode_class (((machine_mode) ((((set)-> u.fld[0]).rt_rtx))->mode), word_mode, (regclass_map[((rhs_regno ((((set)->u.fld[0]).rt_rtx))))])))) | |||
451 | ; /* Continue ordinary processing. */ | |||
452 | /* If this is a straight load, make the extension explicit. */ | |||
453 | else if (REG_P (SET_DEST (set))(((enum rtx_code) ((((set)->u.fld[0]).rt_rtx))->code) == REG) | |||
454 | && recog_data.n_operands == 2 | |||
455 | && SET_SRC (set)(((set)->u.fld[1]).rt_rtx) == op | |||
456 | && SET_DEST (set)(((set)->u.fld[0]).rt_rtx) == recog_data.operand[1-i]) | |||
457 | { | |||
458 | validate_change (insn, recog_data.operand_loc[i], | |||
459 | gen_rtx_fmt_e (load_extend_op (GET_MODE (op)),gen_rtx_fmt_e_stat ((load_extend_op (((machine_mode) (op)-> mode))), (word_mode), (op) ) | |||
460 | word_mode, op)gen_rtx_fmt_e_stat ((load_extend_op (((machine_mode) (op)-> mode))), (word_mode), (op) ), | |||
461 | 1); | |||
462 | validate_change (insn, recog_data.operand_loc[1-i], | |||
463 | gen_rtx_REG (word_mode, REGNO (SET_DEST (set))(rhs_regno((((set)->u.fld[0]).rt_rtx)))), | |||
464 | 1); | |||
465 | if (! apply_change_group ()) | |||
466 | return 0; | |||
467 | return reload_cse_simplify_operands (insn, testreg); | |||
468 | } | |||
469 | else | |||
470 | /* ??? There might be arithmetic operations with memory that are | |||
471 | safe to optimize, but is it worth the trouble? */ | |||
472 | continue; | |||
473 | } | |||
474 | ||||
475 | if (side_effects_p (op)) | |||
476 | continue; | |||
477 | v = cselib_lookup (op, recog_data.operand_mode[i], 0, VOIDmode((void) 0, E_VOIDmode)); | |||
478 | if (! v) | |||
479 | continue; | |||
480 | ||||
481 | for (l = v->locs; l; l = l->next) | |||
482 | if (REG_P (l->loc)(((enum rtx_code) (l->loc)->code) == REG)) | |||
483 | SET_HARD_REG_BIT (equiv_regs[i], REGNO (l->loc)(rhs_regno(l->loc))); | |||
484 | } | |||
485 | ||||
486 | alternative_mask preferred = get_preferred_alternatives (insn); | |||
487 | for (i = 0; i < recog_data.n_operands; i++) | |||
488 | { | |||
489 | machine_mode mode; | |||
490 | int regno; | |||
491 | const char *p; | |||
492 | ||||
493 | op_alt_regno[i] = XALLOCAVEC (int, recog_data.n_alternatives)((int *) __builtin_alloca(sizeof (int) * (recog_data.n_alternatives ))); | |||
494 | for (j = 0; j < recog_data.n_alternatives; j++) | |||
495 | op_alt_regno[i][j] = -1; | |||
496 | ||||
497 | p = constraints[i] = recog_data.constraints[i]; | |||
498 | mode = recog_data.operand_mode[i]; | |||
499 | ||||
500 | /* Add the reject values for each alternative given by the constraints | |||
501 | for this operand. */ | |||
502 | j = 0; | |||
503 | while (*p != '\0') | |||
504 | { | |||
505 | char c = *p++; | |||
506 | if (c == ',') | |||
507 | j++; | |||
508 | else if (c == '?') | |||
509 | alternative_reject[j] += 3; | |||
510 | else if (c == '!') | |||
511 | alternative_reject[j] += 300; | |||
512 | } | |||
513 | ||||
514 | /* We won't change operands which are already registers. We | |||
515 | also don't want to modify output operands. */ | |||
516 | regno = true_regnum (recog_data.operand[i]); | |||
517 | if (regno >= 0 | |||
518 | || constraints[i][0] == '=' | |||
519 | || constraints[i][0] == '+') | |||
520 | continue; | |||
521 | ||||
522 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++) | |||
523 | { | |||
524 | enum reg_class rclass = NO_REGS; | |||
525 | ||||
526 | if (! TEST_HARD_REG_BIT (equiv_regs[i], regno)) | |||
527 | continue; | |||
528 | ||||
529 | set_mode_and_regno (testreg, mode, regno); | |||
530 | ||||
531 | /* We found a register equal to this operand. Now look for all | |||
532 | alternatives that can accept this register and have not been | |||
533 | assigned a register they can use yet. */ | |||
534 | j = 0; | |||
535 | p = constraints[i]; | |||
536 | for (;;) | |||
537 | { | |||
538 | char c = *p; | |||
539 | ||||
540 | switch (c) | |||
541 | { | |||
542 | case 'g': | |||
543 | rclass = reg_class_subunion(this_target_hard_regs->x_reg_class_subunion)[rclass][GENERAL_REGS]; | |||
544 | break; | |||
545 | ||||
546 | default: | |||
547 | rclass | |||
548 | = (reg_class_subunion(this_target_hard_regs->x_reg_class_subunion) | |||
549 | [rclass] | |||
550 | [reg_class_for_constraint (lookup_constraint (p))]); | |||
551 | break; | |||
552 | ||||
553 | case ',': case '\0': | |||
554 | /* See if REGNO fits this alternative, and set it up as the | |||
555 | replacement register if we don't have one for this | |||
556 | alternative yet and the operand being replaced is not | |||
557 | a cheap CONST_INT. */ | |||
558 | if (op_alt_regno[i][j] == -1 | |||
| ||||
559 | && TEST_BIT (preferred, j)(((preferred) >> (j)) & 1) | |||
560 | && reg_fits_class_p (testreg, rclass, 0, mode) | |||
561 | && (!CONST_INT_P (recog_data.operand[i])(((enum rtx_code) (recog_data.operand[i])->code) == CONST_INT ) | |||
562 | || (set_src_cost (recog_data.operand[i], mode, | |||
563 | optimize_bb_for_speed_p | |||
564 | (BLOCK_FOR_INSN (insn))) | |||
565 | > set_src_cost (testreg, mode, | |||
566 | optimize_bb_for_speed_p | |||
567 | (BLOCK_FOR_INSN (insn)))))) | |||
568 | { | |||
569 | alternative_nregs[j]++; | |||
570 | op_alt_regno[i][j] = regno; | |||
571 | } | |||
572 | j++; | |||
573 | rclass = NO_REGS; | |||
574 | break; | |||
575 | } | |||
576 | p += CONSTRAINT_LEN (c, p)insn_constraint_len (c,p); | |||
577 | ||||
578 | if (c == '\0') | |||
579 | break; | |||
580 | } | |||
581 | } | |||
582 | } | |||
583 | ||||
584 | /* The loop below sets alternative_order[0] but -Wmaybe-uninitialized | |||
585 | can't know that. Clear it here to avoid the warning. */ | |||
586 | alternative_order[0] = 0; | |||
587 | gcc_assert (!recog_data.n_alternatives((void)(!(!recog_data.n_alternatives || (which_alternative >= 0 && which_alternative < recog_data.n_alternatives )) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/postreload.cc" , 589, __FUNCTION__), 0 : 0)) | |||
588 | || (which_alternative >= 0((void)(!(!recog_data.n_alternatives || (which_alternative >= 0 && which_alternative < recog_data.n_alternatives )) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/postreload.cc" , 589, __FUNCTION__), 0 : 0)) | |||
589 | && which_alternative < recog_data.n_alternatives))((void)(!(!recog_data.n_alternatives || (which_alternative >= 0 && which_alternative < recog_data.n_alternatives )) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/postreload.cc" , 589, __FUNCTION__), 0 : 0)); | |||
590 | ||||
591 | /* Record all alternatives which are better or equal to the currently | |||
592 | matching one in the alternative_order array. */ | |||
593 | for (i = j = 0; i < recog_data.n_alternatives; i++) | |||
594 | if (alternative_reject[i] <= alternative_reject[which_alternative]) | |||
595 | alternative_order[j++] = i; | |||
596 | recog_data.n_alternatives = j; | |||
597 | ||||
598 | /* Sort it. Given a small number of alternatives, a dumb algorithm | |||
599 | won't hurt too much. */ | |||
600 | for (i = 0; i < recog_data.n_alternatives - 1; i++) | |||
601 | { | |||
602 | int best = i; | |||
603 | int best_reject = alternative_reject[alternative_order[i]]; | |||
604 | int best_nregs = alternative_nregs[alternative_order[i]]; | |||
605 | ||||
606 | for (j = i + 1; j < recog_data.n_alternatives; j++) | |||
607 | { | |||
608 | int this_reject = alternative_reject[alternative_order[j]]; | |||
609 | int this_nregs = alternative_nregs[alternative_order[j]]; | |||
610 | ||||
611 | if (this_reject < best_reject | |||
612 | || (this_reject == best_reject && this_nregs > best_nregs)) | |||
613 | { | |||
614 | best = j; | |||
615 | best_reject = this_reject; | |||
616 | best_nregs = this_nregs; | |||
617 | } | |||
618 | } | |||
619 | ||||
620 | std::swap (alternative_order[best], alternative_order[i]); | |||
621 | } | |||
622 | ||||
623 | /* Substitute the operands as determined by op_alt_regno for the best | |||
624 | alternative. */ | |||
625 | j = alternative_order[0]; | |||
626 | ||||
627 | for (i = 0; i < recog_data.n_operands; i++) | |||
628 | { | |||
629 | machine_mode mode = recog_data.operand_mode[i]; | |||
630 | if (op_alt_regno[i][j] == -1) | |||
631 | continue; | |||
632 | ||||
633 | validate_change (insn, recog_data.operand_loc[i], | |||
634 | gen_rtx_REG (mode, op_alt_regno[i][j]), 1); | |||
635 | } | |||
636 | ||||
637 | for (i = recog_data.n_dups - 1; i >= 0; i--) | |||
638 | { | |||
639 | int op = recog_data.dup_num[i]; | |||
640 | machine_mode mode = recog_data.operand_mode[op]; | |||
641 | ||||
642 | if (op_alt_regno[op][j] == -1) | |||
643 | continue; | |||
644 | ||||
645 | validate_change (insn, recog_data.dup_loc[i], | |||
646 | gen_rtx_REG (mode, op_alt_regno[op][j]), 1); | |||
647 | } | |||
648 | ||||
649 | return apply_change_group (); | |||
650 | } | |||
651 | ||||
652 | /* If reload couldn't use reg+reg+offset addressing, try to use reg+reg | |||
653 | addressing now. | |||
654 | This code might also be useful when reload gave up on reg+reg addressing | |||
655 | because of clashes between the return register and INDEX_REG_CLASS. */ | |||
656 | ||||
657 | /* The maximum number of uses of a register we can keep track of to | |||
658 | replace them with reg+reg addressing. */ | |||
659 | #define RELOAD_COMBINE_MAX_USES16 16 | |||
660 | ||||
661 | /* Describes a recorded use of a register. */ | |||
662 | struct reg_use | |||
663 | { | |||
664 | /* The insn where a register has been used. */ | |||
665 | rtx_insn *insn; | |||
666 | /* Points to the memory reference enclosing the use, if any, NULL_RTX | |||
667 | otherwise. */ | |||
668 | rtx containing_mem; | |||
669 | /* Location of the register within INSN. */ | |||
670 | rtx *usep; | |||
671 | /* The reverse uid of the insn. */ | |||
672 | int ruid; | |||
673 | }; | |||
674 | ||||
675 | /* If the register is used in some unknown fashion, USE_INDEX is negative. | |||
676 | If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID | |||
677 | indicates where it is first set or clobbered. | |||
678 | Otherwise, USE_INDEX is the index of the last encountered use of the | |||
679 | register (which is first among these we have seen since we scan backwards). | |||
680 | USE_RUID indicates the first encountered, i.e. last, of these uses. | |||
681 | If ALL_OFFSETS_MATCH is true, all encountered uses were inside a PLUS | |||
682 | with a constant offset; OFFSET contains this constant in that case. | |||
683 | STORE_RUID is always meaningful if we only want to use a value in a | |||
684 | register in a different place: it denotes the next insn in the insn | |||
685 | stream (i.e. the last encountered) that sets or clobbers the register. | |||
686 | REAL_STORE_RUID is similar, but clobbers are ignored when updating it. | |||
687 | EXPR is the expression used when storing the register. */ | |||
688 | static struct | |||
689 | { | |||
690 | struct reg_use reg_use[RELOAD_COMBINE_MAX_USES16]; | |||
691 | rtx offset; | |||
692 | int use_index; | |||
693 | int store_ruid; | |||
694 | int real_store_ruid; | |||
695 | int use_ruid; | |||
696 | bool all_offsets_match; | |||
697 | rtx expr; | |||
698 | } reg_state[FIRST_PSEUDO_REGISTER76]; | |||
699 | ||||
700 | /* Reverse linear uid. This is increased in reload_combine while scanning | |||
701 | the instructions from last to first. It is used to set last_label_ruid | |||
702 | and the store_ruid / use_ruid fields in reg_state. */ | |||
703 | static int reload_combine_ruid; | |||
704 | ||||
705 | /* The RUID of the last label we encountered in reload_combine. */ | |||
706 | static int last_label_ruid; | |||
707 | ||||
708 | /* The RUID of the last jump we encountered in reload_combine. */ | |||
709 | static int last_jump_ruid; | |||
710 | ||||
711 | /* The register numbers of the first and last index register. A value of | |||
712 | -1 in LAST_INDEX_REG indicates that we've previously computed these | |||
713 | values and found no suitable index registers. */ | |||
714 | static int first_index_reg = -1; | |||
715 | static int last_index_reg; | |||
716 | ||||
717 | #define LABEL_LIVE(LABEL)(label_live[(((LABEL)->u.fld[5]).rt_int) - min_labelno]) \ | |||
718 | (label_live[CODE_LABEL_NUMBER (LABEL)(((LABEL)->u.fld[5]).rt_int) - min_labelno]) | |||
719 | ||||
720 | /* Subroutine of reload_combine_split_ruids, called to fix up a single | |||
721 | ruid pointed to by *PRUID if it is higher than SPLIT_RUID. */ | |||
722 | ||||
723 | static inline void | |||
724 | reload_combine_split_one_ruid (int *pruid, int split_ruid) | |||
725 | { | |||
726 | if (*pruid > split_ruid) | |||
727 | (*pruid)++; | |||
728 | } | |||
729 | ||||
730 | /* Called when we insert a new insn in a position we've already passed in | |||
731 | the scan. Examine all our state, increasing all ruids that are higher | |||
732 | than SPLIT_RUID by one in order to make room for a new insn. */ | |||
733 | ||||
734 | static void | |||
735 | reload_combine_split_ruids (int split_ruid) | |||
736 | { | |||
737 | unsigned i; | |||
738 | ||||
739 | reload_combine_split_one_ruid (&reload_combine_ruid, split_ruid); | |||
740 | reload_combine_split_one_ruid (&last_label_ruid, split_ruid); | |||
741 | reload_combine_split_one_ruid (&last_jump_ruid, split_ruid); | |||
742 | ||||
743 | for (i = 0; i < FIRST_PSEUDO_REGISTER76; i++) | |||
744 | { | |||
745 | int j, idx = reg_state[i].use_index; | |||
746 | reload_combine_split_one_ruid (®_state[i].use_ruid, split_ruid); | |||
747 | reload_combine_split_one_ruid (®_state[i].store_ruid, split_ruid); | |||
748 | reload_combine_split_one_ruid (®_state[i].real_store_ruid, | |||
749 | split_ruid); | |||
750 | if (idx < 0) | |||
751 | continue; | |||
752 | for (j = idx; j < RELOAD_COMBINE_MAX_USES16; j++) | |||
753 | { | |||
754 | reload_combine_split_one_ruid (®_state[i].reg_use[j].ruid, | |||
755 | split_ruid); | |||
756 | } | |||
757 | } | |||
758 | } | |||
759 | ||||
760 | /* Called when we are about to rescan a previously encountered insn with | |||
761 | reload_combine_note_use after modifying some part of it. This clears all | |||
762 | information about uses in that particular insn. */ | |||
763 | ||||
764 | static void | |||
765 | reload_combine_purge_insn_uses (rtx_insn *insn) | |||
766 | { | |||
767 | unsigned i; | |||
768 | ||||
769 | for (i = 0; i < FIRST_PSEUDO_REGISTER76; i++) | |||
770 | { | |||
771 | int j, k, idx = reg_state[i].use_index; | |||
772 | if (idx < 0) | |||
773 | continue; | |||
774 | j = k = RELOAD_COMBINE_MAX_USES16; | |||
775 | while (j-- > idx) | |||
776 | { | |||
777 | if (reg_state[i].reg_use[j].insn != insn) | |||
778 | { | |||
779 | k--; | |||
780 | if (k != j) | |||
781 | reg_state[i].reg_use[k] = reg_state[i].reg_use[j]; | |||
782 | } | |||
783 | } | |||
784 | reg_state[i].use_index = k; | |||
785 | } | |||
786 | } | |||
787 | ||||
788 | /* Called when we need to forget about all uses of REGNO after an insn | |||
789 | which is identified by RUID. */ | |||
790 | ||||
791 | static void | |||
792 | reload_combine_purge_reg_uses_after_ruid (unsigned regno, int ruid) | |||
793 | { | |||
794 | int j, k, idx = reg_state[regno].use_index; | |||
795 | if (idx < 0) | |||
796 | return; | |||
797 | j = k = RELOAD_COMBINE_MAX_USES16; | |||
798 | while (j-- > idx) | |||
799 | { | |||
800 | if (reg_state[regno].reg_use[j].ruid >= ruid) | |||
801 | { | |||
802 | k--; | |||
803 | if (k != j) | |||
804 | reg_state[regno].reg_use[k] = reg_state[regno].reg_use[j]; | |||
805 | } | |||
806 | } | |||
807 | reg_state[regno].use_index = k; | |||
808 | } | |||
809 | ||||
810 | /* Find the use of REGNO with the ruid that is highest among those | |||
811 | lower than RUID_LIMIT, and return it if it is the only use of this | |||
812 | reg in the insn. Return NULL otherwise. */ | |||
813 | ||||
814 | static struct reg_use * | |||
815 | reload_combine_closest_single_use (unsigned regno, int ruid_limit) | |||
816 | { | |||
817 | int i, best_ruid = 0; | |||
818 | int use_idx = reg_state[regno].use_index; | |||
819 | struct reg_use *retval; | |||
820 | ||||
821 | if (use_idx < 0) | |||
822 | return NULLnullptr; | |||
823 | retval = NULLnullptr; | |||
824 | for (i = use_idx; i < RELOAD_COMBINE_MAX_USES16; i++) | |||
825 | { | |||
826 | struct reg_use *use = reg_state[regno].reg_use + i; | |||
827 | int this_ruid = use->ruid; | |||
828 | if (this_ruid >= ruid_limit) | |||
829 | continue; | |||
830 | if (this_ruid > best_ruid) | |||
831 | { | |||
832 | best_ruid = this_ruid; | |||
833 | retval = use; | |||
834 | } | |||
835 | else if (this_ruid == best_ruid) | |||
836 | retval = NULLnullptr; | |||
837 | } | |||
838 | if (last_label_ruid >= best_ruid) | |||
839 | return NULLnullptr; | |||
840 | return retval; | |||
841 | } | |||
842 | ||||
843 | /* After we've moved an add insn, fix up any debug insns that occur | |||
844 | between the old location of the add and the new location. REG is | |||
845 | the destination register of the add insn; REPLACEMENT is the | |||
846 | SET_SRC of the add. FROM and TO specify the range in which we | |||
847 | should make this change on debug insns. */ | |||
848 | ||||
849 | static void | |||
850 | fixup_debug_insns (rtx reg, rtx replacement, rtx_insn *from, rtx_insn *to) | |||
851 | { | |||
852 | rtx_insn *insn; | |||
853 | for (insn = from; insn != to; insn = NEXT_INSN (insn)) | |||
854 | { | |||
855 | rtx t; | |||
856 | ||||
857 | if (!DEBUG_BIND_INSN_P (insn)((((enum rtx_code) (insn)->code) == DEBUG_INSN) && (((enum rtx_code) (PATTERN (insn))->code) == VAR_LOCATION ))) | |||
858 | continue; | |||
859 | ||||
860 | t = INSN_VAR_LOCATION_LOC (insn)((((((__extension__ ({ __typeof (PATTERN (insn)) const _rtx = (PATTERN (insn)); if (((enum rtx_code) (_rtx)->code) != VAR_LOCATION ) rtl_check_failed_flag ("INSN_VAR_LOCATION", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/postreload.cc" , 860, __FUNCTION__); _rtx; }))))->u.fld[1]).rt_rtx)); | |||
861 | t = simplify_replace_rtx (t, reg, replacement); | |||
862 | validate_change (insn, &INSN_VAR_LOCATION_LOC (insn)((((((__extension__ ({ __typeof (PATTERN (insn)) const _rtx = (PATTERN (insn)); if (((enum rtx_code) (_rtx)->code) != VAR_LOCATION ) rtl_check_failed_flag ("INSN_VAR_LOCATION", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/postreload.cc" , 862, __FUNCTION__); _rtx; }))))->u.fld[1]).rt_rtx)), t, 0); | |||
863 | } | |||
864 | } | |||
865 | ||||
866 | /* Subroutine of reload_combine_recognize_const_pattern. Try to replace REG | |||
867 | with SRC in the insn described by USE, taking costs into account. Return | |||
868 | true if we made the replacement. */ | |||
869 | ||||
870 | static bool | |||
871 | try_replace_in_use (struct reg_use *use, rtx reg, rtx src) | |||
872 | { | |||
873 | rtx_insn *use_insn = use->insn; | |||
874 | rtx mem = use->containing_mem; | |||
875 | bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (use_insn)); | |||
876 | ||||
877 | if (mem != NULL_RTX(rtx) 0) | |||
878 | { | |||
879 | addr_space_t as = MEM_ADDR_SPACE (mem)(get_mem_attrs (mem)->addrspace); | |||
880 | rtx oldaddr = XEXP (mem, 0)(((mem)->u.fld[0]).rt_rtx); | |||
881 | rtx newaddr = NULL_RTX(rtx) 0; | |||
882 | int old_cost = address_cost (oldaddr, GET_MODE (mem)((machine_mode) (mem)->mode), as, speed); | |||
883 | int new_cost; | |||
884 | ||||
885 | newaddr = simplify_replace_rtx (oldaddr, reg, src); | |||
886 | if (memory_address_addr_space_p (GET_MODE (mem)((machine_mode) (mem)->mode), newaddr, as)) | |||
887 | { | |||
888 | XEXP (mem, 0)(((mem)->u.fld[0]).rt_rtx) = newaddr; | |||
889 | new_cost = address_cost (newaddr, GET_MODE (mem)((machine_mode) (mem)->mode), as, speed); | |||
890 | XEXP (mem, 0)(((mem)->u.fld[0]).rt_rtx) = oldaddr; | |||
891 | if (new_cost <= old_cost | |||
892 | && validate_change (use_insn, | |||
893 | &XEXP (mem, 0)(((mem)->u.fld[0]).rt_rtx), newaddr, 0)) | |||
894 | return true; | |||
895 | } | |||
896 | } | |||
897 | else | |||
898 | { | |||
899 | rtx new_set = single_set (use_insn); | |||
900 | if (new_set | |||
901 | && REG_P (SET_DEST (new_set))(((enum rtx_code) ((((new_set)->u.fld[0]).rt_rtx))->code ) == REG) | |||
902 | && GET_CODE (SET_SRC (new_set))((enum rtx_code) ((((new_set)->u.fld[1]).rt_rtx))->code ) == PLUS | |||
903 | && REG_P (XEXP (SET_SRC (new_set), 0))(((enum rtx_code) (((((((new_set)->u.fld[1]).rt_rtx))-> u.fld[0]).rt_rtx))->code) == REG) | |||
904 | && CONSTANT_P (XEXP (SET_SRC (new_set), 1))((rtx_class[(int) (((enum rtx_code) (((((((new_set)->u.fld [1]).rt_rtx))->u.fld[1]).rt_rtx))->code))]) == RTX_CONST_OBJ )) | |||
905 | { | |||
906 | rtx new_src; | |||
907 | machine_mode mode = GET_MODE (SET_DEST (new_set))((machine_mode) ((((new_set)->u.fld[0]).rt_rtx))->mode); | |||
908 | int old_cost = set_src_cost (SET_SRC (new_set)(((new_set)->u.fld[1]).rt_rtx), mode, speed); | |||
909 | ||||
910 | gcc_assert (rtx_equal_p (XEXP (SET_SRC (new_set), 0), reg))((void)(!(rtx_equal_p (((((((new_set)->u.fld[1]).rt_rtx))-> u.fld[0]).rt_rtx), reg)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/postreload.cc" , 910, __FUNCTION__), 0 : 0)); | |||
911 | new_src = simplify_replace_rtx (SET_SRC (new_set)(((new_set)->u.fld[1]).rt_rtx), reg, src); | |||
912 | ||||
913 | if (set_src_cost (new_src, mode, speed) <= old_cost | |||
914 | && validate_change (use_insn, &SET_SRC (new_set)(((new_set)->u.fld[1]).rt_rtx), | |||
915 | new_src, 0)) | |||
916 | return true; | |||
917 | } | |||
918 | } | |||
919 | return false; | |||
920 | } | |||
921 | ||||
922 | /* Called by reload_combine when scanning INSN. This function tries to detect | |||
923 | patterns where a constant is added to a register, and the result is used | |||
924 | in an address. | |||
925 | Return true if no further processing is needed on INSN; false if it wasn't | |||
926 | recognized and should be handled normally. */ | |||
927 | ||||
928 | static bool | |||
929 | reload_combine_recognize_const_pattern (rtx_insn *insn) | |||
930 | { | |||
931 | int from_ruid = reload_combine_ruid; | |||
932 | rtx set, pat, reg, src, addreg; | |||
933 | unsigned int regno; | |||
934 | struct reg_use *use; | |||
935 | bool must_move_add; | |||
936 | rtx_insn *add_moved_after_insn = NULLnullptr; | |||
937 | int add_moved_after_ruid = 0; | |||
938 | int clobbered_regno = -1; | |||
939 | ||||
940 | set = single_set (insn); | |||
941 | if (set == NULL_RTX(rtx) 0) | |||
942 | return false; | |||
943 | ||||
944 | reg = SET_DEST (set)(((set)->u.fld[0]).rt_rtx); | |||
945 | src = SET_SRC (set)(((set)->u.fld[1]).rt_rtx); | |||
946 | if (!REG_P (reg)(((enum rtx_code) (reg)->code) == REG) | |||
947 | || REG_NREGS (reg)((&(reg)->u.reg)->nregs) != 1 | |||
948 | || GET_MODE (reg)((machine_mode) (reg)->mode) != Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))) | |||
949 | || reg == stack_pointer_rtx((this_target_rtl->x_global_rtl)[GR_STACK_POINTER])) | |||
950 | return false; | |||
951 | ||||
952 | regno = REGNO (reg)(rhs_regno(reg)); | |||
953 | ||||
954 | /* We look for a REG1 = REG2 + CONSTANT insn, followed by either | |||
955 | uses of REG1 inside an address, or inside another add insn. If | |||
956 | possible and profitable, merge the addition into subsequent | |||
957 | uses. */ | |||
958 | if (GET_CODE (src)((enum rtx_code) (src)->code) != PLUS | |||
959 | || !REG_P (XEXP (src, 0))(((enum rtx_code) ((((src)->u.fld[0]).rt_rtx))->code) == REG) | |||
960 | || !CONSTANT_P (XEXP (src, 1))((rtx_class[(int) (((enum rtx_code) ((((src)->u.fld[1]).rt_rtx ))->code))]) == RTX_CONST_OBJ)) | |||
961 | return false; | |||
962 | ||||
963 | addreg = XEXP (src, 0)(((src)->u.fld[0]).rt_rtx); | |||
964 | must_move_add = rtx_equal_p (reg, addreg); | |||
965 | ||||
966 | pat = PATTERN (insn); | |||
967 | if (must_move_add && set != pat) | |||
968 | { | |||
969 | /* We have to be careful when moving the add; apart from the | |||
970 | single_set there may also be clobbers. Recognize one special | |||
971 | case, that of one clobber alongside the set (likely a clobber | |||
972 | of the CC register). */ | |||
973 | gcc_assert (GET_CODE (PATTERN (insn)) == PARALLEL)((void)(!(((enum rtx_code) (PATTERN (insn))->code) == PARALLEL ) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/postreload.cc" , 973, __FUNCTION__), 0 : 0)); | |||
974 | if (XVECLEN (pat, 0)(((((pat)->u.fld[0]).rt_rtvec))->num_elem) != 2 || XVECEXP (pat, 0, 0)(((((pat)->u.fld[0]).rt_rtvec))->elem[0]) != set | |||
975 | || GET_CODE (XVECEXP (pat, 0, 1))((enum rtx_code) ((((((pat)->u.fld[0]).rt_rtvec))->elem [1]))->code) != CLOBBER | |||
976 | || !REG_P (XEXP (XVECEXP (pat, 0, 1), 0))(((enum rtx_code) (((((((((pat)->u.fld[0]).rt_rtvec))-> elem[1]))->u.fld[0]).rt_rtx))->code) == REG)) | |||
977 | return false; | |||
978 | clobbered_regno = REGNO (XEXP (XVECEXP (pat, 0, 1), 0))(rhs_regno(((((((((pat)->u.fld[0]).rt_rtvec))->elem[1]) )->u.fld[0]).rt_rtx))); | |||
979 | } | |||
980 | ||||
981 | do | |||
982 | { | |||
983 | use = reload_combine_closest_single_use (regno, from_ruid); | |||
984 | ||||
985 | if (use) | |||
986 | /* Start the search for the next use from here. */ | |||
987 | from_ruid = use->ruid; | |||
988 | ||||
989 | if (use && GET_MODE (*use->usep)((machine_mode) (*use->usep)->mode) == Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))) | |||
990 | { | |||
991 | bool delete_add = false; | |||
992 | rtx_insn *use_insn = use->insn; | |||
993 | int use_ruid = use->ruid; | |||
994 | ||||
995 | /* Avoid moving the add insn past a jump. */ | |||
996 | if (must_move_add && use_ruid <= last_jump_ruid) | |||
997 | break; | |||
998 | ||||
999 | /* If the add clobbers another hard reg in parallel, don't move | |||
1000 | it past a real set of this hard reg. */ | |||
1001 | if (must_move_add && clobbered_regno >= 0 | |||
1002 | && reg_state[clobbered_regno].real_store_ruid >= use_ruid) | |||
1003 | break; | |||
1004 | ||||
1005 | gcc_assert (reg_state[regno].store_ruid <= use_ruid)((void)(!(reg_state[regno].store_ruid <= use_ruid) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/postreload.cc" , 1005, __FUNCTION__), 0 : 0)); | |||
1006 | /* Avoid moving a use of ADDREG past a point where it is stored. */ | |||
1007 | if (reg_state[REGNO (addreg)(rhs_regno(addreg))].store_ruid > use_ruid) | |||
1008 | break; | |||
1009 | ||||
1010 | /* We also must not move the addition past an insn that sets | |||
1011 | the same register, unless we can combine two add insns. */ | |||
1012 | if (must_move_add && reg_state[regno].store_ruid == use_ruid) | |||
1013 | { | |||
1014 | if (use->containing_mem == NULL_RTX(rtx) 0) | |||
1015 | delete_add = true; | |||
1016 | else | |||
1017 | break; | |||
1018 | } | |||
1019 | ||||
1020 | if (try_replace_in_use (use, reg, src)) | |||
1021 | { | |||
1022 | reload_combine_purge_insn_uses (use_insn); | |||
1023 | reload_combine_note_use (&PATTERN (use_insn), use_insn, | |||
1024 | use_ruid, NULL_RTX(rtx) 0); | |||
1025 | ||||
1026 | if (delete_add) | |||
1027 | { | |||
1028 | fixup_debug_insns (reg, src, insn, use_insn); | |||
1029 | delete_insn (insn); | |||
1030 | return true; | |||
1031 | } | |||
1032 | if (must_move_add) | |||
1033 | { | |||
1034 | add_moved_after_insn = use_insn; | |||
1035 | add_moved_after_ruid = use_ruid; | |||
1036 | } | |||
1037 | continue; | |||
1038 | } | |||
1039 | } | |||
1040 | /* If we get here, we couldn't handle this use. */ | |||
1041 | if (must_move_add) | |||
1042 | break; | |||
1043 | } | |||
1044 | while (use); | |||
1045 | ||||
1046 | if (!must_move_add || add_moved_after_insn == NULL_RTX(rtx) 0) | |||
1047 | /* Process the add normally. */ | |||
1048 | return false; | |||
1049 | ||||
1050 | fixup_debug_insns (reg, src, insn, add_moved_after_insn); | |||
1051 | ||||
1052 | reorder_insns (insn, insn, add_moved_after_insn); | |||
1053 | reload_combine_purge_reg_uses_after_ruid (regno, add_moved_after_ruid); | |||
1054 | reload_combine_split_ruids (add_moved_after_ruid - 1); | |||
1055 | reload_combine_note_use (&PATTERN (insn), insn, | |||
1056 | add_moved_after_ruid, NULL_RTX(rtx) 0); | |||
1057 | reg_state[regno].store_ruid = add_moved_after_ruid; | |||
1058 | ||||
1059 | return true; | |||
1060 | } | |||
1061 | ||||
1062 | /* Called by reload_combine when scanning INSN. Try to detect a pattern we | |||
1063 | can handle and improve. Return true if no further processing is needed on | |||
1064 | INSN; false if it wasn't recognized and should be handled normally. */ | |||
1065 | ||||
1066 | static bool | |||
1067 | reload_combine_recognize_pattern (rtx_insn *insn) | |||
1068 | { | |||
1069 | rtx set, reg, src; | |||
1070 | ||||
1071 | set = single_set (insn); | |||
1072 | if (set == NULL_RTX(rtx) 0) | |||
1073 | return false; | |||
1074 | ||||
1075 | reg = SET_DEST (set)(((set)->u.fld[0]).rt_rtx); | |||
1076 | src = SET_SRC (set)(((set)->u.fld[1]).rt_rtx); | |||
1077 | if (!REG_P (reg)(((enum rtx_code) (reg)->code) == REG) || REG_NREGS (reg)((&(reg)->u.reg)->nregs) != 1) | |||
1078 | return false; | |||
1079 | ||||
1080 | unsigned int regno = REGNO (reg)(rhs_regno(reg)); | |||
1081 | machine_mode mode = GET_MODE (reg)((machine_mode) (reg)->mode); | |||
1082 | ||||
1083 | if (reg_state[regno].use_index < 0 | |||
1084 | || reg_state[regno].use_index >= RELOAD_COMBINE_MAX_USES16) | |||
1085 | return false; | |||
1086 | ||||
1087 | for (int i = reg_state[regno].use_index; | |||
1088 | i < RELOAD_COMBINE_MAX_USES16; i++) | |||
1089 | { | |||
1090 | struct reg_use *use = reg_state[regno].reg_use + i; | |||
1091 | if (GET_MODE (*use->usep)((machine_mode) (*use->usep)->mode) != mode) | |||
1092 | return false; | |||
1093 | /* Don't try to adjust (use (REGX)). */ | |||
1094 | if (GET_CODE (PATTERN (use->insn))((enum rtx_code) (PATTERN (use->insn))->code) == USE | |||
1095 | && &XEXP (PATTERN (use->insn), 0)(((PATTERN (use->insn))->u.fld[0]).rt_rtx) == use->usep) | |||
1096 | return false; | |||
1097 | } | |||
1098 | ||||
1099 | /* Look for (set (REGX) (CONST_INT)) | |||
1100 | (set (REGX) (PLUS (REGX) (REGY))) | |||
1101 | ... | |||
1102 | ... (MEM (REGX)) ... | |||
1103 | and convert it to | |||
1104 | (set (REGZ) (CONST_INT)) | |||
1105 | ... | |||
1106 | ... (MEM (PLUS (REGZ) (REGY)))... . | |||
1107 | ||||
1108 | First, check that we have (set (REGX) (PLUS (REGX) (REGY))) | |||
1109 | and that we know all uses of REGX before it dies. | |||
1110 | Also, explicitly check that REGX != REGY; our life information | |||
1111 | does not yet show whether REGY changes in this insn. */ | |||
1112 | ||||
1113 | if (GET_CODE (src)((enum rtx_code) (src)->code) == PLUS | |||
1114 | && reg_state[regno].all_offsets_match | |||
1115 | && last_index_reg != -1 | |||
1116 | && REG_P (XEXP (src, 1))(((enum rtx_code) ((((src)->u.fld[1]).rt_rtx))->code) == REG) | |||
1117 | && rtx_equal_p (XEXP (src, 0)(((src)->u.fld[0]).rt_rtx), reg) | |||
1118 | && !rtx_equal_p (XEXP (src, 1)(((src)->u.fld[1]).rt_rtx), reg) | |||
1119 | && last_label_ruid < reg_state[regno].use_ruid) | |||
1120 | { | |||
1121 | rtx base = XEXP (src, 1)(((src)->u.fld[1]).rt_rtx); | |||
1122 | rtx_insn *prev = prev_nonnote_nondebug_insn (insn); | |||
1123 | rtx prev_set = prev ? single_set (prev) : NULL_RTX(rtx) 0; | |||
1124 | rtx index_reg = NULL_RTX(rtx) 0; | |||
1125 | rtx reg_sum = NULL_RTX(rtx) 0; | |||
1126 | int i; | |||
1127 | ||||
1128 | /* Now we need to set INDEX_REG to an index register (denoted as | |||
1129 | REGZ in the illustration above) and REG_SUM to the expression | |||
1130 | register+register that we want to use to substitute uses of REG | |||
1131 | (typically in MEMs) with. First check REG and BASE for being | |||
1132 | index registers; we can use them even if they are not dead. */ | |||
1133 | if (TEST_HARD_REG_BIT (reg_class_contents(this_target_hard_regs->x_reg_class_contents)[INDEX_REG_CLASSINDEX_REGS], regno) | |||
1134 | || TEST_HARD_REG_BIT (reg_class_contents(this_target_hard_regs->x_reg_class_contents)[INDEX_REG_CLASSINDEX_REGS], | |||
1135 | REGNO (base)(rhs_regno(base)))) | |||
1136 | { | |||
1137 | index_reg = reg; | |||
1138 | reg_sum = src; | |||
1139 | } | |||
1140 | else | |||
1141 | { | |||
1142 | /* Otherwise, look for a free index register. Since we have | |||
1143 | checked above that neither REG nor BASE are index registers, | |||
1144 | if we find anything at all, it will be different from these | |||
1145 | two registers. */ | |||
1146 | for (i = first_index_reg; i <= last_index_reg; i++) | |||
1147 | { | |||
1148 | if (TEST_HARD_REG_BIT (reg_class_contents(this_target_hard_regs->x_reg_class_contents)[INDEX_REG_CLASSINDEX_REGS], i) | |||
1149 | && reg_state[i].use_index == RELOAD_COMBINE_MAX_USES16 | |||
1150 | && reg_state[i].store_ruid <= reg_state[regno].use_ruid | |||
1151 | && (crtl(&x_rtl)->abi->clobbers_full_reg_p (i) | |||
1152 | || df_regs_ever_live_p (i)) | |||
1153 | && (!frame_pointer_needed((&x_rtl)->frame_pointer_needed) || i != HARD_FRAME_POINTER_REGNUM6) | |||
1154 | && !fixed_regs(this_target_hard_regs->x_fixed_regs)[i] && !global_regs[i] | |||
1155 | && hard_regno_nregs (i, GET_MODE (reg)((machine_mode) (reg)->mode)) == 1 | |||
1156 | && targetm.hard_regno_scratch_ok (i)) | |||
1157 | { | |||
1158 | index_reg = gen_rtx_REG (GET_MODE (reg)((machine_mode) (reg)->mode), i); | |||
1159 | reg_sum = gen_rtx_PLUS (GET_MODE (reg), index_reg, base)gen_rtx_fmt_ee_stat ((PLUS), ((((machine_mode) (reg)->mode ))), ((index_reg)), ((base)) ); | |||
1160 | break; | |||
1161 | } | |||
1162 | } | |||
1163 | } | |||
1164 | ||||
1165 | /* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that | |||
1166 | (REGY), i.e. BASE, is not clobbered before the last use we'll | |||
1167 | create. */ | |||
1168 | if (reg_sum | |||
1169 | && prev_set | |||
1170 | && CONST_INT_P (SET_SRC (prev_set))(((enum rtx_code) ((((prev_set)->u.fld[1]).rt_rtx))->code ) == CONST_INT) | |||
1171 | && rtx_equal_p (SET_DEST (prev_set)(((prev_set)->u.fld[0]).rt_rtx), reg) | |||
1172 | && (reg_state[REGNO (base)(rhs_regno(base))].store_ruid | |||
1173 | <= reg_state[regno].use_ruid)) | |||
1174 | { | |||
1175 | /* Change destination register and, if necessary, the constant | |||
1176 | value in PREV, the constant loading instruction. */ | |||
1177 | validate_change (prev, &SET_DEST (prev_set)(((prev_set)->u.fld[0]).rt_rtx), index_reg, 1); | |||
1178 | if (reg_state[regno].offset != const0_rtx(const_int_rtx[64])) | |||
1179 | { | |||
1180 | HOST_WIDE_INTlong c | |||
1181 | = trunc_int_for_mode (UINTVAL (SET_SRC (prev_set))((unsigned long) (((((prev_set)->u.fld[1]).rt_rtx))->u. hwint[0])) | |||
1182 | + UINTVAL (reg_state[regno].offset)((unsigned long) ((reg_state[regno].offset)->u.hwint[0])), | |||
1183 | GET_MODE (index_reg)((machine_mode) (index_reg)->mode)); | |||
1184 | validate_change (prev, &SET_SRC (prev_set)(((prev_set)->u.fld[1]).rt_rtx), GEN_INT (c)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (c)), 1); | |||
1185 | } | |||
1186 | ||||
1187 | /* Now for every use of REG that we have recorded, replace REG | |||
1188 | with REG_SUM. */ | |||
1189 | for (i = reg_state[regno].use_index; | |||
1190 | i < RELOAD_COMBINE_MAX_USES16; i++) | |||
1191 | validate_unshare_change (reg_state[regno].reg_use[i].insn, | |||
1192 | reg_state[regno].reg_use[i].usep, | |||
1193 | /* Each change must have its own | |||
1194 | replacement. */ | |||
1195 | reg_sum, 1); | |||
1196 | ||||
1197 | if (apply_change_group ()) | |||
1198 | { | |||
1199 | struct reg_use *lowest_ruid = NULLnullptr; | |||
1200 | ||||
1201 | /* For every new use of REG_SUM, we have to record the use | |||
1202 | of BASE therein, i.e. operand 1. */ | |||
1203 | for (i = reg_state[regno].use_index; | |||
1204 | i < RELOAD_COMBINE_MAX_USES16; i++) | |||
1205 | { | |||
1206 | struct reg_use *use = reg_state[regno].reg_use + i; | |||
1207 | reload_combine_note_use (&XEXP (*use->usep, 1)(((*use->usep)->u.fld[1]).rt_rtx), use->insn, | |||
1208 | use->ruid, use->containing_mem); | |||
1209 | if (lowest_ruid == NULLnullptr || use->ruid < lowest_ruid->ruid) | |||
1210 | lowest_ruid = use; | |||
1211 | } | |||
1212 | ||||
1213 | fixup_debug_insns (reg, reg_sum, insn, lowest_ruid->insn); | |||
1214 | ||||
1215 | /* Delete the reg-reg addition. */ | |||
1216 | delete_insn (insn); | |||
1217 | ||||
1218 | if (reg_state[regno].offset != const0_rtx(const_int_rtx[64])) | |||
1219 | /* Previous REG_EQUIV / REG_EQUAL notes for PREV | |||
1220 | are now invalid. */ | |||
1221 | remove_reg_equal_equiv_notes (prev); | |||
1222 | ||||
1223 | reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES16; | |||
1224 | return true; | |||
1225 | } | |||
1226 | } | |||
1227 | } | |||
1228 | return false; | |||
1229 | } | |||
1230 | ||||
1231 | static void | |||
1232 | reload_combine (void) | |||
1233 | { | |||
1234 | rtx_insn *insn, *prev; | |||
1235 | basic_block bb; | |||
1236 | unsigned int r; | |||
1237 | int min_labelno, n_labels; | |||
1238 | HARD_REG_SET ever_live_at_start, *label_live; | |||
1239 | ||||
1240 | /* To avoid wasting too much time later searching for an index register, | |||
1241 | determine the minimum and maximum index register numbers. */ | |||
1242 | if (INDEX_REG_CLASSINDEX_REGS == NO_REGS) | |||
1243 | last_index_reg = -1; | |||
1244 | else if (first_index_reg == -1 && last_index_reg == 0) | |||
1245 | { | |||
1246 | for (r = 0; r < FIRST_PSEUDO_REGISTER76; r++) | |||
1247 | if (TEST_HARD_REG_BIT (reg_class_contents(this_target_hard_regs->x_reg_class_contents)[INDEX_REG_CLASSINDEX_REGS], r)) | |||
1248 | { | |||
1249 | if (first_index_reg == -1) | |||
1250 | first_index_reg = r; | |||
1251 | ||||
1252 | last_index_reg = r; | |||
1253 | } | |||
1254 | ||||
1255 | /* If no index register is available, we can quit now. Set LAST_INDEX_REG | |||
1256 | to -1 so we'll know to quit early the next time we get here. */ | |||
1257 | if (first_index_reg == -1) | |||
1258 | { | |||
1259 | last_index_reg = -1; | |||
1260 | return; | |||
1261 | } | |||
1262 | } | |||
1263 | ||||
1264 | /* Set up LABEL_LIVE and EVER_LIVE_AT_START. The register lifetime | |||
1265 | information is a bit fuzzy immediately after reload, but it's | |||
1266 | still good enough to determine which registers are live at a jump | |||
1267 | destination. */ | |||
1268 | min_labelno = get_first_label_num (); | |||
1269 | n_labels = max_label_num () - min_labelno; | |||
1270 | label_live = XNEWVEC (HARD_REG_SET, n_labels)((HARD_REG_SET *) xmalloc (sizeof (HARD_REG_SET) * (n_labels) )); | |||
1271 | CLEAR_HARD_REG_SET (ever_live_at_start); | |||
1272 | ||||
1273 | FOR_EACH_BB_REVERSE_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_exit_block_ptr->prev_bb ; bb != ((cfun + 0))->cfg->x_entry_block_ptr; bb = bb-> prev_bb) | |||
1274 | { | |||
1275 | insn = BB_HEAD (bb)(bb)->il.x.head_; | |||
1276 | if (LABEL_P (insn)(((enum rtx_code) (insn)->code) == CODE_LABEL)) | |||
1277 | { | |||
1278 | HARD_REG_SET live; | |||
1279 | bitmap live_in = df_get_live_in (bb); | |||
1280 | ||||
1281 | REG_SET_TO_HARD_REG_SET (live, live_in)do { CLEAR_HARD_REG_SET (live); reg_set_to_hard_reg_set (& live, live_in); } while (0); | |||
1282 | compute_use_by_pseudos (&live, live_in); | |||
1283 | LABEL_LIVE (insn)(label_live[(((insn)->u.fld[5]).rt_int) - min_labelno]) = live; | |||
1284 | ever_live_at_start |= live; | |||
1285 | } | |||
1286 | } | |||
1287 | ||||
1288 | /* Initialize last_label_ruid, reload_combine_ruid and reg_state. */ | |||
1289 | last_label_ruid = last_jump_ruid = reload_combine_ruid = 0; | |||
1290 | for (r = 0; r < FIRST_PSEUDO_REGISTER76; r++) | |||
1291 | { | |||
1292 | reg_state[r].store_ruid = 0; | |||
1293 | reg_state[r].real_store_ruid = 0; | |||
1294 | if (fixed_regs(this_target_hard_regs->x_fixed_regs)[r]) | |||
1295 | reg_state[r].use_index = -1; | |||
1296 | else | |||
1297 | reg_state[r].use_index = RELOAD_COMBINE_MAX_USES16; | |||
1298 | } | |||
1299 | ||||
1300 | for (insn = get_last_insn (); insn; insn = prev) | |||
1301 | { | |||
1302 | bool control_flow_insn; | |||
1303 | rtx note; | |||
1304 | ||||
1305 | prev = PREV_INSN (insn); | |||
1306 | ||||
1307 | /* We cannot do our optimization across labels. Invalidating all the use | |||
1308 | information we have would be costly, so we just note where the label | |||
1309 | is and then later disable any optimization that would cross it. */ | |||
1310 | if (LABEL_P (insn)(((enum rtx_code) (insn)->code) == CODE_LABEL)) | |||
1311 | last_label_ruid = reload_combine_ruid; | |||
1312 | else if (BARRIER_P (insn)(((enum rtx_code) (insn)->code) == BARRIER)) | |||
1313 | { | |||
1314 | /* Crossing a barrier resets all the use information. */ | |||
1315 | for (r = 0; r < FIRST_PSEUDO_REGISTER76; r++) | |||
1316 | if (! fixed_regs(this_target_hard_regs->x_fixed_regs)[r]) | |||
1317 | reg_state[r].use_index = RELOAD_COMBINE_MAX_USES16; | |||
1318 | } | |||
1319 | else if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) == DEBUG_INSN)) && volatile_insn_p (PATTERN (insn))) | |||
1320 | /* Optimizations across insns being marked as volatile must be | |||
1321 | prevented. All the usage information is invalidated | |||
1322 | here. */ | |||
1323 | for (r = 0; r < FIRST_PSEUDO_REGISTER76; r++) | |||
1324 | if (! fixed_regs(this_target_hard_regs->x_fixed_regs)[r] | |||
1325 | && reg_state[r].use_index != RELOAD_COMBINE_MAX_USES16) | |||
1326 | reg_state[r].use_index = -1; | |||
1327 | ||||
1328 | if (! NONDEBUG_INSN_P (insn)((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN))) | |||
1329 | continue; | |||
1330 | ||||
1331 | reload_combine_ruid++; | |||
1332 | ||||
1333 | control_flow_insn = control_flow_insn_p (insn); | |||
1334 | if (control_flow_insn) | |||
1335 | last_jump_ruid = reload_combine_ruid; | |||
1336 | ||||
1337 | if (reload_combine_recognize_const_pattern (insn) | |||
1338 | || reload_combine_recognize_pattern (insn)) | |||
1339 | continue; | |||
1340 | ||||
1341 | note_stores (insn, reload_combine_note_store, NULLnullptr); | |||
1342 | ||||
1343 | if (CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN)) | |||
1344 | { | |||
1345 | rtx link; | |||
1346 | HARD_REG_SET used_regs = insn_callee_abi (insn).full_reg_clobbers (); | |||
1347 | ||||
1348 | for (r = 0; r < FIRST_PSEUDO_REGISTER76; r++) | |||
1349 | if (TEST_HARD_REG_BIT (used_regs, r)) | |||
1350 | { | |||
1351 | reg_state[r].use_index = RELOAD_COMBINE_MAX_USES16; | |||
1352 | reg_state[r].store_ruid = reload_combine_ruid; | |||
1353 | } | |||
1354 | ||||
1355 | for (link = CALL_INSN_FUNCTION_USAGE (insn)(((insn)->u.fld[7]).rt_rtx); link; | |||
1356 | link = XEXP (link, 1)(((link)->u.fld[1]).rt_rtx)) | |||
1357 | { | |||
1358 | rtx setuse = XEXP (link, 0)(((link)->u.fld[0]).rt_rtx); | |||
1359 | rtx usage_rtx = XEXP (setuse, 0)(((setuse)->u.fld[0]).rt_rtx); | |||
1360 | ||||
1361 | if (GET_CODE (setuse)((enum rtx_code) (setuse)->code) == USE && REG_P (usage_rtx)(((enum rtx_code) (usage_rtx)->code) == REG)) | |||
1362 | { | |||
1363 | unsigned int end_regno = END_REGNO (usage_rtx); | |||
1364 | for (unsigned int i = REGNO (usage_rtx)(rhs_regno(usage_rtx)); i < end_regno; ++i) | |||
1365 | reg_state[i].use_index = -1; | |||
1366 | } | |||
1367 | } | |||
1368 | } | |||
1369 | ||||
1370 | if (control_flow_insn && !ANY_RETURN_P (PATTERN (insn))(((enum rtx_code) (PATTERN (insn))->code) == RETURN || ((enum rtx_code) (PATTERN (insn))->code) == SIMPLE_RETURN)) | |||
1371 | { | |||
1372 | /* Non-spill registers might be used at the call destination in | |||
1373 | some unknown fashion, so we have to mark the unknown use. */ | |||
1374 | HARD_REG_SET *live; | |||
1375 | ||||
1376 | if ((condjump_p (insn) || condjump_in_parallel_p (insn)) | |||
1377 | && JUMP_LABEL (insn)(((insn)->u.fld[7]).rt_rtx)) | |||
1378 | { | |||
1379 | if (ANY_RETURN_P (JUMP_LABEL (insn))(((enum rtx_code) ((((insn)->u.fld[7]).rt_rtx))->code) == RETURN || ((enum rtx_code) ((((insn)->u.fld[7]).rt_rtx))-> code) == SIMPLE_RETURN)) | |||
1380 | live = NULLnullptr; | |||
1381 | else | |||
1382 | live = &LABEL_LIVE (JUMP_LABEL (insn))(label_live[((((((insn)->u.fld[7]).rt_rtx))->u.fld[5]). rt_int) - min_labelno]); | |||
1383 | } | |||
1384 | else | |||
1385 | live = &ever_live_at_start; | |||
1386 | ||||
1387 | if (live) | |||
1388 | for (r = 0; r < FIRST_PSEUDO_REGISTER76; r++) | |||
1389 | if (TEST_HARD_REG_BIT (*live, r)) | |||
1390 | reg_state[r].use_index = -1; | |||
1391 | } | |||
1392 | ||||
1393 | reload_combine_note_use (&PATTERN (insn), insn, reload_combine_ruid, | |||
1394 | NULL_RTX(rtx) 0); | |||
1395 | ||||
1396 | for (note = REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx); note; note = XEXP (note, 1)(((note)->u.fld[1]).rt_rtx)) | |||
1397 | { | |||
1398 | if (REG_NOTE_KIND (note)((enum reg_note) ((machine_mode) (note)->mode)) == REG_INC && REG_P (XEXP (note, 0))(((enum rtx_code) ((((note)->u.fld[0]).rt_rtx))->code) == REG)) | |||
1399 | { | |||
1400 | int regno = REGNO (XEXP (note, 0))(rhs_regno((((note)->u.fld[0]).rt_rtx))); | |||
1401 | reg_state[regno].store_ruid = reload_combine_ruid; | |||
1402 | reg_state[regno].real_store_ruid = reload_combine_ruid; | |||
1403 | reg_state[regno].use_index = -1; | |||
1404 | } | |||
1405 | } | |||
1406 | } | |||
1407 | ||||
1408 | free (label_live); | |||
1409 | } | |||
1410 | ||||
1411 | /* Check if DST is a register or a subreg of a register; if it is, | |||
1412 | update store_ruid, real_store_ruid and use_index in the reg_state | |||
1413 | structure accordingly. Called via note_stores from reload_combine. */ | |||
1414 | ||||
1415 | static void | |||
1416 | reload_combine_note_store (rtx dst, const_rtx set, void *data ATTRIBUTE_UNUSED__attribute__ ((__unused__))) | |||
1417 | { | |||
1418 | int regno = 0; | |||
1419 | int i; | |||
1420 | machine_mode mode = GET_MODE (dst)((machine_mode) (dst)->mode); | |||
1421 | ||||
1422 | if (GET_CODE (dst)((enum rtx_code) (dst)->code) == SUBREG) | |||
1423 | { | |||
1424 | regno = subreg_regno_offset (REGNO (SUBREG_REG (dst))(rhs_regno((((dst)->u.fld[0]).rt_rtx))), | |||
1425 | GET_MODE (SUBREG_REG (dst))((machine_mode) ((((dst)->u.fld[0]).rt_rtx))->mode), | |||
1426 | SUBREG_BYTE (dst)(((dst)->u.fld[1]).rt_subreg), | |||
1427 | GET_MODE (dst)((machine_mode) (dst)->mode)); | |||
1428 | dst = SUBREG_REG (dst)(((dst)->u.fld[0]).rt_rtx); | |||
1429 | } | |||
1430 | ||||
1431 | /* Some targets do argument pushes without adding REG_INC notes. */ | |||
1432 | ||||
1433 | if (MEM_P (dst)(((enum rtx_code) (dst)->code) == MEM)) | |||
1434 | { | |||
1435 | dst = XEXP (dst, 0)(((dst)->u.fld[0]).rt_rtx); | |||
1436 | if (GET_CODE (dst)((enum rtx_code) (dst)->code) == PRE_INC || GET_CODE (dst)((enum rtx_code) (dst)->code) == POST_INC | |||
1437 | || GET_CODE (dst)((enum rtx_code) (dst)->code) == PRE_DEC || GET_CODE (dst)((enum rtx_code) (dst)->code) == POST_DEC | |||
1438 | || GET_CODE (dst)((enum rtx_code) (dst)->code) == PRE_MODIFY || GET_CODE (dst)((enum rtx_code) (dst)->code) == POST_MODIFY) | |||
1439 | { | |||
1440 | unsigned int end_regno = END_REGNO (XEXP (dst, 0)(((dst)->u.fld[0]).rt_rtx)); | |||
1441 | for (unsigned int i = REGNO (XEXP (dst, 0))(rhs_regno((((dst)->u.fld[0]).rt_rtx))); i < end_regno; ++i) | |||
1442 | { | |||
1443 | /* We could probably do better, but for now mark the register | |||
1444 | as used in an unknown fashion and set/clobbered at this | |||
1445 | insn. */ | |||
1446 | reg_state[i].use_index = -1; | |||
1447 | reg_state[i].store_ruid = reload_combine_ruid; | |||
1448 | reg_state[i].real_store_ruid = reload_combine_ruid; | |||
1449 | } | |||
1450 | } | |||
1451 | else | |||
1452 | return; | |||
1453 | } | |||
1454 | ||||
1455 | if (!REG_P (dst)(((enum rtx_code) (dst)->code) == REG)) | |||
1456 | return; | |||
1457 | regno += REGNO (dst)(rhs_regno(dst)); | |||
1458 | ||||
1459 | /* note_stores might have stripped a STRICT_LOW_PART, so we have to be | |||
1460 | careful with registers / register parts that are not full words. | |||
1461 | Similarly for ZERO_EXTRACT. */ | |||
1462 | if (GET_CODE (SET_DEST (set))((enum rtx_code) ((((set)->u.fld[0]).rt_rtx))->code) == ZERO_EXTRACT | |||
1463 | || GET_CODE (SET_DEST (set))((enum rtx_code) ((((set)->u.fld[0]).rt_rtx))->code) == STRICT_LOW_PART) | |||
1464 | { | |||
1465 | for (i = end_hard_regno (mode, regno) - 1; i >= regno; i--) | |||
1466 | { | |||
1467 | reg_state[i].use_index = -1; | |||
1468 | reg_state[i].store_ruid = reload_combine_ruid; | |||
1469 | reg_state[i].real_store_ruid = reload_combine_ruid; | |||
1470 | } | |||
1471 | } | |||
1472 | else | |||
1473 | { | |||
1474 | for (i = end_hard_regno (mode, regno) - 1; i >= regno; i--) | |||
1475 | { | |||
1476 | reg_state[i].store_ruid = reload_combine_ruid; | |||
1477 | if (GET_CODE (set)((enum rtx_code) (set)->code) == SET) | |||
1478 | reg_state[i].real_store_ruid = reload_combine_ruid; | |||
1479 | reg_state[i].use_index = RELOAD_COMBINE_MAX_USES16; | |||
1480 | } | |||
1481 | } | |||
1482 | } | |||
1483 | ||||
1484 | /* XP points to a piece of rtl that has to be checked for any uses of | |||
1485 | registers. | |||
1486 | *XP is the pattern of INSN, or a part of it. | |||
1487 | Called from reload_combine, and recursively by itself. */ | |||
1488 | static void | |||
1489 | reload_combine_note_use (rtx *xp, rtx_insn *insn, int ruid, rtx containing_mem) | |||
1490 | { | |||
1491 | rtx x = *xp; | |||
1492 | enum rtx_code code = x->code; | |||
1493 | const char *fmt; | |||
1494 | int i, j; | |||
1495 | rtx offset = const0_rtx(const_int_rtx[64]); /* For the REG case below. */ | |||
1496 | ||||
1497 | switch (code) | |||
1498 | { | |||
1499 | case SET: | |||
1500 | if (REG_P (SET_DEST (x))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == REG )) | |||
1501 | { | |||
1502 | reload_combine_note_use (&SET_SRC (x)(((x)->u.fld[1]).rt_rtx), insn, ruid, NULL_RTX(rtx) 0); | |||
1503 | return; | |||
1504 | } | |||
1505 | break; | |||
1506 | ||||
1507 | case USE: | |||
1508 | /* If this is the USE of a return value, we can't change it. */ | |||
1509 | if (REG_P (XEXP (x, 0))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == REG ) && REG_FUNCTION_VALUE_P (XEXP (x, 0))(__extension__ ({ __typeof (((((x)->u.fld[0]).rt_rtx))) const _rtx = (((((x)->u.fld[0]).rt_rtx))); if (((enum rtx_code) (_rtx)->code) != REG && ((enum rtx_code) (_rtx)-> code) != PARALLEL) rtl_check_failed_flag ("REG_FUNCTION_VALUE_P" ,_rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/postreload.cc" , 1509, __FUNCTION__); _rtx; })->return_val)) | |||
1510 | { | |||
1511 | /* Mark the return register as used in an unknown fashion. */ | |||
1512 | rtx reg = XEXP (x, 0)(((x)->u.fld[0]).rt_rtx); | |||
1513 | unsigned int end_regno = END_REGNO (reg); | |||
1514 | for (unsigned int regno = REGNO (reg)(rhs_regno(reg)); regno < end_regno; ++regno) | |||
1515 | reg_state[regno].use_index = -1; | |||
1516 | return; | |||
1517 | } | |||
1518 | break; | |||
1519 | ||||
1520 | case CLOBBER: | |||
1521 | if (REG_P (SET_DEST (x))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == REG )) | |||
1522 | { | |||
1523 | /* No spurious CLOBBERs of pseudo registers may remain. */ | |||
1524 | gcc_assert (REGNO (SET_DEST (x)) < FIRST_PSEUDO_REGISTER)((void)(!((rhs_regno((((x)->u.fld[0]).rt_rtx))) < 76) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/postreload.cc" , 1524, __FUNCTION__), 0 : 0)); | |||
1525 | return; | |||
1526 | } | |||
1527 | break; | |||
1528 | ||||
1529 | case PLUS: | |||
1530 | /* We are interested in (plus (reg) (const_int)) . */ | |||
1531 | if (!REG_P (XEXP (x, 0))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == REG ) | |||
1532 | || !CONST_INT_P (XEXP (x, 1))(((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) == CONST_INT )) | |||
1533 | break; | |||
1534 | offset = XEXP (x, 1)(((x)->u.fld[1]).rt_rtx); | |||
1535 | x = XEXP (x, 0)(((x)->u.fld[0]).rt_rtx); | |||
1536 | /* Fall through. */ | |||
1537 | case REG: | |||
1538 | { | |||
1539 | int regno = REGNO (x)(rhs_regno(x)); | |||
1540 | int use_index; | |||
1541 | int nregs; | |||
1542 | ||||
1543 | /* No spurious USEs of pseudo registers may remain. */ | |||
1544 | gcc_assert (regno < FIRST_PSEUDO_REGISTER)((void)(!(regno < 76) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/postreload.cc" , 1544, __FUNCTION__), 0 : 0)); | |||
1545 | ||||
1546 | nregs = REG_NREGS (x)((&(x)->u.reg)->nregs); | |||
1547 | ||||
1548 | /* We can't substitute into multi-hard-reg uses. */ | |||
1549 | if (nregs > 1) | |||
1550 | { | |||
1551 | while (--nregs >= 0) | |||
1552 | reg_state[regno + nregs].use_index = -1; | |||
1553 | return; | |||
1554 | } | |||
1555 | ||||
1556 | /* We may be called to update uses in previously seen insns. | |||
1557 | Don't add uses beyond the last store we saw. */ | |||
1558 | if (ruid < reg_state[regno].store_ruid) | |||
1559 | return; | |||
1560 | ||||
1561 | /* If this register is already used in some unknown fashion, we | |||
1562 | can't do anything. | |||
1563 | If we decrement the index from zero to -1, we can't store more | |||
1564 | uses, so this register becomes used in an unknown fashion. */ | |||
1565 | use_index = --reg_state[regno].use_index; | |||
1566 | if (use_index < 0) | |||
1567 | return; | |||
1568 | ||||
1569 | if (use_index == RELOAD_COMBINE_MAX_USES16 - 1) | |||
1570 | { | |||
1571 | /* This is the first use of this register we have seen since we | |||
1572 | marked it as dead. */ | |||
1573 | reg_state[regno].offset = offset; | |||
1574 | reg_state[regno].all_offsets_match = true; | |||
1575 | reg_state[regno].use_ruid = ruid; | |||
1576 | } | |||
1577 | else | |||
1578 | { | |||
1579 | if (reg_state[regno].use_ruid > ruid) | |||
1580 | reg_state[regno].use_ruid = ruid; | |||
1581 | ||||
1582 | if (! rtx_equal_p (offset, reg_state[regno].offset)) | |||
1583 | reg_state[regno].all_offsets_match = false; | |||
1584 | } | |||
1585 | ||||
1586 | reg_state[regno].reg_use[use_index].insn = insn; | |||
1587 | reg_state[regno].reg_use[use_index].ruid = ruid; | |||
1588 | reg_state[regno].reg_use[use_index].containing_mem = containing_mem; | |||
1589 | reg_state[regno].reg_use[use_index].usep = xp; | |||
1590 | return; | |||
1591 | } | |||
1592 | ||||
1593 | case MEM: | |||
1594 | containing_mem = x; | |||
1595 | break; | |||
1596 | ||||
1597 | default: | |||
1598 | break; | |||
1599 | } | |||
1600 | ||||
1601 | /* Recursively process the components of X. */ | |||
1602 | fmt = GET_RTX_FORMAT (code)(rtx_format[(int) (code)]); | |||
1603 | for (i = GET_RTX_LENGTH (code)(rtx_length[(int) (code)]) - 1; i >= 0; i--) | |||
1604 | { | |||
1605 | if (fmt[i] == 'e') | |||
1606 | reload_combine_note_use (&XEXP (x, i)(((x)->u.fld[i]).rt_rtx), insn, ruid, containing_mem); | |||
1607 | else if (fmt[i] == 'E') | |||
1608 | { | |||
1609 | for (j = XVECLEN (x, i)(((((x)->u.fld[i]).rt_rtvec))->num_elem) - 1; j >= 0; j--) | |||
1610 | reload_combine_note_use (&XVECEXP (x, i, j)(((((x)->u.fld[i]).rt_rtvec))->elem[j]), insn, ruid, | |||
1611 | containing_mem); | |||
1612 | } | |||
1613 | } | |||
1614 | } | |||
1615 | ||||
1616 | /* See if we can reduce the cost of a constant by replacing a move | |||
1617 | with an add. We track situations in which a register is set to a | |||
1618 | constant or to a register plus a constant. */ | |||
1619 | /* We cannot do our optimization across labels. Invalidating all the | |||
1620 | information about register contents we have would be costly, so we | |||
1621 | use move2add_last_label_luid to note where the label is and then | |||
1622 | later disable any optimization that would cross it. | |||
1623 | reg_offset[n] / reg_base_reg[n] / reg_symbol_ref[n] / reg_mode[n] | |||
1624 | are only valid if reg_set_luid[n] is greater than | |||
1625 | move2add_last_label_luid. | |||
1626 | For a set that established a new (potential) base register with | |||
1627 | non-constant value, we use move2add_luid from the place where the | |||
1628 | setting insn is encountered; registers based off that base then | |||
1629 | get the same reg_set_luid. Constants all get | |||
1630 | move2add_last_label_luid + 1 as their reg_set_luid. */ | |||
1631 | static int reg_set_luid[FIRST_PSEUDO_REGISTER76]; | |||
1632 | ||||
1633 | /* If reg_base_reg[n] is negative, register n has been set to | |||
1634 | reg_offset[n] or reg_symbol_ref[n] + reg_offset[n] in mode reg_mode[n]. | |||
1635 | If reg_base_reg[n] is non-negative, register n has been set to the | |||
1636 | sum of reg_offset[n] and the value of register reg_base_reg[n] | |||
1637 | before reg_set_luid[n], calculated in mode reg_mode[n] . | |||
1638 | For multi-hard-register registers, all but the first one are | |||
1639 | recorded as BLKmode in reg_mode. Setting reg_mode to VOIDmode | |||
1640 | marks it as invalid. */ | |||
1641 | static HOST_WIDE_INTlong reg_offset[FIRST_PSEUDO_REGISTER76]; | |||
1642 | static int reg_base_reg[FIRST_PSEUDO_REGISTER76]; | |||
1643 | static rtx reg_symbol_ref[FIRST_PSEUDO_REGISTER76]; | |||
1644 | static machine_mode reg_mode[FIRST_PSEUDO_REGISTER76]; | |||
1645 | ||||
1646 | /* move2add_luid is linearly increased while scanning the instructions | |||
1647 | from first to last. It is used to set reg_set_luid in | |||
1648 | reload_cse_move2add and move2add_note_store. */ | |||
1649 | static int move2add_luid; | |||
1650 | ||||
1651 | /* move2add_last_label_luid is set whenever a label is found. Labels | |||
1652 | invalidate all previously collected reg_offset data. */ | |||
1653 | static int move2add_last_label_luid; | |||
1654 | ||||
1655 | /* ??? We don't know how zero / sign extension is handled, hence we | |||
1656 | can't go from a narrower to a wider mode. */ | |||
1657 | #define MODES_OK_FOR_MOVE2ADD(OUTMODE, INMODE)(GET_MODE_SIZE (OUTMODE) == GET_MODE_SIZE (INMODE) || (GET_MODE_SIZE (OUTMODE) <= GET_MODE_SIZE (INMODE) && (targetm.truly_noop_truncation (GET_MODE_PRECISION (OUTMODE), GET_MODE_PRECISION (INMODE))) )) \ | |||
1658 | (GET_MODE_SIZE (OUTMODE) == GET_MODE_SIZE (INMODE) \ | |||
1659 | || (GET_MODE_SIZE (OUTMODE) <= GET_MODE_SIZE (INMODE) \ | |||
1660 | && TRULY_NOOP_TRUNCATION_MODES_P (OUTMODE, INMODE)(targetm.truly_noop_truncation (GET_MODE_PRECISION (OUTMODE), GET_MODE_PRECISION (INMODE))))) | |||
1661 | ||||
1662 | /* Record that REG is being set to a value with the mode of REG. */ | |||
1663 | ||||
1664 | static void | |||
1665 | move2add_record_mode (rtx reg) | |||
1666 | { | |||
1667 | int regno, nregs; | |||
1668 | machine_mode mode = GET_MODE (reg)((machine_mode) (reg)->mode); | |||
1669 | ||||
1670 | if (GET_CODE (reg)((enum rtx_code) (reg)->code) == SUBREG) | |||
1671 | { | |||
1672 | regno = subreg_regno (reg); | |||
1673 | nregs = subreg_nregs (reg); | |||
1674 | } | |||
1675 | else if (REG_P (reg)(((enum rtx_code) (reg)->code) == REG)) | |||
1676 | { | |||
1677 | regno = REGNO (reg)(rhs_regno(reg)); | |||
1678 | nregs = REG_NREGS (reg)((&(reg)->u.reg)->nregs); | |||
1679 | } | |||
1680 | else | |||
1681 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/postreload.cc" , 1681, __FUNCTION__)); | |||
1682 | for (int i = nregs - 1; i > 0; i--) | |||
1683 | reg_mode[regno + i] = BLKmode((void) 0, E_BLKmode); | |||
1684 | reg_mode[regno] = mode; | |||
1685 | } | |||
1686 | ||||
1687 | /* Record that REG is being set to the sum of SYM and OFF. */ | |||
1688 | ||||
1689 | static void | |||
1690 | move2add_record_sym_value (rtx reg, rtx sym, rtx off) | |||
1691 | { | |||
1692 | int regno = REGNO (reg)(rhs_regno(reg)); | |||
1693 | ||||
1694 | move2add_record_mode (reg); | |||
1695 | reg_set_luid[regno] = move2add_luid; | |||
1696 | reg_base_reg[regno] = -1; | |||
1697 | reg_symbol_ref[regno] = sym; | |||
1698 | reg_offset[regno] = INTVAL (off)((off)->u.hwint[0]); | |||
1699 | } | |||
1700 | ||||
1701 | /* Check if REGNO contains a valid value in MODE. */ | |||
1702 | ||||
1703 | static bool | |||
1704 | move2add_valid_value_p (int regno, scalar_int_mode mode) | |||
1705 | { | |||
1706 | if (reg_set_luid[regno] <= move2add_last_label_luid) | |||
1707 | return false; | |||
1708 | ||||
1709 | if (mode != reg_mode[regno]) | |||
1710 | { | |||
1711 | scalar_int_mode old_mode; | |||
1712 | if (!is_a <scalar_int_mode> (reg_mode[regno], &old_mode) | |||
1713 | || !MODES_OK_FOR_MOVE2ADD (mode, old_mode)(GET_MODE_SIZE (mode) == GET_MODE_SIZE (old_mode) || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (old_mode) && (targetm.truly_noop_truncation (GET_MODE_PRECISION (mode), GET_MODE_PRECISION (old_mode)))) ) | |||
1714 | || !REG_CAN_CHANGE_MODE_P (regno, old_mode, mode)(targetm.can_change_mode_class (old_mode, mode, (regclass_map [(regno)])))) | |||
1715 | return false; | |||
1716 | /* The value loaded into regno in reg_mode[regno] is also valid in | |||
1717 | mode after truncation only if (REG:mode regno) is the lowpart of | |||
1718 | (REG:reg_mode[regno] regno). Now, for big endian, the starting | |||
1719 | regno of the lowpart might be different. */ | |||
1720 | poly_int64 s_off = subreg_lowpart_offset (mode, old_mode); | |||
1721 | s_off = subreg_regno_offset (regno, old_mode, s_off, mode); | |||
1722 | if (maybe_ne (s_off, 0)) | |||
1723 | /* We could in principle adjust regno, check reg_mode[regno] to be | |||
1724 | BLKmode, and return s_off to the caller (vs. -1 for failure), | |||
1725 | but we currently have no callers that could make use of this | |||
1726 | information. */ | |||
1727 | return false; | |||
1728 | } | |||
1729 | ||||
1730 | for (int i = end_hard_regno (mode, regno) - 1; i > regno; i--) | |||
1731 | if (reg_mode[i] != BLKmode((void) 0, E_BLKmode)) | |||
1732 | return false; | |||
1733 | return true; | |||
1734 | } | |||
1735 | ||||
1736 | /* This function is called with INSN that sets REG (of mode MODE) | |||
1737 | to (SYM + OFF), while REG is known to already have value (SYM + offset). | |||
1738 | This function tries to change INSN into an add instruction | |||
1739 | (set (REG) (plus (REG) (OFF - offset))) using the known value. | |||
1740 | It also updates the information about REG's known value. | |||
1741 | Return true if we made a change. */ | |||
1742 | ||||
1743 | static bool | |||
1744 | move2add_use_add2_insn (scalar_int_mode mode, rtx reg, rtx sym, rtx off, | |||
1745 | rtx_insn *insn) | |||
1746 | { | |||
1747 | rtx pat = PATTERN (insn); | |||
1748 | rtx src = SET_SRC (pat)(((pat)->u.fld[1]).rt_rtx); | |||
1749 | int regno = REGNO (reg)(rhs_regno(reg)); | |||
1750 | rtx new_src = gen_int_mode (UINTVAL (off)((unsigned long) ((off)->u.hwint[0])) - reg_offset[regno], mode); | |||
1751 | bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)); | |||
1752 | bool changed = false; | |||
1753 | ||||
1754 | /* (set (reg) (plus (reg) (const_int 0))) is not canonical; | |||
1755 | use (set (reg) (reg)) instead. | |||
1756 | We don't delete this insn, nor do we convert it into a | |||
1757 | note, to avoid losing register notes or the return | |||
1758 | value flag. jump2 already knows how to get rid of | |||
1759 | no-op moves. */ | |||
1760 | if (new_src == const0_rtx(const_int_rtx[64])) | |||
1761 | { | |||
1762 | /* If the constants are different, this is a | |||
1763 | truncation, that, if turned into (set (reg) | |||
1764 | (reg)), would be discarded. Maybe we should | |||
1765 | try a truncMN pattern? */ | |||
1766 | if (INTVAL (off)((off)->u.hwint[0]) == reg_offset [regno]) | |||
1767 | changed = validate_change (insn, &SET_SRC (pat)(((pat)->u.fld[1]).rt_rtx), reg, 0); | |||
1768 | } | |||
1769 | else | |||
1770 | { | |||
1771 | struct full_rtx_costs oldcst, newcst; | |||
1772 | rtx tem = gen_rtx_PLUS (mode, reg, new_src)gen_rtx_fmt_ee_stat ((PLUS), ((mode)), ((reg)), ((new_src)) ); | |||
1773 | ||||
1774 | get_full_set_rtx_cost (pat, &oldcst); | |||
1775 | SET_SRC (pat)(((pat)->u.fld[1]).rt_rtx) = tem; | |||
1776 | get_full_set_rtx_cost (pat, &newcst); | |||
1777 | SET_SRC (pat)(((pat)->u.fld[1]).rt_rtx) = src; | |||
1778 | ||||
1779 | if (costs_lt_p (&newcst, &oldcst, speed) | |||
1780 | && have_add2_insn (reg, new_src)) | |||
1781 | changed = validate_change (insn, &SET_SRC (pat)(((pat)->u.fld[1]).rt_rtx), tem, 0); | |||
1782 | else if (sym == NULL_RTX(rtx) 0 && mode != BImode(scalar_int_mode ((scalar_int_mode::from_int) E_BImode))) | |||
1783 | { | |||
1784 | scalar_int_mode narrow_mode; | |||
1785 | FOR_EACH_MODE_UNTIL (narrow_mode, mode)for ((narrow_mode) = (get_narrowest_mode (mode)); (narrow_mode ) != (mode); mode_iterator::get_known_next (&(narrow_mode ))) | |||
1786 | { | |||
1787 | if (have_insn_for (STRICT_LOW_PART, narrow_mode) | |||
1788 | && ((reg_offset[regno] & ~GET_MODE_MASK (narrow_mode)mode_mask_array[narrow_mode]) | |||
1789 | == (INTVAL (off)((off)->u.hwint[0]) & ~GET_MODE_MASK (narrow_mode)mode_mask_array[narrow_mode]))) | |||
1790 | { | |||
1791 | rtx narrow_reg = gen_lowpart_common (narrow_mode, reg); | |||
1792 | rtx narrow_src = gen_int_mode (INTVAL (off)((off)->u.hwint[0]), | |||
1793 | narrow_mode); | |||
1794 | rtx new_set | |||
1795 | = gen_rtx_SET (gen_rtx_STRICT_LOW_PART (VOIDmode,gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((gen_rtx_fmt_e_stat ((STRICT_LOW_PART), ((((void) 0, E_VOIDmode))), ((narrow_reg )) ))), ((narrow_src)) ) | |||
1796 | narrow_reg),gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((gen_rtx_fmt_e_stat ((STRICT_LOW_PART), ((((void) 0, E_VOIDmode))), ((narrow_reg )) ))), ((narrow_src)) ) | |||
1797 | narrow_src)gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((gen_rtx_fmt_e_stat ((STRICT_LOW_PART), ((((void) 0, E_VOIDmode))), ((narrow_reg )) ))), ((narrow_src)) ); | |||
1798 | get_full_set_rtx_cost (new_set, &newcst); | |||
1799 | if (costs_lt_p (&newcst, &oldcst, speed)) | |||
1800 | { | |||
1801 | changed = validate_change (insn, &PATTERN (insn), | |||
1802 | new_set, 0); | |||
1803 | if (changed) | |||
1804 | break; | |||
1805 | } | |||
1806 | } | |||
1807 | } | |||
1808 | } | |||
1809 | } | |||
1810 | move2add_record_sym_value (reg, sym, off); | |||
1811 | return changed; | |||
1812 | } | |||
1813 | ||||
1814 | ||||
1815 | /* This function is called with INSN that sets REG (of mode MODE) to | |||
1816 | (SYM + OFF), but REG doesn't have known value (SYM + offset). This | |||
1817 | function tries to find another register which is known to already have | |||
1818 | value (SYM + offset) and change INSN into an add instruction | |||
1819 | (set (REG) (plus (the found register) (OFF - offset))) if such | |||
1820 | a register is found. It also updates the information about | |||
1821 | REG's known value. | |||
1822 | Return true iff we made a change. */ | |||
1823 | ||||
1824 | static bool | |||
1825 | move2add_use_add3_insn (scalar_int_mode mode, rtx reg, rtx sym, rtx off, | |||
1826 | rtx_insn *insn) | |||
1827 | { | |||
1828 | rtx pat = PATTERN (insn); | |||
1829 | rtx src = SET_SRC (pat)(((pat)->u.fld[1]).rt_rtx); | |||
1830 | int regno = REGNO (reg)(rhs_regno(reg)); | |||
1831 | int min_regno = 0; | |||
1832 | bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)); | |||
1833 | int i; | |||
1834 | bool changed = false; | |||
1835 | struct full_rtx_costs oldcst, newcst, mincst; | |||
1836 | rtx plus_expr; | |||
1837 | ||||
1838 | init_costs_to_max (&mincst); | |||
1839 | get_full_set_rtx_cost (pat, &oldcst); | |||
1840 | ||||
1841 | plus_expr = gen_rtx_PLUS (GET_MODE (reg), reg, const0_rtx)gen_rtx_fmt_ee_stat ((PLUS), ((((machine_mode) (reg)->mode ))), ((reg)), (((const_int_rtx[64]))) ); | |||
1842 | SET_SRC (pat)(((pat)->u.fld[1]).rt_rtx) = plus_expr; | |||
1843 | ||||
1844 | for (i = 0; i < FIRST_PSEUDO_REGISTER76; i++) | |||
1845 | if (move2add_valid_value_p (i, mode) | |||
1846 | && reg_base_reg[i] < 0 | |||
1847 | && reg_symbol_ref[i] != NULL_RTX(rtx) 0 | |||
1848 | && rtx_equal_p (sym, reg_symbol_ref[i])) | |||
1849 | { | |||
1850 | rtx new_src = gen_int_mode (UINTVAL (off)((unsigned long) ((off)->u.hwint[0])) - reg_offset[i], | |||
1851 | GET_MODE (reg)((machine_mode) (reg)->mode)); | |||
1852 | /* (set (reg) (plus (reg) (const_int 0))) is not canonical; | |||
1853 | use (set (reg) (reg)) instead. | |||
1854 | We don't delete this insn, nor do we convert it into a | |||
1855 | note, to avoid losing register notes or the return | |||
1856 | value flag. jump2 already knows how to get rid of | |||
1857 | no-op moves. */ | |||
1858 | if (new_src == const0_rtx(const_int_rtx[64])) | |||
1859 | { | |||
1860 | init_costs_to_zero (&mincst); | |||
1861 | min_regno = i; | |||
1862 | break; | |||
1863 | } | |||
1864 | else | |||
1865 | { | |||
1866 | XEXP (plus_expr, 1)(((plus_expr)->u.fld[1]).rt_rtx) = new_src; | |||
1867 | get_full_set_rtx_cost (pat, &newcst); | |||
1868 | ||||
1869 | if (costs_lt_p (&newcst, &mincst, speed)) | |||
1870 | { | |||
1871 | mincst = newcst; | |||
1872 | min_regno = i; | |||
1873 | } | |||
1874 | } | |||
1875 | } | |||
1876 | SET_SRC (pat)(((pat)->u.fld[1]).rt_rtx) = src; | |||
1877 | ||||
1878 | if (costs_lt_p (&mincst, &oldcst, speed)) | |||
1879 | { | |||
1880 | rtx tem; | |||
1881 | ||||
1882 | tem = gen_rtx_REG (GET_MODE (reg)((machine_mode) (reg)->mode), min_regno); | |||
1883 | if (i != min_regno) | |||
1884 | { | |||
1885 | rtx new_src = gen_int_mode (UINTVAL (off)((unsigned long) ((off)->u.hwint[0])) - reg_offset[min_regno], | |||
1886 | GET_MODE (reg)((machine_mode) (reg)->mode)); | |||
1887 | tem = gen_rtx_PLUS (GET_MODE (reg), tem, new_src)gen_rtx_fmt_ee_stat ((PLUS), ((((machine_mode) (reg)->mode ))), ((tem)), ((new_src)) ); | |||
1888 | } | |||
1889 | if (validate_change (insn, &SET_SRC (pat)(((pat)->u.fld[1]).rt_rtx), tem, 0)) | |||
1890 | changed = true; | |||
1891 | } | |||
1892 | reg_set_luid[regno] = move2add_luid; | |||
1893 | move2add_record_sym_value (reg, sym, off); | |||
1894 | return changed; | |||
1895 | } | |||
1896 | ||||
1897 | /* Convert move insns with constant inputs to additions if they are cheaper. | |||
1898 | Return true if any changes were made. */ | |||
1899 | static bool | |||
1900 | reload_cse_move2add (rtx_insn *first) | |||
1901 | { | |||
1902 | int i; | |||
1903 | rtx_insn *insn; | |||
1904 | bool changed = false; | |||
1905 | ||||
1906 | for (i = FIRST_PSEUDO_REGISTER76 - 1; i >= 0; i--) | |||
1907 | { | |||
1908 | reg_set_luid[i] = 0; | |||
1909 | reg_offset[i] = 0; | |||
1910 | reg_base_reg[i] = 0; | |||
1911 | reg_symbol_ref[i] = NULL_RTX(rtx) 0; | |||
1912 | reg_mode[i] = VOIDmode((void) 0, E_VOIDmode); | |||
1913 | } | |||
1914 | ||||
1915 | move2add_last_label_luid = 0; | |||
1916 | move2add_luid = 2; | |||
1917 | for (insn = first; insn; insn = NEXT_INSN (insn), move2add_luid++) | |||
1918 | { | |||
1919 | rtx pat, note; | |||
1920 | ||||
1921 | if (LABEL_P (insn)(((enum rtx_code) (insn)->code) == CODE_LABEL)) | |||
1922 | { | |||
1923 | move2add_last_label_luid = move2add_luid; | |||
1924 | /* We're going to increment move2add_luid twice after a | |||
1925 | label, so that we can use move2add_last_label_luid + 1 as | |||
1926 | the luid for constants. */ | |||
1927 | move2add_luid++; | |||
1928 | continue; | |||
1929 | } | |||
1930 | if (! INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) == DEBUG_INSN))) | |||
1931 | continue; | |||
1932 | pat = PATTERN (insn); | |||
1933 | /* For simplicity, we only perform this optimization on | |||
1934 | straightforward SETs. */ | |||
1935 | scalar_int_mode mode; | |||
1936 | if (GET_CODE (pat)((enum rtx_code) (pat)->code) == SET | |||
1937 | && REG_P (SET_DEST (pat))(((enum rtx_code) ((((pat)->u.fld[0]).rt_rtx))->code) == REG) | |||
1938 | && is_a <scalar_int_mode> (GET_MODE (SET_DEST (pat))((machine_mode) ((((pat)->u.fld[0]).rt_rtx))->mode), &mode)) | |||
1939 | { | |||
1940 | rtx reg = SET_DEST (pat)(((pat)->u.fld[0]).rt_rtx); | |||
1941 | int regno = REGNO (reg)(rhs_regno(reg)); | |||
1942 | rtx src = SET_SRC (pat)(((pat)->u.fld[1]).rt_rtx); | |||
1943 | ||||
1944 | /* Check if we have valid information on the contents of this | |||
1945 | register in the mode of REG. */ | |||
1946 | if (move2add_valid_value_p (regno, mode) | |||
1947 | && dbg_cnt (cse2_move2add)) | |||
1948 | { | |||
1949 | /* Try to transform (set (REGX) (CONST_INT A)) | |||
1950 | ... | |||
1951 | (set (REGX) (CONST_INT B)) | |||
1952 | to | |||
1953 | (set (REGX) (CONST_INT A)) | |||
1954 | ... | |||
1955 | (set (REGX) (plus (REGX) (CONST_INT B-A))) | |||
1956 | or | |||
1957 | (set (REGX) (CONST_INT A)) | |||
1958 | ... | |||
1959 | (set (STRICT_LOW_PART (REGX)) (CONST_INT B)) | |||
1960 | */ | |||
1961 | ||||
1962 | if (CONST_INT_P (src)(((enum rtx_code) (src)->code) == CONST_INT) | |||
1963 | && reg_base_reg[regno] < 0 | |||
1964 | && reg_symbol_ref[regno] == NULL_RTX(rtx) 0) | |||
1965 | { | |||
1966 | changed |= move2add_use_add2_insn (mode, reg, NULL_RTX(rtx) 0, | |||
1967 | src, insn); | |||
1968 | continue; | |||
1969 | } | |||
1970 | ||||
1971 | /* Try to transform (set (REGX) (REGY)) | |||
1972 | (set (REGX) (PLUS (REGX) (CONST_INT A))) | |||
1973 | ... | |||
1974 | (set (REGX) (REGY)) | |||
1975 | (set (REGX) (PLUS (REGX) (CONST_INT B))) | |||
1976 | to | |||
1977 | (set (REGX) (REGY)) | |||
1978 | (set (REGX) (PLUS (REGX) (CONST_INT A))) | |||
1979 | ... | |||
1980 | (set (REGX) (plus (REGX) (CONST_INT B-A))) */ | |||
1981 | else if (REG_P (src)(((enum rtx_code) (src)->code) == REG) | |||
1982 | && reg_set_luid[regno] == reg_set_luid[REGNO (src)(rhs_regno(src))] | |||
1983 | && reg_base_reg[regno] == reg_base_reg[REGNO (src)(rhs_regno(src))] | |||
1984 | && move2add_valid_value_p (REGNO (src)(rhs_regno(src)), mode)) | |||
1985 | { | |||
1986 | rtx_insn *next = next_nonnote_nondebug_insn (insn); | |||
1987 | rtx set = NULL_RTX(rtx) 0; | |||
1988 | if (next) | |||
1989 | set = single_set (next); | |||
1990 | if (set | |||
1991 | && SET_DEST (set)(((set)->u.fld[0]).rt_rtx) == reg | |||
1992 | && GET_CODE (SET_SRC (set))((enum rtx_code) ((((set)->u.fld[1]).rt_rtx))->code) == PLUS | |||
1993 | && XEXP (SET_SRC (set), 0)((((((set)->u.fld[1]).rt_rtx))->u.fld[0]).rt_rtx) == reg | |||
1994 | && CONST_INT_P (XEXP (SET_SRC (set), 1))(((enum rtx_code) (((((((set)->u.fld[1]).rt_rtx))->u.fld [1]).rt_rtx))->code) == CONST_INT)) | |||
1995 | { | |||
1996 | rtx src3 = XEXP (SET_SRC (set), 1)((((((set)->u.fld[1]).rt_rtx))->u.fld[1]).rt_rtx); | |||
1997 | unsigned HOST_WIDE_INTlong added_offset = UINTVAL (src3)((unsigned long) ((src3)->u.hwint[0])); | |||
1998 | HOST_WIDE_INTlong base_offset = reg_offset[REGNO (src)(rhs_regno(src))]; | |||
1999 | HOST_WIDE_INTlong regno_offset = reg_offset[regno]; | |||
2000 | rtx new_src = | |||
2001 | gen_int_mode (added_offset | |||
2002 | + base_offset | |||
2003 | - regno_offset, | |||
2004 | mode); | |||
2005 | bool success = false; | |||
2006 | bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)); | |||
2007 | ||||
2008 | if (new_src == const0_rtx(const_int_rtx[64])) | |||
2009 | /* See above why we create (set (reg) (reg)) here. */ | |||
2010 | success | |||
2011 | = validate_change (next, &SET_SRC (set)(((set)->u.fld[1]).rt_rtx), reg, 0); | |||
2012 | else | |||
2013 | { | |||
2014 | rtx old_src = SET_SRC (set)(((set)->u.fld[1]).rt_rtx); | |||
2015 | struct full_rtx_costs oldcst, newcst; | |||
2016 | rtx tem = gen_rtx_PLUS (mode, reg, new_src)gen_rtx_fmt_ee_stat ((PLUS), ((mode)), ((reg)), ((new_src)) ); | |||
2017 | ||||
2018 | get_full_set_rtx_cost (set, &oldcst); | |||
2019 | SET_SRC (set)(((set)->u.fld[1]).rt_rtx) = tem; | |||
2020 | get_full_set_src_cost (tem, mode, &newcst); | |||
2021 | SET_SRC (set)(((set)->u.fld[1]).rt_rtx) = old_src; | |||
2022 | costs_add_n_insns (&oldcst, 1); | |||
2023 | ||||
2024 | if (costs_lt_p (&newcst, &oldcst, speed) | |||
2025 | && have_add2_insn (reg, new_src)) | |||
2026 | { | |||
2027 | rtx newpat = gen_rtx_SET (reg, tem)gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((reg)) , ((tem)) ); | |||
2028 | success | |||
2029 | = validate_change (next, &PATTERN (next), | |||
2030 | newpat, 0); | |||
2031 | } | |||
2032 | } | |||
2033 | if (success) | |||
2034 | delete_insn (insn); | |||
2035 | changed |= success; | |||
2036 | insn = next; | |||
2037 | move2add_record_mode (reg); | |||
2038 | reg_offset[regno] | |||
2039 | = trunc_int_for_mode (added_offset + base_offset, | |||
2040 | mode); | |||
2041 | continue; | |||
2042 | } | |||
2043 | } | |||
2044 | } | |||
2045 | ||||
2046 | /* Try to transform | |||
2047 | (set (REGX) (CONST (PLUS (SYMBOL_REF) (CONST_INT A)))) | |||
2048 | ... | |||
2049 | (set (REGY) (CONST (PLUS (SYMBOL_REF) (CONST_INT B)))) | |||
2050 | to | |||
2051 | (set (REGX) (CONST (PLUS (SYMBOL_REF) (CONST_INT A)))) | |||
2052 | ... | |||
2053 | (set (REGY) (CONST (PLUS (REGX) (CONST_INT B-A)))) */ | |||
2054 | if ((GET_CODE (src)((enum rtx_code) (src)->code) == SYMBOL_REF | |||
2055 | || (GET_CODE (src)((enum rtx_code) (src)->code) == CONST | |||
2056 | && GET_CODE (XEXP (src, 0))((enum rtx_code) ((((src)->u.fld[0]).rt_rtx))->code) == PLUS | |||
2057 | && GET_CODE (XEXP (XEXP (src, 0), 0))((enum rtx_code) (((((((src)->u.fld[0]).rt_rtx))->u.fld [0]).rt_rtx))->code) == SYMBOL_REF | |||
2058 | && CONST_INT_P (XEXP (XEXP (src, 0), 1))(((enum rtx_code) (((((((src)->u.fld[0]).rt_rtx))->u.fld [1]).rt_rtx))->code) == CONST_INT))) | |||
2059 | && dbg_cnt (cse2_move2add)) | |||
2060 | { | |||
2061 | rtx sym, off; | |||
2062 | ||||
2063 | if (GET_CODE (src)((enum rtx_code) (src)->code) == SYMBOL_REF) | |||
2064 | { | |||
2065 | sym = src; | |||
2066 | off = const0_rtx(const_int_rtx[64]); | |||
2067 | } | |||
2068 | else | |||
2069 | { | |||
2070 | sym = XEXP (XEXP (src, 0), 0)((((((src)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx); | |||
2071 | off = XEXP (XEXP (src, 0), 1)((((((src)->u.fld[0]).rt_rtx))->u.fld[1]).rt_rtx); | |||
2072 | } | |||
2073 | ||||
2074 | /* If the reg already contains the value which is sum of | |||
2075 | sym and some constant value, we can use an add2 insn. */ | |||
2076 | if (move2add_valid_value_p (regno, mode) | |||
2077 | && reg_base_reg[regno] < 0 | |||
2078 | && reg_symbol_ref[regno] != NULL_RTX(rtx) 0 | |||
2079 | && rtx_equal_p (sym, reg_symbol_ref[regno])) | |||
2080 | changed |= move2add_use_add2_insn (mode, reg, sym, off, insn); | |||
2081 | ||||
2082 | /* Otherwise, we have to find a register whose value is sum | |||
2083 | of sym and some constant value. */ | |||
2084 | else | |||
2085 | changed |= move2add_use_add3_insn (mode, reg, sym, off, insn); | |||
2086 | ||||
2087 | continue; | |||
2088 | } | |||
2089 | } | |||
2090 | ||||
2091 | for (note = REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx); note; note = XEXP (note, 1)(((note)->u.fld[1]).rt_rtx)) | |||
2092 | { | |||
2093 | if (REG_NOTE_KIND (note)((enum reg_note) ((machine_mode) (note)->mode)) == REG_INC | |||
2094 | && REG_P (XEXP (note, 0))(((enum rtx_code) ((((note)->u.fld[0]).rt_rtx))->code) == REG)) | |||
2095 | { | |||
2096 | /* Reset the information about this register. */ | |||
2097 | int regno = REGNO (XEXP (note, 0))(rhs_regno((((note)->u.fld[0]).rt_rtx))); | |||
2098 | if (regno < FIRST_PSEUDO_REGISTER76) | |||
2099 | { | |||
2100 | move2add_record_mode (XEXP (note, 0)(((note)->u.fld[0]).rt_rtx)); | |||
2101 | reg_mode[regno] = VOIDmode((void) 0, E_VOIDmode); | |||
2102 | } | |||
2103 | } | |||
2104 | } | |||
2105 | ||||
2106 | /* There are no REG_INC notes for SP autoinc. */ | |||
2107 | subrtx_var_iterator::array_type array; | |||
2108 | FOR_EACH_SUBRTX_VAR (iter, array, PATTERN (insn), NONCONST)for (subrtx_var_iterator iter (array, PATTERN (insn), rtx_nonconst_subrtx_bounds ); !iter.at_end (); iter.next ()) | |||
2109 | { | |||
2110 | rtx mem = *iter; | |||
2111 | if (mem | |||
2112 | && MEM_P (mem)(((enum rtx_code) (mem)->code) == MEM) | |||
2113 | && GET_RTX_CLASS (GET_CODE (XEXP (mem, 0)))(rtx_class[(int) (((enum rtx_code) ((((mem)->u.fld[0]).rt_rtx ))->code))]) == RTX_AUTOINC) | |||
2114 | { | |||
2115 | if (XEXP (XEXP (mem, 0), 0)((((((mem)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx) == stack_pointer_rtx((this_target_rtl->x_global_rtl)[GR_STACK_POINTER])) | |||
2116 | reg_mode[STACK_POINTER_REGNUM7] = VOIDmode((void) 0, E_VOIDmode); | |||
2117 | } | |||
2118 | } | |||
2119 | ||||
2120 | note_stores (insn, move2add_note_store, insn); | |||
2121 | ||||
2122 | /* If INSN is a conditional branch, we try to extract an | |||
2123 | implicit set out of it. */ | |||
2124 | if (any_condjump_p (insn)) | |||
2125 | { | |||
2126 | rtx cnd = fis_get_condition (insn); | |||
2127 | ||||
2128 | if (cnd != NULL_RTX(rtx) 0 | |||
2129 | && GET_CODE (cnd)((enum rtx_code) (cnd)->code) == NE | |||
2130 | && REG_P (XEXP (cnd, 0))(((enum rtx_code) ((((cnd)->u.fld[0]).rt_rtx))->code) == REG) | |||
2131 | && !reg_set_p (XEXP (cnd, 0)(((cnd)->u.fld[0]).rt_rtx), insn) | |||
2132 | /* The following two checks, which are also in | |||
2133 | move2add_note_store, are intended to reduce the | |||
2134 | number of calls to gen_rtx_SET to avoid memory | |||
2135 | allocation if possible. */ | |||
2136 | && SCALAR_INT_MODE_P (GET_MODE (XEXP (cnd, 0)))(((enum mode_class) mode_class[((machine_mode) ((((cnd)->u .fld[0]).rt_rtx))->mode)]) == MODE_INT || ((enum mode_class ) mode_class[((machine_mode) ((((cnd)->u.fld[0]).rt_rtx))-> mode)]) == MODE_PARTIAL_INT) | |||
2137 | && REG_NREGS (XEXP (cnd, 0))((&((((cnd)->u.fld[0]).rt_rtx))->u.reg)->nregs) == 1 | |||
2138 | && CONST_INT_P (XEXP (cnd, 1))(((enum rtx_code) ((((cnd)->u.fld[1]).rt_rtx))->code) == CONST_INT)) | |||
2139 | { | |||
2140 | rtx implicit_set = | |||
2141 | gen_rtx_SET (XEXP (cnd, 0), XEXP (cnd, 1))gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), (((((cnd )->u.fld[0]).rt_rtx))), (((((cnd)->u.fld[1]).rt_rtx))) ); | |||
2142 | move2add_note_store (SET_DEST (implicit_set)(((implicit_set)->u.fld[0]).rt_rtx), implicit_set, insn); | |||
2143 | } | |||
2144 | } | |||
2145 | ||||
2146 | /* If this is a CALL_INSN, all call used registers are stored with | |||
2147 | unknown values. */ | |||
2148 | if (CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN)) | |||
2149 | { | |||
2150 | function_abi callee_abi = insn_callee_abi (insn); | |||
2151 | for (i = FIRST_PSEUDO_REGISTER76 - 1; i >= 0; i--) | |||
2152 | if (reg_mode[i] != VOIDmode((void) 0, E_VOIDmode) | |||
2153 | && reg_mode[i] != BLKmode((void) 0, E_BLKmode) | |||
2154 | && callee_abi.clobbers_reg_p (reg_mode[i], i)) | |||
2155 | /* Reset the information about this register. */ | |||
2156 | reg_mode[i] = VOIDmode((void) 0, E_VOIDmode); | |||
2157 | } | |||
2158 | } | |||
2159 | return changed; | |||
2160 | } | |||
2161 | ||||
2162 | /* SET is a SET or CLOBBER that sets DST. DATA is the insn which | |||
2163 | contains SET. | |||
2164 | Update reg_set_luid, reg_offset and reg_base_reg accordingly. | |||
2165 | Called from reload_cse_move2add via note_stores. */ | |||
2166 | ||||
2167 | static void | |||
2168 | move2add_note_store (rtx dst, const_rtx set, void *data) | |||
2169 | { | |||
2170 | rtx_insn *insn = (rtx_insn *) data; | |||
2171 | unsigned int regno = 0; | |||
2172 | scalar_int_mode mode; | |||
2173 | ||||
2174 | if (GET_CODE (dst)((enum rtx_code) (dst)->code) == SUBREG) | |||
2175 | regno = subreg_regno (dst); | |||
2176 | else if (REG_P (dst)(((enum rtx_code) (dst)->code) == REG)) | |||
2177 | regno = REGNO (dst)(rhs_regno(dst)); | |||
2178 | else | |||
2179 | return; | |||
2180 | ||||
2181 | if (!is_a <scalar_int_mode> (GET_MODE (dst)((machine_mode) (dst)->mode), &mode)) | |||
2182 | goto invalidate; | |||
2183 | ||||
2184 | if (GET_CODE (set)((enum rtx_code) (set)->code) == SET) | |||
2185 | { | |||
2186 | rtx note, sym = NULL_RTX(rtx) 0; | |||
2187 | rtx off; | |||
2188 | ||||
2189 | note = find_reg_equal_equiv_note (insn); | |||
2190 | if (note && GET_CODE (XEXP (note, 0))((enum rtx_code) ((((note)->u.fld[0]).rt_rtx))->code) == SYMBOL_REF) | |||
2191 | { | |||
2192 | sym = XEXP (note, 0)(((note)->u.fld[0]).rt_rtx); | |||
2193 | off = const0_rtx(const_int_rtx[64]); | |||
2194 | } | |||
2195 | else if (note && GET_CODE (XEXP (note, 0))((enum rtx_code) ((((note)->u.fld[0]).rt_rtx))->code) == CONST | |||
2196 | && GET_CODE (XEXP (XEXP (note, 0), 0))((enum rtx_code) (((((((note)->u.fld[0]).rt_rtx))->u.fld [0]).rt_rtx))->code) == PLUS | |||
2197 | && GET_CODE (XEXP (XEXP (XEXP (note, 0), 0), 0))((enum rtx_code) ((((((((((note)->u.fld[0]).rt_rtx))->u .fld[0]).rt_rtx))->u.fld[0]).rt_rtx))->code) == SYMBOL_REF | |||
2198 | && CONST_INT_P (XEXP (XEXP (XEXP (note, 0), 0), 1))(((enum rtx_code) ((((((((((note)->u.fld[0]).rt_rtx))-> u.fld[0]).rt_rtx))->u.fld[1]).rt_rtx))->code) == CONST_INT )) | |||
2199 | { | |||
2200 | sym = XEXP (XEXP (XEXP (note, 0), 0), 0)(((((((((note)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx))-> u.fld[0]).rt_rtx); | |||
2201 | off = XEXP (XEXP (XEXP (note, 0), 0), 1)(((((((((note)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx))-> u.fld[1]).rt_rtx); | |||
2202 | } | |||
2203 | ||||
2204 | if (sym != NULL_RTX(rtx) 0) | |||
2205 | { | |||
2206 | move2add_record_sym_value (dst, sym, off); | |||
2207 | return; | |||
2208 | } | |||
2209 | } | |||
2210 | ||||
2211 | if (GET_CODE (set)((enum rtx_code) (set)->code) == SET | |||
2212 | && GET_CODE (SET_DEST (set))((enum rtx_code) ((((set)->u.fld[0]).rt_rtx))->code) != ZERO_EXTRACT | |||
2213 | && GET_CODE (SET_DEST (set))((enum rtx_code) ((((set)->u.fld[0]).rt_rtx))->code) != STRICT_LOW_PART) | |||
2214 | { | |||
2215 | rtx src = SET_SRC (set)(((set)->u.fld[1]).rt_rtx); | |||
2216 | rtx base_reg; | |||
2217 | unsigned HOST_WIDE_INTlong offset; | |||
2218 | int base_regno; | |||
2219 | ||||
2220 | switch (GET_CODE (src)((enum rtx_code) (src)->code)) | |||
2221 | { | |||
2222 | case PLUS: | |||
2223 | if (REG_P (XEXP (src, 0))(((enum rtx_code) ((((src)->u.fld[0]).rt_rtx))->code) == REG)) | |||
2224 | { | |||
2225 | base_reg = XEXP (src, 0)(((src)->u.fld[0]).rt_rtx); | |||
2226 | ||||
2227 | if (CONST_INT_P (XEXP (src, 1))(((enum rtx_code) ((((src)->u.fld[1]).rt_rtx))->code) == CONST_INT)) | |||
2228 | offset = UINTVAL (XEXP (src, 1))((unsigned long) (((((src)->u.fld[1]).rt_rtx))->u.hwint [0])); | |||
2229 | else if (REG_P (XEXP (src, 1))(((enum rtx_code) ((((src)->u.fld[1]).rt_rtx))->code) == REG) | |||
2230 | && move2add_valid_value_p (REGNO (XEXP (src, 1))(rhs_regno((((src)->u.fld[1]).rt_rtx))), mode)) | |||
2231 | { | |||
2232 | if (reg_base_reg[REGNO (XEXP (src, 1))(rhs_regno((((src)->u.fld[1]).rt_rtx)))] < 0 | |||
2233 | && reg_symbol_ref[REGNO (XEXP (src, 1))(rhs_regno((((src)->u.fld[1]).rt_rtx)))] == NULL_RTX(rtx) 0) | |||
2234 | offset = reg_offset[REGNO (XEXP (src, 1))(rhs_regno((((src)->u.fld[1]).rt_rtx)))]; | |||
2235 | /* Maybe the first register is known to be a | |||
2236 | constant. */ | |||
2237 | else if (move2add_valid_value_p (REGNO (base_reg)(rhs_regno(base_reg)), mode) | |||
2238 | && reg_base_reg[REGNO (base_reg)(rhs_regno(base_reg))] < 0 | |||
2239 | && reg_symbol_ref[REGNO (base_reg)(rhs_regno(base_reg))] == NULL_RTX(rtx) 0) | |||
2240 | { | |||
2241 | offset = reg_offset[REGNO (base_reg)(rhs_regno(base_reg))]; | |||
2242 | base_reg = XEXP (src, 1)(((src)->u.fld[1]).rt_rtx); | |||
2243 | } | |||
2244 | else | |||
2245 | goto invalidate; | |||
2246 | } | |||
2247 | else | |||
2248 | goto invalidate; | |||
2249 | ||||
2250 | break; | |||
2251 | } | |||
2252 | ||||
2253 | goto invalidate; | |||
2254 | ||||
2255 | case REG: | |||
2256 | base_reg = src; | |||
2257 | offset = 0; | |||
2258 | break; | |||
2259 | ||||
2260 | case CONST_INT: | |||
2261 | /* Start tracking the register as a constant. */ | |||
2262 | reg_base_reg[regno] = -1; | |||
2263 | reg_symbol_ref[regno] = NULL_RTX(rtx) 0; | |||
2264 | reg_offset[regno] = INTVAL (SET_SRC (set))(((((set)->u.fld[1]).rt_rtx))->u.hwint[0]); | |||
2265 | /* We assign the same luid to all registers set to constants. */ | |||
2266 | reg_set_luid[regno] = move2add_last_label_luid + 1; | |||
2267 | move2add_record_mode (dst); | |||
2268 | return; | |||
2269 | ||||
2270 | default: | |||
2271 | goto invalidate; | |||
2272 | } | |||
2273 | ||||
2274 | base_regno = REGNO (base_reg)(rhs_regno(base_reg)); | |||
2275 | /* If information about the base register is not valid, set it | |||
2276 | up as a new base register, pretending its value is known | |||
2277 | starting from the current insn. */ | |||
2278 | if (!move2add_valid_value_p (base_regno, mode)) | |||
2279 | { | |||
2280 | reg_base_reg[base_regno] = base_regno; | |||
2281 | reg_symbol_ref[base_regno] = NULL_RTX(rtx) 0; | |||
2282 | reg_offset[base_regno] = 0; | |||
2283 | reg_set_luid[base_regno] = move2add_luid; | |||
2284 | gcc_assert (GET_MODE (base_reg) == mode)((void)(!(((machine_mode) (base_reg)->mode) == mode) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/postreload.cc" , 2284, __FUNCTION__), 0 : 0)); | |||
2285 | move2add_record_mode (base_reg); | |||
2286 | } | |||
2287 | ||||
2288 | /* Copy base information from our base register. */ | |||
2289 | reg_set_luid[regno] = reg_set_luid[base_regno]; | |||
2290 | reg_base_reg[regno] = reg_base_reg[base_regno]; | |||
2291 | reg_symbol_ref[regno] = reg_symbol_ref[base_regno]; | |||
2292 | ||||
2293 | /* Compute the sum of the offsets or constants. */ | |||
2294 | reg_offset[regno] | |||
2295 | = trunc_int_for_mode (offset + reg_offset[base_regno], mode); | |||
2296 | ||||
2297 | move2add_record_mode (dst); | |||
2298 | } | |||
2299 | else | |||
2300 | { | |||
2301 | invalidate: | |||
2302 | /* Invalidate the contents of the register. */ | |||
2303 | move2add_record_mode (dst); | |||
2304 | reg_mode[regno] = VOIDmode((void) 0, E_VOIDmode); | |||
2305 | } | |||
2306 | } | |||
2307 | ||||
2308 | namespace { | |||
2309 | ||||
2310 | const pass_data pass_data_postreload_cse = | |||
2311 | { | |||
2312 | RTL_PASS, /* type */ | |||
2313 | "postreload", /* name */ | |||
2314 | OPTGROUP_NONE, /* optinfo_flags */ | |||
2315 | TV_RELOAD_CSE_REGS, /* tv_id */ | |||
2316 | 0, /* properties_required */ | |||
2317 | 0, /* properties_provided */ | |||
2318 | 0, /* properties_destroyed */ | |||
2319 | 0, /* todo_flags_start */ | |||
2320 | TODO_df_finish(1 << 17), /* todo_flags_finish */ | |||
2321 | }; | |||
2322 | ||||
2323 | class pass_postreload_cse : public rtl_opt_pass | |||
2324 | { | |||
2325 | public: | |||
2326 | pass_postreload_cse (gcc::context *ctxt) | |||
2327 | : rtl_opt_pass (pass_data_postreload_cse, ctxt) | |||
2328 | {} | |||
2329 | ||||
2330 | /* opt_pass methods: */ | |||
2331 | bool gate (function *) final override | |||
2332 | { | |||
2333 | return (optimizeglobal_options.x_optimize > 0 && reload_completed); | |||
2334 | } | |||
2335 | ||||
2336 | unsigned int execute (function *) final override; | |||
2337 | ||||
2338 | }; // class pass_postreload_cse | |||
2339 | ||||
2340 | unsigned int | |||
2341 | pass_postreload_cse::execute (function *fun) | |||
2342 | { | |||
2343 | if (!dbg_cnt (postreload_cse)) | |||
2344 | return 0; | |||
2345 | ||||
2346 | /* Do a very simple CSE pass over just the hard registers. */ | |||
2347 | reload_cse_regs (get_insns ()); | |||
2348 | /* Reload_cse_regs can eliminate potentially-trapping MEMs. | |||
2349 | Remove any EH edges associated with them. */ | |||
2350 | if (fun->can_throw_non_call_exceptions | |||
2351 | && purge_all_dead_edges ()) | |||
2352 | cleanup_cfg (0); | |||
2353 | ||||
2354 | return 0; | |||
2355 | } | |||
2356 | ||||
2357 | } // anon namespace | |||
2358 | ||||
2359 | rtl_opt_pass * | |||
2360 | make_pass_postreload_cse (gcc::context *ctxt) | |||
2361 | { | |||
2362 | return new pass_postreload_cse (ctxt); | |||
2363 | } |