File: | build/gcc/cprop.cc |
Warning: | line 296, column 24 Although the value stored to 'set' is used in the enclosing expression, the value is never actually read from 'set' |
Press '?' to see keyboard shortcuts
Keyboard shortcuts:
1 | /* Global constant/copy propagation for RTL. |
2 | Copyright (C) 1997-2023 Free Software Foundation, Inc. |
3 | |
4 | This file is part of GCC. |
5 | |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free |
8 | Software Foundation; either version 3, or (at your option) any later |
9 | version. |
10 | |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
14 | for more details. |
15 | |
16 | You should have received a copy of the GNU General Public License |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ |
19 | |
20 | #include "config.h" |
21 | #include "system.h" |
22 | #include "coretypes.h" |
23 | #include "backend.h" |
24 | #include "rtl.h" |
25 | #include "cfghooks.h" |
26 | #include "df.h" |
27 | #include "insn-config.h" |
28 | #include "memmodel.h" |
29 | #include "emit-rtl.h" |
30 | #include "recog.h" |
31 | #include "diagnostic-core.h" |
32 | #include "toplev.h" |
33 | #include "cfgrtl.h" |
34 | #include "cfganal.h" |
35 | #include "lcm.h" |
36 | #include "cfgcleanup.h" |
37 | #include "cselib.h" |
38 | #include "intl.h" |
39 | #include "tree-pass.h" |
40 | #include "dbgcnt.h" |
41 | #include "cfgloop.h" |
42 | #include "gcse.h" |
43 | |
44 | |
45 | /* An obstack for our working variables. */ |
46 | static struct obstack cprop_obstack; |
47 | |
48 | /* Occurrence of an expression. |
49 | There is one per basic block. If a pattern appears more than once the |
50 | last appearance is used. */ |
51 | |
52 | struct cprop_occr |
53 | { |
54 | /* Next occurrence of this expression. */ |
55 | struct cprop_occr *next; |
56 | /* The insn that computes the expression. */ |
57 | rtx_insn *insn; |
58 | }; |
59 | |
60 | /* Hash table entry for assignment expressions. */ |
61 | |
62 | struct cprop_expr |
63 | { |
64 | /* The expression (DEST := SRC). */ |
65 | rtx dest; |
66 | rtx src; |
67 | |
68 | /* Index in the available expression bitmaps. */ |
69 | int bitmap_index; |
70 | /* Next entry with the same hash. */ |
71 | struct cprop_expr *next_same_hash; |
72 | /* List of available occurrence in basic blocks in the function. |
73 | An "available occurrence" is one that is the last occurrence in the |
74 | basic block and whose operands are not modified by following statements |
75 | in the basic block [including this insn]. */ |
76 | struct cprop_occr *avail_occr; |
77 | }; |
78 | |
79 | /* Hash table for copy propagation expressions. |
80 | Each hash table is an array of buckets. |
81 | ??? It is known that if it were an array of entries, structure elements |
82 | `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is |
83 | not clear whether in the final analysis a sufficient amount of memory would |
84 | be saved as the size of the available expression bitmaps would be larger |
85 | [one could build a mapping table without holes afterwards though]. |
86 | Someday I'll perform the computation and figure it out. */ |
87 | |
88 | struct hash_table_d |
89 | { |
90 | /* The table itself. |
91 | This is an array of `set_hash_table_size' elements. */ |
92 | struct cprop_expr **table; |
93 | |
94 | /* Size of the hash table, in elements. */ |
95 | unsigned int size; |
96 | |
97 | /* Number of hash table elements. */ |
98 | unsigned int n_elems; |
99 | }; |
100 | |
101 | /* Copy propagation hash table. */ |
102 | static struct hash_table_d set_hash_table; |
103 | |
104 | /* Array of implicit set patterns indexed by basic block index. */ |
105 | static rtx *implicit_sets; |
106 | |
107 | /* Array of indexes of expressions for implicit set patterns indexed by basic |
108 | block index. In other words, implicit_set_indexes[i] is the bitmap_index |
109 | of the expression whose RTX is implicit_sets[i]. */ |
110 | static int *implicit_set_indexes; |
111 | |
112 | /* Bitmap containing one bit for each register in the program. |
113 | Used when performing GCSE to track which registers have been set since |
114 | the start or end of the basic block while traversing that block. */ |
115 | static regset reg_set_bitmap; |
116 | |
117 | /* Various variables for statistics gathering. */ |
118 | |
119 | /* Memory used in a pass. |
120 | This isn't intended to be absolutely precise. Its intent is only |
121 | to keep an eye on memory usage. */ |
122 | static int bytes_used; |
123 | |
124 | /* Number of local constants propagated. */ |
125 | static int local_const_prop_count; |
126 | /* Number of local copies propagated. */ |
127 | static int local_copy_prop_count; |
128 | /* Number of global constants propagated. */ |
129 | static int global_const_prop_count; |
130 | /* Number of global copies propagated. */ |
131 | static int global_copy_prop_count; |
132 | |
133 | #define GOBNEW(T)((T *) cprop_alloc (sizeof (T))) ((T *) cprop_alloc (sizeof (T))) |
134 | #define GOBNEWVAR(T, S)((T *) cprop_alloc ((S))) ((T *) cprop_alloc ((S))) |
135 | |
136 | /* Cover function to obstack_alloc. */ |
137 | |
138 | static void * |
139 | cprop_alloc (unsigned long size) |
140 | { |
141 | bytes_used += size; |
142 | return obstack_alloc (&cprop_obstack, size)__extension__ ({ struct obstack *__h = (&cprop_obstack); __extension__ ({ struct obstack *__o = (__h); size_t __len = ((size)); if ( __extension__ ({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->next_free); }) < __len) _obstack_newchunk (__o, __len); ((void) ((__o)->next_free += (__len))); }); __extension__ ({ struct obstack *__o1 = (__h); void *__value = (void *) __o1->object_base; if (__o1->next_free == __value ) __o1->maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t) < sizeof (void *) ? ((__o1->object_base) + (((__o1->next_free) - (__o1->object_base) + (__o1-> alignment_mask)) & ~(__o1->alignment_mask))) : (char * ) (((ptrdiff_t) (__o1->next_free) + (__o1->alignment_mask )) & ~(__o1->alignment_mask))); if ((size_t) (__o1-> next_free - (char *) __o1->chunk) > (size_t) (__o1-> chunk_limit - (char *) __o1->chunk)) __o1->next_free = __o1 ->chunk_limit; __o1->object_base = __o1->next_free; __value ; }); }); |
143 | } |
144 | |
145 | /* Return nonzero if register X is unchanged from INSN to the end |
146 | of INSN's basic block. */ |
147 | |
148 | static int |
149 | reg_available_p (const_rtx x, const rtx_insn *insn ATTRIBUTE_UNUSED__attribute__ ((__unused__))) |
150 | { |
151 | return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x))bitmap_bit_p (reg_set_bitmap, (rhs_regno(x))); |
152 | } |
153 | |
154 | /* Hash a set of register REGNO. |
155 | |
156 | Sets are hashed on the register that is set. This simplifies the PRE copy |
157 | propagation code. |
158 | |
159 | ??? May need to make things more elaborate. Later, as necessary. */ |
160 | |
161 | static unsigned int |
162 | hash_mod (int regno, int hash_table_size) |
163 | { |
164 | return (unsigned) regno % hash_table_size; |
165 | } |
166 | |
167 | /* Insert assignment DEST:=SET from INSN in the hash table. |
168 | DEST is a register and SET is a register or a suitable constant. |
169 | If the assignment is already present in the table, record it as |
170 | the last occurrence in INSN's basic block. |
171 | IMPLICIT is true if it's an implicit set, false otherwise. */ |
172 | |
173 | static void |
174 | insert_set_in_table (rtx dest, rtx src, rtx_insn *insn, |
175 | struct hash_table_d *table, bool implicit) |
176 | { |
177 | bool found = false; |
178 | unsigned int hash; |
179 | struct cprop_expr *cur_expr, *last_expr = NULL__null; |
180 | struct cprop_occr *cur_occr; |
181 | |
182 | hash = hash_mod (REGNO (dest)(rhs_regno(dest)), table->size); |
183 | |
184 | for (cur_expr = table->table[hash]; cur_expr; |
185 | cur_expr = cur_expr->next_same_hash) |
186 | { |
187 | if (dest == cur_expr->dest |
188 | && src == cur_expr->src) |
189 | { |
190 | found = true; |
191 | break; |
192 | } |
193 | last_expr = cur_expr; |
194 | } |
195 | |
196 | if (! found) |
197 | { |
198 | cur_expr = GOBNEW (struct cprop_expr)((struct cprop_expr *) cprop_alloc (sizeof (struct cprop_expr ))); |
199 | bytes_used += sizeof (struct cprop_expr); |
200 | if (table->table[hash] == NULL__null) |
201 | /* This is the first pattern that hashed to this index. */ |
202 | table->table[hash] = cur_expr; |
203 | else |
204 | /* Add EXPR to end of this hash chain. */ |
205 | last_expr->next_same_hash = cur_expr; |
206 | |
207 | /* Set the fields of the expr element. |
208 | We must copy X because it can be modified when copy propagation is |
209 | performed on its operands. */ |
210 | cur_expr->dest = copy_rtx (dest); |
211 | cur_expr->src = copy_rtx (src); |
212 | cur_expr->bitmap_index = table->n_elems++; |
213 | cur_expr->next_same_hash = NULL__null; |
214 | cur_expr->avail_occr = NULL__null; |
215 | } |
216 | |
217 | /* Now record the occurrence. */ |
218 | cur_occr = cur_expr->avail_occr; |
219 | |
220 | if (cur_occr |
221 | && BLOCK_FOR_INSN (cur_occr->insn) == BLOCK_FOR_INSN (insn)) |
222 | { |
223 | /* Found another instance of the expression in the same basic block. |
224 | Prefer this occurrence to the currently recorded one. We want |
225 | the last one in the block and the block is scanned from start |
226 | to end. */ |
227 | cur_occr->insn = insn; |
228 | } |
229 | else |
230 | { |
231 | /* First occurrence of this expression in this basic block. */ |
232 | cur_occr = GOBNEW (struct cprop_occr)((struct cprop_occr *) cprop_alloc (sizeof (struct cprop_occr ))); |
233 | bytes_used += sizeof (struct cprop_occr); |
234 | cur_occr->insn = insn; |
235 | cur_occr->next = cur_expr->avail_occr; |
236 | cur_expr->avail_occr = cur_occr; |
237 | } |
238 | |
239 | /* Record bitmap_index of the implicit set in implicit_set_indexes. */ |
240 | if (implicit) |
241 | implicit_set_indexes[BLOCK_FOR_INSN (insn)->index] |
242 | = cur_expr->bitmap_index; |
243 | } |
244 | |
245 | /* Determine whether the rtx X should be treated as a constant for CPROP. |
246 | Since X might be inserted more than once we have to take care that it |
247 | is sharable. */ |
248 | |
249 | static bool |
250 | cprop_constant_p (const_rtx x) |
251 | { |
252 | return CONSTANT_P (x)((rtx_class[(int) (((enum rtx_code) (x)->code))]) == RTX_CONST_OBJ ) && (GET_CODE (x)((enum rtx_code) (x)->code) != CONST || shared_const_p (x)); |
253 | } |
254 | |
255 | /* Determine whether the rtx X should be treated as a register that can |
256 | be propagated. Any pseudo-register is fine. */ |
257 | |
258 | static bool |
259 | cprop_reg_p (const_rtx x) |
260 | { |
261 | return REG_P (x)(((enum rtx_code) (x)->code) == REG) && !HARD_REGISTER_P (x)((((rhs_regno(x))) < 76)); |
262 | } |
263 | |
264 | /* Scan SET present in INSN and add an entry to the hash TABLE. |
265 | IMPLICIT is true if it's an implicit set, false otherwise. */ |
266 | |
267 | static void |
268 | hash_scan_set (rtx set, rtx_insn *insn, struct hash_table_d *table, |
269 | bool implicit) |
270 | { |
271 | rtx src = SET_SRC (set)(((set)->u.fld[1]).rt_rtx); |
272 | rtx dest = SET_DEST (set)(((set)->u.fld[0]).rt_rtx); |
273 | |
274 | if (cprop_reg_p (dest) |
275 | && reg_available_p (dest, insn) |
276 | && can_copy_p (GET_MODE (dest)((machine_mode) (dest)->mode))) |
277 | { |
278 | /* See if a REG_EQUAL note shows this equivalent to a simpler expression. |
279 | |
280 | This allows us to do a single CPROP pass and still eliminate |
281 | redundant constants, addresses or other expressions that are |
282 | constructed with multiple instructions. |
283 | |
284 | However, keep the original SRC if INSN is a simple reg-reg move. In |
285 | In this case, there will almost always be a REG_EQUAL note on the |
286 | insn that sets SRC. By recording the REG_EQUAL value here as SRC |
287 | for INSN, we miss copy propagation opportunities. |
288 | |
289 | Note that this does not impede profitable constant propagations. We |
290 | "look through" reg-reg sets in lookup_set. */ |
291 | rtx note = find_reg_equal_equiv_note (insn); |
292 | if (note != 0 |
293 | && REG_NOTE_KIND (note)((enum reg_note) ((machine_mode) (note)->mode)) == REG_EQUAL |
294 | && !REG_P (src)(((enum rtx_code) (src)->code) == REG) |
295 | && cprop_constant_p (XEXP (note, 0)(((note)->u.fld[0]).rt_rtx))) |
296 | src = XEXP (note, 0)(((note)->u.fld[0]).rt_rtx), set = gen_rtx_SET (dest, src)gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((dest) ), ((src)) ); |
Although the value stored to 'set' is used in the enclosing expression, the value is never actually read from 'set' | |
297 | |
298 | /* Record sets for constant/copy propagation. */ |
299 | if ((cprop_reg_p (src) |
300 | && src != dest |
301 | && reg_available_p (src, insn)) |
302 | || cprop_constant_p (src)) |
303 | insert_set_in_table (dest, src, insn, table, implicit); |
304 | } |
305 | } |
306 | |
307 | /* Process INSN and add hash table entries as appropriate. */ |
308 | |
309 | static void |
310 | hash_scan_insn (rtx_insn *insn, struct hash_table_d *table) |
311 | { |
312 | rtx pat = PATTERN (insn); |
313 | int i; |
314 | |
315 | /* Pick out the sets of INSN and for other forms of instructions record |
316 | what's been modified. */ |
317 | |
318 | if (GET_CODE (pat)((enum rtx_code) (pat)->code) == SET) |
319 | hash_scan_set (pat, insn, table, false); |
320 | else if (GET_CODE (pat)((enum rtx_code) (pat)->code) == PARALLEL) |
321 | for (i = 0; i < XVECLEN (pat, 0)(((((pat)->u.fld[0]).rt_rtvec))->num_elem); i++) |
322 | { |
323 | rtx x = XVECEXP (pat, 0, i)(((((pat)->u.fld[0]).rt_rtvec))->elem[i]); |
324 | |
325 | if (GET_CODE (x)((enum rtx_code) (x)->code) == SET) |
326 | hash_scan_set (x, insn, table, false); |
327 | } |
328 | } |
329 | |
330 | /* Dump the hash table TABLE to file FILE under the name NAME. */ |
331 | |
332 | static void |
333 | dump_hash_table (FILE *file, const char *name, struct hash_table_d *table) |
334 | { |
335 | int i; |
336 | /* Flattened out table, so it's printed in proper order. */ |
337 | struct cprop_expr **flat_table; |
338 | unsigned int *hash_val; |
339 | struct cprop_expr *expr; |
340 | |
341 | flat_table = XCNEWVEC (struct cprop_expr *, table->n_elems)((struct cprop_expr * *) xcalloc ((table->n_elems), sizeof (struct cprop_expr *))); |
342 | hash_val = XNEWVEC (unsigned int, table->n_elems)((unsigned int *) xmalloc (sizeof (unsigned int) * (table-> n_elems))); |
343 | |
344 | for (i = 0; i < (int) table->size; i++) |
345 | for (expr = table->table[i]; expr != NULL__null; expr = expr->next_same_hash) |
346 | { |
347 | flat_table[expr->bitmap_index] = expr; |
348 | hash_val[expr->bitmap_index] = i; |
349 | } |
350 | |
351 | fprintf (file, "%s hash table (%d buckets, %d entries)\n", |
352 | name, table->size, table->n_elems); |
353 | |
354 | for (i = 0; i < (int) table->n_elems; i++) |
355 | if (flat_table[i] != 0) |
356 | { |
357 | expr = flat_table[i]; |
358 | fprintf (file, "Index %d (hash value %d)\n ", |
359 | expr->bitmap_index, hash_val[i]); |
360 | print_rtl (file, expr->dest); |
361 | fprintf (file, " := "); |
362 | print_rtl (file, expr->src); |
363 | fprintf (file, "\n"); |
364 | } |
365 | |
366 | fprintf (file, "\n"); |
367 | |
368 | free (flat_table); |
369 | free (hash_val); |
370 | } |
371 | |
372 | /* Record as unavailable all registers that are DEF operands of INSN. */ |
373 | |
374 | static void |
375 | make_set_regs_unavailable (rtx_insn *insn) |
376 | { |
377 | df_ref def; |
378 | |
379 | FOR_EACH_INSN_DEF (def, insn)for (def = (((df->insns[(INSN_UID (insn))]))->defs); def ; def = ((def)->base.next_loc)) |
380 | SET_REGNO_REG_SET (reg_set_bitmap, DF_REF_REGNO (def))bitmap_set_bit (reg_set_bitmap, ((def)->base.regno)); |
381 | } |
382 | |
383 | /* Top level function to create an assignment hash table. |
384 | |
385 | Assignment entries are placed in the hash table if |
386 | - they are of the form (set (pseudo-reg) src), |
387 | - src is something we want to perform const/copy propagation on, |
388 | - none of the operands or target are subsequently modified in the block |
389 | |
390 | Currently src must be a pseudo-reg or a const_int. |
391 | |
392 | TABLE is the table computed. */ |
393 | |
394 | static void |
395 | compute_hash_table_work (struct hash_table_d *table) |
396 | { |
397 | basic_block bb; |
398 | |
399 | /* Allocate vars to track sets of regs. */ |
400 | reg_set_bitmap = ALLOC_REG_SET (NULL)bitmap_alloc (__null); |
401 | |
402 | FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb ; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb-> next_bb) |
403 | { |
404 | rtx_insn *insn; |
405 | |
406 | /* Reset tables used to keep track of what's not yet invalid [since |
407 | the end of the block]. */ |
408 | CLEAR_REG_SET (reg_set_bitmap)bitmap_clear (reg_set_bitmap); |
409 | |
410 | /* Go over all insns from the last to the first. This is convenient |
411 | for tracking available registers, i.e. not set between INSN and |
412 | the end of the basic block BB. */ |
413 | FOR_BB_INSNS_REVERSE (bb, insn)for ((insn) = (bb)->il.x.rtl->end_; (insn) && ( insn) != PREV_INSN ((bb)->il.x.head_); (insn) = PREV_INSN ( insn)) |
414 | { |
415 | /* Only real insns are interesting. */ |
416 | if (!NONDEBUG_INSN_P (insn)((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN))) |
417 | continue; |
418 | |
419 | /* Record interesting sets from INSN in the hash table. */ |
420 | hash_scan_insn (insn, table); |
421 | |
422 | /* Any registers set in INSN will make SETs above it not AVAIL. */ |
423 | make_set_regs_unavailable (insn); |
424 | } |
425 | |
426 | /* Insert implicit sets in the hash table, pretending they appear as |
427 | insns at the head of the basic block. */ |
428 | if (implicit_sets[bb->index] != NULL_RTX(rtx) 0) |
429 | hash_scan_set (implicit_sets[bb->index], BB_HEAD (bb)(bb)->il.x.head_, table, true); |
430 | } |
431 | |
432 | FREE_REG_SET (reg_set_bitmap)((void) (bitmap_obstack_free ((bitmap) reg_set_bitmap), (reg_set_bitmap ) = (bitmap) __null)); |
433 | } |
434 | |
435 | /* Allocate space for the set/expr hash TABLE. |
436 | It is used to determine the number of buckets to use. */ |
437 | |
438 | static void |
439 | alloc_hash_table (struct hash_table_d *table) |
440 | { |
441 | int n; |
442 | |
443 | n = get_max_insn_count (); |
444 | |
445 | table->size = n / 4; |
446 | if (table->size < 11) |
447 | table->size = 11; |
448 | |
449 | /* Attempt to maintain efficient use of hash table. |
450 | Making it an odd number is simplest for now. |
451 | ??? Later take some measurements. */ |
452 | table->size |= 1; |
453 | n = table->size * sizeof (struct cprop_expr *); |
454 | table->table = XNEWVAR (struct cprop_expr *, n)((struct cprop_expr * *) xmalloc ((n))); |
455 | } |
456 | |
457 | /* Free things allocated by alloc_hash_table. */ |
458 | |
459 | static void |
460 | free_hash_table (struct hash_table_d *table) |
461 | { |
462 | free (table->table); |
463 | } |
464 | |
465 | /* Compute the hash TABLE for doing copy/const propagation or |
466 | expression hash table. */ |
467 | |
468 | static void |
469 | compute_hash_table (struct hash_table_d *table) |
470 | { |
471 | /* Initialize count of number of entries in hash table. */ |
472 | table->n_elems = 0; |
473 | memset (table->table, 0, table->size * sizeof (struct cprop_expr *)); |
474 | |
475 | compute_hash_table_work (table); |
476 | } |
477 | |
478 | /* Expression tracking support. */ |
479 | |
480 | /* Lookup REGNO in the set TABLE. The result is a pointer to the |
481 | table entry, or NULL if not found. */ |
482 | |
483 | static struct cprop_expr * |
484 | lookup_set (unsigned int regno, struct hash_table_d *table) |
485 | { |
486 | unsigned int hash = hash_mod (regno, table->size); |
487 | struct cprop_expr *expr; |
488 | |
489 | expr = table->table[hash]; |
490 | |
491 | while (expr && REGNO (expr->dest)(rhs_regno(expr->dest)) != regno) |
492 | expr = expr->next_same_hash; |
493 | |
494 | return expr; |
495 | } |
496 | |
497 | /* Return the next entry for REGNO in list EXPR. */ |
498 | |
499 | static struct cprop_expr * |
500 | next_set (unsigned int regno, struct cprop_expr *expr) |
501 | { |
502 | do |
503 | expr = expr->next_same_hash; |
504 | while (expr && REGNO (expr->dest)(rhs_regno(expr->dest)) != regno); |
505 | |
506 | return expr; |
507 | } |
508 | |
509 | /* Reset tables used to keep track of what's still available [since the |
510 | start of the block]. */ |
511 | |
512 | static void |
513 | reset_opr_set_tables (void) |
514 | { |
515 | /* Maintain a bitmap of which regs have been set since beginning of |
516 | the block. */ |
517 | CLEAR_REG_SET (reg_set_bitmap)bitmap_clear (reg_set_bitmap); |
518 | } |
519 | |
520 | /* Return nonzero if the register X has not been set yet [since the |
521 | start of the basic block containing INSN]. */ |
522 | |
523 | static int |
524 | reg_not_set_p (const_rtx x, const rtx_insn *insn ATTRIBUTE_UNUSED__attribute__ ((__unused__))) |
525 | { |
526 | return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x))bitmap_bit_p (reg_set_bitmap, (rhs_regno(x))); |
527 | } |
528 | |
529 | /* Record things set by INSN. |
530 | This data is used by reg_not_set_p. */ |
531 | |
532 | static void |
533 | mark_oprs_set (rtx_insn *insn) |
534 | { |
535 | df_ref def; |
536 | |
537 | FOR_EACH_INSN_DEF (def, insn)for (def = (((df->insns[(INSN_UID (insn))]))->defs); def ; def = ((def)->base.next_loc)) |
538 | SET_REGNO_REG_SET (reg_set_bitmap, DF_REF_REGNO (def))bitmap_set_bit (reg_set_bitmap, ((def)->base.regno)); |
539 | } |
540 | |
541 | /* Compute copy/constant propagation working variables. */ |
542 | |
543 | /* Local properties of assignments. */ |
544 | static sbitmap *cprop_avloc; |
545 | static sbitmap *cprop_kill; |
546 | |
547 | /* Global properties of assignments (computed from the local properties). */ |
548 | static sbitmap *cprop_avin; |
549 | static sbitmap *cprop_avout; |
550 | |
551 | /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of |
552 | basic blocks. N_SETS is the number of sets. */ |
553 | |
554 | static void |
555 | alloc_cprop_mem (int n_blocks, int n_sets) |
556 | { |
557 | cprop_avloc = sbitmap_vector_alloc (n_blocks, n_sets); |
558 | cprop_kill = sbitmap_vector_alloc (n_blocks, n_sets); |
559 | |
560 | cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets); |
561 | cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets); |
562 | } |
563 | |
564 | /* Free vars used by copy/const propagation. */ |
565 | |
566 | static void |
567 | free_cprop_mem (void) |
568 | { |
569 | sbitmap_vector_free (cprop_avloc); |
570 | sbitmap_vector_free (cprop_kill); |
571 | sbitmap_vector_free (cprop_avin); |
572 | sbitmap_vector_free (cprop_avout); |
573 | } |
574 | |
575 | /* Compute the local properties of each recorded expression. |
576 | |
577 | Local properties are those that are defined by the block, irrespective of |
578 | other blocks. |
579 | |
580 | An expression is killed in a block if its operands, either DEST or SRC, are |
581 | modified in the block. |
582 | |
583 | An expression is computed (locally available) in a block if it is computed |
584 | at least once and expression would contain the same value if the |
585 | computation was moved to the end of the block. |
586 | |
587 | KILL and COMP are destination sbitmaps for recording local properties. */ |
588 | |
589 | static void |
590 | compute_local_properties (sbitmap *kill, sbitmap *comp, |
591 | struct hash_table_d *table) |
592 | { |
593 | unsigned int i; |
594 | |
595 | /* Initialize the bitmaps that were passed in. */ |
596 | bitmap_vector_clear (kill, last_basic_block_for_fn (cfun)(((cfun + 0))->cfg->x_last_basic_block)); |
597 | bitmap_vector_clear (comp, last_basic_block_for_fn (cfun)(((cfun + 0))->cfg->x_last_basic_block)); |
598 | |
599 | for (i = 0; i < table->size; i++) |
600 | { |
601 | struct cprop_expr *expr; |
602 | |
603 | for (expr = table->table[i]; expr != NULL__null; expr = expr->next_same_hash) |
604 | { |
605 | int indx = expr->bitmap_index; |
606 | df_ref def; |
607 | struct cprop_occr *occr; |
608 | |
609 | /* For each definition of the destination pseudo-reg, the expression |
610 | is killed in the block where the definition is. */ |
611 | for (def = DF_REG_DEF_CHAIN (REGNO (expr->dest))(df->def_regs[((rhs_regno(expr->dest)))]->reg_chain); |
612 | def; def = DF_REF_NEXT_REG (def)((def)->base.next_reg)) |
613 | bitmap_set_bit (kill[DF_REF_BB (def)((((def)->base.cl) == DF_REF_ARTIFICIAL) ? (def)->artificial_ref .bb : BLOCK_FOR_INSN (((def)->base.insn_info->insn)))->index], indx); |
614 | |
615 | /* If the source is a pseudo-reg, for each definition of the source, |
616 | the expression is killed in the block where the definition is. */ |
617 | if (REG_P (expr->src)(((enum rtx_code) (expr->src)->code) == REG)) |
618 | for (def = DF_REG_DEF_CHAIN (REGNO (expr->src))(df->def_regs[((rhs_regno(expr->src)))]->reg_chain); |
619 | def; def = DF_REF_NEXT_REG (def)((def)->base.next_reg)) |
620 | bitmap_set_bit (kill[DF_REF_BB (def)((((def)->base.cl) == DF_REF_ARTIFICIAL) ? (def)->artificial_ref .bb : BLOCK_FOR_INSN (((def)->base.insn_info->insn)))->index], indx); |
621 | |
622 | /* The occurrences recorded in avail_occr are exactly those that |
623 | are locally available in the block where they are. */ |
624 | for (occr = expr->avail_occr; occr != NULL__null; occr = occr->next) |
625 | { |
626 | bitmap_set_bit (comp[BLOCK_FOR_INSN (occr->insn)->index], indx); |
627 | } |
628 | } |
629 | } |
630 | } |
631 | |
632 | /* Hash table support. */ |
633 | |
634 | /* Top level routine to do the dataflow analysis needed by copy/const |
635 | propagation. */ |
636 | |
637 | static void |
638 | compute_cprop_data (void) |
639 | { |
640 | basic_block bb; |
641 | |
642 | compute_local_properties (cprop_kill, cprop_avloc, &set_hash_table); |
643 | compute_available (cprop_avloc, cprop_kill, cprop_avout, cprop_avin); |
644 | |
645 | /* Merge implicit sets into CPROP_AVIN. They are always available at the |
646 | entry of their basic block. We need to do this because 1) implicit sets |
647 | aren't recorded for the local pass so they cannot be propagated within |
648 | their basic block by this pass and 2) the global pass would otherwise |
649 | propagate them only in the successors of their basic block. */ |
650 | FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb ; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb-> next_bb) |
651 | { |
652 | int index = implicit_set_indexes[bb->index]; |
653 | if (index != -1) |
654 | bitmap_set_bit (cprop_avin[bb->index], index); |
655 | } |
656 | } |
657 | |
658 | /* Copy/constant propagation. */ |
659 | |
660 | /* Maximum number of register uses in an insn that we handle. */ |
661 | #define MAX_USES8 8 |
662 | |
663 | /* Table of uses (registers, both hard and pseudo) found in an insn. |
664 | Allocated statically to avoid alloc/free complexity and overhead. */ |
665 | static rtx reg_use_table[MAX_USES8]; |
666 | |
667 | /* Index into `reg_use_table' while building it. */ |
668 | static unsigned reg_use_count; |
669 | |
670 | /* Set up a list of register numbers used in INSN. The found uses are stored |
671 | in `reg_use_table'. `reg_use_count' is initialized to zero before entry, |
672 | and contains the number of uses in the table upon exit. |
673 | |
674 | ??? If a register appears multiple times we will record it multiple times. |
675 | This doesn't hurt anything but it will slow things down. */ |
676 | |
677 | static void |
678 | find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED__attribute__ ((__unused__))) |
679 | { |
680 | int i, j; |
681 | enum rtx_code code; |
682 | const char *fmt; |
683 | rtx x = *xptr; |
684 | |
685 | /* repeat is used to turn tail-recursion into iteration since GCC |
686 | can't do it when there's no return value. */ |
687 | repeat: |
688 | if (x == 0) |
689 | return; |
690 | |
691 | code = GET_CODE (x)((enum rtx_code) (x)->code); |
692 | if (REG_P (x)(((enum rtx_code) (x)->code) == REG)) |
693 | { |
694 | if (reg_use_count == MAX_USES8) |
695 | return; |
696 | |
697 | reg_use_table[reg_use_count] = x; |
698 | reg_use_count++; |
699 | } |
700 | |
701 | /* Recursively scan the operands of this expression. */ |
702 | |
703 | for (i = GET_RTX_LENGTH (code)(rtx_length[(int) (code)]) - 1, fmt = GET_RTX_FORMAT (code)(rtx_format[(int) (code)]); i >= 0; i--) |
704 | { |
705 | if (fmt[i] == 'e') |
706 | { |
707 | /* If we are about to do the last recursive call |
708 | needed at this level, change it into iteration. |
709 | This function is called enough to be worth it. */ |
710 | if (i == 0) |
711 | { |
712 | x = XEXP (x, 0)(((x)->u.fld[0]).rt_rtx); |
713 | goto repeat; |
714 | } |
715 | |
716 | find_used_regs (&XEXP (x, i)(((x)->u.fld[i]).rt_rtx), data); |
717 | } |
718 | else if (fmt[i] == 'E') |
719 | for (j = 0; j < XVECLEN (x, i)(((((x)->u.fld[i]).rt_rtvec))->num_elem); j++) |
720 | find_used_regs (&XVECEXP (x, i, j)(((((x)->u.fld[i]).rt_rtvec))->elem[j]), data); |
721 | } |
722 | } |
723 | |
724 | /* Try to replace all uses of FROM in INSN with TO. |
725 | Return nonzero if successful. */ |
726 | |
727 | static int |
728 | try_replace_reg (rtx from, rtx to, rtx_insn *insn) |
729 | { |
730 | rtx note = find_reg_equal_equiv_note (insn); |
731 | rtx src = 0; |
732 | int success = 0; |
733 | rtx set = single_set (insn); |
734 | |
735 | bool check_rtx_costs = true; |
736 | bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)); |
737 | int old_cost = set ? set_rtx_cost (set, speed) : 0; |
738 | |
739 | if (!set |
740 | || CONSTANT_P (SET_SRC (set))((rtx_class[(int) (((enum rtx_code) ((((set)->u.fld[1]).rt_rtx ))->code))]) == RTX_CONST_OBJ) |
741 | || (note != 0 |
742 | && REG_NOTE_KIND (note)((enum reg_note) ((machine_mode) (note)->mode)) == REG_EQUAL |
743 | && (GET_CODE (XEXP (note, 0))((enum rtx_code) ((((note)->u.fld[0]).rt_rtx))->code) == CONST |
744 | || CONSTANT_P (XEXP (note, 0))((rtx_class[(int) (((enum rtx_code) ((((note)->u.fld[0]).rt_rtx ))->code))]) == RTX_CONST_OBJ)))) |
745 | check_rtx_costs = false; |
746 | |
747 | /* Usually we substitute easy stuff, so we won't copy everything. |
748 | We however need to take care to not duplicate non-trivial CONST |
749 | expressions. */ |
750 | to = copy_rtx (to); |
751 | |
752 | validate_replace_src_group (from, to, insn); |
753 | |
754 | /* If TO is a constant, check the cost of the set after propagation |
755 | to the cost of the set before the propagation. If the cost is |
756 | higher, then do not replace FROM with TO. */ |
757 | |
758 | if (check_rtx_costs |
759 | && CONSTANT_P (to)((rtx_class[(int) (((enum rtx_code) (to)->code))]) == RTX_CONST_OBJ ) |
760 | && set_rtx_cost (set, speed) > old_cost) |
761 | { |
762 | cancel_changes (0); |
763 | return false; |
764 | } |
765 | |
766 | |
767 | if (num_changes_pending () && apply_change_group ()) |
768 | success = 1; |
769 | |
770 | /* Try to simplify SET_SRC if we have substituted a constant. */ |
771 | if (success && set && CONSTANT_P (to)((rtx_class[(int) (((enum rtx_code) (to)->code))]) == RTX_CONST_OBJ )) |
772 | { |
773 | src = simplify_rtx (SET_SRC (set)(((set)->u.fld[1]).rt_rtx)); |
774 | |
775 | if (src) |
776 | validate_change (insn, &SET_SRC (set)(((set)->u.fld[1]).rt_rtx), src, 0); |
777 | } |
778 | |
779 | /* If there is already a REG_EQUAL note, update the expression in it |
780 | with our replacement. */ |
781 | if (note != 0 && REG_NOTE_KIND (note)((enum reg_note) ((machine_mode) (note)->mode)) == REG_EQUAL) |
782 | set_unique_reg_note (insn, REG_EQUAL, |
783 | simplify_replace_rtx (XEXP (note, 0)(((note)->u.fld[0]).rt_rtx), from, to)); |
784 | if (!success && set && reg_mentioned_p (from, SET_SRC (set)(((set)->u.fld[1]).rt_rtx))) |
785 | { |
786 | /* If above failed and this is a single set, try to simplify the source |
787 | of the set given our substitution. We could perhaps try this for |
788 | multiple SETs, but it probably won't buy us anything. */ |
789 | src = simplify_replace_rtx (SET_SRC (set)(((set)->u.fld[1]).rt_rtx), from, to); |
790 | |
791 | if (!rtx_equal_p (src, SET_SRC (set)(((set)->u.fld[1]).rt_rtx)) |
792 | && validate_change (insn, &SET_SRC (set)(((set)->u.fld[1]).rt_rtx), src, 0)) |
793 | success = 1; |
794 | |
795 | /* If we've failed perform the replacement, have a single SET to |
796 | a REG destination and don't yet have a note, add a REG_EQUAL note |
797 | to not lose information. */ |
798 | if (!success && note == 0 && set != 0 && REG_P (SET_DEST (set))(((enum rtx_code) ((((set)->u.fld[0]).rt_rtx))->code) == REG)) |
799 | note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src)); |
800 | } |
801 | |
802 | if (set && MEM_P (SET_DEST (set))(((enum rtx_code) ((((set)->u.fld[0]).rt_rtx))->code) == MEM) && reg_mentioned_p (from, SET_DEST (set)(((set)->u.fld[0]).rt_rtx))) |
803 | { |
804 | /* Registers can also appear as uses in SET_DEST if it is a MEM. |
805 | We could perhaps try this for multiple SETs, but it probably |
806 | won't buy us anything. */ |
807 | rtx dest = simplify_replace_rtx (SET_DEST (set)(((set)->u.fld[0]).rt_rtx), from, to); |
808 | |
809 | if (!rtx_equal_p (dest, SET_DEST (set)(((set)->u.fld[0]).rt_rtx)) |
810 | && validate_change (insn, &SET_DEST (set)(((set)->u.fld[0]).rt_rtx), dest, 0)) |
811 | success = 1; |
812 | } |
813 | |
814 | /* REG_EQUAL may get simplified into register. |
815 | We don't allow that. Remove that note. This code ought |
816 | not to happen, because previous code ought to synthesize |
817 | reg-reg move, but be on the safe side. */ |
818 | if (note && REG_NOTE_KIND (note)((enum reg_note) ((machine_mode) (note)->mode)) == REG_EQUAL && REG_P (XEXP (note, 0))(((enum rtx_code) ((((note)->u.fld[0]).rt_rtx))->code) == REG)) |
819 | remove_note (insn, note); |
820 | |
821 | return success; |
822 | } |
823 | |
824 | /* Find a set of REGNOs that are available on entry to INSN's block. If found, |
825 | SET_RET[0] will be assigned a set with a register source and SET_RET[1] a |
826 | set with a constant source. If not found the corresponding entry is set to |
827 | NULL. */ |
828 | |
829 | static void |
830 | find_avail_set (int regno, rtx_insn *insn, struct cprop_expr *set_ret[2]) |
831 | { |
832 | set_ret[0] = set_ret[1] = NULL__null; |
833 | |
834 | /* Loops are not possible here. To get a loop we would need two sets |
835 | available at the start of the block containing INSN. i.e. we would |
836 | need two sets like this available at the start of the block: |
837 | |
838 | (set (reg X) (reg Y)) |
839 | (set (reg Y) (reg X)) |
840 | |
841 | This cannot happen since the set of (reg Y) would have killed the |
842 | set of (reg X) making it unavailable at the start of this block. */ |
843 | while (1) |
844 | { |
845 | rtx src; |
846 | struct cprop_expr *set = lookup_set (regno, &set_hash_table); |
847 | |
848 | /* Find a set that is available at the start of the block |
849 | which contains INSN. */ |
850 | while (set) |
851 | { |
852 | if (bitmap_bit_p (cprop_avin[BLOCK_FOR_INSN (insn)->index], |
853 | set->bitmap_index)) |
854 | break; |
855 | set = next_set (regno, set); |
856 | } |
857 | |
858 | /* If no available set was found we've reached the end of the |
859 | (possibly empty) copy chain. */ |
860 | if (set == 0) |
861 | break; |
862 | |
863 | src = set->src; |
864 | |
865 | /* We know the set is available. |
866 | Now check that SRC is locally anticipatable (i.e. none of the |
867 | source operands have changed since the start of the block). |
868 | |
869 | If the source operand changed, we may still use it for the next |
870 | iteration of this loop, but we may not use it for substitutions. */ |
871 | |
872 | if (cprop_constant_p (src)) |
873 | set_ret[1] = set; |
874 | else if (reg_not_set_p (src, insn)) |
875 | set_ret[0] = set; |
876 | |
877 | /* If the source of the set is anything except a register, then |
878 | we have reached the end of the copy chain. */ |
879 | if (! REG_P (src)(((enum rtx_code) (src)->code) == REG)) |
880 | break; |
881 | |
882 | /* Follow the copy chain, i.e. start another iteration of the loop |
883 | and see if we have an available copy into SRC. */ |
884 | regno = REGNO (src)(rhs_regno(src)); |
885 | } |
886 | } |
887 | |
888 | /* Subroutine of cprop_insn that tries to propagate constants into |
889 | JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL |
890 | it is the instruction that immediately precedes JUMP, and must be a |
891 | single SET of a register. FROM is what we will try to replace, |
892 | SRC is the constant we will try to substitute for it. Return nonzero |
893 | if a change was made. */ |
894 | |
895 | static int |
896 | cprop_jump (basic_block bb, rtx_insn *setcc, rtx_insn *jump, rtx from, rtx src) |
897 | { |
898 | rtx new_rtx, set_src, note_src; |
899 | rtx set = pc_set (jump); |
900 | rtx note = find_reg_equal_equiv_note (jump); |
901 | |
902 | if (note) |
903 | { |
904 | note_src = XEXP (note, 0)(((note)->u.fld[0]).rt_rtx); |
905 | if (GET_CODE (note_src)((enum rtx_code) (note_src)->code) == EXPR_LIST) |
906 | note_src = NULL_RTX(rtx) 0; |
907 | } |
908 | else note_src = NULL_RTX(rtx) 0; |
909 | |
910 | /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */ |
911 | set_src = note_src ? note_src : SET_SRC (set)(((set)->u.fld[1]).rt_rtx); |
912 | |
913 | /* First substitute the SETCC condition into the JUMP instruction, |
914 | then substitute that given values into this expanded JUMP. */ |
915 | if (setcc != NULL_RTX(rtx) 0 |
916 | && !modified_between_p (from, setcc, jump) |
917 | && !modified_between_p (src, setcc, jump)) |
918 | { |
919 | rtx setcc_src; |
920 | rtx setcc_set = single_set (setcc); |
921 | rtx setcc_note = find_reg_equal_equiv_note (setcc); |
922 | setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0))((enum rtx_code) ((((setcc_note)->u.fld[0]).rt_rtx))->code ) != EXPR_LIST) |
923 | ? XEXP (setcc_note, 0)(((setcc_note)->u.fld[0]).rt_rtx) : SET_SRC (setcc_set)(((setcc_set)->u.fld[1]).rt_rtx); |
924 | set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set)(((setcc_set)->u.fld[0]).rt_rtx), |
925 | setcc_src); |
926 | } |
927 | else |
928 | setcc = NULL__null; |
929 | |
930 | new_rtx = simplify_replace_rtx (set_src, from, src); |
931 | |
932 | /* If no simplification can be made, then try the next register. */ |
933 | if (rtx_equal_p (new_rtx, SET_SRC (set)(((set)->u.fld[1]).rt_rtx))) |
934 | return 0; |
935 | |
936 | /* If this is now a no-op delete it, otherwise this must be a valid insn. */ |
937 | if (new_rtx == pc_rtx) |
938 | delete_insn (jump); |
939 | else |
940 | { |
941 | /* Ensure the value computed inside the jump insn to be equivalent |
942 | to one computed by setcc. */ |
943 | if (setcc && modified_in_p (new_rtx, setcc)) |
944 | return 0; |
945 | if (! validate_unshare_change (jump, &SET_SRC (set)(((set)->u.fld[1]).rt_rtx), new_rtx, 0)) |
946 | { |
947 | /* When (some) constants are not valid in a comparison, and there |
948 | are two registers to be replaced by constants before the entire |
949 | comparison can be folded into a constant, we need to keep |
950 | intermediate information in REG_EQUAL notes. For targets with |
951 | separate compare insns, such notes are added by try_replace_reg. |
952 | When we have a combined compare-and-branch instruction, however, |
953 | we need to attach a note to the branch itself to make this |
954 | optimization work. */ |
955 | |
956 | if (!rtx_equal_p (new_rtx, note_src)) |
957 | set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new_rtx)); |
958 | return 0; |
959 | } |
960 | |
961 | /* Remove REG_EQUAL note after simplification. */ |
962 | if (note_src) |
963 | remove_note (jump, note); |
964 | } |
965 | |
966 | global_const_prop_count++; |
967 | if (dump_file != NULL__null) |
968 | { |
969 | fprintf (dump_file, |
970 | "GLOBAL CONST-PROP: Replacing reg %d in jump_insn %d with " |
971 | "constant ", REGNO (from)(rhs_regno(from)), INSN_UID (jump)); |
972 | print_rtl (dump_file, src); |
973 | fprintf (dump_file, "\n"); |
974 | } |
975 | purge_dead_edges (bb); |
976 | |
977 | /* If a conditional jump has been changed into unconditional jump, remove |
978 | the jump and make the edge fallthru - this is always called in |
979 | cfglayout mode. */ |
980 | if (new_rtx != pc_rtx && simplejump_p (jump)) |
981 | { |
982 | edge e; |
983 | edge_iterator ei; |
984 | |
985 | FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei) , &(e)); ei_next (&(ei))) |
986 | if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr) |
987 | && BB_HEAD (e->dest)(e->dest)->il.x.head_ == JUMP_LABEL (jump)(((jump)->u.fld[7]).rt_rtx)) |
988 | { |
989 | e->flags |= EDGE_FALLTHRU; |
990 | break; |
991 | } |
992 | delete_insn (jump); |
993 | } |
994 | |
995 | return 1; |
996 | } |
997 | |
998 | /* Subroutine of cprop_insn that tries to propagate constants. FROM is what |
999 | we will try to replace, SRC is the constant we will try to substitute for |
1000 | it and INSN is the instruction where this will be happening. */ |
1001 | |
1002 | static int |
1003 | constprop_register (rtx from, rtx src, rtx_insn *insn) |
1004 | { |
1005 | rtx sset; |
1006 | rtx_insn *next_insn; |
1007 | |
1008 | /* Check for reg setting instructions followed by conditional branch |
1009 | instructions first. */ |
1010 | if ((sset = single_set (insn)) != NULL__null |
1011 | && (next_insn = next_nondebug_insn (insn)) != NULL__null |
1012 | && any_condjump_p (next_insn) |
1013 | && onlyjump_p (next_insn)) |
1014 | { |
1015 | rtx dest = SET_DEST (sset)(((sset)->u.fld[0]).rt_rtx); |
1016 | if (REG_P (dest)(((enum rtx_code) (dest)->code) == REG) |
1017 | && cprop_jump (BLOCK_FOR_INSN (insn), insn, next_insn, |
1018 | from, src)) |
1019 | return 1; |
1020 | } |
1021 | |
1022 | /* Handle normal insns next. */ |
1023 | if (NONJUMP_INSN_P (insn)(((enum rtx_code) (insn)->code) == INSN) && try_replace_reg (from, src, insn)) |
1024 | return 1; |
1025 | |
1026 | /* Try to propagate a CONST_INT into a conditional jump. |
1027 | We're pretty specific about what we will handle in this |
1028 | code, we can extend this as necessary over time. |
1029 | |
1030 | Right now the insn in question must look like |
1031 | (set (pc) (if_then_else ...)) */ |
1032 | else if (any_condjump_p (insn) && onlyjump_p (insn)) |
1033 | return cprop_jump (BLOCK_FOR_INSN (insn), NULL__null, insn, from, src); |
1034 | return 0; |
1035 | } |
1036 | |
1037 | /* Perform constant and copy propagation on INSN. |
1038 | Return nonzero if a change was made. */ |
1039 | |
1040 | static int |
1041 | cprop_insn (rtx_insn *insn) |
1042 | { |
1043 | unsigned i; |
1044 | int changed = 0, changed_this_round; |
1045 | rtx note; |
1046 | |
1047 | do |
1048 | { |
1049 | changed_this_round = 0; |
1050 | reg_use_count = 0; |
1051 | note_uses (&PATTERN (insn), find_used_regs, NULL__null); |
1052 | |
1053 | /* We may win even when propagating constants into notes. */ |
1054 | note = find_reg_equal_equiv_note (insn); |
1055 | if (note) |
1056 | find_used_regs (&XEXP (note, 0)(((note)->u.fld[0]).rt_rtx), NULL__null); |
1057 | |
1058 | for (i = 0; i < reg_use_count; i++) |
1059 | { |
1060 | rtx reg_used = reg_use_table[i]; |
1061 | unsigned int regno = REGNO (reg_used)(rhs_regno(reg_used)); |
1062 | rtx src_cst = NULL__null, src_reg = NULL__null; |
1063 | struct cprop_expr *set[2]; |
1064 | |
1065 | /* If the register has already been set in this block, there's |
1066 | nothing we can do. */ |
1067 | if (! reg_not_set_p (reg_used, insn)) |
1068 | continue; |
1069 | |
1070 | /* Find an assignment that sets reg_used and is available |
1071 | at the start of the block. */ |
1072 | find_avail_set (regno, insn, set); |
1073 | if (set[0]) |
1074 | src_reg = set[0]->src; |
1075 | if (set[1]) |
1076 | src_cst = set[1]->src; |
1077 | |
1078 | /* Constant propagation. */ |
1079 | if (src_cst && cprop_constant_p (src_cst) |
1080 | && constprop_register (reg_used, src_cst, insn)) |
1081 | { |
1082 | changed_this_round = changed = 1; |
1083 | global_const_prop_count++; |
1084 | if (dump_file != NULL__null) |
1085 | { |
1086 | fprintf (dump_file, |
1087 | "GLOBAL CONST-PROP: Replacing reg %d in ", regno); |
1088 | fprintf (dump_file, "insn %d with constant ", |
1089 | INSN_UID (insn)); |
1090 | print_rtl (dump_file, src_cst); |
1091 | fprintf (dump_file, "\n"); |
1092 | } |
1093 | if (insn->deleted ()) |
1094 | return 1; |
1095 | } |
1096 | /* Copy propagation. */ |
1097 | else if (src_reg && cprop_reg_p (src_reg) |
1098 | && REGNO (src_reg)(rhs_regno(src_reg)) != regno |
1099 | && try_replace_reg (reg_used, src_reg, insn)) |
1100 | { |
1101 | changed_this_round = changed = 1; |
1102 | global_copy_prop_count++; |
1103 | if (dump_file != NULL__null) |
1104 | { |
1105 | fprintf (dump_file, |
1106 | "GLOBAL COPY-PROP: Replacing reg %d in insn %d", |
1107 | regno, INSN_UID (insn)); |
1108 | fprintf (dump_file, " with reg %d\n", REGNO (src_reg)(rhs_regno(src_reg))); |
1109 | } |
1110 | |
1111 | /* The original insn setting reg_used may or may not now be |
1112 | deletable. We leave the deletion to DCE. */ |
1113 | /* FIXME: If it turns out that the insn isn't deletable, |
1114 | then we may have unnecessarily extended register lifetimes |
1115 | and made things worse. */ |
1116 | } |
1117 | } |
1118 | } |
1119 | /* If try_replace_reg simplified the insn, the regs found by find_used_regs |
1120 | may not be valid anymore. Start over. */ |
1121 | while (changed_this_round); |
1122 | |
1123 | if (changed && DEBUG_INSN_P (insn)(((enum rtx_code) (insn)->code) == DEBUG_INSN)) |
1124 | return 0; |
1125 | |
1126 | return changed; |
1127 | } |
1128 | |
1129 | /* Like find_used_regs, but avoid recording uses that appear in |
1130 | input-output contexts such as zero_extract or pre_dec. This |
1131 | restricts the cases we consider to those for which local cprop |
1132 | can legitimately make replacements. */ |
1133 | |
1134 | static void |
1135 | local_cprop_find_used_regs (rtx *xptr, void *data) |
1136 | { |
1137 | rtx x = *xptr; |
1138 | |
1139 | if (x == 0) |
1140 | return; |
1141 | |
1142 | switch (GET_CODE (x)((enum rtx_code) (x)->code)) |
1143 | { |
1144 | case ZERO_EXTRACT: |
1145 | case SIGN_EXTRACT: |
1146 | case STRICT_LOW_PART: |
1147 | return; |
1148 | |
1149 | case PRE_DEC: |
1150 | case PRE_INC: |
1151 | case POST_DEC: |
1152 | case POST_INC: |
1153 | case PRE_MODIFY: |
1154 | case POST_MODIFY: |
1155 | /* Can only legitimately appear this early in the context of |
1156 | stack pushes for function arguments, but handle all of the |
1157 | codes nonetheless. */ |
1158 | return; |
1159 | |
1160 | case SUBREG: |
1161 | if (read_modify_subreg_p (x)) |
1162 | return; |
1163 | break; |
1164 | |
1165 | default: |
1166 | break; |
1167 | } |
1168 | |
1169 | find_used_regs (xptr, data); |
1170 | } |
1171 | |
1172 | /* Try to perform local const/copy propagation on X in INSN. */ |
1173 | |
1174 | static bool |
1175 | do_local_cprop (rtx x, rtx_insn *insn) |
1176 | { |
1177 | rtx newreg = NULL__null, newcnst = NULL__null; |
1178 | |
1179 | /* Rule out USE instructions and ASM statements as we don't want to |
1180 | change the hard registers mentioned. */ |
1181 | if (REG_P (x)(((enum rtx_code) (x)->code) == REG) |
1182 | && (cprop_reg_p (x) |
1183 | || (GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) != USE |
1184 | && asm_noperands (PATTERN (insn)) < 0))) |
1185 | { |
1186 | cselib_val *val = cselib_lookup (x, GET_MODE (x)((machine_mode) (x)->mode), 0, VOIDmode((void) 0, E_VOIDmode)); |
1187 | struct elt_loc_list *l; |
1188 | |
1189 | if (!val) |
1190 | return false; |
1191 | for (l = val->locs; l; l = l->next) |
1192 | { |
1193 | rtx this_rtx = l->loc; |
1194 | rtx note; |
1195 | |
1196 | if (cprop_constant_p (this_rtx)) |
1197 | newcnst = this_rtx; |
1198 | if (cprop_reg_p (this_rtx) |
1199 | /* Don't copy propagate if it has attached REG_EQUIV note. |
1200 | At this point this only function parameters should have |
1201 | REG_EQUIV notes and if the argument slot is used somewhere |
1202 | explicitly, it means address of parameter has been taken, |
1203 | so we should not extend the lifetime of the pseudo. */ |
1204 | && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX(rtx) 0)) |
1205 | || ! MEM_P (XEXP (note, 0))(((enum rtx_code) ((((note)->u.fld[0]).rt_rtx))->code) == MEM))) |
1206 | newreg = this_rtx; |
1207 | } |
1208 | if (newcnst && constprop_register (x, newcnst, insn)) |
1209 | { |
1210 | if (dump_file != NULL__null) |
1211 | { |
1212 | fprintf (dump_file, "LOCAL CONST-PROP: Replacing reg %d in ", |
1213 | REGNO (x)(rhs_regno(x))); |
1214 | fprintf (dump_file, "insn %d with constant ", |
1215 | INSN_UID (insn)); |
1216 | print_rtl (dump_file, newcnst); |
1217 | fprintf (dump_file, "\n"); |
1218 | } |
1219 | local_const_prop_count++; |
1220 | return true; |
1221 | } |
1222 | else if (newreg && newreg != x && try_replace_reg (x, newreg, insn)) |
1223 | { |
1224 | if (dump_file != NULL__null) |
1225 | { |
1226 | fprintf (dump_file, |
1227 | "LOCAL COPY-PROP: Replacing reg %d in insn %d", |
1228 | REGNO (x)(rhs_regno(x)), INSN_UID (insn)); |
1229 | fprintf (dump_file, " with reg %d\n", REGNO (newreg)(rhs_regno(newreg))); |
1230 | } |
1231 | local_copy_prop_count++; |
1232 | return true; |
1233 | } |
1234 | } |
1235 | return false; |
1236 | } |
1237 | |
1238 | /* Do local const/copy propagation (i.e. within each basic block). */ |
1239 | |
1240 | static int |
1241 | local_cprop_pass (void) |
1242 | { |
1243 | basic_block bb; |
1244 | rtx_insn *insn; |
1245 | bool changed = false; |
1246 | unsigned i; |
1247 | |
1248 | auto_vec<rtx_insn *> uncond_traps; |
1249 | |
1250 | cselib_init (0); |
1251 | FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb ; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb-> next_bb) |
1252 | { |
1253 | FOR_BB_INSNS (bb, insn)for ((insn) = (bb)->il.x.head_; (insn) && (insn) != NEXT_INSN ((bb)->il.x.rtl->end_); (insn) = NEXT_INSN ( insn)) |
1254 | { |
1255 | if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) == DEBUG_INSN))) |
1256 | { |
1257 | bool was_uncond_trap |
1258 | = (GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == TRAP_IF |
1259 | && XEXP (PATTERN (insn), 0)(((PATTERN (insn))->u.fld[0]).rt_rtx) == const1_rtx(const_int_rtx[64 +1])); |
1260 | rtx note = find_reg_equal_equiv_note (insn); |
1261 | do |
1262 | { |
1263 | reg_use_count = 0; |
1264 | note_uses (&PATTERN (insn), local_cprop_find_used_regs, |
1265 | NULL__null); |
1266 | if (note) |
1267 | local_cprop_find_used_regs (&XEXP (note, 0)(((note)->u.fld[0]).rt_rtx), NULL__null); |
1268 | |
1269 | for (i = 0; i < reg_use_count; i++) |
1270 | { |
1271 | if (do_local_cprop (reg_use_table[i], insn)) |
1272 | { |
1273 | if (!DEBUG_INSN_P (insn)(((enum rtx_code) (insn)->code) == DEBUG_INSN)) |
1274 | changed = true; |
1275 | break; |
1276 | } |
1277 | } |
1278 | if (!was_uncond_trap |
1279 | && GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == TRAP_IF |
1280 | && XEXP (PATTERN (insn), 0)(((PATTERN (insn))->u.fld[0]).rt_rtx) == const1_rtx(const_int_rtx[64 +1])) |
1281 | { |
1282 | uncond_traps.safe_push (insn); |
1283 | break; |
1284 | } |
1285 | if (insn->deleted ()) |
1286 | break; |
1287 | } |
1288 | while (i < reg_use_count); |
1289 | } |
1290 | cselib_process_insn (insn); |
1291 | } |
1292 | |
1293 | /* Forget everything at the end of a basic block. */ |
1294 | cselib_clear_table (); |
1295 | } |
1296 | |
1297 | cselib_finish (); |
1298 | |
1299 | while (!uncond_traps.is_empty ()) |
1300 | { |
1301 | rtx_insn *insn = uncond_traps.pop (); |
1302 | basic_block to_split = BLOCK_FOR_INSN (insn); |
1303 | remove_edge (split_block (to_split, insn)); |
1304 | emit_barrier_after_bb (to_split); |
1305 | } |
1306 | |
1307 | return changed; |
1308 | } |
1309 | |
1310 | /* Similar to get_condition, only the resulting condition must be |
1311 | valid at JUMP, instead of at EARLIEST. |
1312 | |
1313 | This differs from noce_get_condition in ifcvt.cc in that we prefer not to |
1314 | settle for the condition variable in the jump instruction being integral. |
1315 | We prefer to be able to record the value of a user variable, rather than |
1316 | the value of a temporary used in a condition. This could be solved by |
1317 | recording the value of *every* register scanned by canonicalize_condition, |
1318 | but this would require some code reorganization. */ |
1319 | |
1320 | rtx |
1321 | fis_get_condition (rtx_insn *jump) |
1322 | { |
1323 | return get_condition (jump, NULL__null, false, true); |
1324 | } |
1325 | |
1326 | /* Check the comparison COND to see if we can safely form an implicit |
1327 | set from it. */ |
1328 | |
1329 | static bool |
1330 | implicit_set_cond_p (const_rtx cond) |
1331 | { |
1332 | machine_mode mode; |
1333 | rtx cst; |
1334 | |
1335 | /* COND must be either an EQ or NE comparison. */ |
1336 | if (GET_CODE (cond)((enum rtx_code) (cond)->code) != EQ && GET_CODE (cond)((enum rtx_code) (cond)->code) != NE) |
1337 | return false; |
1338 | |
1339 | /* The first operand of COND must be a register we can propagate. */ |
1340 | if (!cprop_reg_p (XEXP (cond, 0)(((cond)->u.fld[0]).rt_rtx))) |
1341 | return false; |
1342 | |
1343 | /* The second operand of COND must be a suitable constant. */ |
1344 | mode = GET_MODE (XEXP (cond, 0))((machine_mode) ((((cond)->u.fld[0]).rt_rtx))->mode); |
1345 | cst = XEXP (cond, 1)(((cond)->u.fld[1]).rt_rtx); |
1346 | |
1347 | /* We can't perform this optimization if either operand might be or might |
1348 | contain a signed zero. */ |
1349 | if (HONOR_SIGNED_ZEROS (mode)) |
1350 | { |
1351 | /* It is sufficient to check if CST is or contains a zero. We must |
1352 | handle float, complex, and vector. If any subpart is a zero, then |
1353 | the optimization can't be performed. */ |
1354 | /* ??? The complex and vector checks are not implemented yet. We just |
1355 | always return zero for them. */ |
1356 | if (CONST_DOUBLE_AS_FLOAT_P (cst)(((enum rtx_code) (cst)->code) == CONST_DOUBLE && ( (machine_mode) (cst)->mode) != ((void) 0, E_VOIDmode)) |
1357 | && real_equal (CONST_DOUBLE_REAL_VALUE (cst)((const struct real_value *) (&(cst)->u.rv)), &dconst0)) |
1358 | return 0; |
1359 | else |
1360 | return 0; |
1361 | } |
1362 | |
1363 | return cprop_constant_p (cst); |
1364 | } |
1365 | |
1366 | /* Find the implicit sets of a function. An "implicit set" is a constraint |
1367 | on the value of a variable, implied by a conditional jump. For example, |
1368 | following "if (x == 2)", the then branch may be optimized as though the |
1369 | conditional performed an "explicit set", in this example, "x = 2". This |
1370 | function records the set patterns that are implicit at the start of each |
1371 | basic block. |
1372 | |
1373 | If an implicit set is found but the set is implicit on a critical edge, |
1374 | this critical edge is split. |
1375 | |
1376 | Return true if the CFG was modified, false otherwise. */ |
1377 | |
1378 | static bool |
1379 | find_implicit_sets (void) |
1380 | { |
1381 | basic_block bb, dest; |
1382 | rtx cond, new_rtx; |
1383 | unsigned int count = 0; |
1384 | bool edges_split = false; |
1385 | size_t implicit_sets_size = last_basic_block_for_fn (cfun)(((cfun + 0))->cfg->x_last_basic_block) + 10; |
1386 | |
1387 | implicit_sets = XCNEWVEC (rtx, implicit_sets_size)((rtx *) xcalloc ((implicit_sets_size), sizeof (rtx))); |
1388 | |
1389 | FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb ; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb-> next_bb) |
1390 | { |
1391 | /* Check for more than one successor. */ |
1392 | if (EDGE_COUNT (bb->succs)vec_safe_length (bb->succs) <= 1) |
1393 | continue; |
1394 | |
1395 | cond = fis_get_condition (BB_END (bb)(bb)->il.x.rtl->end_); |
1396 | |
1397 | /* If no condition is found or if it isn't of a suitable form, |
1398 | ignore it. */ |
1399 | if (! cond || ! implicit_set_cond_p (cond)) |
1400 | continue; |
1401 | |
1402 | dest = GET_CODE (cond)((enum rtx_code) (cond)->code) == EQ |
1403 | ? BRANCH_EDGE (bb)((*((bb))->succs)[(0)]->flags & EDGE_FALLTHRU ? (*( (bb))->succs)[(1)] : (*((bb))->succs)[(0)])->dest : FALLTHRU_EDGE (bb)((*((bb))->succs)[(0)]->flags & EDGE_FALLTHRU ? (*( (bb))->succs)[(0)] : (*((bb))->succs)[(1)])->dest; |
1404 | |
1405 | /* If DEST doesn't go anywhere, ignore it. */ |
1406 | if (! dest || dest == EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr)) |
1407 | continue; |
1408 | |
1409 | /* We have found a suitable implicit set. Try to record it now as |
1410 | a SET in DEST. If DEST has more than one predecessor, the edge |
1411 | between BB and DEST is a critical edge and we must split it, |
1412 | because we can only record one implicit set per DEST basic block. */ |
1413 | if (! single_pred_p (dest)) |
1414 | { |
1415 | dest = split_edge (find_edge (bb, dest)); |
1416 | edges_split = true; |
1417 | } |
1418 | |
1419 | if (implicit_sets_size <= (size_t) dest->index) |
1420 | { |
1421 | size_t old_implicit_sets_size = implicit_sets_size; |
1422 | implicit_sets_size *= 2; |
1423 | implicit_sets = XRESIZEVEC (rtx, implicit_sets, implicit_sets_size)((rtx *) xrealloc ((void *) (implicit_sets), sizeof (rtx) * ( implicit_sets_size))); |
1424 | memset (implicit_sets + old_implicit_sets_size, 0, |
1425 | (implicit_sets_size - old_implicit_sets_size) * sizeof (rtx)); |
1426 | } |
1427 | |
1428 | new_rtx = gen_rtx_SET (XEXP (cond, 0), XEXP (cond, 1))gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), (((((cond )->u.fld[0]).rt_rtx))), (((((cond)->u.fld[1]).rt_rtx))) ); |
1429 | implicit_sets[dest->index] = new_rtx; |
1430 | if (dump_file) |
1431 | { |
1432 | fprintf (dump_file, "Implicit set of reg %d in ", |
1433 | REGNO (XEXP (cond, 0))(rhs_regno((((cond)->u.fld[0]).rt_rtx)))); |
1434 | fprintf (dump_file, "basic block %d\n", dest->index); |
1435 | } |
1436 | count++; |
1437 | } |
1438 | |
1439 | if (dump_file) |
1440 | fprintf (dump_file, "Found %d implicit sets\n", count); |
1441 | |
1442 | /* Confess our sins. */ |
1443 | return edges_split; |
1444 | } |
1445 | |
1446 | /* Bypass conditional jumps. */ |
1447 | |
1448 | /* The value of last_basic_block at the beginning of the jump_bypass |
1449 | pass. The use of redirect_edge_and_branch_force may introduce new |
1450 | basic blocks, but the data flow analysis is only valid for basic |
1451 | block indices less than bypass_last_basic_block. */ |
1452 | |
1453 | static int bypass_last_basic_block; |
1454 | |
1455 | /* Find a set of REGNO to a constant that is available at the end of basic |
1456 | block BB. Return NULL if no such set is found. Based heavily upon |
1457 | find_avail_set. */ |
1458 | |
1459 | static struct cprop_expr * |
1460 | find_bypass_set (int regno, int bb) |
1461 | { |
1462 | struct cprop_expr *result = 0; |
1463 | |
1464 | for (;;) |
1465 | { |
1466 | rtx src; |
1467 | struct cprop_expr *set = lookup_set (regno, &set_hash_table); |
1468 | |
1469 | while (set) |
1470 | { |
1471 | if (bitmap_bit_p (cprop_avout[bb], set->bitmap_index)) |
1472 | break; |
1473 | set = next_set (regno, set); |
1474 | } |
1475 | |
1476 | if (set == 0) |
1477 | break; |
1478 | |
1479 | src = set->src; |
1480 | if (cprop_constant_p (src)) |
1481 | result = set; |
1482 | |
1483 | if (! REG_P (src)(((enum rtx_code) (src)->code) == REG)) |
1484 | break; |
1485 | |
1486 | regno = REGNO (src)(rhs_regno(src)); |
1487 | } |
1488 | return result; |
1489 | } |
1490 | |
1491 | /* Subroutine of bypass_block that checks whether a pseudo is killed by |
1492 | any of the instructions inserted on an edge. Jump bypassing places |
1493 | condition code setters on CFG edges using insert_insn_on_edge. This |
1494 | function is required to check that our data flow analysis is still |
1495 | valid prior to commit_edge_insertions. */ |
1496 | |
1497 | static bool |
1498 | reg_killed_on_edge (const_rtx reg, const_edge e) |
1499 | { |
1500 | rtx_insn *insn; |
1501 | |
1502 | for (insn = e->insns.r; insn; insn = NEXT_INSN (insn)) |
1503 | if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) == DEBUG_INSN)) && reg_set_p (reg, insn)) |
1504 | return true; |
1505 | |
1506 | return false; |
1507 | } |
1508 | |
1509 | /* Subroutine of bypass_conditional_jumps that attempts to bypass the given |
1510 | basic block BB which has more than one predecessor. If not NULL, SETCC |
1511 | is the first instruction of BB, which is immediately followed by JUMP_INSN |
1512 | JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB. |
1513 | Returns nonzero if a change was made. |
1514 | |
1515 | During the jump bypassing pass, we may place copies of SETCC instructions |
1516 | on CFG edges. The following routine must be careful to pay attention to |
1517 | these inserted insns when performing its transformations. */ |
1518 | |
1519 | static int |
1520 | bypass_block (basic_block bb, rtx_insn *setcc, rtx_insn *jump) |
1521 | { |
1522 | rtx_insn *insn; |
1523 | rtx note; |
1524 | edge e, edest; |
1525 | int change; |
1526 | int may_be_loop_header = false; |
1527 | unsigned removed_p; |
1528 | unsigned i; |
1529 | edge_iterator ei; |
1530 | |
1531 | insn = (setcc != NULL__null) ? setcc : jump; |
1532 | |
1533 | /* Determine set of register uses in INSN. */ |
1534 | reg_use_count = 0; |
1535 | note_uses (&PATTERN (insn), find_used_regs, NULL__null); |
1536 | note = find_reg_equal_equiv_note (insn); |
1537 | if (note) |
1538 | find_used_regs (&XEXP (note, 0)(((note)->u.fld[0]).rt_rtx), NULL__null); |
1539 | |
1540 | if (current_loops((cfun + 0)->x_current_loops)) |
1541 | { |
1542 | /* If we are to preserve loop structure then do not bypass |
1543 | a loop header. This will either rotate the loop, create |
1544 | multiple entry loops or even irreducible regions. */ |
1545 | if (bb == bb->loop_father->header) |
1546 | return 0; |
1547 | } |
1548 | else |
1549 | { |
1550 | FOR_EACH_EDGE (e, ei, bb->preds)for ((ei) = ei_start_1 (&((bb->preds))); ei_cond ((ei) , &(e)); ei_next (&(ei))) |
1551 | if (e->flags & EDGE_DFS_BACK) |
1552 | { |
1553 | may_be_loop_header = true; |
1554 | break; |
1555 | } |
1556 | } |
1557 | |
1558 | change = 0; |
1559 | for (ei = ei_start (bb->preds)ei_start_1 (&(bb->preds)); (e = ei_safe_edge (ei)); ) |
1560 | { |
1561 | removed_p = 0; |
1562 | |
1563 | if (e->flags & EDGE_COMPLEX(EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_EH | EDGE_PRESERVE )) |
1564 | { |
1565 | ei_next (&ei); |
1566 | continue; |
1567 | } |
1568 | |
1569 | /* We can't redirect edges from new basic blocks. */ |
1570 | if (e->src->index >= bypass_last_basic_block) |
1571 | { |
1572 | ei_next (&ei); |
1573 | continue; |
1574 | } |
1575 | |
1576 | /* The irreducible loops created by redirecting of edges entering the |
1577 | loop from outside would decrease effectiveness of some of the |
1578 | following optimizations, so prevent this. */ |
1579 | if (may_be_loop_header |
1580 | && !(e->flags & EDGE_DFS_BACK)) |
1581 | { |
1582 | ei_next (&ei); |
1583 | continue; |
1584 | } |
1585 | |
1586 | for (i = 0; i < reg_use_count; i++) |
1587 | { |
1588 | rtx reg_used = reg_use_table[i]; |
1589 | unsigned int regno = REGNO (reg_used)(rhs_regno(reg_used)); |
1590 | basic_block dest, old_dest; |
1591 | struct cprop_expr *set; |
1592 | rtx src, new_rtx; |
1593 | |
1594 | set = find_bypass_set (regno, e->src->index); |
1595 | |
1596 | if (! set) |
1597 | continue; |
1598 | |
1599 | /* Check the data flow is valid after edge insertions. */ |
1600 | if (e->insns.r && reg_killed_on_edge (reg_used, e)) |
1601 | continue; |
1602 | |
1603 | src = SET_SRC (pc_set (jump))(((pc_set (jump))->u.fld[1]).rt_rtx); |
1604 | |
1605 | if (setcc != NULL__null) |
1606 | src = simplify_replace_rtx (src, |
1607 | SET_DEST (PATTERN (setcc))(((PATTERN (setcc))->u.fld[0]).rt_rtx), |
1608 | SET_SRC (PATTERN (setcc))(((PATTERN (setcc))->u.fld[1]).rt_rtx)); |
1609 | |
1610 | new_rtx = simplify_replace_rtx (src, reg_used, set->src); |
1611 | |
1612 | /* Jump bypassing may have already placed instructions on |
1613 | edges of the CFG. We can't bypass an outgoing edge that |
1614 | has instructions associated with it, as these insns won't |
1615 | get executed if the incoming edge is redirected. */ |
1616 | if (new_rtx == pc_rtx) |
1617 | { |
1618 | edest = FALLTHRU_EDGE (bb)((*((bb))->succs)[(0)]->flags & EDGE_FALLTHRU ? (*( (bb))->succs)[(0)] : (*((bb))->succs)[(1)]); |
1619 | dest = edest->insns.r ? NULL__null : edest->dest; |
1620 | } |
1621 | else if (GET_CODE (new_rtx)((enum rtx_code) (new_rtx)->code) == LABEL_REF) |
1622 | { |
1623 | dest = BLOCK_FOR_INSN (XEXP (new_rtx, 0)(((new_rtx)->u.fld[0]).rt_rtx)); |
1624 | /* Don't bypass edges containing instructions. */ |
1625 | if (dest) |
1626 | { |
1627 | edest = find_edge (bb, dest); |
1628 | if (edest && edest->insns.r) |
1629 | dest = NULL__null; |
1630 | } |
1631 | } |
1632 | else |
1633 | dest = NULL__null; |
1634 | |
1635 | /* Avoid unification of the edge with other edges from original |
1636 | branch. We would end up emitting the instruction on "both" |
1637 | edges. */ |
1638 | if (dest && setcc && find_edge (e->src, dest)) |
1639 | dest = NULL__null; |
1640 | |
1641 | old_dest = e->dest; |
1642 | if (dest != NULL__null |
1643 | && dest != old_dest |
1644 | && dest != EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr)) |
1645 | { |
1646 | redirect_edge_and_branch_force (e, dest); |
1647 | |
1648 | /* Copy the register setter to the redirected edge. */ |
1649 | if (setcc) |
1650 | { |
1651 | rtx pat = PATTERN (setcc); |
1652 | insert_insn_on_edge (copy_insn (pat), e); |
1653 | } |
1654 | |
1655 | if (dump_file != NULL__null) |
1656 | { |
1657 | fprintf (dump_file, "JUMP-BYPASS: Proved reg %d " |
1658 | "in jump_insn %d equals constant ", |
1659 | regno, INSN_UID (jump)); |
1660 | print_rtl (dump_file, set->src); |
1661 | fprintf (dump_file, "\n\t when BB %d is entered from " |
1662 | "BB %d. Redirect edge %d->%d to %d.\n", |
1663 | old_dest->index, e->src->index, e->src->index, |
1664 | old_dest->index, dest->index); |
1665 | } |
1666 | change = 1; |
1667 | removed_p = 1; |
1668 | break; |
1669 | } |
1670 | } |
1671 | if (!removed_p) |
1672 | ei_next (&ei); |
1673 | } |
1674 | return change; |
1675 | } |
1676 | |
1677 | /* Find basic blocks with more than one predecessor that only contain a |
1678 | single conditional jump. If the result of the comparison is known at |
1679 | compile-time from any incoming edge, redirect that edge to the |
1680 | appropriate target. Return nonzero if a change was made. |
1681 | |
1682 | This function is now mis-named, because we also handle indirect jumps. */ |
1683 | |
1684 | static int |
1685 | bypass_conditional_jumps (void) |
1686 | { |
1687 | basic_block bb; |
1688 | int changed; |
1689 | rtx_insn *setcc; |
1690 | rtx_insn *insn; |
1691 | rtx dest; |
1692 | |
1693 | /* Note we start at block 1. */ |
1694 | if (ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr)->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr)) |
1695 | return 0; |
1696 | |
1697 | mark_dfs_back_edges (); |
1698 | |
1699 | changed = 0; |
1700 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->next_bb,for (bb = (((cfun + 0))->cfg->x_entry_block_ptr)->next_bb ->next_bb; bb != (((cfun + 0))->cfg->x_exit_block_ptr ); bb = bb->next_bb) |
1701 | EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)for (bb = (((cfun + 0))->cfg->x_entry_block_ptr)->next_bb ->next_bb; bb != (((cfun + 0))->cfg->x_exit_block_ptr ); bb = bb->next_bb) |
1702 | { |
1703 | /* Check for more than one predecessor. */ |
1704 | if (!single_pred_p (bb)) |
1705 | { |
1706 | setcc = NULL__null; |
1707 | FOR_BB_INSNS (bb, insn)for ((insn) = (bb)->il.x.head_; (insn) && (insn) != NEXT_INSN ((bb)->il.x.rtl->end_); (insn) = NEXT_INSN ( insn)) |
1708 | if (DEBUG_INSN_P (insn)(((enum rtx_code) (insn)->code) == DEBUG_INSN)) |
1709 | continue; |
1710 | else if (NONJUMP_INSN_P (insn)(((enum rtx_code) (insn)->code) == INSN)) |
1711 | { |
1712 | if (setcc) |
1713 | break; |
1714 | if (GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) != SET) |
1715 | break; |
1716 | |
1717 | dest = SET_DEST (PATTERN (insn))(((PATTERN (insn))->u.fld[0]).rt_rtx); |
1718 | if (REG_P (dest)(((enum rtx_code) (dest)->code) == REG)) |
1719 | setcc = insn; |
1720 | else |
1721 | break; |
1722 | } |
1723 | else if (JUMP_P (insn)(((enum rtx_code) (insn)->code) == JUMP_INSN)) |
1724 | { |
1725 | if ((any_condjump_p (insn) || computed_jump_p (insn)) |
1726 | && onlyjump_p (insn)) |
1727 | changed |= bypass_block (bb, setcc, insn); |
1728 | break; |
1729 | } |
1730 | else if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) == DEBUG_INSN))) |
1731 | break; |
1732 | } |
1733 | } |
1734 | |
1735 | /* If we bypassed any register setting insns, we inserted a |
1736 | copy on the redirected edge. These need to be committed. */ |
1737 | if (changed) |
1738 | commit_edge_insertions (); |
1739 | |
1740 | return changed; |
1741 | } |
1742 | |
1743 | /* Main function for the CPROP pass. */ |
1744 | |
1745 | static int |
1746 | one_cprop_pass (void) |
1747 | { |
1748 | int i; |
1749 | int changed = 0; |
1750 | |
1751 | /* Return if there's nothing to do, or it is too expensive. */ |
1752 | if (n_basic_blocks_for_fn (cfun)(((cfun + 0))->cfg->x_n_basic_blocks) <= NUM_FIXED_BLOCKS(2) + 1 |
1753 | || gcse_or_cprop_is_too_expensive (_ ("const/copy propagation disabled")gettext ("const/copy propagation disabled"))) |
1754 | return 0; |
1755 | |
1756 | global_const_prop_count = local_const_prop_count = 0; |
1757 | global_copy_prop_count = local_copy_prop_count = 0; |
1758 | |
1759 | bytes_used = 0; |
1760 | gcc_obstack_init (&cprop_obstack)_obstack_begin (((&cprop_obstack)), (memory_block_pool::block_size ), (0), (mempool_obstack_chunk_alloc), (mempool_obstack_chunk_free )); |
1761 | |
1762 | /* Do a local const/copy propagation pass first. The global pass |
1763 | only handles global opportunities. |
1764 | If the local pass changes something, remove any unreachable blocks |
1765 | because the CPROP global dataflow analysis may get into infinite |
1766 | loops for CFGs with unreachable blocks. |
1767 | |
1768 | FIXME: This local pass should not be necessary after CSE (but for |
1769 | some reason it still is). It is also (proven) not necessary |
1770 | to run the local pass right after FWPWOP. |
1771 | |
1772 | FIXME: The global analysis would not get into infinite loops if it |
1773 | would use the DF solver (via df_simple_dataflow) instead of |
1774 | the solver implemented in this file. */ |
1775 | changed |= local_cprop_pass (); |
1776 | if (changed) |
1777 | delete_unreachable_blocks (); |
1778 | |
1779 | /* Determine implicit sets. This may change the CFG (split critical |
1780 | edges if that exposes an implicit set). |
1781 | Note that find_implicit_sets() does not rely on up-to-date DF caches |
1782 | so that we do not have to re-run df_analyze() even if local CPROP |
1783 | changed something. |
1784 | ??? This could run earlier so that any uncovered implicit sets |
1785 | sets could be exploited in local_cprop_pass() also. Later. */ |
1786 | changed |= find_implicit_sets (); |
1787 | |
1788 | /* If local_cprop_pass() or find_implicit_sets() changed something, |
1789 | run df_analyze() to bring all insn caches up-to-date, and to take |
1790 | new basic blocks from edge splitting on the DF radar. |
1791 | NB: This also runs the fast DCE pass, because execute_rtl_cprop |
1792 | sets DF_LR_RUN_DCE. */ |
1793 | if (changed) |
1794 | df_analyze (); |
1795 | |
1796 | /* Initialize implicit_set_indexes array. */ |
1797 | implicit_set_indexes = XNEWVEC (int, last_basic_block_for_fn (cfun))((int *) xmalloc (sizeof (int) * ((((cfun + 0))->cfg->x_last_basic_block )))); |
1798 | for (i = 0; i < last_basic_block_for_fn (cfun)(((cfun + 0))->cfg->x_last_basic_block); i++) |
1799 | implicit_set_indexes[i] = -1; |
1800 | |
1801 | alloc_hash_table (&set_hash_table); |
1802 | compute_hash_table (&set_hash_table); |
1803 | |
1804 | /* Free implicit_sets before peak usage. */ |
1805 | free (implicit_sets); |
1806 | implicit_sets = NULL__null; |
1807 | |
1808 | if (dump_file) |
1809 | dump_hash_table (dump_file, "SET", &set_hash_table); |
1810 | if (set_hash_table.n_elems > 0) |
1811 | { |
1812 | basic_block bb; |
1813 | auto_vec<rtx_insn *> uncond_traps; |
1814 | |
1815 | alloc_cprop_mem (last_basic_block_for_fn (cfun)(((cfun + 0))->cfg->x_last_basic_block), |
1816 | set_hash_table.n_elems); |
1817 | compute_cprop_data (); |
1818 | |
1819 | free (implicit_set_indexes); |
1820 | implicit_set_indexes = NULL__null; |
1821 | |
1822 | /* Allocate vars to track sets of regs. */ |
1823 | reg_set_bitmap = ALLOC_REG_SET (NULL)bitmap_alloc (__null); |
1824 | |
1825 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->next_bb,for (bb = (((cfun + 0))->cfg->x_entry_block_ptr)->next_bb ->next_bb; bb != (((cfun + 0))->cfg->x_exit_block_ptr ); bb = bb->next_bb) |
1826 | EXIT_BLOCK_PTR_FOR_FN (cfun),for (bb = (((cfun + 0))->cfg->x_entry_block_ptr)->next_bb ->next_bb; bb != (((cfun + 0))->cfg->x_exit_block_ptr ); bb = bb->next_bb) |
1827 | next_bb)for (bb = (((cfun + 0))->cfg->x_entry_block_ptr)->next_bb ->next_bb; bb != (((cfun + 0))->cfg->x_exit_block_ptr ); bb = bb->next_bb) |
1828 | { |
1829 | bool seen_uncond_trap = false; |
1830 | rtx_insn *insn; |
1831 | |
1832 | /* Reset tables used to keep track of what's still valid [since |
1833 | the start of the block]. */ |
1834 | reset_opr_set_tables (); |
1835 | |
1836 | FOR_BB_INSNS (bb, insn)for ((insn) = (bb)->il.x.head_; (insn) && (insn) != NEXT_INSN ((bb)->il.x.rtl->end_); (insn) = NEXT_INSN ( insn)) |
1837 | if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) == DEBUG_INSN))) |
1838 | { |
1839 | bool was_uncond_trap |
1840 | = (GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == TRAP_IF |
1841 | && XEXP (PATTERN (insn), 0)(((PATTERN (insn))->u.fld[0]).rt_rtx) == const1_rtx(const_int_rtx[64 +1])); |
1842 | |
1843 | changed |= cprop_insn (insn); |
1844 | |
1845 | /* Keep track of everything modified by this insn. */ |
1846 | /* ??? Need to be careful w.r.t. mods done to INSN. |
1847 | Don't call mark_oprs_set if we turned the |
1848 | insn into a NOTE, or deleted the insn. */ |
1849 | if (! NOTE_P (insn)(((enum rtx_code) (insn)->code) == NOTE) && ! insn->deleted ()) |
1850 | mark_oprs_set (insn); |
1851 | |
1852 | if (!was_uncond_trap |
1853 | && GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == TRAP_IF |
1854 | && XEXP (PATTERN (insn), 0)(((PATTERN (insn))->u.fld[0]).rt_rtx) == const1_rtx(const_int_rtx[64 +1])) |
1855 | { |
1856 | /* If we have already seen an unconditional trap |
1857 | earlier, the rest of the bb is going to be removed |
1858 | as unreachable. Just turn it into a note, so that |
1859 | RTL verification doesn't complain about it before |
1860 | it is finally removed. */ |
1861 | if (seen_uncond_trap) |
1862 | set_insn_deleted (insn); |
1863 | else |
1864 | { |
1865 | seen_uncond_trap = true; |
1866 | uncond_traps.safe_push (insn); |
1867 | } |
1868 | } |
1869 | } |
1870 | } |
1871 | |
1872 | /* Make sure bypass_conditional_jumps will ignore not just its new |
1873 | basic blocks, but also the ones after unconditional traps (those are |
1874 | unreachable and will be eventually removed as such). */ |
1875 | bypass_last_basic_block = last_basic_block_for_fn (cfun)(((cfun + 0))->cfg->x_last_basic_block); |
1876 | |
1877 | while (!uncond_traps.is_empty ()) |
1878 | { |
1879 | rtx_insn *insn = uncond_traps.pop (); |
1880 | basic_block to_split = BLOCK_FOR_INSN (insn); |
1881 | remove_edge (split_block (to_split, insn)); |
1882 | emit_barrier_after_bb (to_split); |
1883 | } |
1884 | |
1885 | changed |= bypass_conditional_jumps (); |
1886 | |
1887 | FREE_REG_SET (reg_set_bitmap)((void) (bitmap_obstack_free ((bitmap) reg_set_bitmap), (reg_set_bitmap ) = (bitmap) __null)); |
1888 | free_cprop_mem (); |
1889 | } |
1890 | else |
1891 | { |
1892 | free (implicit_set_indexes); |
1893 | implicit_set_indexes = NULL__null; |
1894 | } |
1895 | |
1896 | free_hash_table (&set_hash_table); |
1897 | obstack_free (&cprop_obstack, NULL)__extension__ ({ struct obstack *__o = (&cprop_obstack); void *__obj = (void *) (__null); if (__obj > (void *) __o-> chunk && __obj < (void *) __o->chunk_limit) __o ->next_free = __o->object_base = (char *) __obj; else _obstack_free (__o, __obj); }); |
1898 | |
1899 | if (dump_file) |
1900 | { |
1901 | fprintf (dump_file, "CPROP of %s, %d basic blocks, %d bytes needed, ", |
1902 | current_function_name (), n_basic_blocks_for_fn (cfun)(((cfun + 0))->cfg->x_n_basic_blocks), |
1903 | bytes_used); |
1904 | fprintf (dump_file, "%d local const props, %d local copy props, ", |
1905 | local_const_prop_count, local_copy_prop_count); |
1906 | fprintf (dump_file, "%d global const props, %d global copy props\n\n", |
1907 | global_const_prop_count, global_copy_prop_count); |
1908 | } |
1909 | |
1910 | return changed; |
1911 | } |
1912 | |
1913 | /* All the passes implemented in this file. Each pass has its |
1914 | own gate and execute function, and at the end of the file a |
1915 | pass definition for passes.cc. |
1916 | |
1917 | We do not construct an accurate cfg in functions which call |
1918 | setjmp, so none of these passes runs if the function calls |
1919 | setjmp. |
1920 | FIXME: Should just handle setjmp via REG_SETJMP notes. */ |
1921 | |
1922 | static unsigned int |
1923 | execute_rtl_cprop (void) |
1924 | { |
1925 | int changed; |
1926 | delete_unreachable_blocks (); |
1927 | df_set_flags (DF_LR_RUN_DCE); |
1928 | df_analyze (); |
1929 | changed = one_cprop_pass (); |
1930 | flag_rerun_cse_after_global_opts |= changed; |
1931 | if (changed) |
1932 | cleanup_cfg (CLEANUP_CFG_CHANGED64); |
1933 | return 0; |
1934 | } |
1935 | |
1936 | namespace { |
1937 | |
1938 | const pass_data pass_data_rtl_cprop = |
1939 | { |
1940 | RTL_PASS, /* type */ |
1941 | "cprop", /* name */ |
1942 | OPTGROUP_NONE, /* optinfo_flags */ |
1943 | TV_CPROP, /* tv_id */ |
1944 | PROP_cfglayout(1 << 9), /* properties_required */ |
1945 | 0, /* properties_provided */ |
1946 | 0, /* properties_destroyed */ |
1947 | 0, /* todo_flags_start */ |
1948 | TODO_df_finish(1 << 17), /* todo_flags_finish */ |
1949 | }; |
1950 | |
1951 | class pass_rtl_cprop : public rtl_opt_pass |
1952 | { |
1953 | public: |
1954 | pass_rtl_cprop (gcc::context *ctxt) |
1955 | : rtl_opt_pass (pass_data_rtl_cprop, ctxt) |
1956 | {} |
1957 | |
1958 | /* opt_pass methods: */ |
1959 | opt_pass * clone () final override { return new pass_rtl_cprop (m_ctxt); } |
1960 | bool gate (function *fun) final override |
1961 | { |
1962 | return optimizeglobal_options.x_optimize > 0 && flag_gcseglobal_options.x_flag_gcse |
1963 | && !fun->calls_setjmp |
1964 | && dbg_cnt (cprop); |
1965 | } |
1966 | |
1967 | unsigned int execute (function *) final override |
1968 | { |
1969 | return execute_rtl_cprop (); |
1970 | } |
1971 | |
1972 | }; // class pass_rtl_cprop |
1973 | |
1974 | } // anon namespace |
1975 | |
1976 | rtl_opt_pass * |
1977 | make_pass_rtl_cprop (gcc::context *ctxt) |
1978 | { |
1979 | return new pass_rtl_cprop (ctxt); |
1980 | } |