File: | build/gcc/sel-sched-ir.cc |
Warning: | line 5822, column 3 Use of memory after it is freed |
Press '?' to see keyboard shortcuts
Keyboard shortcuts:
1 | /* Instruction scheduling pass. Selective scheduler and pipeliner. | |||
2 | Copyright (C) 2006-2023 Free Software Foundation, Inc. | |||
3 | ||||
4 | This file is part of GCC. | |||
5 | ||||
6 | GCC is free software; you can redistribute it and/or modify it under | |||
7 | the terms of the GNU General Public License as published by the Free | |||
8 | Software Foundation; either version 3, or (at your option) any later | |||
9 | version. | |||
10 | ||||
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |||
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |||
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |||
14 | for more details. | |||
15 | ||||
16 | You should have received a copy of the GNU General Public License | |||
17 | along with GCC; see the file COPYING3. If not see | |||
18 | <http://www.gnu.org/licenses/>. */ | |||
19 | ||||
20 | #include "config.h" | |||
21 | #include "system.h" | |||
22 | #include "coretypes.h" | |||
23 | #include "backend.h" | |||
24 | #include "cfghooks.h" | |||
25 | #include "tree.h" | |||
26 | #include "rtl.h" | |||
27 | #include "df.h" | |||
28 | #include "memmodel.h" | |||
29 | #include "tm_p.h" | |||
30 | #include "cfgrtl.h" | |||
31 | #include "cfganal.h" | |||
32 | #include "cfgbuild.h" | |||
33 | #include "insn-config.h" | |||
34 | #include "insn-attr.h" | |||
35 | #include "recog.h" | |||
36 | #include "target.h" | |||
37 | #include "sched-int.h" | |||
38 | #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */ | |||
39 | ||||
40 | #ifdef INSN_SCHEDULING | |||
41 | #include "regset.h" | |||
42 | #include "cfgloop.h" | |||
43 | #include "sel-sched-ir.h" | |||
44 | /* We don't have to use it except for sel_print_insn. */ | |||
45 | #include "sel-sched-dump.h" | |||
46 | ||||
47 | /* A vector holding bb info for whole scheduling pass. */ | |||
48 | vec<sel_global_bb_info_def> sel_global_bb_info; | |||
49 | ||||
50 | /* A vector holding bb info. */ | |||
51 | vec<sel_region_bb_info_def> sel_region_bb_info; | |||
52 | ||||
53 | /* A pool for allocating all lists. */ | |||
54 | object_allocator<_list_node> sched_lists_pool ("sel-sched-lists"); | |||
55 | ||||
56 | /* This contains information about successors for compute_av_set. */ | |||
57 | struct succs_info current_succs; | |||
58 | ||||
59 | /* Data structure to describe interaction with the generic scheduler utils. */ | |||
60 | static struct common_sched_info_def sel_common_sched_info; | |||
61 | ||||
62 | /* The loop nest being pipelined. */ | |||
63 | class loop *current_loop_nest; | |||
64 | ||||
65 | /* LOOP_NESTS is a vector containing the corresponding loop nest for | |||
66 | each region. */ | |||
67 | static vec<loop_p> loop_nests; | |||
68 | ||||
69 | /* Saves blocks already in loop regions, indexed by bb->index. */ | |||
70 | static sbitmap bbs_in_loop_rgns = NULLnullptr; | |||
71 | ||||
72 | /* CFG hooks that are saved before changing create_basic_block hook. */ | |||
73 | static struct cfg_hooks orig_cfg_hooks; | |||
74 | ||||
75 | ||||
76 | /* Array containing reverse topological index of function basic blocks, | |||
77 | indexed by BB->INDEX. */ | |||
78 | static int *rev_top_order_index = NULLnullptr; | |||
79 | ||||
80 | /* Length of the above array. */ | |||
81 | static int rev_top_order_index_len = -1; | |||
82 | ||||
83 | /* A regset pool structure. */ | |||
84 | static struct | |||
85 | { | |||
86 | /* The stack to which regsets are returned. */ | |||
87 | regset *v; | |||
88 | ||||
89 | /* Its pointer. */ | |||
90 | int n; | |||
91 | ||||
92 | /* Its size. */ | |||
93 | int s; | |||
94 | ||||
95 | /* In VV we save all generated regsets so that, when destructing the | |||
96 | pool, we can compare it with V and check that every regset was returned | |||
97 | back to pool. */ | |||
98 | regset *vv; | |||
99 | ||||
100 | /* The pointer of VV stack. */ | |||
101 | int nn; | |||
102 | ||||
103 | /* Its size. */ | |||
104 | int ss; | |||
105 | ||||
106 | /* The difference between allocated and returned regsets. */ | |||
107 | int diff; | |||
108 | } regset_pool = { NULLnullptr, 0, 0, NULLnullptr, 0, 0, 0 }; | |||
109 | ||||
110 | /* This represents the nop pool. */ | |||
111 | static struct | |||
112 | { | |||
113 | /* The vector which holds previously emitted nops. */ | |||
114 | insn_t *v; | |||
115 | ||||
116 | /* Its pointer. */ | |||
117 | int n; | |||
118 | ||||
119 | /* Its size. */ | |||
120 | int s; | |||
121 | } nop_pool = { NULLnullptr, 0, 0 }; | |||
122 | ||||
123 | /* The pool for basic block notes. */ | |||
124 | static vec<rtx_note *> bb_note_pool; | |||
125 | ||||
126 | /* A NOP pattern used to emit placeholder insns. */ | |||
127 | rtx nop_pattern = NULL_RTX(rtx) 0; | |||
128 | /* A special instruction that resides in EXIT_BLOCK. | |||
129 | EXIT_INSN is successor of the insns that lead to EXIT_BLOCK. */ | |||
130 | rtx_insn *exit_insn = NULLnullptr; | |||
131 | ||||
132 | /* TRUE if while scheduling current region, which is loop, its preheader | |||
133 | was removed. */ | |||
134 | bool preheader_removed = false; | |||
135 | ||||
136 | ||||
137 | /* Forward static declarations. */ | |||
138 | static void fence_clear (fence_t); | |||
139 | ||||
140 | static void deps_init_id (idata_t, insn_t, bool); | |||
141 | static void init_id_from_df (idata_t, insn_t, bool); | |||
142 | static expr_t set_insn_init (expr_t, vinsn_t, int); | |||
143 | ||||
144 | static void cfg_preds (basic_block, insn_t **, int *); | |||
145 | static void prepare_insn_expr (insn_t, int); | |||
146 | static void free_history_vect (vec<expr_history_def> &); | |||
147 | ||||
148 | static void move_bb_info (basic_block, basic_block); | |||
149 | static void remove_empty_bb (basic_block, bool); | |||
150 | static void sel_merge_blocks (basic_block, basic_block); | |||
151 | static void sel_remove_loop_preheader (void); | |||
152 | static bool bb_has_removable_jump_to_p (basic_block, basic_block); | |||
153 | ||||
154 | static bool insn_is_the_only_one_in_bb_p (insn_t); | |||
155 | static void create_initial_data_sets (basic_block); | |||
156 | ||||
157 | static void free_av_set (basic_block); | |||
158 | static void invalidate_av_set (basic_block); | |||
159 | static void extend_insn_data (void); | |||
160 | static void sel_init_new_insn (insn_t, int, int = -1); | |||
161 | static void finish_insns (void); | |||
162 | ||||
163 | /* Various list functions. */ | |||
164 | ||||
165 | /* Copy an instruction list L. */ | |||
166 | ilist_t | |||
167 | ilist_copy (ilist_t l) | |||
168 | { | |||
169 | ilist_t head = NULLnullptr, *tailp = &head; | |||
170 | ||||
171 | while (l) | |||
172 | { | |||
173 | ilist_add (tailp, ILIST_INSN (l)((l)->u.insn)); | |||
174 | tailp = &ILIST_NEXT (*tailp)(((*tailp)->next)); | |||
175 | l = ILIST_NEXT (l)(((l)->next)); | |||
176 | } | |||
177 | ||||
178 | return head; | |||
179 | } | |||
180 | ||||
181 | /* Invert an instruction list L. */ | |||
182 | ilist_t | |||
183 | ilist_invert (ilist_t l) | |||
184 | { | |||
185 | ilist_t res = NULLnullptr; | |||
186 | ||||
187 | while (l) | |||
188 | { | |||
189 | ilist_add (&res, ILIST_INSN (l)((l)->u.insn)); | |||
190 | l = ILIST_NEXT (l)(((l)->next)); | |||
191 | } | |||
192 | ||||
193 | return res; | |||
194 | } | |||
195 | ||||
196 | /* Add a new boundary to the LP list with parameters TO, PTR, and DC. */ | |||
197 | void | |||
198 | blist_add (blist_t *lp, insn_t to, ilist_t ptr, deps_t dc) | |||
199 | { | |||
200 | bnd_t bnd; | |||
201 | ||||
202 | _list_add (lp); | |||
203 | bnd = BLIST_BND (*lp)(&(*lp)->u.bnd); | |||
204 | ||||
205 | BND_TO (bnd)((bnd)->to) = to; | |||
206 | BND_PTR (bnd)((bnd)->ptr) = ptr; | |||
207 | BND_AV (bnd)((bnd)->av) = NULLnullptr; | |||
208 | BND_AV1 (bnd)((bnd)->av1) = NULLnullptr; | |||
209 | BND_DC (bnd)((bnd)->dc) = dc; | |||
210 | } | |||
211 | ||||
212 | /* Remove the list note pointed to by LP. */ | |||
213 | void | |||
214 | blist_remove (blist_t *lp) | |||
215 | { | |||
216 | bnd_t b = BLIST_BND (*lp)(&(*lp)->u.bnd); | |||
217 | ||||
218 | av_set_clear (&BND_AV (b)((b)->av)); | |||
219 | av_set_clear (&BND_AV1 (b)((b)->av1)); | |||
220 | ilist_clear (&BND_PTR (b))(_list_clear (&((b)->ptr))); | |||
221 | ||||
222 | _list_remove (lp); | |||
223 | } | |||
224 | ||||
225 | /* Init a fence tail L. */ | |||
226 | void | |||
227 | flist_tail_init (flist_tail_t l) | |||
228 | { | |||
229 | FLIST_TAIL_HEAD (l)((l)->head) = NULLnullptr; | |||
230 | FLIST_TAIL_TAILP (l)((l)->tailp) = &FLIST_TAIL_HEAD (l)((l)->head); | |||
231 | } | |||
232 | ||||
233 | /* Try to find fence corresponding to INSN in L. */ | |||
234 | fence_t | |||
235 | flist_lookup (flist_t l, insn_t insn) | |||
236 | { | |||
237 | while (l) | |||
238 | { | |||
239 | if (FENCE_INSN (FLIST_FENCE (l))(((&(l)->u.fence))->insn) == insn) | |||
240 | return FLIST_FENCE (l)(&(l)->u.fence); | |||
241 | ||||
242 | l = FLIST_NEXT (l)(((l)->next)); | |||
243 | } | |||
244 | ||||
245 | return NULLnullptr; | |||
246 | } | |||
247 | ||||
248 | /* Init the fields of F before running fill_insns. */ | |||
249 | static void | |||
250 | init_fence_for_scheduling (fence_t f) | |||
251 | { | |||
252 | FENCE_BNDS (f)((f)->bnds) = NULLnullptr; | |||
253 | FENCE_PROCESSED_P (f)((f)->processed_p) = false; | |||
254 | FENCE_SCHEDULED_P (f)((f)->scheduled_p) = false; | |||
255 | } | |||
256 | ||||
257 | /* Add new fence consisting of INSN and STATE to the list pointed to by LP. */ | |||
258 | static void | |||
259 | flist_add (flist_t *lp, insn_t insn, state_t state, deps_t dc, void *tc, | |||
260 | insn_t last_scheduled_insn, vec<rtx_insn *, va_gc> *executing_insns, | |||
261 | int *ready_ticks, int ready_ticks_size, insn_t sched_next, | |||
262 | int cycle, int cycle_issued_insns, int issue_more, | |||
263 | bool starts_cycle_p, bool after_stall_p) | |||
264 | { | |||
265 | fence_t f; | |||
266 | ||||
267 | _list_add (lp); | |||
268 | f = FLIST_FENCE (*lp)(&(*lp)->u.fence); | |||
269 | ||||
270 | FENCE_INSN (f)((f)->insn) = insn; | |||
271 | ||||
272 | gcc_assert (state != NULL)((void)(!(state != nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 272, __FUNCTION__), 0 : 0)); | |||
273 | FENCE_STATE (f)((f)->state) = state; | |||
274 | ||||
275 | FENCE_CYCLE (f)((f)->cycle) = cycle; | |||
276 | FENCE_ISSUED_INSNS (f)((f)->cycle_issued_insns) = cycle_issued_insns; | |||
277 | FENCE_STARTS_CYCLE_P (f)((f)->starts_cycle_p) = starts_cycle_p; | |||
278 | FENCE_AFTER_STALL_P (f)((f)->after_stall_p) = after_stall_p; | |||
279 | ||||
280 | gcc_assert (dc != NULL)((void)(!(dc != nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 280, __FUNCTION__), 0 : 0)); | |||
281 | FENCE_DC (f)((f)->dc) = dc; | |||
282 | ||||
283 | gcc_assert (tc != NULL || targetm.sched.alloc_sched_context == NULL)((void)(!(tc != nullptr || targetm.sched.alloc_sched_context == nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 283, __FUNCTION__), 0 : 0)); | |||
284 | FENCE_TC (f)((f)->tc) = tc; | |||
285 | ||||
286 | FENCE_LAST_SCHEDULED_INSN (f)((f)->last_scheduled_insn) = last_scheduled_insn; | |||
287 | FENCE_ISSUE_MORE (f)((f)->issue_more) = issue_more; | |||
288 | FENCE_EXECUTING_INSNS (f)((f)->executing_insns) = executing_insns; | |||
289 | FENCE_READY_TICKS (f)((f)->ready_ticks) = ready_ticks; | |||
290 | FENCE_READY_TICKS_SIZE (f)((f)->ready_ticks_size) = ready_ticks_size; | |||
291 | FENCE_SCHED_NEXT (f)((f)->sched_next) = sched_next; | |||
292 | ||||
293 | init_fence_for_scheduling (f); | |||
294 | } | |||
295 | ||||
296 | /* Remove the head node of the list pointed to by LP. */ | |||
297 | static void | |||
298 | flist_remove (flist_t *lp) | |||
299 | { | |||
300 | if (FENCE_INSN (FLIST_FENCE (*lp))(((&(*lp)->u.fence))->insn)) | |||
301 | fence_clear (FLIST_FENCE (*lp)(&(*lp)->u.fence)); | |||
302 | _list_remove (lp); | |||
303 | } | |||
304 | ||||
305 | /* Clear the fence list pointed to by LP. */ | |||
306 | void | |||
307 | flist_clear (flist_t *lp) | |||
308 | { | |||
309 | while (*lp) | |||
310 | flist_remove (lp); | |||
311 | } | |||
312 | ||||
313 | /* Add ORIGINAL_INSN the def list DL honoring CROSSED_CALL_ABIS. */ | |||
314 | void | |||
315 | def_list_add (def_list_t *dl, insn_t original_insn, | |||
316 | unsigned int crossed_call_abis) | |||
317 | { | |||
318 | def_t d; | |||
319 | ||||
320 | _list_add (dl); | |||
321 | d = DEF_LIST_DEF (*dl)(&(*dl)->u.def); | |||
322 | ||||
323 | d->orig_insn = original_insn; | |||
324 | d->crossed_call_abis = crossed_call_abis; | |||
325 | } | |||
326 | ||||
327 | ||||
328 | /* Functions to work with target contexts. */ | |||
329 | ||||
330 | /* Bulk target context. It is convenient for debugging purposes to ensure | |||
331 | that there are no uninitialized (null) target contexts. */ | |||
332 | static tc_t bulk_tc = (tc_t) 1; | |||
333 | ||||
334 | /* Target hooks wrappers. In the future we can provide some default | |||
335 | implementations for them. */ | |||
336 | ||||
337 | /* Allocate a store for the target context. */ | |||
338 | static tc_t | |||
339 | alloc_target_context (void) | |||
340 | { | |||
341 | return (targetm.sched.alloc_sched_context | |||
342 | ? targetm.sched.alloc_sched_context () : bulk_tc); | |||
343 | } | |||
344 | ||||
345 | /* Init target context TC. | |||
346 | If CLEAN_P is true, then make TC as it is beginning of the scheduler. | |||
347 | Overwise, copy current backend context to TC. */ | |||
348 | static void | |||
349 | init_target_context (tc_t tc, bool clean_p) | |||
350 | { | |||
351 | if (targetm.sched.init_sched_context) | |||
352 | targetm.sched.init_sched_context (tc, clean_p); | |||
353 | } | |||
354 | ||||
355 | /* Allocate and initialize a target context. Meaning of CLEAN_P is the same as | |||
356 | int init_target_context (). */ | |||
357 | tc_t | |||
358 | create_target_context (bool clean_p) | |||
359 | { | |||
360 | tc_t tc = alloc_target_context (); | |||
361 | ||||
362 | init_target_context (tc, clean_p); | |||
363 | return tc; | |||
364 | } | |||
365 | ||||
366 | /* Copy TC to the current backend context. */ | |||
367 | void | |||
368 | set_target_context (tc_t tc) | |||
369 | { | |||
370 | if (targetm.sched.set_sched_context) | |||
371 | targetm.sched.set_sched_context (tc); | |||
372 | } | |||
373 | ||||
374 | /* TC is about to be destroyed. Free any internal data. */ | |||
375 | static void | |||
376 | clear_target_context (tc_t tc) | |||
377 | { | |||
378 | if (targetm.sched.clear_sched_context) | |||
379 | targetm.sched.clear_sched_context (tc); | |||
380 | } | |||
381 | ||||
382 | /* Clear and free it. */ | |||
383 | static void | |||
384 | delete_target_context (tc_t tc) | |||
385 | { | |||
386 | clear_target_context (tc); | |||
387 | ||||
388 | if (targetm.sched.free_sched_context) | |||
389 | targetm.sched.free_sched_context (tc); | |||
390 | } | |||
391 | ||||
392 | /* Make a copy of FROM in TO. | |||
393 | NB: May be this should be a hook. */ | |||
394 | static void | |||
395 | copy_target_context (tc_t to, tc_t from) | |||
396 | { | |||
397 | tc_t tmp = create_target_context (false); | |||
398 | ||||
399 | set_target_context (from); | |||
400 | init_target_context (to, false); | |||
401 | ||||
402 | set_target_context (tmp); | |||
403 | delete_target_context (tmp); | |||
404 | } | |||
405 | ||||
406 | /* Create a copy of TC. */ | |||
407 | static tc_t | |||
408 | create_copy_of_target_context (tc_t tc) | |||
409 | { | |||
410 | tc_t copy = alloc_target_context (); | |||
411 | ||||
412 | copy_target_context (copy, tc); | |||
413 | ||||
414 | return copy; | |||
415 | } | |||
416 | ||||
417 | /* Clear TC and initialize it according to CLEAN_P. The meaning of CLEAN_P | |||
418 | is the same as in init_target_context (). */ | |||
419 | void | |||
420 | reset_target_context (tc_t tc, bool clean_p) | |||
421 | { | |||
422 | clear_target_context (tc); | |||
423 | init_target_context (tc, clean_p); | |||
424 | } | |||
425 | ||||
426 | /* Functions to work with dependence contexts. | |||
427 | Dc (aka deps context, aka deps_t, aka class deps_desc *) is short for dependence | |||
428 | context. It accumulates information about processed insns to decide if | |||
429 | current insn is dependent on the processed ones. */ | |||
430 | ||||
431 | /* Make a copy of FROM in TO. */ | |||
432 | static void | |||
433 | copy_deps_context (deps_t to, deps_t from) | |||
434 | { | |||
435 | init_deps (to, false); | |||
436 | deps_join (to, from); | |||
437 | } | |||
438 | ||||
439 | /* Allocate store for dep context. */ | |||
440 | static deps_t | |||
441 | alloc_deps_context (void) | |||
442 | { | |||
443 | return XNEW (class deps_desc)((class deps_desc *) xmalloc (sizeof (class deps_desc))); | |||
444 | } | |||
445 | ||||
446 | /* Allocate and initialize dep context. */ | |||
447 | static deps_t | |||
448 | create_deps_context (void) | |||
449 | { | |||
450 | deps_t dc = alloc_deps_context (); | |||
451 | ||||
452 | init_deps (dc, false); | |||
453 | return dc; | |||
454 | } | |||
455 | ||||
456 | /* Create a copy of FROM. */ | |||
457 | static deps_t | |||
458 | create_copy_of_deps_context (deps_t from) | |||
459 | { | |||
460 | deps_t to = alloc_deps_context (); | |||
461 | ||||
462 | copy_deps_context (to, from); | |||
463 | return to; | |||
464 | } | |||
465 | ||||
466 | /* Clean up internal data of DC. */ | |||
467 | static void | |||
468 | clear_deps_context (deps_t dc) | |||
469 | { | |||
470 | free_deps (dc); | |||
471 | } | |||
472 | ||||
473 | /* Clear and free DC. */ | |||
474 | static void | |||
475 | delete_deps_context (deps_t dc) | |||
476 | { | |||
477 | clear_deps_context (dc); | |||
478 | free (dc); | |||
479 | } | |||
480 | ||||
481 | /* Clear and init DC. */ | |||
482 | static void | |||
483 | reset_deps_context (deps_t dc) | |||
484 | { | |||
485 | clear_deps_context (dc); | |||
486 | init_deps (dc, false); | |||
487 | } | |||
488 | ||||
489 | /* This structure describes the dependence analysis hooks for advancing | |||
490 | dependence context. */ | |||
491 | static struct sched_deps_info_def advance_deps_context_sched_deps_info = | |||
492 | { | |||
493 | NULLnullptr, | |||
494 | ||||
495 | NULLnullptr, /* start_insn */ | |||
496 | NULLnullptr, /* finish_insn */ | |||
497 | NULLnullptr, /* start_lhs */ | |||
498 | NULLnullptr, /* finish_lhs */ | |||
499 | NULLnullptr, /* start_rhs */ | |||
500 | NULLnullptr, /* finish_rhs */ | |||
501 | haifa_note_reg_set, | |||
502 | haifa_note_reg_clobber, | |||
503 | haifa_note_reg_use, | |||
504 | NULLnullptr, /* note_mem_dep */ | |||
505 | NULLnullptr, /* note_dep */ | |||
506 | ||||
507 | 0, 0, 0 | |||
508 | }; | |||
509 | ||||
510 | /* Process INSN and add its impact on DC. */ | |||
511 | void | |||
512 | advance_deps_context (deps_t dc, insn_t insn) | |||
513 | { | |||
514 | sched_deps_info = &advance_deps_context_sched_deps_info; | |||
515 | deps_analyze_insn (dc, insn); | |||
516 | } | |||
517 | ||||
518 | ||||
519 | /* Functions to work with DFA states. */ | |||
520 | ||||
521 | /* Allocate store for a DFA state. */ | |||
522 | static state_t | |||
523 | state_alloc (void) | |||
524 | { | |||
525 | return xmalloc (dfa_state_size); | |||
526 | } | |||
527 | ||||
528 | /* Allocate and initialize DFA state. */ | |||
529 | static state_t | |||
530 | state_create (void) | |||
531 | { | |||
532 | state_t state = state_alloc (); | |||
533 | ||||
534 | state_reset (state); | |||
535 | advance_state (state); | |||
536 | return state; | |||
537 | } | |||
538 | ||||
539 | /* Free DFA state. */ | |||
540 | static void | |||
541 | state_free (state_t state) | |||
542 | { | |||
543 | free (state); | |||
544 | } | |||
545 | ||||
546 | /* Make a copy of FROM in TO. */ | |||
547 | static void | |||
548 | state_copy (state_t to, state_t from) | |||
549 | { | |||
550 | memcpy (to, from, dfa_state_size); | |||
551 | } | |||
552 | ||||
553 | /* Create a copy of FROM. */ | |||
554 | static state_t | |||
555 | state_create_copy (state_t from) | |||
556 | { | |||
557 | state_t to = state_alloc (); | |||
558 | ||||
559 | state_copy (to, from); | |||
560 | return to; | |||
561 | } | |||
562 | ||||
563 | ||||
564 | /* Functions to work with fences. */ | |||
565 | ||||
566 | /* Clear the fence. */ | |||
567 | static void | |||
568 | fence_clear (fence_t f) | |||
569 | { | |||
570 | state_t s = FENCE_STATE (f)((f)->state); | |||
571 | deps_t dc = FENCE_DC (f)((f)->dc); | |||
572 | void *tc = FENCE_TC (f)((f)->tc); | |||
573 | ||||
574 | ilist_clear (&FENCE_BNDS (f))(_list_clear (&((f)->bnds))); | |||
575 | ||||
576 | gcc_assert ((s != NULL && dc != NULL && tc != NULL)((void)(!((s != nullptr && dc != nullptr && tc != nullptr) || (s == nullptr && dc == nullptr && tc == nullptr)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 577, __FUNCTION__), 0 : 0)) | |||
577 | || (s == NULL && dc == NULL && tc == NULL))((void)(!((s != nullptr && dc != nullptr && tc != nullptr) || (s == nullptr && dc == nullptr && tc == nullptr)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 577, __FUNCTION__), 0 : 0)); | |||
578 | ||||
579 | free (s); | |||
580 | ||||
581 | if (dc != NULLnullptr) | |||
582 | delete_deps_context (dc); | |||
583 | ||||
584 | if (tc != NULLnullptr) | |||
585 | delete_target_context (tc); | |||
586 | vec_free (FENCE_EXECUTING_INSNS (f)((f)->executing_insns)); | |||
587 | free (FENCE_READY_TICKS (f)((f)->ready_ticks)); | |||
588 | FENCE_READY_TICKS (f)((f)->ready_ticks) = NULLnullptr; | |||
589 | } | |||
590 | ||||
591 | /* Init a list of fences with successors of OLD_FENCE. */ | |||
592 | void | |||
593 | init_fences (insn_t old_fence) | |||
594 | { | |||
595 | insn_t succ; | |||
596 | succ_iterator si; | |||
597 | bool first = true; | |||
598 | int ready_ticks_size = get_max_uid () + 1; | |||
599 | ||||
600 | FOR_EACH_SUCC_1 (succ, si, old_fence,for ((si) = _succ_iter_start (&(succ), (old_fence), ((1) | (8))); _succ_iter_cond (&(si), &(succ), (old_fence), _eligible_successor_edge_p); _succ_iter_next (&(si))) | |||
601 | SUCCS_NORMAL | SUCCS_SKIP_TO_LOOP_EXITS)for ((si) = _succ_iter_start (&(succ), (old_fence), ((1) | (8))); _succ_iter_cond (&(si), &(succ), (old_fence), _eligible_successor_edge_p); _succ_iter_next (&(si))) | |||
602 | { | |||
603 | ||||
604 | if (first) | |||
605 | first = false; | |||
606 | else | |||
607 | gcc_assert (flag_sel_sched_pipelining_outer_loops)((void)(!(global_options.x_flag_sel_sched_pipelining_outer_loops ) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 607, __FUNCTION__), 0 : 0)); | |||
608 | ||||
609 | flist_add (&fences, succ, | |||
610 | state_create (), | |||
611 | create_deps_context () /* dc */, | |||
612 | create_target_context (true) /* tc */, | |||
613 | NULLnullptr /* last_scheduled_insn */, | |||
614 | NULLnullptr, /* executing_insns */ | |||
615 | XCNEWVEC (int, ready_ticks_size)((int *) xcalloc ((ready_ticks_size), sizeof (int))), /* ready_ticks */ | |||
616 | ready_ticks_size, | |||
617 | NULLnullptr /* sched_next */, | |||
618 | 1 /* cycle */, 0 /* cycle_issued_insns */, | |||
619 | issue_rate, /* issue_more */ | |||
620 | 1 /* starts_cycle_p */, 0 /* after_stall_p */); | |||
621 | } | |||
622 | } | |||
623 | ||||
624 | /* Merges two fences (filling fields of fence F with resulting values) by | |||
625 | following rules: 1) state, target context and last scheduled insn are | |||
626 | propagated from fallthrough edge if it is available; | |||
627 | 2) deps context and cycle is propagated from more probable edge; | |||
628 | 3) all other fields are set to corresponding constant values. | |||
629 | ||||
630 | INSN, STATE, DC, TC, LAST_SCHEDULED_INSN, EXECUTING_INSNS, | |||
631 | READY_TICKS, READY_TICKS_SIZE, SCHED_NEXT, CYCLE, ISSUE_MORE | |||
632 | and AFTER_STALL_P are the corresponding fields of the second fence. */ | |||
633 | static void | |||
634 | merge_fences (fence_t f, insn_t insn, | |||
635 | state_t state, deps_t dc, void *tc, | |||
636 | rtx_insn *last_scheduled_insn, | |||
637 | vec<rtx_insn *, va_gc> *executing_insns, | |||
638 | int *ready_ticks, int ready_ticks_size, | |||
639 | rtx sched_next, int cycle, int issue_more, bool after_stall_p) | |||
640 | { | |||
641 | insn_t last_scheduled_insn_old = FENCE_LAST_SCHEDULED_INSN (f)((f)->last_scheduled_insn); | |||
642 | ||||
643 | gcc_assert (sel_bb_head_p (FENCE_INSN (f))((void)(!(sel_bb_head_p (((f)->insn)) && !sched_next && !((f)->sched_next)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 644, __FUNCTION__), 0 : 0)) | |||
644 | && !sched_next && !FENCE_SCHED_NEXT (f))((void)(!(sel_bb_head_p (((f)->insn)) && !sched_next && !((f)->sched_next)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 644, __FUNCTION__), 0 : 0)); | |||
645 | ||||
646 | /* Check if we can decide which path fences came. | |||
647 | If we can't (or don't want to) - reset all. */ | |||
648 | if (last_scheduled_insn == NULLnullptr | |||
649 | || last_scheduled_insn_old == NULLnullptr | |||
650 | /* This is a case when INSN is reachable on several paths from | |||
651 | one insn (this can happen when pipelining of outer loops is on and | |||
652 | there are two edges: one going around of inner loop and the other - | |||
653 | right through it; in such case just reset everything). */ | |||
654 | || last_scheduled_insn == last_scheduled_insn_old) | |||
655 | { | |||
656 | state_reset (FENCE_STATE (f)((f)->state)); | |||
657 | state_free (state); | |||
658 | ||||
659 | reset_deps_context (FENCE_DC (f)((f)->dc)); | |||
660 | delete_deps_context (dc); | |||
661 | ||||
662 | reset_target_context (FENCE_TC (f)((f)->tc), true); | |||
663 | delete_target_context (tc); | |||
664 | ||||
665 | if (cycle > FENCE_CYCLE (f)((f)->cycle)) | |||
666 | FENCE_CYCLE (f)((f)->cycle) = cycle; | |||
667 | ||||
668 | FENCE_LAST_SCHEDULED_INSN (f)((f)->last_scheduled_insn) = NULLnullptr; | |||
669 | FENCE_ISSUE_MORE (f)((f)->issue_more) = issue_rate; | |||
670 | vec_free (executing_insns); | |||
671 | free (ready_ticks); | |||
672 | if (FENCE_EXECUTING_INSNS (f)((f)->executing_insns)) | |||
673 | FENCE_EXECUTING_INSNS (f)((f)->executing_insns)->block_remove (0, | |||
674 | FENCE_EXECUTING_INSNS (f)((f)->executing_insns)->length ()); | |||
675 | if (FENCE_READY_TICKS (f)((f)->ready_ticks)) | |||
676 | memset (FENCE_READY_TICKS (f)((f)->ready_ticks), 0, FENCE_READY_TICKS_SIZE (f)((f)->ready_ticks_size)); | |||
677 | } | |||
678 | else | |||
679 | { | |||
680 | edge edge_old = NULLnullptr, edge_new = NULLnullptr; | |||
681 | edge candidate; | |||
682 | succ_iterator si; | |||
683 | insn_t succ; | |||
684 | ||||
685 | /* Find fallthrough edge. */ | |||
686 | gcc_assert (BLOCK_FOR_INSN (insn)->prev_bb)((void)(!(BLOCK_FOR_INSN (insn)->prev_bb) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 686, __FUNCTION__), 0 : 0)); | |||
687 | candidate = find_fallthru_edge_from (BLOCK_FOR_INSN (insn)->prev_bb); | |||
688 | ||||
689 | if (!candidate | |||
690 | || (candidate->src != BLOCK_FOR_INSN (last_scheduled_insn) | |||
691 | && candidate->src != BLOCK_FOR_INSN (last_scheduled_insn_old))) | |||
692 | { | |||
693 | /* No fallthrough edge leading to basic block of INSN. */ | |||
694 | state_reset (FENCE_STATE (f)((f)->state)); | |||
695 | state_free (state); | |||
696 | ||||
697 | reset_target_context (FENCE_TC (f)((f)->tc), true); | |||
698 | delete_target_context (tc); | |||
699 | ||||
700 | FENCE_LAST_SCHEDULED_INSN (f)((f)->last_scheduled_insn) = NULLnullptr; | |||
701 | FENCE_ISSUE_MORE (f)((f)->issue_more) = issue_rate; | |||
702 | } | |||
703 | else | |||
704 | if (candidate->src == BLOCK_FOR_INSN (last_scheduled_insn)) | |||
705 | { | |||
706 | state_free (FENCE_STATE (f)((f)->state)); | |||
707 | FENCE_STATE (f)((f)->state) = state; | |||
708 | ||||
709 | delete_target_context (FENCE_TC (f)((f)->tc)); | |||
710 | FENCE_TC (f)((f)->tc) = tc; | |||
711 | ||||
712 | FENCE_LAST_SCHEDULED_INSN (f)((f)->last_scheduled_insn) = last_scheduled_insn; | |||
713 | FENCE_ISSUE_MORE (f)((f)->issue_more) = issue_more; | |||
714 | } | |||
715 | else | |||
716 | { | |||
717 | /* Leave STATE, TC and LAST_SCHEDULED_INSN fields untouched. */ | |||
718 | state_free (state); | |||
719 | delete_target_context (tc); | |||
720 | ||||
721 | gcc_assert (BLOCK_FOR_INSN (insn)->prev_bb((void)(!(BLOCK_FOR_INSN (insn)->prev_bb != BLOCK_FOR_INSN (last_scheduled_insn)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 722, __FUNCTION__), 0 : 0)) | |||
722 | != BLOCK_FOR_INSN (last_scheduled_insn))((void)(!(BLOCK_FOR_INSN (insn)->prev_bb != BLOCK_FOR_INSN (last_scheduled_insn)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 722, __FUNCTION__), 0 : 0)); | |||
723 | } | |||
724 | ||||
725 | /* Find edge of first predecessor (last_scheduled_insn_old->insn). */ | |||
726 | FOR_EACH_SUCC_1 (succ, si, last_scheduled_insn_old,for ((si) = _succ_iter_start (&(succ), (last_scheduled_insn_old ), ((1) | (8))); _succ_iter_cond (&(si), &(succ), (last_scheduled_insn_old ), _eligible_successor_edge_p); _succ_iter_next (&(si))) | |||
727 | SUCCS_NORMAL | SUCCS_SKIP_TO_LOOP_EXITS)for ((si) = _succ_iter_start (&(succ), (last_scheduled_insn_old ), ((1) | (8))); _succ_iter_cond (&(si), &(succ), (last_scheduled_insn_old ), _eligible_successor_edge_p); _succ_iter_next (&(si))) | |||
728 | { | |||
729 | if (succ == insn) | |||
730 | { | |||
731 | /* No same successor allowed from several edges. */ | |||
732 | gcc_assert (!edge_old)((void)(!(!edge_old) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 732, __FUNCTION__), 0 : 0)); | |||
733 | edge_old = si.e1; | |||
734 | } | |||
735 | } | |||
736 | /* Find edge of second predecessor (last_scheduled_insn->insn). */ | |||
737 | FOR_EACH_SUCC_1 (succ, si, last_scheduled_insn,for ((si) = _succ_iter_start (&(succ), (last_scheduled_insn ), ((1) | (8))); _succ_iter_cond (&(si), &(succ), (last_scheduled_insn ), _eligible_successor_edge_p); _succ_iter_next (&(si))) | |||
738 | SUCCS_NORMAL | SUCCS_SKIP_TO_LOOP_EXITS)for ((si) = _succ_iter_start (&(succ), (last_scheduled_insn ), ((1) | (8))); _succ_iter_cond (&(si), &(succ), (last_scheduled_insn ), _eligible_successor_edge_p); _succ_iter_next (&(si))) | |||
739 | { | |||
740 | if (succ == insn) | |||
741 | { | |||
742 | /* No same successor allowed from several edges. */ | |||
743 | gcc_assert (!edge_new)((void)(!(!edge_new) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 743, __FUNCTION__), 0 : 0)); | |||
744 | edge_new = si.e1; | |||
745 | } | |||
746 | } | |||
747 | ||||
748 | /* Check if we can choose most probable predecessor. */ | |||
749 | if (edge_old == NULLnullptr || edge_new == NULLnullptr) | |||
750 | { | |||
751 | reset_deps_context (FENCE_DC (f)((f)->dc)); | |||
752 | delete_deps_context (dc); | |||
753 | vec_free (executing_insns); | |||
754 | free (ready_ticks); | |||
755 | ||||
756 | FENCE_CYCLE (f)((f)->cycle) = MAX (FENCE_CYCLE (f), cycle)((((f)->cycle)) > (cycle) ? (((f)->cycle)) : (cycle) ); | |||
757 | if (FENCE_EXECUTING_INSNS (f)((f)->executing_insns)) | |||
758 | FENCE_EXECUTING_INSNS (f)((f)->executing_insns)->block_remove (0, | |||
759 | FENCE_EXECUTING_INSNS (f)((f)->executing_insns)->length ()); | |||
760 | if (FENCE_READY_TICKS (f)((f)->ready_ticks)) | |||
761 | memset (FENCE_READY_TICKS (f)((f)->ready_ticks), 0, FENCE_READY_TICKS_SIZE (f)((f)->ready_ticks_size)); | |||
762 | } | |||
763 | else | |||
764 | if (edge_new->probability > edge_old->probability) | |||
765 | { | |||
766 | delete_deps_context (FENCE_DC (f)((f)->dc)); | |||
767 | FENCE_DC (f)((f)->dc) = dc; | |||
768 | vec_free (FENCE_EXECUTING_INSNS (f)((f)->executing_insns)); | |||
769 | FENCE_EXECUTING_INSNS (f)((f)->executing_insns) = executing_insns; | |||
770 | free (FENCE_READY_TICKS (f)((f)->ready_ticks)); | |||
771 | FENCE_READY_TICKS (f)((f)->ready_ticks) = ready_ticks; | |||
772 | FENCE_READY_TICKS_SIZE (f)((f)->ready_ticks_size) = ready_ticks_size; | |||
773 | FENCE_CYCLE (f)((f)->cycle) = cycle; | |||
774 | } | |||
775 | else | |||
776 | { | |||
777 | /* Leave DC and CYCLE untouched. */ | |||
778 | delete_deps_context (dc); | |||
779 | vec_free (executing_insns); | |||
780 | free (ready_ticks); | |||
781 | } | |||
782 | } | |||
783 | ||||
784 | /* Fill remaining invariant fields. */ | |||
785 | if (after_stall_p) | |||
786 | FENCE_AFTER_STALL_P (f)((f)->after_stall_p) = 1; | |||
787 | ||||
788 | FENCE_ISSUED_INSNS (f)((f)->cycle_issued_insns) = 0; | |||
789 | FENCE_STARTS_CYCLE_P (f)((f)->starts_cycle_p) = 1; | |||
790 | FENCE_SCHED_NEXT (f)((f)->sched_next) = NULLnullptr; | |||
791 | } | |||
792 | ||||
793 | /* Add a new fence to NEW_FENCES list, initializing it from all | |||
794 | other parameters. */ | |||
795 | static void | |||
796 | add_to_fences (flist_tail_t new_fences, insn_t insn, | |||
797 | state_t state, deps_t dc, void *tc, | |||
798 | rtx_insn *last_scheduled_insn, | |||
799 | vec<rtx_insn *, va_gc> *executing_insns, int *ready_ticks, | |||
800 | int ready_ticks_size, rtx_insn *sched_next, int cycle, | |||
801 | int cycle_issued_insns, int issue_rate, | |||
802 | bool starts_cycle_p, bool after_stall_p) | |||
803 | { | |||
804 | fence_t f = flist_lookup (FLIST_TAIL_HEAD (new_fences)((new_fences)->head), insn); | |||
805 | ||||
806 | if (! f) | |||
807 | { | |||
808 | flist_add (FLIST_TAIL_TAILP (new_fences)((new_fences)->tailp), insn, state, dc, tc, | |||
809 | last_scheduled_insn, executing_insns, ready_ticks, | |||
810 | ready_ticks_size, sched_next, cycle, cycle_issued_insns, | |||
811 | issue_rate, starts_cycle_p, after_stall_p); | |||
812 | ||||
813 | FLIST_TAIL_TAILP (new_fences)((new_fences)->tailp) | |||
814 | = &FLIST_NEXT (*FLIST_TAIL_TAILP (new_fences))(((*((new_fences)->tailp))->next)); | |||
815 | } | |||
816 | else | |||
817 | { | |||
818 | merge_fences (f, insn, state, dc, tc, last_scheduled_insn, | |||
819 | executing_insns, ready_ticks, ready_ticks_size, | |||
820 | sched_next, cycle, issue_rate, after_stall_p); | |||
821 | } | |||
822 | } | |||
823 | ||||
824 | /* Move the first fence in the OLD_FENCES list to NEW_FENCES. */ | |||
825 | void | |||
826 | move_fence_to_fences (flist_t old_fences, flist_tail_t new_fences) | |||
827 | { | |||
828 | fence_t f, old; | |||
829 | flist_t *tailp = FLIST_TAIL_TAILP (new_fences)((new_fences)->tailp); | |||
830 | ||||
831 | old = FLIST_FENCE (old_fences)(&(old_fences)->u.fence); | |||
832 | f = flist_lookup (FLIST_TAIL_HEAD (new_fences)((new_fences)->head), | |||
833 | FENCE_INSN (FLIST_FENCE (old_fences))(((&(old_fences)->u.fence))->insn)); | |||
834 | if (f) | |||
835 | { | |||
836 | merge_fences (f, old->insn, old->state, old->dc, old->tc, | |||
837 | old->last_scheduled_insn, old->executing_insns, | |||
838 | old->ready_ticks, old->ready_ticks_size, | |||
839 | old->sched_next, old->cycle, old->issue_more, | |||
840 | old->after_stall_p); | |||
841 | } | |||
842 | else | |||
843 | { | |||
844 | _list_add (tailp); | |||
845 | FLIST_TAIL_TAILP (new_fences)((new_fences)->tailp) = &FLIST_NEXT (*tailp)(((*tailp)->next)); | |||
846 | *FLIST_FENCE (*tailp)(&(*tailp)->u.fence) = *old; | |||
847 | init_fence_for_scheduling (FLIST_FENCE (*tailp)(&(*tailp)->u.fence)); | |||
848 | } | |||
849 | FENCE_INSN (old)((old)->insn) = NULLnullptr; | |||
850 | } | |||
851 | ||||
852 | /* Add a new fence to NEW_FENCES list and initialize most of its data | |||
853 | as a clean one. */ | |||
854 | void | |||
855 | add_clean_fence_to_fences (flist_tail_t new_fences, insn_t succ, fence_t fence) | |||
856 | { | |||
857 | int ready_ticks_size = get_max_uid () + 1; | |||
858 | ||||
859 | add_to_fences (new_fences, | |||
860 | succ, state_create (), create_deps_context (), | |||
861 | create_target_context (true), | |||
862 | NULLnullptr, NULLnullptr, | |||
863 | XCNEWVEC (int, ready_ticks_size)((int *) xcalloc ((ready_ticks_size), sizeof (int))), ready_ticks_size, | |||
864 | NULLnullptr, FENCE_CYCLE (fence)((fence)->cycle) + 1, | |||
865 | 0, issue_rate, 1, FENCE_AFTER_STALL_P (fence)((fence)->after_stall_p)); | |||
866 | } | |||
867 | ||||
868 | /* Add a new fence to NEW_FENCES list and initialize all of its data | |||
869 | from FENCE and SUCC. */ | |||
870 | void | |||
871 | add_dirty_fence_to_fences (flist_tail_t new_fences, insn_t succ, fence_t fence) | |||
872 | { | |||
873 | int * new_ready_ticks | |||
874 | = XNEWVEC (int, FENCE_READY_TICKS_SIZE (fence))((int *) xmalloc (sizeof (int) * (((fence)->ready_ticks_size )))); | |||
875 | ||||
876 | memcpy (new_ready_ticks, FENCE_READY_TICKS (fence)((fence)->ready_ticks), | |||
877 | FENCE_READY_TICKS_SIZE (fence)((fence)->ready_ticks_size) * sizeof (int)); | |||
878 | add_to_fences (new_fences, | |||
879 | succ, state_create_copy (FENCE_STATE (fence)((fence)->state)), | |||
880 | create_copy_of_deps_context (FENCE_DC (fence)((fence)->dc)), | |||
881 | create_copy_of_target_context (FENCE_TC (fence)((fence)->tc)), | |||
882 | FENCE_LAST_SCHEDULED_INSN (fence)((fence)->last_scheduled_insn), | |||
883 | vec_safe_copy (FENCE_EXECUTING_INSNS (fence)((fence)->executing_insns)), | |||
884 | new_ready_ticks, | |||
885 | FENCE_READY_TICKS_SIZE (fence)((fence)->ready_ticks_size), | |||
886 | FENCE_SCHED_NEXT (fence)((fence)->sched_next), | |||
887 | FENCE_CYCLE (fence)((fence)->cycle), | |||
888 | FENCE_ISSUED_INSNS (fence)((fence)->cycle_issued_insns), | |||
889 | FENCE_ISSUE_MORE (fence)((fence)->issue_more), | |||
890 | FENCE_STARTS_CYCLE_P (fence)((fence)->starts_cycle_p), | |||
891 | FENCE_AFTER_STALL_P (fence)((fence)->after_stall_p)); | |||
892 | } | |||
893 | ||||
894 | ||||
895 | /* Functions to work with regset and nop pools. */ | |||
896 | ||||
897 | /* Returns the new regset from pool. It might have some of the bits set | |||
898 | from the previous usage. */ | |||
899 | regset | |||
900 | get_regset_from_pool (void) | |||
901 | { | |||
902 | regset rs; | |||
903 | ||||
904 | if (regset_pool.n != 0) | |||
905 | rs = regset_pool.v[--regset_pool.n]; | |||
906 | else | |||
907 | /* We need to create the regset. */ | |||
908 | { | |||
909 | rs = ALLOC_REG_SET (®_obstack)bitmap_alloc (®_obstack); | |||
910 | ||||
911 | if (regset_pool.nn == regset_pool.ss) | |||
912 | regset_pool.vv = XRESIZEVEC (regset, regset_pool.vv,((regset *) xrealloc ((void *) (regset_pool.vv), sizeof (regset ) * ((regset_pool.ss = 2 * regset_pool.ss + 1)))) | |||
913 | (regset_pool.ss = 2 * regset_pool.ss + 1))((regset *) xrealloc ((void *) (regset_pool.vv), sizeof (regset ) * ((regset_pool.ss = 2 * regset_pool.ss + 1)))); | |||
914 | regset_pool.vv[regset_pool.nn++] = rs; | |||
915 | } | |||
916 | ||||
917 | regset_pool.diff++; | |||
918 | ||||
919 | return rs; | |||
920 | } | |||
921 | ||||
922 | /* Same as above, but returns the empty regset. */ | |||
923 | regset | |||
924 | get_clear_regset_from_pool (void) | |||
925 | { | |||
926 | regset rs = get_regset_from_pool (); | |||
927 | ||||
928 | CLEAR_REG_SET (rs)bitmap_clear (rs); | |||
929 | return rs; | |||
930 | } | |||
931 | ||||
932 | /* Return regset RS to the pool for future use. */ | |||
933 | void | |||
934 | return_regset_to_pool (regset rs) | |||
935 | { | |||
936 | gcc_assert (rs)((void)(!(rs) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 936, __FUNCTION__), 0 : 0)); | |||
937 | regset_pool.diff--; | |||
938 | ||||
939 | if (regset_pool.n == regset_pool.s) | |||
940 | regset_pool.v = XRESIZEVEC (regset, regset_pool.v,((regset *) xrealloc ((void *) (regset_pool.v), sizeof (regset ) * ((regset_pool.s = 2 * regset_pool.s + 1)))) | |||
941 | (regset_pool.s = 2 * regset_pool.s + 1))((regset *) xrealloc ((void *) (regset_pool.v), sizeof (regset ) * ((regset_pool.s = 2 * regset_pool.s + 1)))); | |||
942 | regset_pool.v[regset_pool.n++] = rs; | |||
943 | } | |||
944 | ||||
945 | /* This is used as a qsort callback for sorting regset pool stacks. | |||
946 | X and XX are addresses of two regsets. They are never equal. */ | |||
947 | static int | |||
948 | cmp_v_in_regset_pool (const void *x, const void *xx) | |||
949 | { | |||
950 | uintptr_t r1 = (uintptr_t) *((const regset *) x); | |||
951 | uintptr_t r2 = (uintptr_t) *((const regset *) xx); | |||
952 | if (r1 > r2) | |||
953 | return 1; | |||
954 | else if (r1 < r2) | |||
955 | return -1; | |||
956 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 956, __FUNCTION__)); | |||
957 | } | |||
958 | ||||
959 | /* Free the regset pool possibly checking for memory leaks. */ | |||
960 | void | |||
961 | free_regset_pool (void) | |||
962 | { | |||
963 | if (flag_checkingglobal_options.x_flag_checking) | |||
964 | { | |||
965 | regset *v = regset_pool.v; | |||
966 | int i = 0; | |||
967 | int n = regset_pool.n; | |||
968 | ||||
969 | regset *vv = regset_pool.vv; | |||
970 | int ii = 0; | |||
971 | int nn = regset_pool.nn; | |||
972 | ||||
973 | int diff = 0; | |||
974 | ||||
975 | gcc_assert (n <= nn)((void)(!(n <= nn) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 975, __FUNCTION__), 0 : 0)); | |||
976 | ||||
977 | /* Sort both vectors so it will be possible to compare them. */ | |||
978 | qsort (v, n, sizeof (*v), cmp_v_in_regset_pool)gcc_qsort (v, n, sizeof (*v), cmp_v_in_regset_pool); | |||
979 | qsort (vv, nn, sizeof (*vv), cmp_v_in_regset_pool)gcc_qsort (vv, nn, sizeof (*vv), cmp_v_in_regset_pool); | |||
980 | ||||
981 | while (ii < nn) | |||
982 | { | |||
983 | if (v[i] == vv[ii]) | |||
984 | i++; | |||
985 | else | |||
986 | /* VV[II] was lost. */ | |||
987 | diff++; | |||
988 | ||||
989 | ii++; | |||
990 | } | |||
991 | ||||
992 | gcc_assert (diff == regset_pool.diff)((void)(!(diff == regset_pool.diff) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 992, __FUNCTION__), 0 : 0)); | |||
993 | } | |||
994 | ||||
995 | /* If not true - we have a memory leak. */ | |||
996 | gcc_assert (regset_pool.diff == 0)((void)(!(regset_pool.diff == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 996, __FUNCTION__), 0 : 0)); | |||
997 | ||||
998 | while (regset_pool.n) | |||
999 | { | |||
1000 | --regset_pool.n; | |||
1001 | FREE_REG_SET (regset_pool.v[regset_pool.n])((void) (bitmap_obstack_free ((bitmap) regset_pool.v[regset_pool .n]), (regset_pool.v[regset_pool.n]) = (bitmap) nullptr)); | |||
1002 | } | |||
1003 | ||||
1004 | free (regset_pool.v); | |||
1005 | regset_pool.v = NULLnullptr; | |||
1006 | regset_pool.s = 0; | |||
1007 | ||||
1008 | free (regset_pool.vv); | |||
1009 | regset_pool.vv = NULLnullptr; | |||
1010 | regset_pool.nn = 0; | |||
1011 | regset_pool.ss = 0; | |||
1012 | ||||
1013 | regset_pool.diff = 0; | |||
1014 | } | |||
1015 | ||||
1016 | ||||
1017 | /* Functions to work with nop pools. NOP insns are used as temporary | |||
1018 | placeholders of the insns being scheduled to allow correct update of | |||
1019 | the data sets. When update is finished, NOPs are deleted. */ | |||
1020 | ||||
1021 | /* A vinsn that is used to represent a nop. This vinsn is shared among all | |||
1022 | nops sel-sched generates. */ | |||
1023 | static vinsn_t nop_vinsn = NULLnullptr; | |||
1024 | ||||
1025 | /* Emit a nop before INSN, taking it from pool. */ | |||
1026 | insn_t | |||
1027 | get_nop_from_pool (insn_t insn) | |||
1028 | { | |||
1029 | rtx nop_pat; | |||
1030 | insn_t nop; | |||
1031 | bool old_p = nop_pool.n != 0; | |||
1032 | int flags; | |||
1033 | ||||
1034 | if (old_p) | |||
1035 | nop_pat = nop_pool.v[--nop_pool.n]; | |||
1036 | else | |||
1037 | nop_pat = nop_pattern; | |||
1038 | ||||
1039 | nop = emit_insn_before (nop_pat, insn); | |||
1040 | ||||
1041 | if (old_p) | |||
1042 | flags = INSN_INIT_TODO_SSID(2); | |||
1043 | else | |||
1044 | flags = INSN_INIT_TODO_LUID(1) | INSN_INIT_TODO_SSID(2); | |||
1045 | ||||
1046 | set_insn_init (INSN_EXPR (insn)(&(&s_i_d[(sched_luids[INSN_UID (insn)])])->expr), nop_vinsn, INSN_SEQNO (insn)((&s_i_d[(sched_luids[INSN_UID (insn)])])->seqno)); | |||
1047 | sel_init_new_insn (nop, flags); | |||
1048 | ||||
1049 | return nop; | |||
1050 | } | |||
1051 | ||||
1052 | /* Remove NOP from the instruction stream and return it to the pool. */ | |||
1053 | void | |||
1054 | return_nop_to_pool (insn_t nop, bool full_tidying) | |||
1055 | { | |||
1056 | gcc_assert (INSN_IN_STREAM_P (nop))((void)(!((PREV_INSN (nop) && NEXT_INSN (nop))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 1056, __FUNCTION__), 0 : 0)); | |||
1057 | sel_remove_insn (nop, false, full_tidying); | |||
1058 | ||||
1059 | /* We'll recycle this nop. */ | |||
1060 | nop->set_undeleted (); | |||
1061 | ||||
1062 | if (nop_pool.n == nop_pool.s) | |||
1063 | nop_pool.v = XRESIZEVEC (rtx_insn *, nop_pool.v,((rtx_insn * *) xrealloc ((void *) (nop_pool.v), sizeof (rtx_insn *) * ((nop_pool.s = 2 * nop_pool.s + 1)))) | |||
1064 | (nop_pool.s = 2 * nop_pool.s + 1))((rtx_insn * *) xrealloc ((void *) (nop_pool.v), sizeof (rtx_insn *) * ((nop_pool.s = 2 * nop_pool.s + 1)))); | |||
1065 | nop_pool.v[nop_pool.n++] = nop; | |||
1066 | } | |||
1067 | ||||
1068 | /* Free the nop pool. */ | |||
1069 | void | |||
1070 | free_nop_pool (void) | |||
1071 | { | |||
1072 | nop_pool.n = 0; | |||
1073 | nop_pool.s = 0; | |||
1074 | free (nop_pool.v); | |||
1075 | nop_pool.v = NULLnullptr; | |||
1076 | } | |||
1077 | ||||
1078 | ||||
1079 | /* Skip unspec to support ia64 speculation. Called from rtx_equal_p_cb. | |||
1080 | The callback is given two rtxes XX and YY and writes the new rtxes | |||
1081 | to NX and NY in case some needs to be skipped. */ | |||
1082 | static int | |||
1083 | skip_unspecs_callback (const_rtx *xx, const_rtx *yy, rtx *nx, rtx* ny) | |||
1084 | { | |||
1085 | const_rtx x = *xx; | |||
1086 | const_rtx y = *yy; | |||
1087 | ||||
1088 | if (GET_CODE (x)((enum rtx_code) (x)->code) == UNSPEC | |||
1089 | && (targetm.sched.skip_rtx_p == NULLnullptr | |||
1090 | || targetm.sched.skip_rtx_p (x))) | |||
1091 | { | |||
1092 | *nx = XVECEXP (x, 0, 0)(((((x)->u.fld[0]).rt_rtvec))->elem[0]); | |||
1093 | *ny = CONST_CAST_RTX (y)(const_cast<struct rtx_def *> (((y)))); | |||
1094 | return 1; | |||
1095 | } | |||
1096 | ||||
1097 | if (GET_CODE (y)((enum rtx_code) (y)->code) == UNSPEC | |||
1098 | && (targetm.sched.skip_rtx_p == NULLnullptr | |||
1099 | || targetm.sched.skip_rtx_p (y))) | |||
1100 | { | |||
1101 | *nx = CONST_CAST_RTX (x)(const_cast<struct rtx_def *> (((x)))); | |||
1102 | *ny = XVECEXP (y, 0, 0)(((((y)->u.fld[0]).rt_rtvec))->elem[0]); | |||
1103 | return 1; | |||
1104 | } | |||
1105 | ||||
1106 | return 0; | |||
1107 | } | |||
1108 | ||||
1109 | /* Callback, called from hash_rtx_cb. Helps to hash UNSPEC rtx X in a correct way | |||
1110 | to support ia64 speculation. When changes are needed, new rtx X and new mode | |||
1111 | NMODE are written, and the callback returns true. */ | |||
1112 | static int | |||
1113 | hash_with_unspec_callback (const_rtx x, machine_mode mode ATTRIBUTE_UNUSED__attribute__ ((__unused__)), | |||
1114 | rtx *nx, machine_mode* nmode) | |||
1115 | { | |||
1116 | if (GET_CODE (x)((enum rtx_code) (x)->code) == UNSPEC | |||
1117 | && targetm.sched.skip_rtx_p | |||
1118 | && targetm.sched.skip_rtx_p (x)) | |||
1119 | { | |||
1120 | *nx = XVECEXP (x, 0 ,0)(((((x)->u.fld[0]).rt_rtvec))->elem[0]); | |||
1121 | *nmode = VOIDmode((void) 0, E_VOIDmode); | |||
1122 | return 1; | |||
1123 | } | |||
1124 | ||||
1125 | return 0; | |||
1126 | } | |||
1127 | ||||
1128 | /* Returns LHS and RHS are ok to be scheduled separately. */ | |||
1129 | static bool | |||
1130 | lhs_and_rhs_separable_p (rtx lhs, rtx rhs) | |||
1131 | { | |||
1132 | if (lhs == NULLnullptr || rhs == NULLnullptr) | |||
1133 | return false; | |||
1134 | ||||
1135 | /* Do not schedule constants as rhs: no point to use reg, if const | |||
1136 | can be used. Moreover, scheduling const as rhs may lead to mode | |||
1137 | mismatch cause consts don't have modes but they could be merged | |||
1138 | from branches where the same const used in different modes. */ | |||
1139 | if (CONSTANT_P (rhs)((rtx_class[(int) (((enum rtx_code) (rhs)->code))]) == RTX_CONST_OBJ )) | |||
1140 | return false; | |||
1141 | ||||
1142 | /* ??? Do not rename predicate registers to avoid ICEs in bundling. */ | |||
1143 | if (COMPARISON_P (rhs)(((rtx_class[(int) (((enum rtx_code) (rhs)->code))]) & (~1)) == (RTX_COMPARE & (~1)))) | |||
1144 | return false; | |||
1145 | ||||
1146 | /* Do not allow single REG to be an rhs. */ | |||
1147 | if (REG_P (rhs)(((enum rtx_code) (rhs)->code) == REG)) | |||
1148 | return false; | |||
1149 | ||||
1150 | /* See comment at find_used_regs_1 (*1) for explanation of this | |||
1151 | restriction. */ | |||
1152 | /* FIXME: remove this later. */ | |||
1153 | if (MEM_P (lhs)(((enum rtx_code) (lhs)->code) == MEM)) | |||
1154 | return false; | |||
1155 | ||||
1156 | /* This will filter all tricky things like ZERO_EXTRACT etc. | |||
1157 | For now we don't handle it. */ | |||
1158 | if (!REG_P (lhs)(((enum rtx_code) (lhs)->code) == REG) && !MEM_P (lhs)(((enum rtx_code) (lhs)->code) == MEM)) | |||
1159 | return false; | |||
1160 | ||||
1161 | return true; | |||
1162 | } | |||
1163 | ||||
1164 | /* Initialize vinsn VI for INSN. Only for use from vinsn_create (). When | |||
1165 | FORCE_UNIQUE_P is true, the resulting vinsn will not be clonable. This is | |||
1166 | used e.g. for insns from recovery blocks. */ | |||
1167 | static void | |||
1168 | vinsn_init (vinsn_t vi, insn_t insn, bool force_unique_p) | |||
1169 | { | |||
1170 | hash_rtx_callback_function hrcf; | |||
1171 | int insn_class; | |||
1172 | ||||
1173 | VINSN_INSN_RTX (vi)((vi)->insn_rtx) = insn; | |||
1174 | VINSN_COUNT (vi)((vi)->count) = 0; | |||
1175 | vi->cost = -1; | |||
1176 | ||||
1177 | if (INSN_NOP_P (insn)(PATTERN (insn) == nop_pattern)) | |||
1178 | return; | |||
1179 | ||||
1180 | if (DF_INSN_UID_SAFE_GET (INSN_UID (insn))(((unsigned)(INSN_UID (insn)) < ((df)->insns_size)) ? ( df->insns[(INSN_UID (insn))]) : nullptr) != NULLnullptr) | |||
1181 | init_id_from_df (VINSN_ID (vi)(&((vi)->id)), insn, force_unique_p); | |||
1182 | else | |||
1183 | deps_init_id (VINSN_ID (vi)(&((vi)->id)), insn, force_unique_p); | |||
1184 | ||||
1185 | /* Hash vinsn depending on whether it is separable or not. */ | |||
1186 | hrcf = targetm.sched.skip_rtx_p ? hash_with_unspec_callback : NULLnullptr; | |||
1187 | if (VINSN_SEPARABLE_P (vi)(((((&((vi)->id)))->type)) == SET)) | |||
1188 | { | |||
1189 | rtx rhs = VINSN_RHS (vi)((((&((vi)->id)))->rhs)); | |||
1190 | ||||
1191 | VINSN_HASH (vi)((vi)->hash) = hash_rtx_cb (rhs, GET_MODE (rhs)((machine_mode) (rhs)->mode), | |||
1192 | NULLnullptr, NULLnullptr, false, hrcf); | |||
1193 | VINSN_HASH_RTX (vi)((vi)->hash_rtx) = hash_rtx_cb (VINSN_PATTERN (vi)(PATTERN (((vi)->insn_rtx))), | |||
1194 | VOIDmode((void) 0, E_VOIDmode), NULLnullptr, NULLnullptr, | |||
1195 | false, hrcf); | |||
1196 | } | |||
1197 | else | |||
1198 | { | |||
1199 | VINSN_HASH (vi)((vi)->hash) = hash_rtx_cb (VINSN_PATTERN (vi)(PATTERN (((vi)->insn_rtx))), VOIDmode((void) 0, E_VOIDmode), | |||
1200 | NULLnullptr, NULLnullptr, false, hrcf); | |||
1201 | VINSN_HASH_RTX (vi)((vi)->hash_rtx) = VINSN_HASH (vi)((vi)->hash); | |||
1202 | } | |||
1203 | ||||
1204 | insn_class = haifa_classify_insn (insn); | |||
1205 | if (insn_class >= 2 | |||
1206 | && (!targetm.sched.get_insn_spec_ds | |||
1207 | || ((targetm.sched.get_insn_spec_ds (insn) & BEGIN_CONTROL(((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_CONTROL_BITS_OFFSET )) | |||
1208 | == 0))) | |||
1209 | VINSN_MAY_TRAP_P (vi)((vi)->may_trap_p) = true; | |||
1210 | else | |||
1211 | VINSN_MAY_TRAP_P (vi)((vi)->may_trap_p) = false; | |||
1212 | } | |||
1213 | ||||
1214 | /* Indicate that VI has become the part of an rtx object. */ | |||
1215 | void | |||
1216 | vinsn_attach (vinsn_t vi) | |||
1217 | { | |||
1218 | /* Assert that VI is not pending for deletion. */ | |||
1219 | gcc_assert (VINSN_INSN_RTX (vi))((void)(!(((vi)->insn_rtx)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 1219, __FUNCTION__), 0 : 0)); | |||
1220 | ||||
1221 | VINSN_COUNT (vi)((vi)->count)++; | |||
1222 | } | |||
1223 | ||||
1224 | /* Create and init VI from the INSN. Use UNIQUE_P for determining the correct | |||
1225 | VINSN_TYPE (VI). */ | |||
1226 | static vinsn_t | |||
1227 | vinsn_create (insn_t insn, bool force_unique_p) | |||
1228 | { | |||
1229 | vinsn_t vi = XCNEW (struct vinsn_def)((struct vinsn_def *) xcalloc (1, sizeof (struct vinsn_def))); | |||
1230 | ||||
1231 | vinsn_init (vi, insn, force_unique_p); | |||
1232 | return vi; | |||
1233 | } | |||
1234 | ||||
1235 | /* Return a copy of VI. When REATTACH_P is true, detach VI and attach | |||
1236 | the copy. */ | |||
1237 | vinsn_t | |||
1238 | vinsn_copy (vinsn_t vi, bool reattach_p) | |||
1239 | { | |||
1240 | rtx_insn *copy; | |||
1241 | bool unique = VINSN_UNIQUE_P (vi)(!((((((&((vi)->id)))->type)) == SET) || ((((&( (vi)->id)))->type)) == USE)); | |||
1242 | vinsn_t new_vi; | |||
1243 | ||||
1244 | copy = create_copy_of_insn_rtx (VINSN_INSN_RTX (vi)((vi)->insn_rtx)); | |||
1245 | new_vi = create_vinsn_from_insn_rtx (copy, unique); | |||
1246 | if (reattach_p) | |||
1247 | { | |||
1248 | vinsn_detach (vi); | |||
1249 | vinsn_attach (new_vi); | |||
1250 | } | |||
1251 | ||||
1252 | return new_vi; | |||
1253 | } | |||
1254 | ||||
1255 | /* Delete the VI vinsn and free its data. */ | |||
1256 | static void | |||
1257 | vinsn_delete (vinsn_t vi) | |||
1258 | { | |||
1259 | gcc_assert (VINSN_COUNT (vi) == 0)((void)(!(((vi)->count) == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 1259, __FUNCTION__), 0 : 0)); | |||
1260 | ||||
1261 | if (!INSN_NOP_P (VINSN_INSN_RTX (vi))(PATTERN (((vi)->insn_rtx)) == nop_pattern)) | |||
1262 | { | |||
1263 | return_regset_to_pool (VINSN_REG_SETS (vi)((((&((vi)->id)))->reg_sets))); | |||
1264 | return_regset_to_pool (VINSN_REG_USES (vi)((((&((vi)->id)))->reg_uses))); | |||
1265 | return_regset_to_pool (VINSN_REG_CLOBBERS (vi)((((&((vi)->id)))->reg_clobbers))); | |||
1266 | } | |||
1267 | ||||
1268 | free (vi); | |||
1269 | } | |||
1270 | ||||
1271 | /* Indicate that VI is no longer a part of some rtx object. | |||
1272 | Remove VI if it is no longer needed. */ | |||
1273 | void | |||
1274 | vinsn_detach (vinsn_t vi) | |||
1275 | { | |||
1276 | gcc_assert (VINSN_COUNT (vi) > 0)((void)(!(((vi)->count) > 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 1276, __FUNCTION__), 0 : 0)); | |||
1277 | ||||
1278 | if (--VINSN_COUNT (vi)((vi)->count) == 0) | |||
1279 | vinsn_delete (vi); | |||
1280 | } | |||
1281 | ||||
1282 | /* Returns TRUE if VI is a branch. */ | |||
1283 | bool | |||
1284 | vinsn_cond_branch_p (vinsn_t vi) | |||
1285 | { | |||
1286 | insn_t insn; | |||
1287 | ||||
1288 | if (!VINSN_UNIQUE_P (vi)(!((((((&((vi)->id)))->type)) == SET) || ((((&( (vi)->id)))->type)) == USE))) | |||
1289 | return false; | |||
1290 | ||||
1291 | insn = VINSN_INSN_RTX (vi)((vi)->insn_rtx); | |||
1292 | if (BB_END (BLOCK_FOR_INSN (insn))(BLOCK_FOR_INSN (insn))->il.x.rtl->end_ != insn) | |||
1293 | return false; | |||
1294 | ||||
1295 | return control_flow_insn_p (insn); | |||
1296 | } | |||
1297 | ||||
1298 | /* Return latency of INSN. */ | |||
1299 | static int | |||
1300 | sel_insn_rtx_cost (rtx_insn *insn) | |||
1301 | { | |||
1302 | int cost; | |||
1303 | ||||
1304 | /* A USE insn, or something else we don't need to | |||
1305 | understand. We can't pass these directly to | |||
1306 | result_ready_cost or insn_default_latency because it will | |||
1307 | trigger a fatal error for unrecognizable insns. */ | |||
1308 | if (recog_memoized (insn) < 0) | |||
1309 | cost = 0; | |||
1310 | else | |||
1311 | { | |||
1312 | cost = insn_default_latency (insn); | |||
1313 | ||||
1314 | if (cost < 0) | |||
1315 | cost = 0; | |||
1316 | } | |||
1317 | ||||
1318 | return cost; | |||
1319 | } | |||
1320 | ||||
1321 | /* Return the cost of the VI. | |||
1322 | !!! FIXME: Unify with haifa-sched.cc: insn_sched_cost (). */ | |||
1323 | int | |||
1324 | sel_vinsn_cost (vinsn_t vi) | |||
1325 | { | |||
1326 | int cost = vi->cost; | |||
1327 | ||||
1328 | if (cost < 0) | |||
1329 | { | |||
1330 | cost = sel_insn_rtx_cost (VINSN_INSN_RTX (vi)((vi)->insn_rtx)); | |||
1331 | vi->cost = cost; | |||
1332 | } | |||
1333 | ||||
1334 | return cost; | |||
1335 | } | |||
1336 | ||||
1337 | ||||
1338 | /* Functions for insn emitting. */ | |||
1339 | ||||
1340 | /* Emit new insn after AFTER based on PATTERN and initialize its data from | |||
1341 | EXPR and SEQNO. */ | |||
1342 | insn_t | |||
1343 | sel_gen_insn_from_rtx_after (rtx pattern, expr_t expr, int seqno, insn_t after) | |||
1344 | { | |||
1345 | insn_t new_insn; | |||
1346 | ||||
1347 | gcc_assert (EXPR_TARGET_AVAILABLE (expr) == true)((void)(!(((expr)->target_available) == true) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 1347, __FUNCTION__), 0 : 0)); | |||
1348 | ||||
1349 | new_insn = emit_insn_after (pattern, after); | |||
1350 | set_insn_init (expr, NULLnullptr, seqno); | |||
1351 | sel_init_new_insn (new_insn, INSN_INIT_TODO_LUID(1) | INSN_INIT_TODO_SSID(2)); | |||
1352 | ||||
1353 | return new_insn; | |||
1354 | } | |||
1355 | ||||
1356 | /* Force newly generated vinsns to be unique. */ | |||
1357 | static bool init_insn_force_unique_p = false; | |||
1358 | ||||
1359 | /* Emit new speculation recovery insn after AFTER based on PATTERN and | |||
1360 | initialize its data from EXPR and SEQNO. */ | |||
1361 | insn_t | |||
1362 | sel_gen_recovery_insn_from_rtx_after (rtx pattern, expr_t expr, int seqno, | |||
1363 | insn_t after) | |||
1364 | { | |||
1365 | insn_t insn; | |||
1366 | ||||
1367 | gcc_assert (!init_insn_force_unique_p)((void)(!(!init_insn_force_unique_p) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 1367, __FUNCTION__), 0 : 0)); | |||
1368 | ||||
1369 | init_insn_force_unique_p = true; | |||
1370 | insn = sel_gen_insn_from_rtx_after (pattern, expr, seqno, after); | |||
1371 | CANT_MOVE (insn)((&h_d_i_d[(sched_luids[INSN_UID (insn)])])->cant_move ) = 1; | |||
1372 | init_insn_force_unique_p = false; | |||
1373 | ||||
1374 | return insn; | |||
1375 | } | |||
1376 | ||||
1377 | /* Emit new insn after AFTER based on EXPR and SEQNO. If VINSN is not NULL, | |||
1378 | take it as a new vinsn instead of EXPR's vinsn. | |||
1379 | We simplify insns later, after scheduling region in | |||
1380 | simplify_changed_insns. */ | |||
1381 | insn_t | |||
1382 | sel_gen_insn_from_expr_after (expr_t expr, vinsn_t vinsn, int seqno, | |||
1383 | insn_t after) | |||
1384 | { | |||
1385 | expr_t emit_expr; | |||
1386 | insn_t insn; | |||
1387 | int flags; | |||
1388 | ||||
1389 | emit_expr = set_insn_init (expr, vinsn ? vinsn : EXPR_VINSN (expr)((expr)->vinsn), | |||
| ||||
1390 | seqno); | |||
1391 | insn = EXPR_INSN_RTX (emit_expr)(((((emit_expr)->vinsn))->insn_rtx)); | |||
1392 | ||||
1393 | /* The insn may come from the transformation cache, which may hold already | |||
1394 | deleted insns, so mark it as not deleted. */ | |||
1395 | insn->set_undeleted (); | |||
1396 | ||||
1397 | add_insn_after (insn, after, BLOCK_FOR_INSN (insn)); | |||
1398 | ||||
1399 | flags = INSN_INIT_TODO_SSID(2); | |||
1400 | if (INSN_LUID (insn)(sched_luids[INSN_UID (insn)]) == 0) | |||
1401 | flags |= INSN_INIT_TODO_LUID(1); | |||
1402 | sel_init_new_insn (insn, flags); | |||
1403 | ||||
1404 | return insn; | |||
1405 | } | |||
1406 | ||||
1407 | /* Move insn from EXPR after AFTER. */ | |||
1408 | insn_t | |||
1409 | sel_move_insn (expr_t expr, int seqno, insn_t after) | |||
1410 | { | |||
1411 | insn_t insn = EXPR_INSN_RTX (expr)(((((expr)->vinsn))->insn_rtx)); | |||
1412 | basic_block bb = BLOCK_FOR_INSN (after); | |||
1413 | insn_t next = NEXT_INSN (after); | |||
1414 | ||||
1415 | /* Assert that in move_op we disconnected this insn properly. */ | |||
1416 | gcc_assert (EXPR_VINSN (INSN_EXPR (insn)) != NULL)((void)(!((((&(&s_i_d[(sched_luids[INSN_UID (insn)])] )->expr))->vinsn) != nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 1416, __FUNCTION__), 0 : 0)); | |||
1417 | SET_PREV_INSN (insn) = after; | |||
1418 | SET_NEXT_INSN (insn) = next; | |||
1419 | ||||
1420 | SET_NEXT_INSN (after) = insn; | |||
1421 | SET_PREV_INSN (next) = insn; | |||
1422 | ||||
1423 | /* Update links from insn to bb and vice versa. */ | |||
1424 | df_insn_change_bb (insn, bb); | |||
1425 | if (BB_END (bb)(bb)->il.x.rtl->end_ == after) | |||
1426 | BB_END (bb)(bb)->il.x.rtl->end_ = insn; | |||
1427 | ||||
1428 | prepare_insn_expr (insn, seqno); | |||
1429 | return insn; | |||
1430 | } | |||
1431 | ||||
1432 | ||||
1433 | /* Functions to work with right-hand sides. */ | |||
1434 | ||||
1435 | /* Search for a hash value determined by UID/NEW_VINSN in a sorted vector | |||
1436 | VECT and return true when found. Use NEW_VINSN for comparison only when | |||
1437 | COMPARE_VINSNS is true. Write to INDP the index on which | |||
1438 | the search has stopped, such that inserting the new element at INDP will | |||
1439 | retain VECT's sort order. */ | |||
1440 | static bool | |||
1441 | find_in_history_vect_1 (vec<expr_history_def> vect, | |||
1442 | unsigned uid, vinsn_t new_vinsn, | |||
1443 | bool compare_vinsns, int *indp) | |||
1444 | { | |||
1445 | expr_history_def *arr; | |||
1446 | int i, j, len = vect.length (); | |||
1447 | ||||
1448 | if (len == 0) | |||
1449 | { | |||
1450 | *indp = 0; | |||
1451 | return false; | |||
1452 | } | |||
1453 | ||||
1454 | arr = vect.address (); | |||
1455 | i = 0, j = len - 1; | |||
1456 | ||||
1457 | while (i <= j) | |||
1458 | { | |||
1459 | unsigned auid = arr[i].uid; | |||
1460 | vinsn_t avinsn = arr[i].new_expr_vinsn; | |||
1461 | ||||
1462 | if (auid == uid | |||
1463 | /* When undoing transformation on a bookkeeping copy, the new vinsn | |||
1464 | may not be exactly equal to the one that is saved in the vector. | |||
1465 | This is because the insn whose copy we're checking was possibly | |||
1466 | substituted itself. */ | |||
1467 | && (! compare_vinsns | |||
1468 | || vinsn_equal_p (avinsn, new_vinsn))) | |||
1469 | { | |||
1470 | *indp = i; | |||
1471 | return true; | |||
1472 | } | |||
1473 | else if (auid > uid) | |||
1474 | break; | |||
1475 | i++; | |||
1476 | } | |||
1477 | ||||
1478 | *indp = i; | |||
1479 | return false; | |||
1480 | } | |||
1481 | ||||
1482 | /* Search for a uid of INSN and NEW_VINSN in a sorted vector VECT. Return | |||
1483 | the position found or -1, if no such value is in vector. | |||
1484 | Search also for UIDs of insn's originators, if ORIGINATORS_P is true. */ | |||
1485 | int | |||
1486 | find_in_history_vect (vec<expr_history_def> vect, rtx insn, | |||
1487 | vinsn_t new_vinsn, bool originators_p) | |||
1488 | { | |||
1489 | int ind; | |||
1490 | ||||
1491 | if (find_in_history_vect_1 (vect, INSN_UID (insn), new_vinsn, | |||
1492 | false, &ind)) | |||
1493 | return ind; | |||
1494 | ||||
1495 | if (INSN_ORIGINATORS (insn)((&s_i_d[(sched_luids[INSN_UID (insn)])])->originators ) && originators_p) | |||
1496 | { | |||
1497 | unsigned uid; | |||
1498 | bitmap_iterator bi; | |||
1499 | ||||
1500 | EXECUTE_IF_SET_IN_BITMAP (INSN_ORIGINATORS (insn), 0, uid, bi)for (bmp_iter_set_init (&(bi), (((&s_i_d[(sched_luids [INSN_UID (insn)])])->originators)), (0), &(uid)); bmp_iter_set (&(bi), &(uid)); bmp_iter_next (&(bi), &(uid ))) | |||
1501 | if (find_in_history_vect_1 (vect, uid, new_vinsn, false, &ind)) | |||
1502 | return ind; | |||
1503 | } | |||
1504 | ||||
1505 | return -1; | |||
1506 | } | |||
1507 | ||||
1508 | /* Insert new element in a sorted history vector pointed to by PVECT, | |||
1509 | if it is not there already. The element is searched using | |||
1510 | UID/NEW_EXPR_VINSN pair. TYPE, OLD_EXPR_VINSN and SPEC_DS save | |||
1511 | the history of a transformation. */ | |||
1512 | void | |||
1513 | insert_in_history_vect (vec<expr_history_def> *pvect, | |||
1514 | unsigned uid, enum local_trans_type type, | |||
1515 | vinsn_t old_expr_vinsn, vinsn_t new_expr_vinsn, | |||
1516 | ds_t spec_ds) | |||
1517 | { | |||
1518 | vec<expr_history_def> vect = *pvect; | |||
1519 | expr_history_def temp; | |||
1520 | bool res; | |||
1521 | int ind; | |||
1522 | ||||
1523 | res = find_in_history_vect_1 (vect, uid, new_expr_vinsn, true, &ind); | |||
1524 | ||||
1525 | if (res) | |||
1526 | { | |||
1527 | expr_history_def *phist = &vect[ind]; | |||
1528 | ||||
1529 | /* It is possible that speculation types of expressions that were | |||
1530 | propagated through different paths will be different here. In this | |||
1531 | case, merge the status to get the correct check later. */ | |||
1532 | if (phist->spec_ds != spec_ds) | |||
1533 | phist->spec_ds = ds_max_merge (phist->spec_ds, spec_ds); | |||
1534 | return; | |||
1535 | } | |||
1536 | ||||
1537 | temp.uid = uid; | |||
1538 | temp.old_expr_vinsn = old_expr_vinsn; | |||
1539 | temp.new_expr_vinsn = new_expr_vinsn; | |||
1540 | temp.spec_ds = spec_ds; | |||
1541 | temp.type = type; | |||
1542 | ||||
1543 | vinsn_attach (old_expr_vinsn); | |||
1544 | vinsn_attach (new_expr_vinsn); | |||
1545 | vect.safe_insert (ind, temp); | |||
1546 | *pvect = vect; | |||
1547 | } | |||
1548 | ||||
1549 | /* Free history vector PVECT. */ | |||
1550 | static void | |||
1551 | free_history_vect (vec<expr_history_def> &pvect) | |||
1552 | { | |||
1553 | unsigned i; | |||
1554 | expr_history_def *phist; | |||
1555 | ||||
1556 | if (! pvect.exists ()) | |||
1557 | return; | |||
1558 | ||||
1559 | for (i = 0; pvect.iterate (i, &phist); i++) | |||
1560 | { | |||
1561 | vinsn_detach (phist->old_expr_vinsn); | |||
1562 | vinsn_detach (phist->new_expr_vinsn); | |||
1563 | } | |||
1564 | ||||
1565 | pvect.release (); | |||
1566 | } | |||
1567 | ||||
1568 | /* Merge vector FROM to PVECT. */ | |||
1569 | static void | |||
1570 | merge_history_vect (vec<expr_history_def> *pvect, | |||
1571 | vec<expr_history_def> from) | |||
1572 | { | |||
1573 | expr_history_def *phist; | |||
1574 | int i; | |||
1575 | ||||
1576 | /* We keep this vector sorted. */ | |||
1577 | for (i = 0; from.iterate (i, &phist); i++) | |||
1578 | insert_in_history_vect (pvect, phist->uid, phist->type, | |||
1579 | phist->old_expr_vinsn, phist->new_expr_vinsn, | |||
1580 | phist->spec_ds); | |||
1581 | } | |||
1582 | ||||
1583 | /* Compare two vinsns as rhses if possible and as vinsns otherwise. */ | |||
1584 | bool | |||
1585 | vinsn_equal_p (vinsn_t x, vinsn_t y) | |||
1586 | { | |||
1587 | rtx_equal_p_callback_function repcf; | |||
1588 | ||||
1589 | if (x == y) | |||
1590 | return true; | |||
1591 | ||||
1592 | if (VINSN_TYPE (x)((((&((x)->id)))->type)) != VINSN_TYPE (y)((((&((y)->id)))->type))) | |||
1593 | return false; | |||
1594 | ||||
1595 | if (VINSN_HASH (x)((x)->hash) != VINSN_HASH (y)((y)->hash)) | |||
1596 | return false; | |||
1597 | ||||
1598 | repcf = targetm.sched.skip_rtx_p ? skip_unspecs_callback : NULLnullptr; | |||
1599 | if (VINSN_SEPARABLE_P (x)(((((&((x)->id)))->type)) == SET)) | |||
1600 | { | |||
1601 | /* Compare RHSes of VINSNs. */ | |||
1602 | gcc_assert (VINSN_RHS (x))((void)(!(((((&((x)->id)))->rhs))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 1602, __FUNCTION__), 0 : 0)); | |||
1603 | gcc_assert (VINSN_RHS (y))((void)(!(((((&((y)->id)))->rhs))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 1603, __FUNCTION__), 0 : 0)); | |||
1604 | ||||
1605 | return rtx_equal_p_cb (VINSN_RHS (x)((((&((x)->id)))->rhs)), VINSN_RHS (y)((((&((y)->id)))->rhs)), repcf); | |||
1606 | } | |||
1607 | ||||
1608 | return rtx_equal_p_cb (VINSN_PATTERN (x)(PATTERN (((x)->insn_rtx))), VINSN_PATTERN (y)(PATTERN (((y)->insn_rtx))), repcf); | |||
1609 | } | |||
1610 | ||||
1611 | ||||
1612 | /* Functions for working with expressions. */ | |||
1613 | ||||
1614 | /* Initialize EXPR. */ | |||
1615 | static void | |||
1616 | init_expr (expr_t expr, vinsn_t vi, int spec, int use, int priority, | |||
1617 | int sched_times, int orig_bb_index, ds_t spec_done_ds, | |||
1618 | ds_t spec_to_check_ds, int orig_sched_cycle, | |||
1619 | vec<expr_history_def> history, | |||
1620 | signed char target_available, | |||
1621 | bool was_substituted, bool was_renamed, bool needs_spec_check_p, | |||
1622 | bool cant_move) | |||
1623 | { | |||
1624 | vinsn_attach (vi); | |||
1625 | ||||
1626 | EXPR_VINSN (expr)((expr)->vinsn) = vi; | |||
1627 | EXPR_SPEC (expr)((expr)->spec) = spec; | |||
1628 | EXPR_USEFULNESS (expr)((expr)->usefulness) = use; | |||
1629 | EXPR_PRIORITY (expr)((expr)->priority) = priority; | |||
1630 | EXPR_PRIORITY_ADJ (expr)((expr)->priority_adj) = 0; | |||
1631 | EXPR_SCHED_TIMES (expr)((expr)->sched_times) = sched_times; | |||
1632 | EXPR_ORIG_BB_INDEX (expr)((expr)->orig_bb_index) = orig_bb_index; | |||
1633 | EXPR_ORIG_SCHED_CYCLE (expr)((expr)->orig_sched_cycle) = orig_sched_cycle; | |||
1634 | EXPR_SPEC_DONE_DS (expr)((expr)->spec_done_ds) = spec_done_ds; | |||
1635 | EXPR_SPEC_TO_CHECK_DS (expr)((expr)->spec_to_check_ds) = spec_to_check_ds; | |||
1636 | ||||
1637 | if (history.exists ()) | |||
1638 | EXPR_HISTORY_OF_CHANGES (expr)((expr)->history_of_changes) = history; | |||
1639 | else | |||
1640 | EXPR_HISTORY_OF_CHANGES (expr)((expr)->history_of_changes).create (0); | |||
1641 | ||||
1642 | EXPR_TARGET_AVAILABLE (expr)((expr)->target_available) = target_available; | |||
1643 | EXPR_WAS_SUBSTITUTED (expr)((expr)->was_substituted) = was_substituted; | |||
1644 | EXPR_WAS_RENAMED (expr)((expr)->was_renamed) = was_renamed; | |||
1645 | EXPR_NEEDS_SPEC_CHECK_P (expr)((expr)->needs_spec_check_p) = needs_spec_check_p; | |||
1646 | EXPR_CANT_MOVE (expr)((expr)->cant_move) = cant_move; | |||
1647 | } | |||
1648 | ||||
1649 | /* Make a copy of the expr FROM into the expr TO. */ | |||
1650 | void | |||
1651 | copy_expr (expr_t to, expr_t from) | |||
1652 | { | |||
1653 | vec<expr_history_def> temp = vNULL; | |||
1654 | ||||
1655 | if (EXPR_HISTORY_OF_CHANGES (from)((from)->history_of_changes).exists ()) | |||
1656 | { | |||
1657 | unsigned i; | |||
1658 | expr_history_def *phist; | |||
1659 | ||||
1660 | temp = EXPR_HISTORY_OF_CHANGES (from)((from)->history_of_changes).copy (); | |||
1661 | for (i = 0; | |||
1662 | temp.iterate (i, &phist); | |||
1663 | i++) | |||
1664 | { | |||
1665 | vinsn_attach (phist->old_expr_vinsn); | |||
1666 | vinsn_attach (phist->new_expr_vinsn); | |||
1667 | } | |||
1668 | } | |||
1669 | ||||
1670 | init_expr (to, EXPR_VINSN (from)((from)->vinsn), EXPR_SPEC (from)((from)->spec), | |||
1671 | EXPR_USEFULNESS (from)((from)->usefulness), EXPR_PRIORITY (from)((from)->priority), | |||
1672 | EXPR_SCHED_TIMES (from)((from)->sched_times), EXPR_ORIG_BB_INDEX (from)((from)->orig_bb_index), | |||
1673 | EXPR_SPEC_DONE_DS (from)((from)->spec_done_ds), EXPR_SPEC_TO_CHECK_DS (from)((from)->spec_to_check_ds), | |||
1674 | EXPR_ORIG_SCHED_CYCLE (from)((from)->orig_sched_cycle), temp, | |||
1675 | EXPR_TARGET_AVAILABLE (from)((from)->target_available), EXPR_WAS_SUBSTITUTED (from)((from)->was_substituted), | |||
1676 | EXPR_WAS_RENAMED (from)((from)->was_renamed), EXPR_NEEDS_SPEC_CHECK_P (from)((from)->needs_spec_check_p), | |||
1677 | EXPR_CANT_MOVE (from)((from)->cant_move)); | |||
1678 | } | |||
1679 | ||||
1680 | /* Same, but the final expr will not ever be in av sets, so don't copy | |||
1681 | "uninteresting" data such as bitmap cache. */ | |||
1682 | void | |||
1683 | copy_expr_onside (expr_t to, expr_t from) | |||
1684 | { | |||
1685 | init_expr (to, EXPR_VINSN (from)((from)->vinsn), EXPR_SPEC (from)((from)->spec), EXPR_USEFULNESS (from)((from)->usefulness), | |||
1686 | EXPR_PRIORITY (from)((from)->priority), EXPR_SCHED_TIMES (from)((from)->sched_times), 0, | |||
1687 | EXPR_SPEC_DONE_DS (from)((from)->spec_done_ds), EXPR_SPEC_TO_CHECK_DS (from)((from)->spec_to_check_ds), 0, | |||
1688 | vNULL, | |||
1689 | EXPR_TARGET_AVAILABLE (from)((from)->target_available), EXPR_WAS_SUBSTITUTED (from)((from)->was_substituted), | |||
1690 | EXPR_WAS_RENAMED (from)((from)->was_renamed), EXPR_NEEDS_SPEC_CHECK_P (from)((from)->needs_spec_check_p), | |||
1691 | EXPR_CANT_MOVE (from)((from)->cant_move)); | |||
1692 | } | |||
1693 | ||||
1694 | /* Prepare the expr of INSN for scheduling. Used when moving insn and when | |||
1695 | initializing new insns. */ | |||
1696 | static void | |||
1697 | prepare_insn_expr (insn_t insn, int seqno) | |||
1698 | { | |||
1699 | expr_t expr = INSN_EXPR (insn)(&(&s_i_d[(sched_luids[INSN_UID (insn)])])->expr); | |||
1700 | ds_t ds; | |||
1701 | ||||
1702 | INSN_SEQNO (insn)((&s_i_d[(sched_luids[INSN_UID (insn)])])->seqno) = seqno; | |||
1703 | EXPR_ORIG_BB_INDEX (expr)((expr)->orig_bb_index) = BLOCK_NUM (insn)(BLOCK_FOR_INSN (insn)->index + 0); | |||
1704 | EXPR_SPEC (expr)((expr)->spec) = 0; | |||
1705 | EXPR_ORIG_SCHED_CYCLE (expr)((expr)->orig_sched_cycle) = 0; | |||
1706 | EXPR_WAS_SUBSTITUTED (expr)((expr)->was_substituted) = 0; | |||
1707 | EXPR_WAS_RENAMED (expr)((expr)->was_renamed) = 0; | |||
1708 | EXPR_TARGET_AVAILABLE (expr)((expr)->target_available) = 1; | |||
1709 | INSN_LIVE_VALID_P (insn)((&s_i_d[(sched_luids[INSN_UID (insn)])])->live_valid_p ) = false; | |||
1710 | ||||
1711 | /* ??? If this expression is speculative, make its dependence | |||
1712 | as weak as possible. We can filter this expression later | |||
1713 | in process_spec_exprs, because we do not distinguish | |||
1714 | between the status we got during compute_av_set and the | |||
1715 | existing status. To be fixed. */ | |||
1716 | ds = EXPR_SPEC_DONE_DS (expr)((expr)->spec_done_ds); | |||
1717 | if (ds) | |||
1718 | EXPR_SPEC_DONE_DS (expr)((expr)->spec_done_ds) = ds_get_max_dep_weak (ds); | |||
1719 | ||||
1720 | free_history_vect (EXPR_HISTORY_OF_CHANGES (expr)((expr)->history_of_changes)); | |||
1721 | } | |||
1722 | ||||
1723 | /* Update target_available bits when merging exprs TO and FROM. SPLIT_POINT | |||
1724 | is non-null when expressions are merged from different successors at | |||
1725 | a split point. */ | |||
1726 | static void | |||
1727 | update_target_availability (expr_t to, expr_t from, insn_t split_point) | |||
1728 | { | |||
1729 | if (EXPR_TARGET_AVAILABLE (to)((to)->target_available) < 0 | |||
1730 | || EXPR_TARGET_AVAILABLE (from)((from)->target_available) < 0) | |||
1731 | EXPR_TARGET_AVAILABLE (to)((to)->target_available) = -1; | |||
1732 | else | |||
1733 | { | |||
1734 | /* We try to detect the case when one of the expressions | |||
1735 | can only be reached through another one. In this case, | |||
1736 | we can do better. */ | |||
1737 | if (split_point == NULLnullptr) | |||
1738 | { | |||
1739 | int toind, fromind; | |||
1740 | ||||
1741 | toind = EXPR_ORIG_BB_INDEX (to)((to)->orig_bb_index); | |||
1742 | fromind = EXPR_ORIG_BB_INDEX (from)((from)->orig_bb_index); | |||
1743 | ||||
1744 | if (toind && toind == fromind) | |||
1745 | /* Do nothing -- everything is done in | |||
1746 | merge_with_other_exprs. */ | |||
1747 | ; | |||
1748 | else | |||
1749 | EXPR_TARGET_AVAILABLE (to)((to)->target_available) = -1; | |||
1750 | } | |||
1751 | else if (EXPR_TARGET_AVAILABLE (from)((from)->target_available) == 0 | |||
1752 | && EXPR_LHS (from)(((((&((((from)->vinsn))->id)))->lhs))) | |||
1753 | && REG_P (EXPR_LHS (from))(((enum rtx_code) ((((((&((((from)->vinsn))->id)))-> lhs))))->code) == REG) | |||
1754 | && REGNO (EXPR_LHS (to))(rhs_regno((((((&((((to)->vinsn))->id)))->lhs))) )) != REGNO (EXPR_LHS (from))(rhs_regno((((((&((((from)->vinsn))->id)))->lhs) ))))) | |||
1755 | EXPR_TARGET_AVAILABLE (to)((to)->target_available) = -1; | |||
1756 | else | |||
1757 | EXPR_TARGET_AVAILABLE (to)((to)->target_available) &= EXPR_TARGET_AVAILABLE (from)((from)->target_available); | |||
1758 | } | |||
1759 | } | |||
1760 | ||||
1761 | /* Update speculation bits when merging exprs TO and FROM. SPLIT_POINT | |||
1762 | is non-null when expressions are merged from different successors at | |||
1763 | a split point. */ | |||
1764 | static void | |||
1765 | update_speculative_bits (expr_t to, expr_t from, insn_t split_point) | |||
1766 | { | |||
1767 | ds_t old_to_ds, old_from_ds; | |||
1768 | ||||
1769 | old_to_ds = EXPR_SPEC_DONE_DS (to)((to)->spec_done_ds); | |||
1770 | old_from_ds = EXPR_SPEC_DONE_DS (from)((from)->spec_done_ds); | |||
1771 | ||||
1772 | EXPR_SPEC_DONE_DS (to)((to)->spec_done_ds) = ds_max_merge (old_to_ds, old_from_ds); | |||
1773 | EXPR_SPEC_TO_CHECK_DS (to)((to)->spec_to_check_ds) |= EXPR_SPEC_TO_CHECK_DS (from)((from)->spec_to_check_ds); | |||
1774 | EXPR_NEEDS_SPEC_CHECK_P (to)((to)->needs_spec_check_p) |= EXPR_NEEDS_SPEC_CHECK_P (from)((from)->needs_spec_check_p); | |||
1775 | ||||
1776 | /* When merging e.g. control & data speculative exprs, or a control | |||
1777 | speculative with a control&data speculative one, we really have | |||
1778 | to change vinsn too. Also, when speculative status is changed, | |||
1779 | we also need to record this as a transformation in expr's history. */ | |||
1780 | if ((old_to_ds & SPECULATIVE(((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_DATA_BITS_OFFSET ) | (((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_DATA_BITS_OFFSET)) | ((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_CONTROL_BITS_OFFSET) | (((ds_t ) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_CONTROL_BITS_OFFSET )))) || (old_from_ds & SPECULATIVE(((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_DATA_BITS_OFFSET ) | (((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_DATA_BITS_OFFSET)) | ((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_CONTROL_BITS_OFFSET) | (((ds_t ) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_CONTROL_BITS_OFFSET ))))) | |||
1781 | { | |||
1782 | old_to_ds = ds_get_speculation_types (old_to_ds); | |||
1783 | old_from_ds = ds_get_speculation_types (old_from_ds); | |||
1784 | ||||
1785 | if (old_to_ds != old_from_ds) | |||
1786 | { | |||
1787 | ds_t record_ds; | |||
1788 | ||||
1789 | /* When both expressions are speculative, we need to change | |||
1790 | the vinsn first. */ | |||
1791 | if ((old_to_ds & SPECULATIVE(((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_DATA_BITS_OFFSET ) | (((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_DATA_BITS_OFFSET)) | ((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_CONTROL_BITS_OFFSET) | (((ds_t ) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_CONTROL_BITS_OFFSET )))) && (old_from_ds & SPECULATIVE(((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_DATA_BITS_OFFSET ) | (((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_DATA_BITS_OFFSET)) | ((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_CONTROL_BITS_OFFSET) | (((ds_t ) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_CONTROL_BITS_OFFSET ))))) | |||
1792 | { | |||
1793 | int res; | |||
1794 | ||||
1795 | res = speculate_expr (to, EXPR_SPEC_DONE_DS (to)((to)->spec_done_ds)); | |||
1796 | gcc_assert (res >= 0)((void)(!(res >= 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 1796, __FUNCTION__), 0 : 0)); | |||
1797 | } | |||
1798 | ||||
1799 | if (split_point != NULLnullptr) | |||
1800 | { | |||
1801 | /* Record the change with proper status. */ | |||
1802 | record_ds = EXPR_SPEC_DONE_DS (to)((to)->spec_done_ds) & SPECULATIVE(((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_DATA_BITS_OFFSET ) | (((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_DATA_BITS_OFFSET)) | ((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_CONTROL_BITS_OFFSET) | (((ds_t ) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_CONTROL_BITS_OFFSET ))); | |||
1803 | record_ds &= ~(old_to_ds & SPECULATIVE(((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_DATA_BITS_OFFSET ) | (((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_DATA_BITS_OFFSET)) | ((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_CONTROL_BITS_OFFSET) | (((ds_t ) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_CONTROL_BITS_OFFSET )))); | |||
1804 | record_ds &= ~(old_from_ds & SPECULATIVE(((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_DATA_BITS_OFFSET ) | (((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_DATA_BITS_OFFSET)) | ((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_CONTROL_BITS_OFFSET) | (((ds_t ) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_CONTROL_BITS_OFFSET )))); | |||
1805 | ||||
1806 | insert_in_history_vect (&EXPR_HISTORY_OF_CHANGES (to)((to)->history_of_changes), | |||
1807 | INSN_UID (split_point), TRANS_SPECULATION, | |||
1808 | EXPR_VINSN (from)((from)->vinsn), EXPR_VINSN (to)((to)->vinsn), | |||
1809 | record_ds); | |||
1810 | } | |||
1811 | } | |||
1812 | } | |||
1813 | } | |||
1814 | ||||
1815 | ||||
1816 | /* Merge bits of FROM expr to TO expr. When SPLIT_POINT is not NULL, | |||
1817 | this is done along different paths. */ | |||
1818 | void | |||
1819 | merge_expr_data (expr_t to, expr_t from, insn_t split_point) | |||
1820 | { | |||
1821 | /* Choose the maximum of the specs of merged exprs. This is required | |||
1822 | for correctness of bookkeeping. */ | |||
1823 | if (EXPR_SPEC (to)((to)->spec) < EXPR_SPEC (from)((from)->spec)) | |||
1824 | EXPR_SPEC (to)((to)->spec) = EXPR_SPEC (from)((from)->spec); | |||
1825 | ||||
1826 | if (split_point) | |||
1827 | EXPR_USEFULNESS (to)((to)->usefulness) += EXPR_USEFULNESS (from)((from)->usefulness); | |||
1828 | else | |||
1829 | EXPR_USEFULNESS (to)((to)->usefulness) = MAX (EXPR_USEFULNESS (to),((((to)->usefulness)) > (((from)->usefulness)) ? ((( to)->usefulness)) : (((from)->usefulness))) | |||
1830 | EXPR_USEFULNESS (from))((((to)->usefulness)) > (((from)->usefulness)) ? ((( to)->usefulness)) : (((from)->usefulness))); | |||
1831 | ||||
1832 | if (EXPR_PRIORITY (to)((to)->priority) < EXPR_PRIORITY (from)((from)->priority)) | |||
1833 | EXPR_PRIORITY (to)((to)->priority) = EXPR_PRIORITY (from)((from)->priority); | |||
1834 | ||||
1835 | /* We merge sched-times half-way to the larger value to avoid the endless | |||
1836 | pipelining of unneeded insns. The average seems to be good compromise | |||
1837 | between pipelining opportunities and avoiding extra work. */ | |||
1838 | if (EXPR_SCHED_TIMES (to)((to)->sched_times) != EXPR_SCHED_TIMES (from)((from)->sched_times)) | |||
1839 | EXPR_SCHED_TIMES (to)((to)->sched_times) = ((EXPR_SCHED_TIMES (from)((from)->sched_times) + EXPR_SCHED_TIMES (to)((to)->sched_times) | |||
1840 | + 1) / 2); | |||
1841 | ||||
1842 | if (EXPR_ORIG_BB_INDEX (to)((to)->orig_bb_index) != EXPR_ORIG_BB_INDEX (from)((from)->orig_bb_index)) | |||
1843 | EXPR_ORIG_BB_INDEX (to)((to)->orig_bb_index) = 0; | |||
1844 | ||||
1845 | EXPR_ORIG_SCHED_CYCLE (to)((to)->orig_sched_cycle) = MIN (EXPR_ORIG_SCHED_CYCLE (to),((((to)->orig_sched_cycle)) < (((from)->orig_sched_cycle )) ? (((to)->orig_sched_cycle)) : (((from)->orig_sched_cycle ))) | |||
1846 | EXPR_ORIG_SCHED_CYCLE (from))((((to)->orig_sched_cycle)) < (((from)->orig_sched_cycle )) ? (((to)->orig_sched_cycle)) : (((from)->orig_sched_cycle ))); | |||
1847 | ||||
1848 | EXPR_WAS_SUBSTITUTED (to)((to)->was_substituted) |= EXPR_WAS_SUBSTITUTED (from)((from)->was_substituted); | |||
1849 | EXPR_WAS_RENAMED (to)((to)->was_renamed) |= EXPR_WAS_RENAMED (from)((from)->was_renamed); | |||
1850 | EXPR_CANT_MOVE (to)((to)->cant_move) |= EXPR_CANT_MOVE (from)((from)->cant_move); | |||
1851 | ||||
1852 | merge_history_vect (&EXPR_HISTORY_OF_CHANGES (to)((to)->history_of_changes), | |||
1853 | EXPR_HISTORY_OF_CHANGES (from)((from)->history_of_changes)); | |||
1854 | update_target_availability (to, from, split_point); | |||
1855 | update_speculative_bits (to, from, split_point); | |||
1856 | } | |||
1857 | ||||
1858 | /* Merge bits of FROM expr to TO expr. Vinsns in the exprs should be equal | |||
1859 | in terms of vinsn_equal_p. SPLIT_POINT is non-null when expressions | |||
1860 | are merged from different successors at a split point. */ | |||
1861 | void | |||
1862 | merge_expr (expr_t to, expr_t from, insn_t split_point) | |||
1863 | { | |||
1864 | vinsn_t to_vi = EXPR_VINSN (to)((to)->vinsn); | |||
1865 | vinsn_t from_vi = EXPR_VINSN (from)((from)->vinsn); | |||
1866 | ||||
1867 | gcc_assert (vinsn_equal_p (to_vi, from_vi))((void)(!(vinsn_equal_p (to_vi, from_vi)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 1867, __FUNCTION__), 0 : 0)); | |||
1868 | ||||
1869 | /* Make sure that speculative pattern is propagated into exprs that | |||
1870 | have non-speculative one. This will provide us with consistent | |||
1871 | speculative bits and speculative patterns inside expr. */ | |||
1872 | if (EXPR_SPEC_DONE_DS (to)((to)->spec_done_ds) == 0 | |||
1873 | && (EXPR_SPEC_DONE_DS (from)((from)->spec_done_ds) != 0 | |||
1874 | /* Do likewise for volatile insns, so that we always retain | |||
1875 | the may_trap_p bit on the resulting expression. However, | |||
1876 | avoid propagating the trapping bit into the instructions | |||
1877 | already speculated. This would result in replacing the | |||
1878 | speculative pattern with the non-speculative one and breaking | |||
1879 | the speculation support. */ | |||
1880 | || (!VINSN_MAY_TRAP_P (EXPR_VINSN (to))((((to)->vinsn))->may_trap_p) | |||
1881 | && VINSN_MAY_TRAP_P (EXPR_VINSN (from))((((from)->vinsn))->may_trap_p)))) | |||
1882 | change_vinsn_in_expr (to, EXPR_VINSN (from)((from)->vinsn)); | |||
1883 | ||||
1884 | merge_expr_data (to, from, split_point); | |||
1885 | gcc_assert (EXPR_USEFULNESS (to) <= REG_BR_PROB_BASE)((void)(!(((to)->usefulness) <= 10000) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 1885, __FUNCTION__), 0 : 0)); | |||
1886 | } | |||
1887 | ||||
1888 | /* Clear the information of this EXPR. */ | |||
1889 | void | |||
1890 | clear_expr (expr_t expr) | |||
1891 | { | |||
1892 | ||||
1893 | vinsn_detach (EXPR_VINSN (expr)((expr)->vinsn)); | |||
1894 | EXPR_VINSN (expr)((expr)->vinsn) = NULLnullptr; | |||
1895 | ||||
1896 | free_history_vect (EXPR_HISTORY_OF_CHANGES (expr)((expr)->history_of_changes)); | |||
1897 | } | |||
1898 | ||||
1899 | /* For a given LV_SET, mark EXPR having unavailable target register. */ | |||
1900 | static void | |||
1901 | set_unavailable_target_for_expr (expr_t expr, regset lv_set) | |||
1902 | { | |||
1903 | if (EXPR_SEPARABLE_P (expr)((((((&((((expr)->vinsn))->id)))->type)) == SET) )) | |||
1904 | { | |||
1905 | if (REG_P (EXPR_LHS (expr))(((enum rtx_code) ((((((&((((expr)->vinsn))->id)))-> lhs))))->code) == REG) | |||
1906 | && register_unavailable_p (lv_set, EXPR_LHS (expr)(((((&((((expr)->vinsn))->id)))->lhs))))) | |||
1907 | { | |||
1908 | /* If it's an insn like r1 = use (r1, ...), and it exists in | |||
1909 | different forms in each of the av_sets being merged, we can't say | |||
1910 | whether original destination register is available or not. | |||
1911 | However, this still works if destination register is not used | |||
1912 | in the original expression: if the branch at which LV_SET we're | |||
1913 | looking here is not actually 'other branch' in sense that same | |||
1914 | expression is available through it (but it can't be determined | |||
1915 | at computation stage because of transformations on one of the | |||
1916 | branches), it still won't affect the availability. | |||
1917 | Liveness of a register somewhere on a code motion path means | |||
1918 | it's either read somewhere on a codemotion path, live on | |||
1919 | 'other' branch, live at the point immediately following | |||
1920 | the original operation, or is read by the original operation. | |||
1921 | The latter case is filtered out in the condition below. | |||
1922 | It still doesn't cover the case when register is defined and used | |||
1923 | somewhere within the code motion path, and in this case we could | |||
1924 | miss a unifying code motion along both branches using a renamed | |||
1925 | register, but it won't affect a code correctness since upon | |||
1926 | an actual code motion a bookkeeping code would be generated. */ | |||
1927 | if (register_unavailable_p (VINSN_REG_USES (EXPR_VINSN (expr))((((&((((expr)->vinsn))->id)))->reg_uses)), | |||
1928 | EXPR_LHS (expr)(((((&((((expr)->vinsn))->id)))->lhs))))) | |||
1929 | EXPR_TARGET_AVAILABLE (expr)((expr)->target_available) = -1; | |||
1930 | else | |||
1931 | EXPR_TARGET_AVAILABLE (expr)((expr)->target_available) = false; | |||
1932 | } | |||
1933 | } | |||
1934 | else | |||
1935 | { | |||
1936 | unsigned regno; | |||
1937 | reg_set_iterator rsi; | |||
1938 | ||||
1939 | EXECUTE_IF_SET_IN_REG_SET (VINSN_REG_SETS (EXPR_VINSN (expr)),for (bmp_iter_set_init (&(rsi), (((((&((((expr)->vinsn ))->id)))->reg_sets))), (0), &(regno)); bmp_iter_set (&(rsi), &(regno)); bmp_iter_next (&(rsi), & (regno))) | |||
1940 | 0, regno, rsi)for (bmp_iter_set_init (&(rsi), (((((&((((expr)->vinsn ))->id)))->reg_sets))), (0), &(regno)); bmp_iter_set (&(rsi), &(regno)); bmp_iter_next (&(rsi), & (regno))) | |||
1941 | if (bitmap_bit_p (lv_set, regno)) | |||
1942 | { | |||
1943 | EXPR_TARGET_AVAILABLE (expr)((expr)->target_available) = false; | |||
1944 | break; | |||
1945 | } | |||
1946 | ||||
1947 | EXECUTE_IF_SET_IN_REG_SET (VINSN_REG_CLOBBERS (EXPR_VINSN (expr)),for (bmp_iter_set_init (&(rsi), (((((&((((expr)->vinsn ))->id)))->reg_clobbers))), (0), &(regno)); bmp_iter_set (&(rsi), &(regno)); bmp_iter_next (&(rsi), & (regno))) | |||
1948 | 0, regno, rsi)for (bmp_iter_set_init (&(rsi), (((((&((((expr)->vinsn ))->id)))->reg_clobbers))), (0), &(regno)); bmp_iter_set (&(rsi), &(regno)); bmp_iter_next (&(rsi), & (regno))) | |||
1949 | if (bitmap_bit_p (lv_set, regno)) | |||
1950 | { | |||
1951 | EXPR_TARGET_AVAILABLE (expr)((expr)->target_available) = false; | |||
1952 | break; | |||
1953 | } | |||
1954 | } | |||
1955 | } | |||
1956 | ||||
1957 | /* Try to make EXPR speculative. Return 1 when EXPR's pattern | |||
1958 | or dependence status have changed, 2 when also the target register | |||
1959 | became unavailable, 0 if nothing had to be changed. */ | |||
1960 | int | |||
1961 | speculate_expr (expr_t expr, ds_t ds) | |||
1962 | { | |||
1963 | int res; | |||
1964 | rtx_insn *orig_insn_rtx; | |||
1965 | rtx spec_pat; | |||
1966 | ds_t target_ds, current_ds; | |||
1967 | ||||
1968 | /* Obtain the status we need to put on EXPR. */ | |||
1969 | target_ds = (ds & SPECULATIVE(((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_DATA_BITS_OFFSET ) | (((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_DATA_BITS_OFFSET)) | ((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_CONTROL_BITS_OFFSET) | (((ds_t ) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_CONTROL_BITS_OFFSET )))); | |||
1970 | current_ds = EXPR_SPEC_DONE_DS (expr)((expr)->spec_done_ds); | |||
1971 | ds = ds_full_merge (current_ds, target_ds, NULL_RTX(rtx) 0, NULL_RTX(rtx) 0); | |||
1972 | ||||
1973 | orig_insn_rtx = EXPR_INSN_RTX (expr)(((((expr)->vinsn))->insn_rtx)); | |||
1974 | ||||
1975 | res = sched_speculate_insn (orig_insn_rtx, ds, &spec_pat); | |||
1976 | ||||
1977 | switch (res) | |||
1978 | { | |||
1979 | case 0: | |||
1980 | EXPR_SPEC_DONE_DS (expr)((expr)->spec_done_ds) = ds; | |||
1981 | return current_ds != ds ? 1 : 0; | |||
1982 | ||||
1983 | case 1: | |||
1984 | { | |||
1985 | rtx_insn *spec_insn_rtx = | |||
1986 | create_insn_rtx_from_pattern (spec_pat, NULL_RTX(rtx) 0); | |||
1987 | vinsn_t spec_vinsn = create_vinsn_from_insn_rtx (spec_insn_rtx, false); | |||
1988 | ||||
1989 | change_vinsn_in_expr (expr, spec_vinsn); | |||
1990 | EXPR_SPEC_DONE_DS (expr)((expr)->spec_done_ds) = ds; | |||
1991 | EXPR_NEEDS_SPEC_CHECK_P (expr)((expr)->needs_spec_check_p) = true; | |||
1992 | ||||
1993 | /* Do not allow clobbering the address register of speculative | |||
1994 | insns. */ | |||
1995 | if (register_unavailable_p (VINSN_REG_USES (EXPR_VINSN (expr))((((&((((expr)->vinsn))->id)))->reg_uses)), | |||
1996 | expr_dest_reg (expr))) | |||
1997 | { | |||
1998 | EXPR_TARGET_AVAILABLE (expr)((expr)->target_available) = false; | |||
1999 | return 2; | |||
2000 | } | |||
2001 | ||||
2002 | return 1; | |||
2003 | } | |||
2004 | ||||
2005 | case -1: | |||
2006 | return -1; | |||
2007 | ||||
2008 | default: | |||
2009 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 2009, __FUNCTION__)); | |||
2010 | return -1; | |||
2011 | } | |||
2012 | } | |||
2013 | ||||
2014 | /* Return a destination register, if any, of EXPR. */ | |||
2015 | rtx | |||
2016 | expr_dest_reg (expr_t expr) | |||
2017 | { | |||
2018 | rtx dest = VINSN_LHS (EXPR_VINSN (expr))((((&((((expr)->vinsn))->id)))->lhs)); | |||
2019 | ||||
2020 | if (dest != NULL_RTX(rtx) 0 && REG_P (dest)(((enum rtx_code) (dest)->code) == REG)) | |||
2021 | return dest; | |||
2022 | ||||
2023 | return NULL_RTX(rtx) 0; | |||
2024 | } | |||
2025 | ||||
2026 | /* Returns the REGNO of the R's destination. */ | |||
2027 | unsigned | |||
2028 | expr_dest_regno (expr_t expr) | |||
2029 | { | |||
2030 | rtx dest = expr_dest_reg (expr); | |||
2031 | ||||
2032 | gcc_assert (dest != NULL_RTX)((void)(!(dest != (rtx) 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 2032, __FUNCTION__), 0 : 0)); | |||
2033 | return REGNO (dest)(rhs_regno(dest)); | |||
2034 | } | |||
2035 | ||||
2036 | /* For a given LV_SET, mark all expressions in JOIN_SET, but not present in | |||
2037 | AV_SET having unavailable target register. */ | |||
2038 | void | |||
2039 | mark_unavailable_targets (av_set_t join_set, av_set_t av_set, regset lv_set) | |||
2040 | { | |||
2041 | expr_t expr; | |||
2042 | av_set_iterator avi; | |||
2043 | ||||
2044 | FOR_EACH_EXPR (expr, avi, join_set)for (_list_iter_start (&((avi)), &((join_set)), false ); _list_iter_cond_expr (*((avi)).lp, &((expr))); _list_iter_next (&((avi)))) | |||
2045 | if (av_set_lookup (av_set, EXPR_VINSN (expr)((expr)->vinsn)) == NULLnullptr) | |||
2046 | set_unavailable_target_for_expr (expr, lv_set); | |||
2047 | } | |||
2048 | ||||
2049 | ||||
2050 | /* Returns true if REG (at least partially) is present in REGS. */ | |||
2051 | bool | |||
2052 | register_unavailable_p (regset regs, rtx reg) | |||
2053 | { | |||
2054 | unsigned regno, end_regno; | |||
2055 | ||||
2056 | regno = REGNO (reg)(rhs_regno(reg)); | |||
2057 | if (bitmap_bit_p (regs, regno)) | |||
2058 | return true; | |||
2059 | ||||
2060 | end_regno = END_REGNO (reg); | |||
2061 | ||||
2062 | while (++regno < end_regno) | |||
2063 | if (bitmap_bit_p (regs, regno)) | |||
2064 | return true; | |||
2065 | ||||
2066 | return false; | |||
2067 | } | |||
2068 | ||||
2069 | /* Av set functions. */ | |||
2070 | ||||
2071 | /* Add a new element to av set SETP. | |||
2072 | Return the element added. */ | |||
2073 | static av_set_t | |||
2074 | av_set_add_element (av_set_t *setp) | |||
2075 | { | |||
2076 | /* Insert at the beginning of the list. */ | |||
2077 | _list_add (setp); | |||
2078 | return *setp; | |||
2079 | } | |||
2080 | ||||
2081 | /* Add EXPR to SETP. */ | |||
2082 | void | |||
2083 | av_set_add (av_set_t *setp, expr_t expr) | |||
2084 | { | |||
2085 | av_set_t elem; | |||
2086 | ||||
2087 | gcc_assert (!INSN_NOP_P (EXPR_INSN_RTX (expr)))((void)(!(!(PATTERN ((((((expr)->vinsn))->insn_rtx))) == nop_pattern)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 2087, __FUNCTION__), 0 : 0)); | |||
2088 | elem = av_set_add_element (setp); | |||
2089 | copy_expr (_AV_SET_EXPR (elem)(&(elem)->u.expr), expr); | |||
2090 | } | |||
2091 | ||||
2092 | /* Same, but do not copy EXPR. */ | |||
2093 | static void | |||
2094 | av_set_add_nocopy (av_set_t *setp, expr_t expr) | |||
2095 | { | |||
2096 | av_set_t elem; | |||
2097 | ||||
2098 | elem = av_set_add_element (setp); | |||
2099 | *_AV_SET_EXPR (elem)(&(elem)->u.expr) = *expr; | |||
2100 | } | |||
2101 | ||||
2102 | /* Remove expr pointed to by IP from the av_set. */ | |||
2103 | void | |||
2104 | av_set_iter_remove (av_set_iterator *ip) | |||
2105 | { | |||
2106 | clear_expr (_AV_SET_EXPR (*ip->lp)(&(*ip->lp)->u.expr)); | |||
2107 | _list_iter_remove (ip); | |||
2108 | } | |||
2109 | ||||
2110 | /* Search for an expr in SET, such that it's equivalent to SOUGHT_VINSN in the | |||
2111 | sense of vinsn_equal_p function. Return NULL if no such expr is | |||
2112 | in SET was found. */ | |||
2113 | expr_t | |||
2114 | av_set_lookup (av_set_t set, vinsn_t sought_vinsn) | |||
2115 | { | |||
2116 | expr_t expr; | |||
2117 | av_set_iterator i; | |||
2118 | ||||
2119 | FOR_EACH_EXPR (expr, i, set)for (_list_iter_start (&((i)), &((set)), false); _list_iter_cond_expr (*((i)).lp, &((expr))); _list_iter_next (&((i)))) | |||
2120 | if (vinsn_equal_p (EXPR_VINSN (expr)((expr)->vinsn), sought_vinsn)) | |||
2121 | return expr; | |||
2122 | return NULLnullptr; | |||
2123 | } | |||
2124 | ||||
2125 | /* Same, but also remove the EXPR found. */ | |||
2126 | static expr_t | |||
2127 | av_set_lookup_and_remove (av_set_t *setp, vinsn_t sought_vinsn) | |||
2128 | { | |||
2129 | expr_t expr; | |||
2130 | av_set_iterator i; | |||
2131 | ||||
2132 | FOR_EACH_EXPR_1 (expr, i, setp)for (_list_iter_start (&((i)), ((setp)), true); _list_iter_cond_expr (*((i)).lp, &((expr))); _list_iter_next (&((i)))) | |||
2133 | if (vinsn_equal_p (EXPR_VINSN (expr)((expr)->vinsn), sought_vinsn)) | |||
2134 | { | |||
2135 | _list_iter_remove_nofree (&i); | |||
2136 | return expr; | |||
2137 | } | |||
2138 | return NULLnullptr; | |||
2139 | } | |||
2140 | ||||
2141 | /* Search for an expr in SET, such that it's equivalent to EXPR in the | |||
2142 | sense of vinsn_equal_p function of their vinsns, but not EXPR itself. | |||
2143 | Returns NULL if no such expr is in SET was found. */ | |||
2144 | static expr_t | |||
2145 | av_set_lookup_other_equiv_expr (av_set_t set, expr_t expr) | |||
2146 | { | |||
2147 | expr_t cur_expr; | |||
2148 | av_set_iterator i; | |||
2149 | ||||
2150 | FOR_EACH_EXPR (cur_expr, i, set)for (_list_iter_start (&((i)), &((set)), false); _list_iter_cond_expr (*((i)).lp, &((cur_expr))); _list_iter_next (&((i))) ) | |||
2151 | { | |||
2152 | if (cur_expr == expr) | |||
2153 | continue; | |||
2154 | if (vinsn_equal_p (EXPR_VINSN (cur_expr)((cur_expr)->vinsn), EXPR_VINSN (expr)((expr)->vinsn))) | |||
2155 | return cur_expr; | |||
2156 | } | |||
2157 | ||||
2158 | return NULLnullptr; | |||
2159 | } | |||
2160 | ||||
2161 | /* If other expression is already in AVP, remove one of them. */ | |||
2162 | expr_t | |||
2163 | merge_with_other_exprs (av_set_t *avp, av_set_iterator *ip, expr_t expr) | |||
2164 | { | |||
2165 | expr_t expr2; | |||
2166 | ||||
2167 | expr2 = av_set_lookup_other_equiv_expr (*avp, expr); | |||
2168 | if (expr2 != NULLnullptr) | |||
2169 | { | |||
2170 | /* Reset target availability on merge, since taking it only from one | |||
2171 | of the exprs would be controversial for different code. */ | |||
2172 | EXPR_TARGET_AVAILABLE (expr2)((expr2)->target_available) = -1; | |||
2173 | EXPR_USEFULNESS (expr2)((expr2)->usefulness) = 0; | |||
2174 | ||||
2175 | merge_expr (expr2, expr, NULLnullptr); | |||
2176 | ||||
2177 | /* Fix usefulness as it should be now REG_BR_PROB_BASE. */ | |||
2178 | EXPR_USEFULNESS (expr2)((expr2)->usefulness) = REG_BR_PROB_BASE10000; | |||
2179 | ||||
2180 | av_set_iter_remove (ip); | |||
2181 | return expr2; | |||
2182 | } | |||
2183 | ||||
2184 | return expr; | |||
2185 | } | |||
2186 | ||||
2187 | /* Return true if there is an expr that correlates to VI in SET. */ | |||
2188 | bool | |||
2189 | av_set_is_in_p (av_set_t set, vinsn_t vi) | |||
2190 | { | |||
2191 | return av_set_lookup (set, vi) != NULLnullptr; | |||
2192 | } | |||
2193 | ||||
2194 | /* Return a copy of SET. */ | |||
2195 | av_set_t | |||
2196 | av_set_copy (av_set_t set) | |||
2197 | { | |||
2198 | expr_t expr; | |||
2199 | av_set_iterator i; | |||
2200 | av_set_t res = NULLnullptr; | |||
2201 | ||||
2202 | FOR_EACH_EXPR (expr, i, set)for (_list_iter_start (&((i)), &((set)), false); _list_iter_cond_expr (*((i)).lp, &((expr))); _list_iter_next (&((i)))) | |||
2203 | av_set_add (&res, expr); | |||
2204 | ||||
2205 | return res; | |||
2206 | } | |||
2207 | ||||
2208 | /* Join two av sets that do not have common elements by attaching second set | |||
2209 | (pointed to by FROMP) to the end of first set (TO_TAILP must point to | |||
2210 | _AV_SET_NEXT of first set's last element). */ | |||
2211 | static void | |||
2212 | join_distinct_sets (av_set_t *to_tailp, av_set_t *fromp) | |||
2213 | { | |||
2214 | gcc_assert (*to_tailp == NULL)((void)(!(*to_tailp == nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 2214, __FUNCTION__), 0 : 0)); | |||
2215 | *to_tailp = *fromp; | |||
2216 | *fromp = NULLnullptr; | |||
2217 | } | |||
2218 | ||||
2219 | /* Makes set pointed to by TO to be the union of TO and FROM. Clear av_set | |||
2220 | pointed to by FROMP afterwards. */ | |||
2221 | void | |||
2222 | av_set_union_and_clear (av_set_t *top, av_set_t *fromp, insn_t insn) | |||
2223 | { | |||
2224 | expr_t expr1; | |||
2225 | av_set_iterator i; | |||
2226 | ||||
2227 | /* Delete from TOP all exprs, that present in FROMP. */ | |||
2228 | FOR_EACH_EXPR_1 (expr1, i, top)for (_list_iter_start (&((i)), ((top)), true); _list_iter_cond_expr (*((i)).lp, &((expr1))); _list_iter_next (&((i)))) | |||
2229 | { | |||
2230 | expr_t expr2 = av_set_lookup (*fromp, EXPR_VINSN (expr1)((expr1)->vinsn)); | |||
2231 | ||||
2232 | if (expr2) | |||
2233 | { | |||
2234 | merge_expr (expr2, expr1, insn); | |||
2235 | av_set_iter_remove (&i); | |||
2236 | } | |||
2237 | } | |||
2238 | ||||
2239 | join_distinct_sets (i.lp, fromp); | |||
2240 | } | |||
2241 | ||||
2242 | /* Same as above, but also update availability of target register in | |||
2243 | TOP judging by TO_LV_SET and FROM_LV_SET. */ | |||
2244 | void | |||
2245 | av_set_union_and_live (av_set_t *top, av_set_t *fromp, regset to_lv_set, | |||
2246 | regset from_lv_set, insn_t insn) | |||
2247 | { | |||
2248 | expr_t expr1; | |||
2249 | av_set_iterator i; | |||
2250 | av_set_t *to_tailp, in_both_set = NULLnullptr; | |||
2251 | ||||
2252 | /* Delete from TOP all expres, that present in FROMP. */ | |||
2253 | FOR_EACH_EXPR_1 (expr1, i, top)for (_list_iter_start (&((i)), ((top)), true); _list_iter_cond_expr (*((i)).lp, &((expr1))); _list_iter_next (&((i)))) | |||
2254 | { | |||
2255 | expr_t expr2 = av_set_lookup_and_remove (fromp, EXPR_VINSN (expr1)((expr1)->vinsn)); | |||
2256 | ||||
2257 | if (expr2) | |||
2258 | { | |||
2259 | /* It may be that the expressions have different destination | |||
2260 | registers, in which case we need to check liveness here. */ | |||
2261 | if (EXPR_SEPARABLE_P (expr1)((((((&((((expr1)->vinsn))->id)))->type)) == SET ))) | |||
2262 | { | |||
2263 | int regno1 = (REG_P (EXPR_LHS (expr1))(((enum rtx_code) ((((((&((((expr1)->vinsn))->id))) ->lhs))))->code) == REG) | |||
2264 | ? (int) expr_dest_regno (expr1) : -1); | |||
2265 | int regno2 = (REG_P (EXPR_LHS (expr2))(((enum rtx_code) ((((((&((((expr2)->vinsn))->id))) ->lhs))))->code) == REG) | |||
2266 | ? (int) expr_dest_regno (expr2) : -1); | |||
2267 | ||||
2268 | /* ??? We don't have a way to check restrictions for | |||
2269 | *other* register on the current path, we did it only | |||
2270 | for the current target register. Give up. */ | |||
2271 | if (regno1 != regno2) | |||
2272 | EXPR_TARGET_AVAILABLE (expr2)((expr2)->target_available) = -1; | |||
2273 | } | |||
2274 | else if (EXPR_INSN_RTX (expr1)(((((expr1)->vinsn))->insn_rtx)) != EXPR_INSN_RTX (expr2)(((((expr2)->vinsn))->insn_rtx))) | |||
2275 | EXPR_TARGET_AVAILABLE (expr2)((expr2)->target_available) = -1; | |||
2276 | ||||
2277 | merge_expr (expr2, expr1, insn); | |||
2278 | av_set_add_nocopy (&in_both_set, expr2); | |||
2279 | av_set_iter_remove (&i); | |||
2280 | } | |||
2281 | else | |||
2282 | /* EXPR1 is present in TOP, but not in FROMP. Check it on | |||
2283 | FROM_LV_SET. */ | |||
2284 | set_unavailable_target_for_expr (expr1, from_lv_set); | |||
2285 | } | |||
2286 | to_tailp = i.lp; | |||
2287 | ||||
2288 | /* These expressions are not present in TOP. Check liveness | |||
2289 | restrictions on TO_LV_SET. */ | |||
2290 | FOR_EACH_EXPR (expr1, i, *fromp)for (_list_iter_start (&((i)), &((*fromp)), false); _list_iter_cond_expr (*((i)).lp, &((expr1))); _list_iter_next (&((i)))) | |||
2291 | set_unavailable_target_for_expr (expr1, to_lv_set); | |||
2292 | ||||
2293 | join_distinct_sets (i.lp, &in_both_set); | |||
2294 | join_distinct_sets (to_tailp, fromp); | |||
2295 | } | |||
2296 | ||||
2297 | /* Clear av_set pointed to by SETP. */ | |||
2298 | void | |||
2299 | av_set_clear (av_set_t *setp) | |||
2300 | { | |||
2301 | expr_t expr; | |||
2302 | av_set_iterator i; | |||
2303 | ||||
2304 | FOR_EACH_EXPR_1 (expr, i, setp)for (_list_iter_start (&((i)), ((setp)), true); _list_iter_cond_expr (*((i)).lp, &((expr))); _list_iter_next (&((i)))) | |||
2305 | av_set_iter_remove (&i); | |||
2306 | ||||
2307 | gcc_assert (*setp == NULL)((void)(!(*setp == nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 2307, __FUNCTION__), 0 : 0)); | |||
2308 | } | |||
2309 | ||||
2310 | /* Leave only one non-speculative element in the SETP. */ | |||
2311 | void | |||
2312 | av_set_leave_one_nonspec (av_set_t *setp) | |||
2313 | { | |||
2314 | expr_t expr; | |||
2315 | av_set_iterator i; | |||
2316 | bool has_one_nonspec = false; | |||
2317 | ||||
2318 | /* Keep all speculative exprs, and leave one non-speculative | |||
2319 | (the first one). */ | |||
2320 | FOR_EACH_EXPR_1 (expr, i, setp)for (_list_iter_start (&((i)), ((setp)), true); _list_iter_cond_expr (*((i)).lp, &((expr))); _list_iter_next (&((i)))) | |||
2321 | { | |||
2322 | if (!EXPR_SPEC_DONE_DS (expr)((expr)->spec_done_ds)) | |||
2323 | { | |||
2324 | if (has_one_nonspec) | |||
2325 | av_set_iter_remove (&i); | |||
2326 | else | |||
2327 | has_one_nonspec = true; | |||
2328 | } | |||
2329 | } | |||
2330 | } | |||
2331 | ||||
2332 | /* Return the N'th element of the SET. */ | |||
2333 | expr_t | |||
2334 | av_set_element (av_set_t set, int n) | |||
2335 | { | |||
2336 | expr_t expr; | |||
2337 | av_set_iterator i; | |||
2338 | ||||
2339 | FOR_EACH_EXPR (expr, i, set)for (_list_iter_start (&((i)), &((set)), false); _list_iter_cond_expr (*((i)).lp, &((expr))); _list_iter_next (&((i)))) | |||
2340 | if (n-- == 0) | |||
2341 | return expr; | |||
2342 | ||||
2343 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 2343, __FUNCTION__)); | |||
2344 | return NULLnullptr; | |||
2345 | } | |||
2346 | ||||
2347 | /* Deletes all expressions from AVP that are conditional branches (IFs). */ | |||
2348 | void | |||
2349 | av_set_substract_cond_branches (av_set_t *avp) | |||
2350 | { | |||
2351 | av_set_iterator i; | |||
2352 | expr_t expr; | |||
2353 | ||||
2354 | FOR_EACH_EXPR_1 (expr, i, avp)for (_list_iter_start (&((i)), ((avp)), true); _list_iter_cond_expr (*((i)).lp, &((expr))); _list_iter_next (&((i)))) | |||
2355 | if (vinsn_cond_branch_p (EXPR_VINSN (expr)((expr)->vinsn))) | |||
2356 | av_set_iter_remove (&i); | |||
2357 | } | |||
2358 | ||||
2359 | /* Multiplies usefulness attribute of each member of av-set *AVP by | |||
2360 | value PROB / ALL_PROB. */ | |||
2361 | void | |||
2362 | av_set_split_usefulness (av_set_t av, int prob, int all_prob) | |||
2363 | { | |||
2364 | av_set_iterator i; | |||
2365 | expr_t expr; | |||
2366 | ||||
2367 | FOR_EACH_EXPR (expr, i, av)for (_list_iter_start (&((i)), &((av)), false); _list_iter_cond_expr (*((i)).lp, &((expr))); _list_iter_next (&((i)))) | |||
2368 | EXPR_USEFULNESS (expr)((expr)->usefulness) = (all_prob | |||
2369 | ? (EXPR_USEFULNESS (expr)((expr)->usefulness) * prob) / all_prob | |||
2370 | : 0); | |||
2371 | } | |||
2372 | ||||
2373 | /* Leave in AVP only those expressions, which are present in AV, | |||
2374 | and return it, merging history expressions. */ | |||
2375 | void | |||
2376 | av_set_code_motion_filter (av_set_t *avp, av_set_t av) | |||
2377 | { | |||
2378 | av_set_iterator i; | |||
2379 | expr_t expr, expr2; | |||
2380 | ||||
2381 | FOR_EACH_EXPR_1 (expr, i, avp)for (_list_iter_start (&((i)), ((avp)), true); _list_iter_cond_expr (*((i)).lp, &((expr))); _list_iter_next (&((i)))) | |||
2382 | if ((expr2 = av_set_lookup (av, EXPR_VINSN (expr)((expr)->vinsn))) == NULLnullptr) | |||
2383 | av_set_iter_remove (&i); | |||
2384 | else | |||
2385 | /* When updating av sets in bookkeeping blocks, we can add more insns | |||
2386 | there which will be transformed but the upper av sets will not | |||
2387 | reflect those transformations. We then fail to undo those | |||
2388 | when searching for such insns. So merge the history saved | |||
2389 | in the av set of the block we are processing. */ | |||
2390 | merge_history_vect (&EXPR_HISTORY_OF_CHANGES (expr)((expr)->history_of_changes), | |||
2391 | EXPR_HISTORY_OF_CHANGES (expr2)((expr2)->history_of_changes)); | |||
2392 | } | |||
2393 | ||||
2394 | ||||
2395 | ||||
2396 | /* Dependence hooks to initialize insn data. */ | |||
2397 | ||||
2398 | /* This is used in hooks callable from dependence analysis when initializing | |||
2399 | instruction's data. */ | |||
2400 | static struct | |||
2401 | { | |||
2402 | /* Where the dependence was found (lhs/rhs). */ | |||
2403 | deps_where_t where; | |||
2404 | ||||
2405 | /* The actual data object to initialize. */ | |||
2406 | idata_t id; | |||
2407 | ||||
2408 | /* True when the insn should not be made clonable. */ | |||
2409 | bool force_unique_p; | |||
2410 | ||||
2411 | /* True when insn should be treated as of type USE, i.e. never renamed. */ | |||
2412 | bool force_use_p; | |||
2413 | } deps_init_id_data; | |||
2414 | ||||
2415 | ||||
2416 | /* Setup ID for INSN. FORCE_UNIQUE_P is true when INSN should not be | |||
2417 | clonable. */ | |||
2418 | static void | |||
2419 | setup_id_for_insn (idata_t id, insn_t insn, bool force_unique_p) | |||
2420 | { | |||
2421 | int type; | |||
2422 | ||||
2423 | /* Determine whether INSN could be cloned and return appropriate vinsn type. | |||
2424 | That clonable insns which can be separated into lhs and rhs have type SET. | |||
2425 | Other clonable insns have type USE. */ | |||
2426 | type = GET_CODE (insn)((enum rtx_code) (insn)->code); | |||
2427 | ||||
2428 | /* Only regular insns could be cloned. */ | |||
2429 | if (type == INSN && !force_unique_p) | |||
2430 | type = SET; | |||
2431 | else if (type == JUMP_INSN && simplejump_p (insn)) | |||
2432 | type = PC; | |||
2433 | else if (type == DEBUG_INSN) | |||
2434 | type = !force_unique_p ? USE : INSN; | |||
2435 | ||||
2436 | IDATA_TYPE (id)((id)->type) = type; | |||
2437 | IDATA_REG_SETS (id)((id)->reg_sets) = get_clear_regset_from_pool (); | |||
2438 | IDATA_REG_USES (id)((id)->reg_uses) = get_clear_regset_from_pool (); | |||
2439 | IDATA_REG_CLOBBERS (id)((id)->reg_clobbers) = get_clear_regset_from_pool (); | |||
2440 | } | |||
2441 | ||||
2442 | /* Start initializing insn data. */ | |||
2443 | static void | |||
2444 | deps_init_id_start_insn (insn_t insn) | |||
2445 | { | |||
2446 | gcc_assert (deps_init_id_data.where == DEPS_IN_NOWHERE)((void)(!(deps_init_id_data.where == DEPS_IN_NOWHERE) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 2446, __FUNCTION__), 0 : 0)); | |||
2447 | ||||
2448 | setup_id_for_insn (deps_init_id_data.id, insn, | |||
2449 | deps_init_id_data.force_unique_p); | |||
2450 | deps_init_id_data.where = DEPS_IN_INSN; | |||
2451 | } | |||
2452 | ||||
2453 | /* Start initializing lhs data. */ | |||
2454 | static void | |||
2455 | deps_init_id_start_lhs (rtx lhs) | |||
2456 | { | |||
2457 | gcc_assert (deps_init_id_data.where == DEPS_IN_INSN)((void)(!(deps_init_id_data.where == DEPS_IN_INSN) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 2457, __FUNCTION__), 0 : 0)); | |||
2458 | gcc_assert (IDATA_LHS (deps_init_id_data.id) == NULL)((void)(!(((deps_init_id_data.id)->lhs) == nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 2458, __FUNCTION__), 0 : 0)); | |||
2459 | ||||
2460 | if (IDATA_TYPE (deps_init_id_data.id)((deps_init_id_data.id)->type) == SET) | |||
2461 | { | |||
2462 | IDATA_LHS (deps_init_id_data.id)((deps_init_id_data.id)->lhs) = lhs; | |||
2463 | deps_init_id_data.where = DEPS_IN_LHS; | |||
2464 | } | |||
2465 | } | |||
2466 | ||||
2467 | /* Finish initializing lhs data. */ | |||
2468 | static void | |||
2469 | deps_init_id_finish_lhs (void) | |||
2470 | { | |||
2471 | deps_init_id_data.where = DEPS_IN_INSN; | |||
2472 | } | |||
2473 | ||||
2474 | /* Note a set of REGNO. */ | |||
2475 | static void | |||
2476 | deps_init_id_note_reg_set (int regno) | |||
2477 | { | |||
2478 | haifa_note_reg_set (regno); | |||
2479 | ||||
2480 | if (deps_init_id_data.where == DEPS_IN_RHS) | |||
2481 | deps_init_id_data.force_use_p = true; | |||
2482 | ||||
2483 | if (IDATA_TYPE (deps_init_id_data.id)((deps_init_id_data.id)->type) != PC) | |||
2484 | SET_REGNO_REG_SET (IDATA_REG_SETS (deps_init_id_data.id), regno)bitmap_set_bit (((deps_init_id_data.id)->reg_sets), regno); | |||
2485 | ||||
2486 | #ifdef STACK_REGS | |||
2487 | /* Make instructions that set stack registers to be ineligible for | |||
2488 | renaming to avoid issues with find_used_regs. */ | |||
2489 | if (IN_RANGE (regno, FIRST_STACK_REG, LAST_STACK_REG)((unsigned long) (regno) - (unsigned long) (8) <= (unsigned long) (15) - (unsigned long) (8))) | |||
2490 | deps_init_id_data.force_use_p = true; | |||
2491 | #endif | |||
2492 | } | |||
2493 | ||||
2494 | /* Note a clobber of REGNO. */ | |||
2495 | static void | |||
2496 | deps_init_id_note_reg_clobber (int regno) | |||
2497 | { | |||
2498 | haifa_note_reg_clobber (regno); | |||
2499 | ||||
2500 | if (deps_init_id_data.where == DEPS_IN_RHS) | |||
2501 | deps_init_id_data.force_use_p = true; | |||
2502 | ||||
2503 | if (IDATA_TYPE (deps_init_id_data.id)((deps_init_id_data.id)->type) != PC) | |||
2504 | SET_REGNO_REG_SET (IDATA_REG_CLOBBERS (deps_init_id_data.id), regno)bitmap_set_bit (((deps_init_id_data.id)->reg_clobbers), regno ); | |||
2505 | } | |||
2506 | ||||
2507 | /* Note a use of REGNO. */ | |||
2508 | static void | |||
2509 | deps_init_id_note_reg_use (int regno) | |||
2510 | { | |||
2511 | haifa_note_reg_use (regno); | |||
2512 | ||||
2513 | if (IDATA_TYPE (deps_init_id_data.id)((deps_init_id_data.id)->type) != PC) | |||
2514 | SET_REGNO_REG_SET (IDATA_REG_USES (deps_init_id_data.id), regno)bitmap_set_bit (((deps_init_id_data.id)->reg_uses), regno); | |||
2515 | } | |||
2516 | ||||
2517 | /* Start initializing rhs data. */ | |||
2518 | static void | |||
2519 | deps_init_id_start_rhs (rtx rhs) | |||
2520 | { | |||
2521 | gcc_assert (deps_init_id_data.where == DEPS_IN_INSN)((void)(!(deps_init_id_data.where == DEPS_IN_INSN) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 2521, __FUNCTION__), 0 : 0)); | |||
2522 | ||||
2523 | /* And there was no sel_deps_reset_to_insn (). */ | |||
2524 | if (IDATA_LHS (deps_init_id_data.id)((deps_init_id_data.id)->lhs) != NULLnullptr) | |||
2525 | { | |||
2526 | IDATA_RHS (deps_init_id_data.id)((deps_init_id_data.id)->rhs) = rhs; | |||
2527 | deps_init_id_data.where = DEPS_IN_RHS; | |||
2528 | } | |||
2529 | } | |||
2530 | ||||
2531 | /* Finish initializing rhs data. */ | |||
2532 | static void | |||
2533 | deps_init_id_finish_rhs (void) | |||
2534 | { | |||
2535 | gcc_assert (deps_init_id_data.where == DEPS_IN_RHS((void)(!(deps_init_id_data.where == DEPS_IN_RHS || deps_init_id_data .where == DEPS_IN_INSN) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 2536, __FUNCTION__), 0 : 0)) | |||
2536 | || deps_init_id_data.where == DEPS_IN_INSN)((void)(!(deps_init_id_data.where == DEPS_IN_RHS || deps_init_id_data .where == DEPS_IN_INSN) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 2536, __FUNCTION__), 0 : 0)); | |||
2537 | deps_init_id_data.where = DEPS_IN_INSN; | |||
2538 | } | |||
2539 | ||||
2540 | /* Finish initializing insn data. */ | |||
2541 | static void | |||
2542 | deps_init_id_finish_insn (void) | |||
2543 | { | |||
2544 | gcc_assert (deps_init_id_data.where == DEPS_IN_INSN)((void)(!(deps_init_id_data.where == DEPS_IN_INSN) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 2544, __FUNCTION__), 0 : 0)); | |||
2545 | ||||
2546 | if (IDATA_TYPE (deps_init_id_data.id)((deps_init_id_data.id)->type) == SET) | |||
2547 | { | |||
2548 | rtx lhs = IDATA_LHS (deps_init_id_data.id)((deps_init_id_data.id)->lhs); | |||
2549 | rtx rhs = IDATA_RHS (deps_init_id_data.id)((deps_init_id_data.id)->rhs); | |||
2550 | ||||
2551 | if (lhs == NULLnullptr || rhs == NULLnullptr || !lhs_and_rhs_separable_p (lhs, rhs) | |||
2552 | || deps_init_id_data.force_use_p) | |||
2553 | { | |||
2554 | /* This should be a USE, as we don't want to schedule its RHS | |||
2555 | separately. However, we still want to have them recorded | |||
2556 | for the purposes of substitution. That's why we don't | |||
2557 | simply call downgrade_to_use () here. */ | |||
2558 | gcc_assert (IDATA_TYPE (deps_init_id_data.id) == SET)((void)(!(((deps_init_id_data.id)->type) == SET) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 2558, __FUNCTION__), 0 : 0)); | |||
2559 | gcc_assert (!lhs == !rhs)((void)(!(!lhs == !rhs) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 2559, __FUNCTION__), 0 : 0)); | |||
2560 | ||||
2561 | IDATA_TYPE (deps_init_id_data.id)((deps_init_id_data.id)->type) = USE; | |||
2562 | } | |||
2563 | } | |||
2564 | ||||
2565 | deps_init_id_data.where = DEPS_IN_NOWHERE; | |||
2566 | } | |||
2567 | ||||
2568 | /* This is dependence info used for initializing insn's data. */ | |||
2569 | static struct sched_deps_info_def deps_init_id_sched_deps_info; | |||
2570 | ||||
2571 | /* This initializes most of the static part of the above structure. */ | |||
2572 | static const struct sched_deps_info_def const_deps_init_id_sched_deps_info = | |||
2573 | { | |||
2574 | NULLnullptr, | |||
2575 | ||||
2576 | deps_init_id_start_insn, | |||
2577 | deps_init_id_finish_insn, | |||
2578 | deps_init_id_start_lhs, | |||
2579 | deps_init_id_finish_lhs, | |||
2580 | deps_init_id_start_rhs, | |||
2581 | deps_init_id_finish_rhs, | |||
2582 | deps_init_id_note_reg_set, | |||
2583 | deps_init_id_note_reg_clobber, | |||
2584 | deps_init_id_note_reg_use, | |||
2585 | NULLnullptr, /* note_mem_dep */ | |||
2586 | NULLnullptr, /* note_dep */ | |||
2587 | ||||
2588 | 0, /* use_cselib */ | |||
2589 | 0, /* use_deps_list */ | |||
2590 | 0 /* generate_spec_deps */ | |||
2591 | }; | |||
2592 | ||||
2593 | /* Initialize INSN's lhs and rhs in ID. When FORCE_UNIQUE_P is true, | |||
2594 | we don't actually need information about lhs and rhs. */ | |||
2595 | static void | |||
2596 | setup_id_lhs_rhs (idata_t id, insn_t insn, bool force_unique_p) | |||
2597 | { | |||
2598 | rtx pat = PATTERN (insn); | |||
2599 | ||||
2600 | if (NONJUMP_INSN_P (insn)(((enum rtx_code) (insn)->code) == INSN) | |||
2601 | && GET_CODE (pat)((enum rtx_code) (pat)->code) == SET | |||
2602 | && !force_unique_p) | |||
2603 | { | |||
2604 | IDATA_RHS (id)((id)->rhs) = SET_SRC (pat)(((pat)->u.fld[1]).rt_rtx); | |||
2605 | IDATA_LHS (id)((id)->lhs) = SET_DEST (pat)(((pat)->u.fld[0]).rt_rtx); | |||
2606 | } | |||
2607 | else | |||
2608 | IDATA_LHS (id)((id)->lhs) = IDATA_RHS (id)((id)->rhs) = NULLnullptr; | |||
2609 | } | |||
2610 | ||||
2611 | /* Possibly downgrade INSN to USE. */ | |||
2612 | static void | |||
2613 | maybe_downgrade_id_to_use (idata_t id, insn_t insn) | |||
2614 | { | |||
2615 | bool must_be_use = false; | |||
2616 | df_ref def; | |||
2617 | rtx lhs = IDATA_LHS (id)((id)->lhs); | |||
2618 | rtx rhs = IDATA_RHS (id)((id)->rhs); | |||
2619 | ||||
2620 | /* We downgrade only SETs. */ | |||
2621 | if (IDATA_TYPE (id)((id)->type) != SET) | |||
2622 | return; | |||
2623 | ||||
2624 | if (!lhs || !lhs_and_rhs_separable_p (lhs, rhs)) | |||
2625 | { | |||
2626 | IDATA_TYPE (id)((id)->type) = USE; | |||
2627 | return; | |||
2628 | } | |||
2629 | ||||
2630 | FOR_EACH_INSN_DEF (def, insn)for (def = (((df->insns[(INSN_UID (insn))]))->defs); def ; def = ((def)->base.next_loc)) | |||
2631 | { | |||
2632 | if (DF_REF_INSN (def)((def)->base.insn_info->insn) | |||
2633 | && DF_REF_FLAGS_IS_SET (def, DF_REF_PRE_POST_MODIFY)((((def)->base.flags) & (DF_REF_PRE_POST_MODIFY)) != 0 ) | |||
2634 | && loc_mentioned_in_p (DF_REF_LOC (def)(((def)->base.cl) == DF_REF_REGULAR ? (def)->regular_ref .loc : nullptr), IDATA_RHS (id)((id)->rhs))) | |||
2635 | { | |||
2636 | must_be_use = true; | |||
2637 | break; | |||
2638 | } | |||
2639 | ||||
2640 | #ifdef STACK_REGS | |||
2641 | /* Make instructions that set stack registers to be ineligible for | |||
2642 | renaming to avoid issues with find_used_regs. */ | |||
2643 | if (IN_RANGE (DF_REF_REGNO (def), FIRST_STACK_REG, LAST_STACK_REG)((unsigned long) (((def)->base.regno)) - (unsigned long) ( 8) <= (unsigned long) (15) - (unsigned long) (8))) | |||
2644 | { | |||
2645 | must_be_use = true; | |||
2646 | break; | |||
2647 | } | |||
2648 | #endif | |||
2649 | } | |||
2650 | ||||
2651 | if (must_be_use) | |||
2652 | IDATA_TYPE (id)((id)->type) = USE; | |||
2653 | } | |||
2654 | ||||
2655 | /* Setup implicit register clobbers calculated by sched-deps for INSN | |||
2656 | before reload and save them in ID. */ | |||
2657 | static void | |||
2658 | setup_id_implicit_regs (idata_t id, insn_t insn) | |||
2659 | { | |||
2660 | if (reload_completed) | |||
2661 | return; | |||
2662 | ||||
2663 | HARD_REG_SET temp; | |||
2664 | ||||
2665 | get_implicit_reg_pending_clobbers (&temp, insn); | |||
2666 | IOR_REG_SET_HRS (IDATA_REG_SETS (id), temp)bitmap_ior_into (((id)->reg_sets), bitmap_view<HARD_REG_SET > (temp)); | |||
2667 | } | |||
2668 | ||||
2669 | /* Setup register sets describing INSN in ID. */ | |||
2670 | static void | |||
2671 | setup_id_reg_sets (idata_t id, insn_t insn) | |||
2672 | { | |||
2673 | struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn)(df->insns[(INSN_UID (insn))]); | |||
2674 | df_ref def, use; | |||
2675 | regset tmp = get_clear_regset_from_pool (); | |||
2676 | ||||
2677 | FOR_EACH_INSN_INFO_DEF (def, insn_info)for (def = ((insn_info)->defs); def; def = ((def)->base .next_loc)) | |||
2678 | { | |||
2679 | unsigned int regno = DF_REF_REGNO (def)((def)->base.regno); | |||
2680 | ||||
2681 | /* Post modifies are treated like clobbers by sched-deps.cc. */ | |||
2682 | if (DF_REF_FLAGS_IS_SET (def, (DF_REF_MUST_CLOBBER((((def)->base.flags) & ((DF_REF_MUST_CLOBBER | DF_REF_PRE_POST_MODIFY ))) != 0) | |||
2683 | | DF_REF_PRE_POST_MODIFY))((((def)->base.flags) & ((DF_REF_MUST_CLOBBER | DF_REF_PRE_POST_MODIFY ))) != 0)) | |||
2684 | SET_REGNO_REG_SET (IDATA_REG_CLOBBERS (id), regno)bitmap_set_bit (((id)->reg_clobbers), regno); | |||
2685 | else if (! DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER)((((def)->base.flags) & (DF_REF_MAY_CLOBBER)) != 0)) | |||
2686 | { | |||
2687 | SET_REGNO_REG_SET (IDATA_REG_SETS (id), regno)bitmap_set_bit (((id)->reg_sets), regno); | |||
2688 | ||||
2689 | #ifdef STACK_REGS | |||
2690 | /* For stack registers, treat writes to them as writes | |||
2691 | to the first one to be consistent with sched-deps.cc. */ | |||
2692 | if (IN_RANGE (regno, FIRST_STACK_REG, LAST_STACK_REG)((unsigned long) (regno) - (unsigned long) (8) <= (unsigned long) (15) - (unsigned long) (8))) | |||
2693 | SET_REGNO_REG_SET (IDATA_REG_SETS (id), FIRST_STACK_REG)bitmap_set_bit (((id)->reg_sets), 8); | |||
2694 | #endif | |||
2695 | } | |||
2696 | /* Mark special refs that generate read/write def pair. */ | |||
2697 | if (DF_REF_FLAGS_IS_SET (def, DF_REF_CONDITIONAL)((((def)->base.flags) & (DF_REF_CONDITIONAL)) != 0) | |||
2698 | || regno == STACK_POINTER_REGNUM7) | |||
2699 | bitmap_set_bit (tmp, regno); | |||
2700 | } | |||
2701 | ||||
2702 | FOR_EACH_INSN_INFO_USE (use, insn_info)for (use = ((insn_info)->uses); use; use = ((use)->base .next_loc)) | |||
2703 | { | |||
2704 | unsigned int regno = DF_REF_REGNO (use)((use)->base.regno); | |||
2705 | ||||
2706 | /* When these refs are met for the first time, skip them, as | |||
2707 | these uses are just counterparts of some defs. */ | |||
2708 | if (bitmap_bit_p (tmp, regno)) | |||
2709 | bitmap_clear_bit (tmp, regno); | |||
2710 | else if (! DF_REF_FLAGS_IS_SET (use, DF_REF_CALL_STACK_USAGE)((((use)->base.flags) & (DF_REF_CALL_STACK_USAGE)) != 0 )) | |||
2711 | { | |||
2712 | SET_REGNO_REG_SET (IDATA_REG_USES (id), regno)bitmap_set_bit (((id)->reg_uses), regno); | |||
2713 | ||||
2714 | #ifdef STACK_REGS | |||
2715 | /* For stack registers, treat reads from them as reads from | |||
2716 | the first one to be consistent with sched-deps.cc. */ | |||
2717 | if (IN_RANGE (regno, FIRST_STACK_REG, LAST_STACK_REG)((unsigned long) (regno) - (unsigned long) (8) <= (unsigned long) (15) - (unsigned long) (8))) | |||
2718 | SET_REGNO_REG_SET (IDATA_REG_USES (id), FIRST_STACK_REG)bitmap_set_bit (((id)->reg_uses), 8); | |||
2719 | #endif | |||
2720 | } | |||
2721 | } | |||
2722 | ||||
2723 | /* Also get implicit reg clobbers from sched-deps. */ | |||
2724 | setup_id_implicit_regs (id, insn); | |||
2725 | ||||
2726 | return_regset_to_pool (tmp); | |||
2727 | } | |||
2728 | ||||
2729 | /* Initialize instruction data for INSN in ID using DF's data. */ | |||
2730 | static void | |||
2731 | init_id_from_df (idata_t id, insn_t insn, bool force_unique_p) | |||
2732 | { | |||
2733 | gcc_assert (DF_INSN_UID_SAFE_GET (INSN_UID (insn)) != NULL)((void)(!((((unsigned)(INSN_UID (insn)) < ((df)->insns_size )) ? (df->insns[(INSN_UID (insn))]) : nullptr) != nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 2733, __FUNCTION__), 0 : 0)); | |||
2734 | ||||
2735 | setup_id_for_insn (id, insn, force_unique_p); | |||
2736 | setup_id_lhs_rhs (id, insn, force_unique_p); | |||
2737 | ||||
2738 | if (INSN_NOP_P (insn)(PATTERN (insn) == nop_pattern)) | |||
2739 | return; | |||
2740 | ||||
2741 | maybe_downgrade_id_to_use (id, insn); | |||
2742 | setup_id_reg_sets (id, insn); | |||
2743 | } | |||
2744 | ||||
2745 | /* Initialize instruction data for INSN in ID. */ | |||
2746 | static void | |||
2747 | deps_init_id (idata_t id, insn_t insn, bool force_unique_p) | |||
2748 | { | |||
2749 | class deps_desc _dc, *dc = &_dc; | |||
2750 | ||||
2751 | deps_init_id_data.where = DEPS_IN_NOWHERE; | |||
2752 | deps_init_id_data.id = id; | |||
2753 | deps_init_id_data.force_unique_p = force_unique_p; | |||
2754 | deps_init_id_data.force_use_p = false; | |||
2755 | ||||
2756 | init_deps (dc, false); | |||
2757 | memcpy (&deps_init_id_sched_deps_info, | |||
2758 | &const_deps_init_id_sched_deps_info, | |||
2759 | sizeof (deps_init_id_sched_deps_info)); | |||
2760 | if (spec_info != NULLnullptr) | |||
2761 | deps_init_id_sched_deps_info.generate_spec_deps = 1; | |||
2762 | sched_deps_info = &deps_init_id_sched_deps_info; | |||
2763 | ||||
2764 | deps_analyze_insn (dc, insn); | |||
2765 | /* Implicit reg clobbers received from sched-deps separately. */ | |||
2766 | setup_id_implicit_regs (id, insn); | |||
2767 | ||||
2768 | free_deps (dc); | |||
2769 | deps_init_id_data.id = NULLnullptr; | |||
2770 | } | |||
2771 | ||||
2772 | ||||
2773 | struct sched_scan_info_def | |||
2774 | { | |||
2775 | /* This hook notifies scheduler frontend to extend its internal per basic | |||
2776 | block data structures. This hook should be called once before a series of | |||
2777 | calls to bb_init (). */ | |||
2778 | void (*extend_bb) (void); | |||
2779 | ||||
2780 | /* This hook makes scheduler frontend to initialize its internal data | |||
2781 | structures for the passed basic block. */ | |||
2782 | void (*init_bb) (basic_block); | |||
2783 | ||||
2784 | /* This hook notifies scheduler frontend to extend its internal per insn data | |||
2785 | structures. This hook should be called once before a series of calls to | |||
2786 | insn_init (). */ | |||
2787 | void (*extend_insn) (void); | |||
2788 | ||||
2789 | /* This hook makes scheduler frontend to initialize its internal data | |||
2790 | structures for the passed insn. */ | |||
2791 | void (*init_insn) (insn_t); | |||
2792 | }; | |||
2793 | ||||
2794 | /* A driver function to add a set of basic blocks (BBS) to the | |||
2795 | scheduling region. */ | |||
2796 | static void | |||
2797 | sched_scan (const struct sched_scan_info_def *ssi, bb_vec_t bbs) | |||
2798 | { | |||
2799 | unsigned i; | |||
2800 | basic_block bb; | |||
2801 | ||||
2802 | if (ssi->extend_bb) | |||
2803 | ssi->extend_bb (); | |||
2804 | ||||
2805 | if (ssi->init_bb) | |||
2806 | FOR_EACH_VEC_ELT (bbs, i, bb)for (i = 0; (bbs).iterate ((i), &(bb)); ++(i)) | |||
2807 | ssi->init_bb (bb); | |||
2808 | ||||
2809 | if (ssi->extend_insn) | |||
2810 | ssi->extend_insn (); | |||
2811 | ||||
2812 | if (ssi->init_insn) | |||
2813 | FOR_EACH_VEC_ELT (bbs, i, bb)for (i = 0; (bbs).iterate ((i), &(bb)); ++(i)) | |||
2814 | { | |||
2815 | rtx_insn *insn; | |||
2816 | ||||
2817 | FOR_BB_INSNS (bb, insn)for ((insn) = (bb)->il.x.head_; (insn) && (insn) != NEXT_INSN ((bb)->il.x.rtl->end_); (insn) = NEXT_INSN ( insn)) | |||
2818 | ssi->init_insn (insn); | |||
2819 | } | |||
2820 | } | |||
2821 | ||||
2822 | /* Implement hooks for collecting fundamental insn properties like if insn is | |||
2823 | an ASM or is within a SCHED_GROUP. */ | |||
2824 | ||||
2825 | /* True when a "one-time init" data for INSN was already inited. */ | |||
2826 | static bool | |||
2827 | first_time_insn_init (insn_t insn) | |||
2828 | { | |||
2829 | return INSN_LIVE (insn)((&s_i_d[(sched_luids[INSN_UID (insn)])])->live) == NULLnullptr; | |||
2830 | } | |||
2831 | ||||
2832 | /* Hash an entry in a transformed_insns hashtable. */ | |||
2833 | static hashval_t | |||
2834 | hash_transformed_insns (const void *p) | |||
2835 | { | |||
2836 | return VINSN_HASH_RTX (((const struct transformed_insns *) p)->vinsn_old)((((const struct transformed_insns *) p)->vinsn_old)->hash_rtx ); | |||
2837 | } | |||
2838 | ||||
2839 | /* Compare the entries in a transformed_insns hashtable. */ | |||
2840 | static int | |||
2841 | eq_transformed_insns (const void *p, const void *q) | |||
2842 | { | |||
2843 | rtx_insn *i1 = | |||
2844 | VINSN_INSN_RTX (((const struct transformed_insns *) p)->vinsn_old)((((const struct transformed_insns *) p)->vinsn_old)->insn_rtx ); | |||
2845 | rtx_insn *i2 = | |||
2846 | VINSN_INSN_RTX (((const struct transformed_insns *) q)->vinsn_old)((((const struct transformed_insns *) q)->vinsn_old)->insn_rtx ); | |||
2847 | ||||
2848 | if (INSN_UID (i1) == INSN_UID (i2)) | |||
2849 | return 1; | |||
2850 | return rtx_equal_p (PATTERN (i1), PATTERN (i2)); | |||
2851 | } | |||
2852 | ||||
2853 | /* Free an entry in a transformed_insns hashtable. */ | |||
2854 | static void | |||
2855 | free_transformed_insns (void *p) | |||
2856 | { | |||
2857 | struct transformed_insns *pti = (struct transformed_insns *) p; | |||
2858 | ||||
2859 | vinsn_detach (pti->vinsn_old); | |||
2860 | vinsn_detach (pti->vinsn_new); | |||
2861 | free (pti); | |||
2862 | } | |||
2863 | ||||
2864 | /* Init the s_i_d data for INSN which should be inited just once, when | |||
2865 | we first see the insn. */ | |||
2866 | static void | |||
2867 | init_first_time_insn_data (insn_t insn) | |||
2868 | { | |||
2869 | /* This should not be set if this is the first time we init data for | |||
2870 | insn. */ | |||
2871 | gcc_assert (first_time_insn_init (insn))((void)(!(first_time_insn_init (insn)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 2871, __FUNCTION__), 0 : 0)); | |||
2872 | ||||
2873 | /* These are needed for nops too. */ | |||
2874 | INSN_LIVE (insn)((&s_i_d[(sched_luids[INSN_UID (insn)])])->live) = get_regset_from_pool (); | |||
2875 | INSN_LIVE_VALID_P (insn)((&s_i_d[(sched_luids[INSN_UID (insn)])])->live_valid_p ) = false; | |||
2876 | ||||
2877 | if (!INSN_NOP_P (insn)(PATTERN (insn) == nop_pattern)) | |||
2878 | { | |||
2879 | INSN_ANALYZED_DEPS (insn)((&s_i_d[(sched_luids[INSN_UID (insn)])])->analyzed_deps ) = BITMAP_ALLOCbitmap_alloc (NULLnullptr); | |||
2880 | INSN_FOUND_DEPS (insn)((&s_i_d[(sched_luids[INSN_UID (insn)])])->found_deps) = BITMAP_ALLOCbitmap_alloc (NULLnullptr); | |||
2881 | INSN_TRANSFORMED_INSNS (insn)((&s_i_d[(sched_luids[INSN_UID (insn)])])->transformed_insns ) | |||
2882 | = htab_create (16, hash_transformed_insns, | |||
2883 | eq_transformed_insns, free_transformed_insns); | |||
2884 | init_deps (&INSN_DEPS_CONTEXT (insn)((&s_i_d[(sched_luids[INSN_UID (insn)])])->deps_context ), true); | |||
2885 | } | |||
2886 | } | |||
2887 | ||||
2888 | /* Free almost all above data for INSN that is scheduled already. | |||
2889 | Used for extra-large basic blocks. */ | |||
2890 | void | |||
2891 | free_data_for_scheduled_insn (insn_t insn) | |||
2892 | { | |||
2893 | gcc_assert (! first_time_insn_init (insn))((void)(!(! first_time_insn_init (insn)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 2893, __FUNCTION__), 0 : 0)); | |||
2894 | ||||
2895 | if (! INSN_ANALYZED_DEPS (insn)((&s_i_d[(sched_luids[INSN_UID (insn)])])->analyzed_deps )) | |||
2896 | return; | |||
2897 | ||||
2898 | BITMAP_FREE (INSN_ANALYZED_DEPS (insn))((void) (bitmap_obstack_free ((bitmap) ((&s_i_d[(sched_luids [INSN_UID (insn)])])->analyzed_deps)), (((&s_i_d[(sched_luids [INSN_UID (insn)])])->analyzed_deps)) = (bitmap) nullptr)); | |||
2899 | BITMAP_FREE (INSN_FOUND_DEPS (insn))((void) (bitmap_obstack_free ((bitmap) ((&s_i_d[(sched_luids [INSN_UID (insn)])])->found_deps)), (((&s_i_d[(sched_luids [INSN_UID (insn)])])->found_deps)) = (bitmap) nullptr)); | |||
2900 | htab_delete (INSN_TRANSFORMED_INSNS (insn)((&s_i_d[(sched_luids[INSN_UID (insn)])])->transformed_insns )); | |||
2901 | ||||
2902 | /* This is allocated only for bookkeeping insns. */ | |||
2903 | if (INSN_ORIGINATORS (insn)((&s_i_d[(sched_luids[INSN_UID (insn)])])->originators )) | |||
2904 | BITMAP_FREE (INSN_ORIGINATORS (insn))((void) (bitmap_obstack_free ((bitmap) ((&s_i_d[(sched_luids [INSN_UID (insn)])])->originators)), (((&s_i_d[(sched_luids [INSN_UID (insn)])])->originators)) = (bitmap) nullptr)); | |||
2905 | free_deps (&INSN_DEPS_CONTEXT (insn)((&s_i_d[(sched_luids[INSN_UID (insn)])])->deps_context )); | |||
2906 | ||||
2907 | INSN_ANALYZED_DEPS (insn)((&s_i_d[(sched_luids[INSN_UID (insn)])])->analyzed_deps ) = NULLnullptr; | |||
2908 | ||||
2909 | /* Clear the readonly flag so we would ICE when trying to recalculate | |||
2910 | the deps context (as we believe that it should not happen). */ | |||
2911 | (&INSN_DEPS_CONTEXT (insn)((&s_i_d[(sched_luids[INSN_UID (insn)])])->deps_context ))->readonly = 0; | |||
2912 | } | |||
2913 | ||||
2914 | /* Free the same data as above for INSN. */ | |||
2915 | static void | |||
2916 | free_first_time_insn_data (insn_t insn) | |||
2917 | { | |||
2918 | gcc_assert (! first_time_insn_init (insn))((void)(!(! first_time_insn_init (insn)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 2918, __FUNCTION__), 0 : 0)); | |||
2919 | ||||
2920 | free_data_for_scheduled_insn (insn); | |||
2921 | return_regset_to_pool (INSN_LIVE (insn)((&s_i_d[(sched_luids[INSN_UID (insn)])])->live)); | |||
2922 | INSN_LIVE (insn)((&s_i_d[(sched_luids[INSN_UID (insn)])])->live) = NULLnullptr; | |||
2923 | INSN_LIVE_VALID_P (insn)((&s_i_d[(sched_luids[INSN_UID (insn)])])->live_valid_p ) = false; | |||
2924 | } | |||
2925 | ||||
2926 | /* Initialize region-scope data structures for basic blocks. */ | |||
2927 | static void | |||
2928 | init_global_and_expr_for_bb (basic_block bb) | |||
2929 | { | |||
2930 | if (sel_bb_empty_p (bb)) | |||
2931 | return; | |||
2932 | ||||
2933 | invalidate_av_set (bb); | |||
2934 | } | |||
2935 | ||||
2936 | /* Data for global dependency analysis (to initialize CANT_MOVE and | |||
2937 | SCHED_GROUP_P). */ | |||
2938 | static struct | |||
2939 | { | |||
2940 | /* Previous insn. */ | |||
2941 | insn_t prev_insn; | |||
2942 | } init_global_data; | |||
2943 | ||||
2944 | /* Determine if INSN is in the sched_group, is an asm or should not be | |||
2945 | cloned. After that initialize its expr. */ | |||
2946 | static void | |||
2947 | init_global_and_expr_for_insn (insn_t insn) | |||
2948 | { | |||
2949 | if (LABEL_P (insn)(((enum rtx_code) (insn)->code) == CODE_LABEL)) | |||
2950 | return; | |||
2951 | ||||
2952 | if (NOTE_INSN_BASIC_BLOCK_P (insn)((((enum rtx_code) (insn)->code) == NOTE) && (((insn )->u.fld[4]).rt_int) == NOTE_INSN_BASIC_BLOCK)) | |||
2953 | { | |||
2954 | init_global_data.prev_insn = NULLnullptr; | |||
2955 | return; | |||
2956 | } | |||
2957 | ||||
2958 | gcc_assert (INSN_P (insn))((void)(!((((((enum rtx_code) (insn)->code) == INSN) || (( (enum rtx_code) (insn)->code) == JUMP_INSN) || (((enum rtx_code ) (insn)->code) == CALL_INSN)) || (((enum rtx_code) (insn) ->code) == DEBUG_INSN))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 2958, __FUNCTION__), 0 : 0)); | |||
2959 | ||||
2960 | if (SCHED_GROUP_P (insn)(__extension__ ({ __typeof ((insn)) const _rtx = ((insn)); if (((enum rtx_code) (_rtx)->code) != DEBUG_INSN && ( (enum rtx_code) (_rtx)->code) != INSN && ((enum rtx_code ) (_rtx)->code) != JUMP_INSN && ((enum rtx_code) ( _rtx)->code) != CALL_INSN) rtl_check_failed_flag ("SCHED_GROUP_P" , _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 2960, __FUNCTION__); _rtx; })->in_struct)) | |||
2961 | /* Setup a sched_group. */ | |||
2962 | { | |||
2963 | insn_t prev_insn = init_global_data.prev_insn; | |||
2964 | ||||
2965 | if (prev_insn) | |||
2966 | INSN_SCHED_NEXT (prev_insn)((&s_i_d[(sched_luids[INSN_UID (prev_insn)])])->sched_next ) = insn; | |||
2967 | ||||
2968 | init_global_data.prev_insn = insn; | |||
2969 | } | |||
2970 | else | |||
2971 | init_global_data.prev_insn = NULLnullptr; | |||
2972 | ||||
2973 | if (GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == ASM_INPUT | |||
2974 | || asm_noperands (PATTERN (insn)) >= 0) | |||
2975 | /* Mark INSN as an asm. */ | |||
2976 | INSN_ASM_P (insn)((&s_i_d[(sched_luids[INSN_UID (insn)])])->asm_p) = true; | |||
2977 | ||||
2978 | { | |||
2979 | bool force_unique_p; | |||
2980 | ds_t spec_done_ds; | |||
2981 | ||||
2982 | /* Certain instructions cannot be cloned, and frame related insns and | |||
2983 | the insn adjacent to NOTE_INSN_EPILOGUE_BEG cannot be moved out of | |||
2984 | their block. */ | |||
2985 | if (prologue_epilogue_contains (insn)) | |||
2986 | { | |||
2987 | if (RTX_FRAME_RELATED_P (insn)(__extension__ ({ __typeof ((insn)) const _rtx = ((insn)); if (((enum rtx_code) (_rtx)->code) != DEBUG_INSN && ( (enum rtx_code) (_rtx)->code) != INSN && ((enum rtx_code ) (_rtx)->code) != CALL_INSN && ((enum rtx_code) ( _rtx)->code) != JUMP_INSN && ((enum rtx_code) (_rtx )->code) != BARRIER && ((enum rtx_code) (_rtx)-> code) != SET) rtl_check_failed_flag ("RTX_FRAME_RELATED_P",_rtx , "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 2987, __FUNCTION__); _rtx; })->frame_related)) | |||
2988 | CANT_MOVE (insn)((&h_d_i_d[(sched_luids[INSN_UID (insn)])])->cant_move ) = 1; | |||
2989 | else | |||
2990 | { | |||
2991 | rtx note; | |||
2992 | for (note = REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx); note; note = XEXP (note, 1)(((note)->u.fld[1]).rt_rtx)) | |||
2993 | if (REG_NOTE_KIND (note)((enum reg_note) ((machine_mode) (note)->mode)) == REG_SAVE_NOTE | |||
2994 | && ((enum insn_note) INTVAL (XEXP (note, 0))(((((note)->u.fld[0]).rt_rtx))->u.hwint[0]) | |||
2995 | == NOTE_INSN_EPILOGUE_BEG)) | |||
2996 | { | |||
2997 | CANT_MOVE (insn)((&h_d_i_d[(sched_luids[INSN_UID (insn)])])->cant_move ) = 1; | |||
2998 | break; | |||
2999 | } | |||
3000 | } | |||
3001 | force_unique_p = true; | |||
3002 | } | |||
3003 | else | |||
3004 | if (CANT_MOVE (insn)((&h_d_i_d[(sched_luids[INSN_UID (insn)])])->cant_move ) | |||
3005 | || INSN_ASM_P (insn)((&s_i_d[(sched_luids[INSN_UID (insn)])])->asm_p) | |||
3006 | || SCHED_GROUP_P (insn)(__extension__ ({ __typeof ((insn)) const _rtx = ((insn)); if (((enum rtx_code) (_rtx)->code) != DEBUG_INSN && ( (enum rtx_code) (_rtx)->code) != INSN && ((enum rtx_code ) (_rtx)->code) != JUMP_INSN && ((enum rtx_code) ( _rtx)->code) != CALL_INSN) rtl_check_failed_flag ("SCHED_GROUP_P" , _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 3006, __FUNCTION__); _rtx; })->in_struct) | |||
3007 | || CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN) | |||
3008 | /* Exception handling insns are always unique. */ | |||
3009 | || (cfun(cfun + 0)->can_throw_non_call_exceptions && can_throw_internal (insn)) | |||
3010 | /* TRAP_IF though have an INSN code is control_flow_insn_p (). */ | |||
3011 | || control_flow_insn_p (insn) | |||
3012 | || volatile_insn_p (PATTERN (insn)) | |||
3013 | || (targetm.cannot_copy_insn_p | |||
3014 | && targetm.cannot_copy_insn_p (insn))) | |||
3015 | force_unique_p = true; | |||
3016 | else | |||
3017 | force_unique_p = false; | |||
3018 | ||||
3019 | if (targetm.sched.get_insn_spec_ds) | |||
3020 | { | |||
3021 | spec_done_ds = targetm.sched.get_insn_spec_ds (insn); | |||
3022 | spec_done_ds = ds_get_max_dep_weak (spec_done_ds); | |||
3023 | } | |||
3024 | else | |||
3025 | spec_done_ds = 0; | |||
3026 | ||||
3027 | /* Initialize INSN's expr. */ | |||
3028 | init_expr (INSN_EXPR (insn)(&(&s_i_d[(sched_luids[INSN_UID (insn)])])->expr), vinsn_create (insn, force_unique_p), 0, | |||
3029 | REG_BR_PROB_BASE10000, INSN_PRIORITY (insn)((&h_i_d[INSN_UID (insn)])->priority), 0, BLOCK_NUM (insn)(BLOCK_FOR_INSN (insn)->index + 0), | |||
3030 | spec_done_ds, 0, 0, vNULL, true, | |||
3031 | false, false, false, CANT_MOVE (insn)((&h_d_i_d[(sched_luids[INSN_UID (insn)])])->cant_move )); | |||
3032 | } | |||
3033 | ||||
3034 | init_first_time_insn_data (insn); | |||
3035 | } | |||
3036 | ||||
3037 | /* Scan the region and initialize instruction data for basic blocks BBS. */ | |||
3038 | void | |||
3039 | sel_init_global_and_expr (bb_vec_t bbs) | |||
3040 | { | |||
3041 | /* ??? It would be nice to implement push / pop scheme for sched_infos. */ | |||
3042 | const struct sched_scan_info_def ssi = | |||
3043 | { | |||
3044 | NULLnullptr, /* extend_bb */ | |||
3045 | init_global_and_expr_for_bb, /* init_bb */ | |||
3046 | extend_insn_data, /* extend_insn */ | |||
3047 | init_global_and_expr_for_insn /* init_insn */ | |||
3048 | }; | |||
3049 | ||||
3050 | sched_scan (&ssi, bbs); | |||
3051 | } | |||
3052 | ||||
3053 | /* Finalize region-scope data structures for basic blocks. */ | |||
3054 | static void | |||
3055 | finish_global_and_expr_for_bb (basic_block bb) | |||
3056 | { | |||
3057 | av_set_clear (&BB_AV_SET (bb)((&sel_region_bb_info[(bb)->index])->av_set)); | |||
3058 | BB_AV_LEVEL (bb)((&sel_region_bb_info[(bb)->index])->av_level) = 0; | |||
3059 | } | |||
3060 | ||||
3061 | /* Finalize INSN's data. */ | |||
3062 | static void | |||
3063 | finish_global_and_expr_insn (insn_t insn) | |||
3064 | { | |||
3065 | if (LABEL_P (insn)(((enum rtx_code) (insn)->code) == CODE_LABEL) || NOTE_INSN_BASIC_BLOCK_P (insn)((((enum rtx_code) (insn)->code) == NOTE) && (((insn )->u.fld[4]).rt_int) == NOTE_INSN_BASIC_BLOCK)) | |||
3066 | return; | |||
3067 | ||||
3068 | gcc_assert (INSN_P (insn))((void)(!((((((enum rtx_code) (insn)->code) == INSN) || (( (enum rtx_code) (insn)->code) == JUMP_INSN) || (((enum rtx_code ) (insn)->code) == CALL_INSN)) || (((enum rtx_code) (insn) ->code) == DEBUG_INSN))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 3068, __FUNCTION__), 0 : 0)); | |||
3069 | ||||
3070 | if (INSN_LUID (insn)(sched_luids[INSN_UID (insn)]) > 0) | |||
3071 | { | |||
3072 | free_first_time_insn_data (insn); | |||
3073 | INSN_WS_LEVEL (insn)((&s_i_d[(sched_luids[INSN_UID (insn)])])->ws_level) = 0; | |||
3074 | CANT_MOVE (insn)((&h_d_i_d[(sched_luids[INSN_UID (insn)])])->cant_move ) = 0; | |||
3075 | ||||
3076 | /* We can no longer assert this, as vinsns of this insn could be | |||
3077 | easily live in other insn's caches. This should be changed to | |||
3078 | a counter-like approach among all vinsns. */ | |||
3079 | gcc_assert (true || VINSN_COUNT (INSN_VINSN (insn)) == 1)((void)(!(true || ((((((&(&s_i_d[(sched_luids[INSN_UID (insn)])])->expr))->vinsn)))->count) == 1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 3079, __FUNCTION__), 0 : 0)); | |||
3080 | clear_expr (INSN_EXPR (insn)(&(&s_i_d[(sched_luids[INSN_UID (insn)])])->expr)); | |||
3081 | } | |||
3082 | } | |||
3083 | ||||
3084 | /* Finalize per instruction data for the whole region. */ | |||
3085 | void | |||
3086 | sel_finish_global_and_expr (void) | |||
3087 | { | |||
3088 | { | |||
3089 | bb_vec_t bbs; | |||
3090 | int i; | |||
3091 | ||||
3092 | bbs.create (current_nr_blocks); | |||
3093 | ||||
3094 | for (i = 0; i < current_nr_blocks; i++) | |||
3095 | bbs.quick_push (BASIC_BLOCK_FOR_FN (cfun, BB_TO_BLOCK (i))((*(((cfun + 0))->cfg->x_basic_block_info))[((rgn_bb_table [ebb_head[i]]))])); | |||
3096 | ||||
3097 | /* Clear AV_SETs and INSN_EXPRs. */ | |||
3098 | { | |||
3099 | const struct sched_scan_info_def ssi = | |||
3100 | { | |||
3101 | NULLnullptr, /* extend_bb */ | |||
3102 | finish_global_and_expr_for_bb, /* init_bb */ | |||
3103 | NULLnullptr, /* extend_insn */ | |||
3104 | finish_global_and_expr_insn /* init_insn */ | |||
3105 | }; | |||
3106 | ||||
3107 | sched_scan (&ssi, bbs); | |||
3108 | } | |||
3109 | ||||
3110 | bbs.release (); | |||
3111 | } | |||
3112 | ||||
3113 | finish_insns (); | |||
3114 | } | |||
3115 | ||||
3116 | ||||
3117 | /* In the below hooks, we merely calculate whether or not a dependence | |||
3118 | exists, and in what part of insn. However, we will need more data | |||
3119 | when we'll start caching dependence requests. */ | |||
3120 | ||||
3121 | /* Container to hold information for dependency analysis. */ | |||
3122 | static struct | |||
3123 | { | |||
3124 | deps_t dc; | |||
3125 | ||||
3126 | /* A variable to track which part of rtx we are scanning in | |||
3127 | sched-deps.cc: sched_analyze_insn (). */ | |||
3128 | deps_where_t where; | |||
3129 | ||||
3130 | /* Current producer. */ | |||
3131 | insn_t pro; | |||
3132 | ||||
3133 | /* Current consumer. */ | |||
3134 | vinsn_t con; | |||
3135 | ||||
3136 | /* Is SEL_DEPS_HAS_DEP_P[DEPS_IN_X] is true, then X has a dependence. | |||
3137 | X is from { INSN, LHS, RHS }. */ | |||
3138 | ds_t has_dep_p[DEPS_IN_NOWHERE]; | |||
3139 | } has_dependence_data; | |||
3140 | ||||
3141 | /* Start analyzing dependencies of INSN. */ | |||
3142 | static void | |||
3143 | has_dependence_start_insn (insn_t insn ATTRIBUTE_UNUSED__attribute__ ((__unused__))) | |||
3144 | { | |||
3145 | gcc_assert (has_dependence_data.where == DEPS_IN_NOWHERE)((void)(!(has_dependence_data.where == DEPS_IN_NOWHERE) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 3145, __FUNCTION__), 0 : 0)); | |||
3146 | ||||
3147 | has_dependence_data.where = DEPS_IN_INSN; | |||
3148 | } | |||
3149 | ||||
3150 | /* Finish analyzing dependencies of an insn. */ | |||
3151 | static void | |||
3152 | has_dependence_finish_insn (void) | |||
3153 | { | |||
3154 | gcc_assert (has_dependence_data.where == DEPS_IN_INSN)((void)(!(has_dependence_data.where == DEPS_IN_INSN) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 3154, __FUNCTION__), 0 : 0)); | |||
3155 | ||||
3156 | has_dependence_data.where = DEPS_IN_NOWHERE; | |||
3157 | } | |||
3158 | ||||
3159 | /* Start analyzing dependencies of LHS. */ | |||
3160 | static void | |||
3161 | has_dependence_start_lhs (rtx lhs ATTRIBUTE_UNUSED__attribute__ ((__unused__))) | |||
3162 | { | |||
3163 | gcc_assert (has_dependence_data.where == DEPS_IN_INSN)((void)(!(has_dependence_data.where == DEPS_IN_INSN) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 3163, __FUNCTION__), 0 : 0)); | |||
3164 | ||||
3165 | if (VINSN_LHS (has_dependence_data.con)((((&((has_dependence_data.con)->id)))->lhs)) != NULLnullptr) | |||
3166 | has_dependence_data.where = DEPS_IN_LHS; | |||
3167 | } | |||
3168 | ||||
3169 | /* Finish analyzing dependencies of an lhs. */ | |||
3170 | static void | |||
3171 | has_dependence_finish_lhs (void) | |||
3172 | { | |||
3173 | has_dependence_data.where = DEPS_IN_INSN; | |||
3174 | } | |||
3175 | ||||
3176 | /* Start analyzing dependencies of RHS. */ | |||
3177 | static void | |||
3178 | has_dependence_start_rhs (rtx rhs ATTRIBUTE_UNUSED__attribute__ ((__unused__))) | |||
3179 | { | |||
3180 | gcc_assert (has_dependence_data.where == DEPS_IN_INSN)((void)(!(has_dependence_data.where == DEPS_IN_INSN) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 3180, __FUNCTION__), 0 : 0)); | |||
3181 | ||||
3182 | if (VINSN_RHS (has_dependence_data.con)((((&((has_dependence_data.con)->id)))->rhs)) != NULLnullptr) | |||
3183 | has_dependence_data.where = DEPS_IN_RHS; | |||
3184 | } | |||
3185 | ||||
3186 | /* Start analyzing dependencies of an rhs. */ | |||
3187 | static void | |||
3188 | has_dependence_finish_rhs (void) | |||
3189 | { | |||
3190 | gcc_assert (has_dependence_data.where == DEPS_IN_RHS((void)(!(has_dependence_data.where == DEPS_IN_RHS || has_dependence_data .where == DEPS_IN_INSN) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 3191, __FUNCTION__), 0 : 0)) | |||
3191 | || has_dependence_data.where == DEPS_IN_INSN)((void)(!(has_dependence_data.where == DEPS_IN_RHS || has_dependence_data .where == DEPS_IN_INSN) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 3191, __FUNCTION__), 0 : 0)); | |||
3192 | ||||
3193 | has_dependence_data.where = DEPS_IN_INSN; | |||
3194 | } | |||
3195 | ||||
3196 | /* Note a set of REGNO. */ | |||
3197 | static void | |||
3198 | has_dependence_note_reg_set (int regno) | |||
3199 | { | |||
3200 | struct deps_reg *reg_last = &has_dependence_data.dc->reg_last[regno]; | |||
3201 | ||||
3202 | if (!sched_insns_conditions_mutex_p (has_dependence_data.pro, | |||
3203 | VINSN_INSN_RTX((has_dependence_data.con)->insn_rtx) | |||
3204 | (has_dependence_data.con)((has_dependence_data.con)->insn_rtx))) | |||
3205 | { | |||
3206 | ds_t *dsp = &has_dependence_data.has_dep_p[has_dependence_data.where]; | |||
3207 | ||||
3208 | if (reg_last->sets != NULLnullptr | |||
3209 | || reg_last->clobbers != NULLnullptr) | |||
3210 | *dsp = (*dsp & ~SPECULATIVE(((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_DATA_BITS_OFFSET ) | (((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_DATA_BITS_OFFSET)) | ((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_CONTROL_BITS_OFFSET) | (((ds_t ) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_CONTROL_BITS_OFFSET )))) | DEP_OUTPUT((((ds_t) 1) << (BE_IN_CONTROL_BITS_OFFSET + (((8 * 4) - 8) / 4))) << 1); | |||
3211 | ||||
3212 | if (reg_last->uses || reg_last->implicit_sets) | |||
3213 | *dsp = (*dsp & ~SPECULATIVE(((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_DATA_BITS_OFFSET ) | (((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_DATA_BITS_OFFSET)) | ((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_CONTROL_BITS_OFFSET) | (((ds_t ) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_CONTROL_BITS_OFFSET )))) | DEP_ANTI(((((ds_t) 1) << (BE_IN_CONTROL_BITS_OFFSET + (((8 * 4) - 8) / 4))) << 1) << 1); | |||
3214 | } | |||
3215 | } | |||
3216 | ||||
3217 | /* Note a clobber of REGNO. */ | |||
3218 | static void | |||
3219 | has_dependence_note_reg_clobber (int regno) | |||
3220 | { | |||
3221 | struct deps_reg *reg_last = &has_dependence_data.dc->reg_last[regno]; | |||
3222 | ||||
3223 | if (!sched_insns_conditions_mutex_p (has_dependence_data.pro, | |||
3224 | VINSN_INSN_RTX((has_dependence_data.con)->insn_rtx) | |||
3225 | (has_dependence_data.con)((has_dependence_data.con)->insn_rtx))) | |||
3226 | { | |||
3227 | ds_t *dsp = &has_dependence_data.has_dep_p[has_dependence_data.where]; | |||
3228 | ||||
3229 | if (reg_last->sets) | |||
3230 | *dsp = (*dsp & ~SPECULATIVE(((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_DATA_BITS_OFFSET ) | (((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_DATA_BITS_OFFSET)) | ((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_CONTROL_BITS_OFFSET) | (((ds_t ) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_CONTROL_BITS_OFFSET )))) | DEP_OUTPUT((((ds_t) 1) << (BE_IN_CONTROL_BITS_OFFSET + (((8 * 4) - 8) / 4))) << 1); | |||
3231 | ||||
3232 | if (reg_last->uses || reg_last->implicit_sets) | |||
3233 | *dsp = (*dsp & ~SPECULATIVE(((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_DATA_BITS_OFFSET ) | (((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_DATA_BITS_OFFSET)) | ((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_CONTROL_BITS_OFFSET) | (((ds_t ) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_CONTROL_BITS_OFFSET )))) | DEP_ANTI(((((ds_t) 1) << (BE_IN_CONTROL_BITS_OFFSET + (((8 * 4) - 8) / 4))) << 1) << 1); | |||
3234 | } | |||
3235 | } | |||
3236 | ||||
3237 | /* Note a use of REGNO. */ | |||
3238 | static void | |||
3239 | has_dependence_note_reg_use (int regno) | |||
3240 | { | |||
3241 | struct deps_reg *reg_last = &has_dependence_data.dc->reg_last[regno]; | |||
3242 | ||||
3243 | if (!sched_insns_conditions_mutex_p (has_dependence_data.pro, | |||
3244 | VINSN_INSN_RTX((has_dependence_data.con)->insn_rtx) | |||
3245 | (has_dependence_data.con)((has_dependence_data.con)->insn_rtx))) | |||
3246 | { | |||
3247 | ds_t *dsp = &has_dependence_data.has_dep_p[has_dependence_data.where]; | |||
3248 | ||||
3249 | if (reg_last->sets) | |||
3250 | *dsp = (*dsp & ~SPECULATIVE(((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_DATA_BITS_OFFSET ) | (((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_DATA_BITS_OFFSET)) | ((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_CONTROL_BITS_OFFSET) | (((ds_t ) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_CONTROL_BITS_OFFSET )))) | DEP_TRUE(((ds_t) 1) << (BE_IN_CONTROL_BITS_OFFSET + (((8 * 4) - 8) / 4))); | |||
3251 | ||||
3252 | if (reg_last->clobbers || reg_last->implicit_sets) | |||
3253 | *dsp = (*dsp & ~SPECULATIVE(((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_DATA_BITS_OFFSET ) | (((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_DATA_BITS_OFFSET)) | ((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_CONTROL_BITS_OFFSET) | (((ds_t ) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_CONTROL_BITS_OFFSET )))) | DEP_ANTI(((((ds_t) 1) << (BE_IN_CONTROL_BITS_OFFSET + (((8 * 4) - 8) / 4))) << 1) << 1); | |||
3254 | ||||
3255 | /* Merge BE_IN_SPEC bits into *DSP when the dependency producer | |||
3256 | is actually a check insn. We need to do this for any register | |||
3257 | read-read dependency with the check unless we track properly | |||
3258 | all registers written by BE_IN_SPEC-speculated insns, as | |||
3259 | we don't have explicit dependence lists. See PR 53975. */ | |||
3260 | if (reg_last->uses) | |||
3261 | { | |||
3262 | ds_t pro_spec_checked_ds; | |||
3263 | ||||
3264 | pro_spec_checked_ds = INSN_SPEC_CHECKED_DS (has_dependence_data.pro)((&s_i_d[(sched_luids[INSN_UID (has_dependence_data.pro)] )])->spec_checked_ds); | |||
3265 | pro_spec_checked_ds = ds_get_max_dep_weak (pro_spec_checked_ds); | |||
3266 | ||||
3267 | if (pro_spec_checked_ds != 0) | |||
3268 | *dsp = ds_full_merge (*dsp, pro_spec_checked_ds, | |||
3269 | NULL_RTX(rtx) 0, NULL_RTX(rtx) 0); | |||
3270 | } | |||
3271 | } | |||
3272 | } | |||
3273 | ||||
3274 | /* Note a memory dependence. */ | |||
3275 | static void | |||
3276 | has_dependence_note_mem_dep (rtx mem ATTRIBUTE_UNUSED__attribute__ ((__unused__)), | |||
3277 | rtx pending_mem ATTRIBUTE_UNUSED__attribute__ ((__unused__)), | |||
3278 | insn_t pending_insn ATTRIBUTE_UNUSED__attribute__ ((__unused__)), | |||
3279 | ds_t ds ATTRIBUTE_UNUSED__attribute__ ((__unused__))) | |||
3280 | { | |||
3281 | if (!sched_insns_conditions_mutex_p (has_dependence_data.pro, | |||
3282 | VINSN_INSN_RTX (has_dependence_data.con)((has_dependence_data.con)->insn_rtx))) | |||
3283 | { | |||
3284 | ds_t *dsp = &has_dependence_data.has_dep_p[has_dependence_data.where]; | |||
3285 | ||||
3286 | *dsp = ds_full_merge (ds, *dsp, pending_mem, mem); | |||
3287 | } | |||
3288 | } | |||
3289 | ||||
3290 | /* Note a dependence. */ | |||
3291 | static void | |||
3292 | has_dependence_note_dep (insn_t pro, ds_t ds ATTRIBUTE_UNUSED__attribute__ ((__unused__))) | |||
3293 | { | |||
3294 | insn_t real_pro = has_dependence_data.pro; | |||
3295 | insn_t real_con = VINSN_INSN_RTX (has_dependence_data.con)((has_dependence_data.con)->insn_rtx); | |||
3296 | ||||
3297 | /* We do not allow for debug insns to move through others unless they | |||
3298 | are at the start of bb. This movement may create bookkeeping copies | |||
3299 | that later would not be able to move up, violating the invariant | |||
3300 | that a bookkeeping copy should be movable as the original insn. | |||
3301 | Detect that here and allow that movement if we allowed it before | |||
3302 | in the first place. */ | |||
3303 | if (DEBUG_INSN_P (real_con)(((enum rtx_code) (real_con)->code) == DEBUG_INSN) && !DEBUG_INSN_P (real_pro)(((enum rtx_code) (real_pro)->code) == DEBUG_INSN) | |||
3304 | && INSN_UID (NEXT_INSN (pro)) == INSN_UID (real_con)) | |||
3305 | return; | |||
3306 | ||||
3307 | if (!sched_insns_conditions_mutex_p (real_pro, real_con)) | |||
3308 | { | |||
3309 | ds_t *dsp = &has_dependence_data.has_dep_p[has_dependence_data.where]; | |||
3310 | ||||
3311 | *dsp = ds_full_merge (ds, *dsp, NULL_RTX(rtx) 0, NULL_RTX(rtx) 0); | |||
3312 | } | |||
3313 | } | |||
3314 | ||||
3315 | /* Mark the insn as having a hard dependence that prevents speculation. */ | |||
3316 | void | |||
3317 | sel_mark_hard_insn (rtx insn) | |||
3318 | { | |||
3319 | int i; | |||
3320 | ||||
3321 | /* Only work when we're in has_dependence_p mode. | |||
3322 | ??? This is a hack, this should actually be a hook. */ | |||
3323 | if (!has_dependence_data.dc || !has_dependence_data.pro) | |||
3324 | return; | |||
3325 | ||||
3326 | gcc_assert (insn == VINSN_INSN_RTX (has_dependence_data.con))((void)(!(insn == ((has_dependence_data.con)->insn_rtx)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 3326, __FUNCTION__), 0 : 0)); | |||
3327 | gcc_assert (has_dependence_data.where == DEPS_IN_INSN)((void)(!(has_dependence_data.where == DEPS_IN_INSN) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 3327, __FUNCTION__), 0 : 0)); | |||
3328 | ||||
3329 | for (i = 0; i < DEPS_IN_NOWHERE; i++) | |||
3330 | has_dependence_data.has_dep_p[i] &= ~SPECULATIVE(((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_DATA_BITS_OFFSET ) | (((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_DATA_BITS_OFFSET)) | ((((ds_t) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BEGIN_CONTROL_BITS_OFFSET) | (((ds_t ) ((1 << (((8 * 4) - 8) / 4)) - 1)) << BE_IN_CONTROL_BITS_OFFSET ))); | |||
3331 | } | |||
3332 | ||||
3333 | /* This structure holds the hooks for the dependency analysis used when | |||
3334 | actually processing dependencies in the scheduler. */ | |||
3335 | static struct sched_deps_info_def has_dependence_sched_deps_info; | |||
3336 | ||||
3337 | /* This initializes most of the fields of the above structure. */ | |||
3338 | static const struct sched_deps_info_def const_has_dependence_sched_deps_info = | |||
3339 | { | |||
3340 | NULLnullptr, | |||
3341 | ||||
3342 | has_dependence_start_insn, | |||
3343 | has_dependence_finish_insn, | |||
3344 | has_dependence_start_lhs, | |||
3345 | has_dependence_finish_lhs, | |||
3346 | has_dependence_start_rhs, | |||
3347 | has_dependence_finish_rhs, | |||
3348 | has_dependence_note_reg_set, | |||
3349 | has_dependence_note_reg_clobber, | |||
3350 | has_dependence_note_reg_use, | |||
3351 | has_dependence_note_mem_dep, | |||
3352 | has_dependence_note_dep, | |||
3353 | ||||
3354 | 0, /* use_cselib */ | |||
3355 | 0, /* use_deps_list */ | |||
3356 | 0 /* generate_spec_deps */ | |||
3357 | }; | |||
3358 | ||||
3359 | /* Initialize has_dependence_sched_deps_info with extra spec field. */ | |||
3360 | static void | |||
3361 | setup_has_dependence_sched_deps_info (void) | |||
3362 | { | |||
3363 | memcpy (&has_dependence_sched_deps_info, | |||
3364 | &const_has_dependence_sched_deps_info, | |||
3365 | sizeof (has_dependence_sched_deps_info)); | |||
3366 | ||||
3367 | if (spec_info != NULLnullptr) | |||
3368 | has_dependence_sched_deps_info.generate_spec_deps = 1; | |||
3369 | ||||
3370 | sched_deps_info = &has_dependence_sched_deps_info; | |||
3371 | } | |||
3372 | ||||
3373 | /* Remove all dependences found and recorded in has_dependence_data array. */ | |||
3374 | void | |||
3375 | sel_clear_has_dependence (void) | |||
3376 | { | |||
3377 | int i; | |||
3378 | ||||
3379 | for (i = 0; i < DEPS_IN_NOWHERE; i++) | |||
3380 | has_dependence_data.has_dep_p[i] = 0; | |||
3381 | } | |||
3382 | ||||
3383 | /* Return nonzero if EXPR has is dependent upon PRED. Return the pointer | |||
3384 | to the dependence information array in HAS_DEP_PP. */ | |||
3385 | ds_t | |||
3386 | has_dependence_p (expr_t expr, insn_t pred, ds_t **has_dep_pp) | |||
3387 | { | |||
3388 | int i; | |||
3389 | ds_t ds; | |||
3390 | class deps_desc *dc; | |||
3391 | ||||
3392 | if (INSN_SIMPLEJUMP_P (pred)((((((&((((((&(&s_i_d[(sched_luids[INSN_UID (pred )])])->expr))->vinsn)))->id)))->type))) == PC)) | |||
3393 | /* Unconditional jump is just a transfer of control flow. | |||
3394 | Ignore it. */ | |||
3395 | return false; | |||
3396 | ||||
3397 | dc = &INSN_DEPS_CONTEXT (pred)((&s_i_d[(sched_luids[INSN_UID (pred)])])->deps_context ); | |||
3398 | ||||
3399 | /* We init this field lazily. */ | |||
3400 | if (dc->reg_last == NULLnullptr) | |||
3401 | init_deps_reg_last (dc); | |||
3402 | ||||
3403 | if (!dc->readonly) | |||
3404 | { | |||
3405 | has_dependence_data.pro = NULLnullptr; | |||
3406 | /* Initialize empty dep context with information about PRED. */ | |||
3407 | advance_deps_context (dc, pred); | |||
3408 | dc->readonly = 1; | |||
3409 | } | |||
3410 | ||||
3411 | has_dependence_data.where = DEPS_IN_NOWHERE; | |||
3412 | has_dependence_data.pro = pred; | |||
3413 | has_dependence_data.con = EXPR_VINSN (expr)((expr)->vinsn); | |||
3414 | has_dependence_data.dc = dc; | |||
3415 | ||||
3416 | sel_clear_has_dependence (); | |||
3417 | ||||
3418 | /* Now catch all dependencies that would be generated between PRED and | |||
3419 | INSN. */ | |||
3420 | setup_has_dependence_sched_deps_info (); | |||
3421 | deps_analyze_insn (dc, EXPR_INSN_RTX (expr)(((((expr)->vinsn))->insn_rtx))); | |||
3422 | has_dependence_data.dc = NULLnullptr; | |||
3423 | ||||
3424 | /* When a barrier was found, set DEPS_IN_INSN bits. */ | |||
3425 | if (dc->last_reg_pending_barrier == TRUE_BARRIER) | |||
3426 | has_dependence_data.has_dep_p[DEPS_IN_INSN] = DEP_TRUE(((ds_t) 1) << (BE_IN_CONTROL_BITS_OFFSET + (((8 * 4) - 8) / 4))); | |||
3427 | else if (dc->last_reg_pending_barrier == MOVE_BARRIER) | |||
3428 | has_dependence_data.has_dep_p[DEPS_IN_INSN] = DEP_ANTI(((((ds_t) 1) << (BE_IN_CONTROL_BITS_OFFSET + (((8 * 4) - 8) / 4))) << 1) << 1); | |||
3429 | ||||
3430 | /* Do not allow stores to memory to move through checks. Currently | |||
3431 | we don't move this to sched-deps.cc as the check doesn't have | |||
3432 | obvious places to which this dependence can be attached. | |||
3433 | FIMXE: this should go to a hook. */ | |||
3434 | if (EXPR_LHS (expr)(((((&((((expr)->vinsn))->id)))->lhs))) | |||
3435 | && MEM_P (EXPR_LHS (expr))(((enum rtx_code) ((((((&((((expr)->vinsn))->id)))-> lhs))))->code) == MEM) | |||
3436 | && sel_insn_is_speculation_check (pred)) | |||
3437 | has_dependence_data.has_dep_p[DEPS_IN_INSN] = DEP_ANTI(((((ds_t) 1) << (BE_IN_CONTROL_BITS_OFFSET + (((8 * 4) - 8) / 4))) << 1) << 1); | |||
3438 | ||||
3439 | *has_dep_pp = has_dependence_data.has_dep_p; | |||
3440 | ds = 0; | |||
3441 | for (i = 0; i < DEPS_IN_NOWHERE; i++) | |||
3442 | ds = ds_full_merge (ds, has_dependence_data.has_dep_p[i], | |||
3443 | NULL_RTX(rtx) 0, NULL_RTX(rtx) 0); | |||
3444 | ||||
3445 | return ds; | |||
3446 | } | |||
3447 | ||||
3448 | ||||
3449 | /* Dependence hooks implementation that checks dependence latency constraints | |||
3450 | on the insns being scheduled. The entry point for these routines is | |||
3451 | tick_check_p predicate. */ | |||
3452 | ||||
3453 | static struct | |||
3454 | { | |||
3455 | /* An expr we are currently checking. */ | |||
3456 | expr_t expr; | |||
3457 | ||||
3458 | /* A minimal cycle for its scheduling. */ | |||
3459 | int cycle; | |||
3460 | ||||
3461 | /* Whether we have seen a true dependence while checking. */ | |||
3462 | bool seen_true_dep_p; | |||
3463 | } tick_check_data; | |||
3464 | ||||
3465 | /* Update minimal scheduling cycle for tick_check_insn given that it depends | |||
3466 | on PRO with status DS and weight DW. */ | |||
3467 | static void | |||
3468 | tick_check_dep_with_dw (insn_t pro_insn, ds_t ds, dw_t dw) | |||
3469 | { | |||
3470 | expr_t con_expr = tick_check_data.expr; | |||
3471 | insn_t con_insn = EXPR_INSN_RTX (con_expr)(((((con_expr)->vinsn))->insn_rtx)); | |||
3472 | ||||
3473 | if (con_insn != pro_insn) | |||
3474 | { | |||
3475 | enum reg_note dt; | |||
3476 | int tick; | |||
3477 | ||||
3478 | if (/* PROducer was removed from above due to pipelining. */ | |||
3479 | !INSN_IN_STREAM_P (pro_insn)(PREV_INSN (pro_insn) && NEXT_INSN (pro_insn)) | |||
3480 | /* Or PROducer was originally on the next iteration regarding the | |||
3481 | CONsumer. */ | |||
3482 | || (INSN_SCHED_TIMES (pro_insn)((((&(&s_i_d[(sched_luids[INSN_UID (pro_insn)])])-> expr))->sched_times)) | |||
3483 | - EXPR_SCHED_TIMES (con_expr)((con_expr)->sched_times)) > 1) | |||
3484 | /* Don't count this dependence. */ | |||
3485 | return; | |||
3486 | ||||
3487 | dt = ds_to_dt (ds); | |||
3488 | if (dt == REG_DEP_TRUE) | |||
3489 | tick_check_data.seen_true_dep_p = true; | |||
3490 | ||||
3491 | gcc_assert (INSN_SCHED_CYCLE (pro_insn) > 0)((void)(!(((&s_i_d[(sched_luids[INSN_UID (pro_insn)])])-> sched_cycle) > 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 3491, __FUNCTION__), 0 : 0)); | |||
3492 | ||||
3493 | { | |||
3494 | dep_def _dep, *dep = &_dep; | |||
3495 | ||||
3496 | init_dep (dep, pro_insn, con_insn, dt); | |||
3497 | ||||
3498 | tick = INSN_SCHED_CYCLE (pro_insn)((&s_i_d[(sched_luids[INSN_UID (pro_insn)])])->sched_cycle ) + dep_cost_1 (dep, dw); | |||
3499 | } | |||
3500 | ||||
3501 | /* When there are several kinds of dependencies between pro and con, | |||
3502 | only REG_DEP_TRUE should be taken into account. */ | |||
3503 | if (tick > tick_check_data.cycle | |||
3504 | && (dt == REG_DEP_TRUE || !tick_check_data.seen_true_dep_p)) | |||
3505 | tick_check_data.cycle = tick; | |||
3506 | } | |||
3507 | } | |||
3508 | ||||
3509 | /* An implementation of note_dep hook. */ | |||
3510 | static void | |||
3511 | tick_check_note_dep (insn_t pro, ds_t ds) | |||
3512 | { | |||
3513 | tick_check_dep_with_dw (pro, ds, 0); | |||
3514 | } | |||
3515 | ||||
3516 | /* An implementation of note_mem_dep hook. */ | |||
3517 | static void | |||
3518 | tick_check_note_mem_dep (rtx mem1, rtx mem2, insn_t pro, ds_t ds) | |||
3519 | { | |||
3520 | dw_t dw; | |||
3521 | ||||
3522 | dw = (ds_to_dt (ds) == REG_DEP_TRUE | |||
3523 | ? estimate_dep_weak (mem1, mem2) | |||
3524 | : 0); | |||
3525 | ||||
3526 | tick_check_dep_with_dw (pro, ds, dw); | |||
3527 | } | |||
3528 | ||||
3529 | /* This structure contains hooks for dependence analysis used when determining | |||
3530 | whether an insn is ready for scheduling. */ | |||
3531 | static struct sched_deps_info_def tick_check_sched_deps_info = | |||
3532 | { | |||
3533 | NULLnullptr, | |||
3534 | ||||
3535 | NULLnullptr, | |||
3536 | NULLnullptr, | |||
3537 | NULLnullptr, | |||
3538 | NULLnullptr, | |||
3539 | NULLnullptr, | |||
3540 | NULLnullptr, | |||
3541 | haifa_note_reg_set, | |||
3542 | haifa_note_reg_clobber, | |||
3543 | haifa_note_reg_use, | |||
3544 | tick_check_note_mem_dep, | |||
3545 | tick_check_note_dep, | |||
3546 | ||||
3547 | 0, 0, 0 | |||
3548 | }; | |||
3549 | ||||
3550 | /* Estimate number of cycles from the current cycle of FENCE until EXPR can be | |||
3551 | scheduled. Return 0 if all data from producers in DC is ready. */ | |||
3552 | int | |||
3553 | tick_check_p (expr_t expr, deps_t dc, fence_t fence) | |||
3554 | { | |||
3555 | int cycles_left; | |||
3556 | /* Initialize variables. */ | |||
3557 | tick_check_data.expr = expr; | |||
3558 | tick_check_data.cycle = 0; | |||
3559 | tick_check_data.seen_true_dep_p = false; | |||
3560 | sched_deps_info = &tick_check_sched_deps_info; | |||
3561 | ||||
3562 | gcc_assert (!dc->readonly)((void)(!(!dc->readonly) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 3562, __FUNCTION__), 0 : 0)); | |||
3563 | dc->readonly = 1; | |||
3564 | deps_analyze_insn (dc, EXPR_INSN_RTX (expr)(((((expr)->vinsn))->insn_rtx))); | |||
3565 | dc->readonly = 0; | |||
3566 | ||||
3567 | cycles_left = tick_check_data.cycle - FENCE_CYCLE (fence)((fence)->cycle); | |||
3568 | ||||
3569 | return cycles_left >= 0 ? cycles_left : 0; | |||
3570 | } | |||
3571 | ||||
3572 | ||||
3573 | /* Functions to work with insns. */ | |||
3574 | ||||
3575 | /* Returns true if LHS of INSN is the same as DEST of an insn | |||
3576 | being moved. */ | |||
3577 | bool | |||
3578 | lhs_of_insn_equals_to_dest_p (insn_t insn, rtx dest) | |||
3579 | { | |||
3580 | rtx lhs = INSN_LHS (insn)(((((&((((((&(&s_i_d[(sched_luids[INSN_UID (insn) ])])->expr))->vinsn)))->id)))->lhs))); | |||
3581 | ||||
3582 | if (lhs == NULLnullptr || dest == NULLnullptr) | |||
3583 | return false; | |||
3584 | ||||
3585 | return rtx_equal_p (lhs, dest); | |||
3586 | } | |||
3587 | ||||
3588 | /* Return s_i_d entry of INSN. Callable from debugger. */ | |||
3589 | sel_insn_data_def | |||
3590 | insn_sid (insn_t insn) | |||
3591 | { | |||
3592 | return *SID (insn)(&s_i_d[(sched_luids[INSN_UID (insn)])]); | |||
3593 | } | |||
3594 | ||||
3595 | /* True when INSN is a speculative check. We can tell this by looking | |||
3596 | at the data structures of the selective scheduler, not by examining | |||
3597 | the pattern. */ | |||
3598 | bool | |||
3599 | sel_insn_is_speculation_check (rtx insn) | |||
3600 | { | |||
3601 | return s_i_d.exists () && !! INSN_SPEC_CHECKED_DS (insn)((&s_i_d[(sched_luids[INSN_UID (insn)])])->spec_checked_ds ); | |||
3602 | } | |||
3603 | ||||
3604 | /* Extracts machine mode MODE and destination location DST_LOC | |||
3605 | for given INSN. */ | |||
3606 | void | |||
3607 | get_dest_and_mode (rtx insn, rtx *dst_loc, machine_mode *mode) | |||
3608 | { | |||
3609 | rtx pat = PATTERN (insn); | |||
3610 | ||||
3611 | gcc_assert (dst_loc)((void)(!(dst_loc) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 3611, __FUNCTION__), 0 : 0)); | |||
3612 | gcc_assert (GET_CODE (pat) == SET)((void)(!(((enum rtx_code) (pat)->code) == SET) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 3612, __FUNCTION__), 0 : 0)); | |||
3613 | ||||
3614 | *dst_loc = SET_DEST (pat)(((pat)->u.fld[0]).rt_rtx); | |||
3615 | ||||
3616 | gcc_assert (*dst_loc)((void)(!(*dst_loc) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 3616, __FUNCTION__), 0 : 0)); | |||
3617 | gcc_assert (MEM_P (*dst_loc) || REG_P (*dst_loc))((void)(!((((enum rtx_code) (*dst_loc)->code) == MEM) || ( ((enum rtx_code) (*dst_loc)->code) == REG)) ? fancy_abort ( "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 3617, __FUNCTION__), 0 : 0)); | |||
3618 | ||||
3619 | if (mode) | |||
3620 | *mode = GET_MODE (*dst_loc)((machine_mode) (*dst_loc)->mode); | |||
3621 | } | |||
3622 | ||||
3623 | /* Returns true when moving through JUMP will result in bookkeeping | |||
3624 | creation. */ | |||
3625 | bool | |||
3626 | bookkeeping_can_be_created_if_moved_through_p (insn_t jump) | |||
3627 | { | |||
3628 | insn_t succ; | |||
3629 | succ_iterator si; | |||
3630 | ||||
3631 | FOR_EACH_SUCC (succ, si, jump)for ((si) = _succ_iter_start (&(succ), (jump), ((1))); _succ_iter_cond (&(si), &(succ), (jump), _eligible_successor_edge_p) ; _succ_iter_next (&(si))) | |||
3632 | if (sel_num_cfg_preds_gt_1 (succ)) | |||
3633 | return true; | |||
3634 | ||||
3635 | return false; | |||
3636 | } | |||
3637 | ||||
3638 | /* Return 'true' if INSN is the only one in its basic block. */ | |||
3639 | static bool | |||
3640 | insn_is_the_only_one_in_bb_p (insn_t insn) | |||
3641 | { | |||
3642 | return sel_bb_head_p (insn) && sel_bb_end_p (insn); | |||
3643 | } | |||
3644 | ||||
3645 | /* Check that the region we're scheduling still has at most one | |||
3646 | backedge. */ | |||
3647 | static void | |||
3648 | verify_backedges (void) | |||
3649 | { | |||
3650 | if (pipelining_p) | |||
3651 | { | |||
3652 | int i, n = 0; | |||
3653 | edge e; | |||
3654 | edge_iterator ei; | |||
3655 | ||||
3656 | for (i = 0; i < current_nr_blocks; i++) | |||
3657 | FOR_EACH_EDGE (e, ei, BASIC_BLOCK_FOR_FN (cfun, BB_TO_BLOCK (i))->succs)for ((ei) = ei_start_1 (&((((*(((cfun + 0))->cfg->x_basic_block_info ))[((rgn_bb_table[ebb_head[i]]))])->succs))); ei_cond ((ei ), &(e)); ei_next (&(ei))) | |||
3658 | if (in_current_region_p (e->dest) | |||
3659 | && BLOCK_TO_BB (e->dest->index)(block_to_bb[e->dest->index]) < i) | |||
3660 | n++; | |||
3661 | ||||
3662 | gcc_assert (n <= 1)((void)(!(n <= 1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 3662, __FUNCTION__), 0 : 0)); | |||
3663 | } | |||
3664 | } | |||
3665 | ||||
3666 | ||||
3667 | /* Functions to work with control flow. */ | |||
3668 | ||||
3669 | /* Recompute BLOCK_TO_BB and BB_FOR_BLOCK for current region so that blocks | |||
3670 | are sorted in topological order (it might have been invalidated by | |||
3671 | redirecting an edge). */ | |||
3672 | static void | |||
3673 | sel_recompute_toporder (void) | |||
3674 | { | |||
3675 | int i, n, rgn; | |||
3676 | int *postorder, n_blocks; | |||
3677 | ||||
3678 | postorder = XALLOCAVEC (int, n_basic_blocks_for_fn (cfun))((int *) __builtin_alloca(sizeof (int) * ((((cfun + 0))->cfg ->x_n_basic_blocks)))); | |||
3679 | n_blocks = post_order_compute (postorder, false, false); | |||
3680 | ||||
3681 | rgn = CONTAINING_RGN (BB_TO_BLOCK (0))(containing_rgn[(rgn_bb_table[ebb_head[0]])]); | |||
3682 | for (n = 0, i = n_blocks - 1; i >= 0; i--) | |||
3683 | if (CONTAINING_RGN (postorder[i])(containing_rgn[postorder[i]]) == rgn) | |||
3684 | { | |||
3685 | BLOCK_TO_BB (postorder[i])(block_to_bb[postorder[i]]) = n; | |||
3686 | BB_TO_BLOCK (n)(rgn_bb_table[ebb_head[n]]) = postorder[i]; | |||
3687 | n++; | |||
3688 | } | |||
3689 | ||||
3690 | /* Assert that we updated info for all blocks. We may miss some blocks if | |||
3691 | this function is called when redirecting an edge made a block | |||
3692 | unreachable, but that block is not deleted yet. */ | |||
3693 | gcc_assert (n == RGN_NR_BLOCKS (rgn))((void)(!(n == (rgn_table[rgn].rgn_nr_blocks)) ? fancy_abort ( "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 3693, __FUNCTION__), 0 : 0)); | |||
3694 | } | |||
3695 | ||||
3696 | /* Tidy the possibly empty block BB. */ | |||
3697 | static bool | |||
3698 | maybe_tidy_empty_bb (basic_block bb) | |||
3699 | { | |||
3700 | basic_block succ_bb, pred_bb, note_bb; | |||
3701 | vec<basic_block> dom_bbs; | |||
3702 | edge e; | |||
3703 | edge_iterator ei; | |||
3704 | bool rescan_p; | |||
3705 | ||||
3706 | /* Keep empty bb only if this block immediately precedes EXIT and | |||
3707 | has incoming non-fallthrough edge, or it has no predecessors or | |||
3708 | successors. Otherwise remove it. */ | |||
3709 | if (!sel_bb_empty_p (bb) | |||
3710 | || (single_succ_p (bb) | |||
3711 | && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr) | |||
3712 | && (!single_pred_p (bb) | |||
3713 | || !(single_pred_edge (bb)->flags & EDGE_FALLTHRU))) | |||
3714 | || EDGE_COUNT (bb->preds)vec_safe_length (bb->preds) == 0 | |||
3715 | || EDGE_COUNT (bb->succs)vec_safe_length (bb->succs) == 0) | |||
3716 | return false; | |||
3717 | ||||
3718 | /* Do not attempt to redirect complex edges. */ | |||
3719 | FOR_EACH_EDGE (e, ei, bb->preds)for ((ei) = ei_start_1 (&((bb->preds))); ei_cond ((ei) , &(e)); ei_next (&(ei))) | |||
3720 | if (e->flags & EDGE_COMPLEX(EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_EH | EDGE_PRESERVE )) | |||
3721 | return false; | |||
3722 | else if (e->flags & EDGE_FALLTHRU) | |||
3723 | { | |||
3724 | rtx note; | |||
3725 | /* If prev bb ends with asm goto, see if any of the | |||
3726 | ASM_OPERANDS_LABELs don't point to the fallthru | |||
3727 | label. Do not attempt to redirect it in that case. */ | |||
3728 | if (JUMP_P (BB_END (e->src))(((enum rtx_code) ((e->src)->il.x.rtl->end_)->code ) == JUMP_INSN) | |||
3729 | && (note = extract_asm_operands (PATTERN (BB_END (e->src)(e->src)->il.x.rtl->end_)))) | |||
3730 | { | |||
3731 | int i, n = ASM_OPERANDS_LABEL_LENGTH (note)(((((note)->u.fld[5]).rt_rtvec))->num_elem); | |||
3732 | ||||
3733 | for (i = 0; i < n; ++i) | |||
3734 | if (XEXP (ASM_OPERANDS_LABEL (note, i), 0)((((((((note)->u.fld[5]).rt_rtvec))->elem[i]))->u.fld [0]).rt_rtx) == BB_HEAD (bb)(bb)->il.x.head_) | |||
3735 | return false; | |||
3736 | } | |||
3737 | } | |||
3738 | ||||
3739 | free_data_sets (bb); | |||
3740 | ||||
3741 | /* Do not delete BB if it has more than one successor. | |||
3742 | That can occur when we moving a jump. */ | |||
3743 | if (!single_succ_p (bb)) | |||
3744 | { | |||
3745 | gcc_assert (can_merge_blocks_p (bb->prev_bb, bb))((void)(!(can_merge_blocks_p (bb->prev_bb, bb)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 3745, __FUNCTION__), 0 : 0)); | |||
3746 | sel_merge_blocks (bb->prev_bb, bb); | |||
3747 | return true; | |||
3748 | } | |||
3749 | ||||
3750 | succ_bb = single_succ (bb); | |||
3751 | rescan_p = true; | |||
3752 | pred_bb = NULLnullptr; | |||
3753 | dom_bbs.create (0); | |||
3754 | ||||
3755 | /* Save a pred/succ from the current region to attach the notes to. */ | |||
3756 | note_bb = NULLnullptr; | |||
3757 | FOR_EACH_EDGE (e, ei, bb->preds)for ((ei) = ei_start_1 (&((bb->preds))); ei_cond ((ei) , &(e)); ei_next (&(ei))) | |||
3758 | if (in_current_region_p (e->src)) | |||
3759 | { | |||
3760 | note_bb = e->src; | |||
3761 | break; | |||
3762 | } | |||
3763 | if (note_bb == NULLnullptr) | |||
3764 | note_bb = succ_bb; | |||
3765 | ||||
3766 | /* Redirect all non-fallthru edges to the next bb. */ | |||
3767 | while (rescan_p) | |||
3768 | { | |||
3769 | rescan_p = false; | |||
3770 | ||||
3771 | FOR_EACH_EDGE (e, ei, bb->preds)for ((ei) = ei_start_1 (&((bb->preds))); ei_cond ((ei) , &(e)); ei_next (&(ei))) | |||
3772 | { | |||
3773 | pred_bb = e->src; | |||
3774 | ||||
3775 | if (!(e->flags & EDGE_FALLTHRU)) | |||
3776 | { | |||
3777 | /* We cannot invalidate computed topological order by moving | |||
3778 | the edge destination block (E->SUCC) along a fallthru edge. | |||
3779 | ||||
3780 | We will update dominators here only when we'll get | |||
3781 | an unreachable block when redirecting, otherwise | |||
3782 | sel_redirect_edge_and_branch will take care of it. */ | |||
3783 | if (e->dest != bb | |||
3784 | && single_pred_p (e->dest)) | |||
3785 | dom_bbs.safe_push (e->dest); | |||
3786 | sel_redirect_edge_and_branch (e, succ_bb); | |||
3787 | rescan_p = true; | |||
3788 | break; | |||
3789 | } | |||
3790 | /* If the edge is fallthru, but PRED_BB ends in a conditional jump | |||
3791 | to BB (so there is no non-fallthru edge from PRED_BB to BB), we | |||
3792 | still have to adjust it. */ | |||
3793 | else if (single_succ_p (pred_bb) && any_condjump_p (BB_END (pred_bb)(pred_bb)->il.x.rtl->end_)) | |||
3794 | { | |||
3795 | /* If possible, try to remove the unneeded conditional jump. */ | |||
3796 | if (onlyjump_p (BB_END (pred_bb)(pred_bb)->il.x.rtl->end_) | |||
3797 | && INSN_SCHED_TIMES (BB_END (pred_bb))((((&(&s_i_d[(sched_luids[INSN_UID ((pred_bb)->il. x.rtl->end_)])])->expr))->sched_times)) == 0 | |||
3798 | && !IN_CURRENT_FENCE_P (BB_END (pred_bb))(flist_lookup (fences, (pred_bb)->il.x.rtl->end_) != nullptr )) | |||
3799 | { | |||
3800 | if (!sel_remove_insn (BB_END (pred_bb)(pred_bb)->il.x.rtl->end_, false, false)) | |||
3801 | tidy_fallthru_edge (e); | |||
3802 | } | |||
3803 | else | |||
3804 | sel_redirect_edge_and_branch (e, succ_bb); | |||
3805 | rescan_p = true; | |||
3806 | break; | |||
3807 | } | |||
3808 | } | |||
3809 | } | |||
3810 | ||||
3811 | if (can_merge_blocks_p (bb->prev_bb, bb)) | |||
3812 | sel_merge_blocks (bb->prev_bb, bb); | |||
3813 | else | |||
3814 | { | |||
3815 | /* This is a block without fallthru predecessor. Just delete it. */ | |||
3816 | gcc_assert (note_bb)((void)(!(note_bb) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 3816, __FUNCTION__), 0 : 0)); | |||
3817 | move_bb_info (note_bb, bb); | |||
3818 | remove_empty_bb (bb, true); | |||
3819 | } | |||
3820 | ||||
3821 | if (!dom_bbs.is_empty ()) | |||
3822 | { | |||
3823 | dom_bbs.safe_push (succ_bb); | |||
3824 | iterate_fix_dominators (CDI_DOMINATORS, dom_bbs, false); | |||
3825 | dom_bbs.release (); | |||
3826 | } | |||
3827 | ||||
3828 | return true; | |||
3829 | } | |||
3830 | ||||
3831 | /* Tidy the control flow after we have removed original insn from | |||
3832 | XBB. Return true if we have removed some blocks. When FULL_TIDYING | |||
3833 | is true, also try to optimize control flow on non-empty blocks. */ | |||
3834 | bool | |||
3835 | tidy_control_flow (basic_block xbb, bool full_tidying) | |||
3836 | { | |||
3837 | bool changed = true; | |||
3838 | insn_t first, last; | |||
3839 | ||||
3840 | /* First check whether XBB is empty. */ | |||
3841 | changed = maybe_tidy_empty_bb (xbb); | |||
3842 | if (changed || !full_tidying) | |||
3843 | return changed; | |||
3844 | ||||
3845 | /* Check if there is a unnecessary jump after insn left. */ | |||
3846 | if (bb_has_removable_jump_to_p (xbb, xbb->next_bb) | |||
3847 | && INSN_SCHED_TIMES (BB_END (xbb))((((&(&s_i_d[(sched_luids[INSN_UID ((xbb)->il.x.rtl ->end_)])])->expr))->sched_times)) == 0 | |||
3848 | && !IN_CURRENT_FENCE_P (BB_END (xbb))(flist_lookup (fences, (xbb)->il.x.rtl->end_) != nullptr )) | |||
3849 | { | |||
3850 | /* We used to call sel_remove_insn here that can trigger tidy_control_flow | |||
3851 | before we fix up the fallthru edge. Correct that ordering by | |||
3852 | explicitly doing the latter before the former. */ | |||
3853 | clear_expr (INSN_EXPR (BB_END (xbb))(&(&s_i_d[(sched_luids[INSN_UID ((xbb)->il.x.rtl-> end_)])])->expr)); | |||
3854 | tidy_fallthru_edge (EDGE_SUCC (xbb, 0)(*(xbb)->succs)[(0)]); | |||
3855 | if (tidy_control_flow (xbb, false)) | |||
3856 | return true; | |||
3857 | } | |||
3858 | ||||
3859 | first = sel_bb_head (xbb); | |||
3860 | last = sel_bb_end (xbb); | |||
3861 | if (MAY_HAVE_DEBUG_INSNS(global_options.x_debug_nonbind_markers_p || global_options.x_flag_var_tracking_assignments )) | |||
3862 | { | |||
3863 | if (first != last && DEBUG_INSN_P (first)(((enum rtx_code) (first)->code) == DEBUG_INSN)) | |||
3864 | do | |||
3865 | first = NEXT_INSN (first); | |||
3866 | while (first != last && (DEBUG_INSN_P (first)(((enum rtx_code) (first)->code) == DEBUG_INSN) || NOTE_P (first)(((enum rtx_code) (first)->code) == NOTE))); | |||
3867 | ||||
3868 | if (first != last && DEBUG_INSN_P (last)(((enum rtx_code) (last)->code) == DEBUG_INSN)) | |||
3869 | do | |||
3870 | last = PREV_INSN (last); | |||
3871 | while (first != last && (DEBUG_INSN_P (last)(((enum rtx_code) (last)->code) == DEBUG_INSN) || NOTE_P (last)(((enum rtx_code) (last)->code) == NOTE))); | |||
3872 | } | |||
3873 | /* Check if there is an unnecessary jump in previous basic block leading | |||
3874 | to next basic block left after removing INSN from stream. | |||
3875 | If it is so, remove that jump and redirect edge to current | |||
3876 | basic block (where there was INSN before deletion). This way | |||
3877 | when NOP will be deleted several instructions later with its | |||
3878 | basic block we will not get a jump to next instruction, which | |||
3879 | can be harmful. */ | |||
3880 | if (first == last | |||
3881 | && !sel_bb_empty_p (xbb) | |||
3882 | && INSN_NOP_P (last)(PATTERN (last) == nop_pattern) | |||
3883 | /* Flow goes fallthru from current block to the next. */ | |||
3884 | && EDGE_COUNT (xbb->succs)vec_safe_length (xbb->succs) == 1 | |||
3885 | && (EDGE_SUCC (xbb, 0)(*(xbb)->succs)[(0)]->flags & EDGE_FALLTHRU) | |||
3886 | /* When successor is an EXIT block, it may not be the next block. */ | |||
3887 | && single_succ (xbb) != EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr) | |||
3888 | /* And unconditional jump in previous basic block leads to | |||
3889 | next basic block of XBB and this jump can be safely removed. */ | |||
3890 | && in_current_region_p (xbb->prev_bb) | |||
3891 | && bb_has_removable_jump_to_p (xbb->prev_bb, xbb->next_bb) | |||
3892 | && INSN_SCHED_TIMES (BB_END (xbb->prev_bb))((((&(&s_i_d[(sched_luids[INSN_UID ((xbb->prev_bb) ->il.x.rtl->end_)])])->expr))->sched_times)) == 0 | |||
3893 | /* Also this jump is not at the scheduling boundary. */ | |||
3894 | && !IN_CURRENT_FENCE_P (BB_END (xbb->prev_bb))(flist_lookup (fences, (xbb->prev_bb)->il.x.rtl->end_ ) != nullptr)) | |||
3895 | { | |||
3896 | bool recompute_toporder_p; | |||
3897 | /* Clear data structures of jump - jump itself will be removed | |||
3898 | by sel_redirect_edge_and_branch. */ | |||
3899 | clear_expr (INSN_EXPR (BB_END (xbb->prev_bb))(&(&s_i_d[(sched_luids[INSN_UID ((xbb->prev_bb)-> il.x.rtl->end_)])])->expr)); | |||
3900 | recompute_toporder_p | |||
3901 | = sel_redirect_edge_and_branch (EDGE_SUCC (xbb->prev_bb, 0)(*(xbb->prev_bb)->succs)[(0)], xbb); | |||
3902 | ||||
3903 | gcc_assert (EDGE_SUCC (xbb->prev_bb, 0)->flags & EDGE_FALLTHRU)((void)(!((*(xbb->prev_bb)->succs)[(0)]->flags & EDGE_FALLTHRU) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 3903, __FUNCTION__), 0 : 0)); | |||
3904 | ||||
3905 | /* We could have skipped some debug insns which did not get removed with the block, | |||
3906 | and the seqnos could become incorrect. Fix them up here. */ | |||
3907 | if (MAY_HAVE_DEBUG_INSNS(global_options.x_debug_nonbind_markers_p || global_options.x_flag_var_tracking_assignments ) && (sel_bb_head (xbb) != first || sel_bb_end (xbb) != last)) | |||
3908 | { | |||
3909 | if (!sel_bb_empty_p (xbb->prev_bb)) | |||
3910 | { | |||
3911 | int prev_seqno = INSN_SEQNO (sel_bb_end (xbb->prev_bb))((&s_i_d[(sched_luids[INSN_UID (sel_bb_end (xbb->prev_bb ))])])->seqno); | |||
3912 | if (prev_seqno > INSN_SEQNO (sel_bb_head (xbb))((&s_i_d[(sched_luids[INSN_UID (sel_bb_head (xbb))])])-> seqno)) | |||
3913 | for (insn_t insn = sel_bb_head (xbb); insn != first; insn = NEXT_INSN (insn)) | |||
3914 | INSN_SEQNO (insn)((&s_i_d[(sched_luids[INSN_UID (insn)])])->seqno) = prev_seqno + 1; | |||
3915 | } | |||
3916 | } | |||
3917 | ||||
3918 | /* It can turn out that after removing unused jump, basic block | |||
3919 | that contained that jump, becomes empty too. In such case | |||
3920 | remove it too. */ | |||
3921 | if (sel_bb_empty_p (xbb->prev_bb)) | |||
3922 | changed = maybe_tidy_empty_bb (xbb->prev_bb); | |||
3923 | if (recompute_toporder_p) | |||
3924 | sel_recompute_toporder (); | |||
3925 | } | |||
3926 | ||||
3927 | /* TODO: use separate flag for CFG checking. */ | |||
3928 | if (flag_checkingglobal_options.x_flag_checking) | |||
3929 | { | |||
3930 | verify_backedges (); | |||
3931 | verify_dominators (CDI_DOMINATORS); | |||
3932 | } | |||
3933 | ||||
3934 | return changed; | |||
3935 | } | |||
3936 | ||||
3937 | /* Purge meaningless empty blocks in the middle of a region. */ | |||
3938 | void | |||
3939 | purge_empty_blocks (void) | |||
3940 | { | |||
3941 | int i; | |||
3942 | ||||
3943 | /* Do not attempt to delete the first basic block in the region. */ | |||
3944 | for (i = 1; i < current_nr_blocks; ) | |||
3945 | { | |||
3946 | basic_block b = BASIC_BLOCK_FOR_FN (cfun, BB_TO_BLOCK (i))((*(((cfun + 0))->cfg->x_basic_block_info))[((rgn_bb_table [ebb_head[i]]))]); | |||
3947 | ||||
3948 | if (maybe_tidy_empty_bb (b)) | |||
3949 | continue; | |||
3950 | ||||
3951 | i++; | |||
3952 | } | |||
3953 | } | |||
3954 | ||||
3955 | /* Rip-off INSN from the insn stream. When ONLY_DISCONNECT is true, | |||
3956 | do not delete insn's data, because it will be later re-emitted. | |||
3957 | Return true if we have removed some blocks afterwards. */ | |||
3958 | bool | |||
3959 | sel_remove_insn (insn_t insn, bool only_disconnect, bool full_tidying) | |||
3960 | { | |||
3961 | basic_block bb = BLOCK_FOR_INSN (insn); | |||
3962 | ||||
3963 | gcc_assert (INSN_IN_STREAM_P (insn))((void)(!((PREV_INSN (insn) && NEXT_INSN (insn))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 3963, __FUNCTION__), 0 : 0)); | |||
3964 | ||||
3965 | if (DEBUG_INSN_P (insn)(((enum rtx_code) (insn)->code) == DEBUG_INSN) && BB_AV_SET_VALID_P (bb)(((&sel_region_bb_info[(bb)->index])->av_level) == global_level )) | |||
3966 | { | |||
3967 | expr_t expr; | |||
3968 | av_set_iterator i; | |||
3969 | ||||
3970 | /* When we remove a debug insn that is head of a BB, it remains | |||
3971 | in the AV_SET of the block, but it shouldn't. */ | |||
3972 | FOR_EACH_EXPR_1 (expr, i, &BB_AV_SET (bb))for (_list_iter_start (&((i)), ((&((&sel_region_bb_info [(bb)->index])->av_set))), true); _list_iter_cond_expr ( *((i)).lp, &((expr))); _list_iter_next (&((i)))) | |||
3973 | if (EXPR_INSN_RTX (expr)(((((expr)->vinsn))->insn_rtx)) == insn) | |||
3974 | { | |||
3975 | av_set_iter_remove (&i); | |||
3976 | break; | |||
3977 | } | |||
3978 | } | |||
3979 | ||||
3980 | if (only_disconnect) | |||
3981 | remove_insn (insn); | |||
3982 | else | |||
3983 | { | |||
3984 | delete_insn (insn); | |||
3985 | clear_expr (INSN_EXPR (insn)(&(&s_i_d[(sched_luids[INSN_UID (insn)])])->expr)); | |||
3986 | } | |||
3987 | ||||
3988 | /* It is necessary to NULL these fields in case we are going to re-insert | |||
3989 | INSN into the insns stream, as will usually happen in the ONLY_DISCONNECT | |||
3990 | case, but also for NOPs that we will return to the nop pool. */ | |||
3991 | SET_PREV_INSN (insn) = NULL_RTX(rtx) 0; | |||
3992 | SET_NEXT_INSN (insn) = NULL_RTX(rtx) 0; | |||
3993 | set_block_for_insn (insn, NULLnullptr); | |||
3994 | ||||
3995 | return tidy_control_flow (bb, full_tidying); | |||
3996 | } | |||
3997 | ||||
3998 | /* Estimate number of the insns in BB. */ | |||
3999 | static int | |||
4000 | sel_estimate_number_of_insns (basic_block bb) | |||
4001 | { | |||
4002 | int res = 0; | |||
4003 | insn_t insn = NEXT_INSN (BB_HEAD (bb)(bb)->il.x.head_), next_tail = NEXT_INSN (BB_END (bb)(bb)->il.x.rtl->end_); | |||
4004 | ||||
4005 | for (; insn != next_tail; insn = NEXT_INSN (insn)) | |||
4006 | if (NONDEBUG_INSN_P (insn)((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN))) | |||
4007 | res++; | |||
4008 | ||||
4009 | return res; | |||
4010 | } | |||
4011 | ||||
4012 | /* We don't need separate luids for notes or labels. */ | |||
4013 | static int | |||
4014 | sel_luid_for_non_insn (rtx x) | |||
4015 | { | |||
4016 | gcc_assert (NOTE_P (x) || LABEL_P (x))((void)(!((((enum rtx_code) (x)->code) == NOTE) || (((enum rtx_code) (x)->code) == CODE_LABEL)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4016, __FUNCTION__), 0 : 0)); | |||
4017 | ||||
4018 | return -1; | |||
4019 | } | |||
4020 | ||||
4021 | /* Find the proper seqno for inserting at INSN by successors. | |||
4022 | Return -1 if no successors with positive seqno exist. */ | |||
4023 | static int | |||
4024 | get_seqno_by_succs (rtx_insn *insn) | |||
4025 | { | |||
4026 | basic_block bb = BLOCK_FOR_INSN (insn); | |||
4027 | rtx_insn *tmp = insn, *end = BB_END (bb)(bb)->il.x.rtl->end_; | |||
4028 | int seqno; | |||
4029 | insn_t succ = NULLnullptr; | |||
4030 | succ_iterator si; | |||
4031 | ||||
4032 | while (tmp != end) | |||
4033 | { | |||
4034 | tmp = NEXT_INSN (tmp); | |||
4035 | if (INSN_P (tmp)(((((enum rtx_code) (tmp)->code) == INSN) || (((enum rtx_code ) (tmp)->code) == JUMP_INSN) || (((enum rtx_code) (tmp)-> code) == CALL_INSN)) || (((enum rtx_code) (tmp)->code) == DEBUG_INSN ))) | |||
4036 | return INSN_SEQNO (tmp)((&s_i_d[(sched_luids[INSN_UID (tmp)])])->seqno); | |||
4037 | } | |||
4038 | ||||
4039 | seqno = INT_MAX2147483647; | |||
4040 | ||||
4041 | FOR_EACH_SUCC_1 (succ, si, end, SUCCS_NORMAL)for ((si) = _succ_iter_start (&(succ), (end), ((1))); _succ_iter_cond (&(si), &(succ), (end), _eligible_successor_edge_p); _succ_iter_next (&(si))) | |||
4042 | if (INSN_SEQNO (succ)((&s_i_d[(sched_luids[INSN_UID (succ)])])->seqno) > 0) | |||
4043 | seqno = MIN (seqno, INSN_SEQNO (succ))((seqno) < (((&s_i_d[(sched_luids[INSN_UID (succ)])])-> seqno)) ? (seqno) : (((&s_i_d[(sched_luids[INSN_UID (succ )])])->seqno))); | |||
4044 | ||||
4045 | if (seqno == INT_MAX2147483647) | |||
4046 | return -1; | |||
4047 | ||||
4048 | return seqno; | |||
4049 | } | |||
4050 | ||||
4051 | /* Compute seqno for INSN by its preds or succs. Use OLD_SEQNO to compute | |||
4052 | seqno in corner cases. */ | |||
4053 | static int | |||
4054 | get_seqno_for_a_jump (insn_t insn, int old_seqno) | |||
4055 | { | |||
4056 | int seqno; | |||
4057 | ||||
4058 | gcc_assert (INSN_SIMPLEJUMP_P (insn))((void)(!(((((((&((((((&(&s_i_d[(sched_luids[INSN_UID (insn)])])->expr))->vinsn)))->id)))->type))) == PC )) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4058, __FUNCTION__), 0 : 0)); | |||
4059 | ||||
4060 | if (!sel_bb_head_p (insn)) | |||
4061 | seqno = INSN_SEQNO (PREV_INSN (insn))((&s_i_d[(sched_luids[INSN_UID (PREV_INSN (insn))])])-> seqno); | |||
4062 | else | |||
4063 | { | |||
4064 | basic_block bb = BLOCK_FOR_INSN (insn); | |||
4065 | ||||
4066 | if (single_pred_p (bb) | |||
4067 | && !in_current_region_p (single_pred (bb))) | |||
4068 | { | |||
4069 | /* We can have preds outside a region when splitting edges | |||
4070 | for pipelining of an outer loop. Use succ instead. | |||
4071 | There should be only one of them. */ | |||
4072 | insn_t succ = NULLnullptr; | |||
4073 | succ_iterator si; | |||
4074 | bool first = true; | |||
4075 | ||||
4076 | gcc_assert (flag_sel_sched_pipelining_outer_loops((void)(!(global_options.x_flag_sel_sched_pipelining_outer_loops && current_loop_nest) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4077, __FUNCTION__), 0 : 0)) | |||
4077 | && current_loop_nest)((void)(!(global_options.x_flag_sel_sched_pipelining_outer_loops && current_loop_nest) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4077, __FUNCTION__), 0 : 0)); | |||
4078 | FOR_EACH_SUCC_1 (succ, si, insn,for ((si) = _succ_iter_start (&(succ), (insn), ((1) | (8) )); _succ_iter_cond (&(si), &(succ), (insn), _eligible_successor_edge_p ); _succ_iter_next (&(si))) | |||
4079 | SUCCS_NORMAL | SUCCS_SKIP_TO_LOOP_EXITS)for ((si) = _succ_iter_start (&(succ), (insn), ((1) | (8) )); _succ_iter_cond (&(si), &(succ), (insn), _eligible_successor_edge_p ); _succ_iter_next (&(si))) | |||
4080 | { | |||
4081 | gcc_assert (first)((void)(!(first) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4081, __FUNCTION__), 0 : 0)); | |||
4082 | first = false; | |||
4083 | } | |||
4084 | ||||
4085 | gcc_assert (succ != NULL)((void)(!(succ != nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4085, __FUNCTION__), 0 : 0)); | |||
4086 | seqno = INSN_SEQNO (succ)((&s_i_d[(sched_luids[INSN_UID (succ)])])->seqno); | |||
4087 | } | |||
4088 | else | |||
4089 | { | |||
4090 | insn_t *preds; | |||
4091 | int n; | |||
4092 | ||||
4093 | cfg_preds (BLOCK_FOR_INSN (insn), &preds, &n); | |||
4094 | ||||
4095 | gcc_assert (n > 0)((void)(!(n > 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4095, __FUNCTION__), 0 : 0)); | |||
4096 | /* For one predecessor, use simple method. */ | |||
4097 | if (n == 1) | |||
4098 | seqno = INSN_SEQNO (preds[0])((&s_i_d[(sched_luids[INSN_UID (preds[0])])])->seqno); | |||
4099 | else | |||
4100 | seqno = get_seqno_by_preds (insn); | |||
4101 | ||||
4102 | free (preds); | |||
4103 | } | |||
4104 | } | |||
4105 | ||||
4106 | /* We were unable to find a good seqno among preds. */ | |||
4107 | if (seqno < 0) | |||
4108 | seqno = get_seqno_by_succs (insn); | |||
4109 | ||||
4110 | if (seqno < 0) | |||
4111 | { | |||
4112 | /* The only case where this could be here legally is that the only | |||
4113 | unscheduled insn was a conditional jump that got removed and turned | |||
4114 | into this unconditional one. Initialize from the old seqno | |||
4115 | of that jump passed down to here. */ | |||
4116 | seqno = old_seqno; | |||
4117 | } | |||
4118 | ||||
4119 | gcc_assert (seqno >= 0)((void)(!(seqno >= 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4119, __FUNCTION__), 0 : 0)); | |||
4120 | return seqno; | |||
4121 | } | |||
4122 | ||||
4123 | /* Find the proper seqno for inserting at INSN. Returns -1 if no predecessors | |||
4124 | with positive seqno exist. */ | |||
4125 | int | |||
4126 | get_seqno_by_preds (rtx_insn *insn) | |||
4127 | { | |||
4128 | basic_block bb = BLOCK_FOR_INSN (insn); | |||
4129 | rtx_insn *tmp = insn, *head = BB_HEAD (bb)(bb)->il.x.head_; | |||
4130 | insn_t *preds; | |||
4131 | int n, i, seqno; | |||
4132 | ||||
4133 | /* Loop backwards from INSN to HEAD including both. */ | |||
4134 | while (1) | |||
4135 | { | |||
4136 | if (INSN_P (tmp)(((((enum rtx_code) (tmp)->code) == INSN) || (((enum rtx_code ) (tmp)->code) == JUMP_INSN) || (((enum rtx_code) (tmp)-> code) == CALL_INSN)) || (((enum rtx_code) (tmp)->code) == DEBUG_INSN ))) | |||
4137 | return INSN_SEQNO (tmp)((&s_i_d[(sched_luids[INSN_UID (tmp)])])->seqno); | |||
4138 | if (tmp == head) | |||
4139 | break; | |||
4140 | tmp = PREV_INSN (tmp); | |||
4141 | } | |||
4142 | ||||
4143 | cfg_preds (bb, &preds, &n); | |||
4144 | for (i = 0, seqno = -1; i < n; i++) | |||
4145 | seqno = MAX (seqno, INSN_SEQNO (preds[i]))((seqno) > (((&s_i_d[(sched_luids[INSN_UID (preds[i])] )])->seqno)) ? (seqno) : (((&s_i_d[(sched_luids[INSN_UID (preds[i])])])->seqno))); | |||
4146 | ||||
4147 | return seqno; | |||
4148 | } | |||
4149 | ||||
4150 | ||||
4151 | ||||
4152 | /* Extend pass-scope data structures for basic blocks. */ | |||
4153 | void | |||
4154 | sel_extend_global_bb_info (void) | |||
4155 | { | |||
4156 | sel_global_bb_info.safe_grow_cleared (last_basic_block_for_fn (cfun)(((cfun + 0))->cfg->x_last_basic_block), true); | |||
4157 | } | |||
4158 | ||||
4159 | /* Extend region-scope data structures for basic blocks. */ | |||
4160 | static void | |||
4161 | extend_region_bb_info (void) | |||
4162 | { | |||
4163 | sel_region_bb_info.safe_grow_cleared (last_basic_block_for_fn (cfun)(((cfun + 0))->cfg->x_last_basic_block), true); | |||
4164 | } | |||
4165 | ||||
4166 | /* Extend all data structures to fit for all basic blocks. */ | |||
4167 | static void | |||
4168 | extend_bb_info (void) | |||
4169 | { | |||
4170 | sel_extend_global_bb_info (); | |||
4171 | extend_region_bb_info (); | |||
4172 | } | |||
4173 | ||||
4174 | /* Finalize pass-scope data structures for basic blocks. */ | |||
4175 | void | |||
4176 | sel_finish_global_bb_info (void) | |||
4177 | { | |||
4178 | sel_global_bb_info.release (); | |||
4179 | } | |||
4180 | ||||
4181 | /* Finalize region-scope data structures for basic blocks. */ | |||
4182 | static void | |||
4183 | finish_region_bb_info (void) | |||
4184 | { | |||
4185 | sel_region_bb_info.release (); | |||
4186 | } | |||
4187 | ||||
4188 | ||||
4189 | /* Data for each insn in current region. */ | |||
4190 | vec<sel_insn_data_def> s_i_d; | |||
4191 | ||||
4192 | /* Extend data structures for insns from current region. */ | |||
4193 | static void | |||
4194 | extend_insn_data (void) | |||
4195 | { | |||
4196 | int reserve; | |||
4197 | ||||
4198 | sched_extend_target (); | |||
4199 | sched_deps_init (false); | |||
4200 | ||||
4201 | /* Extend data structures for insns from current region. */ | |||
4202 | reserve = (sched_max_luid + 1 - s_i_d.length ()); | |||
4203 | if (reserve > 0 && ! s_i_d.space (reserve)) | |||
4204 | { | |||
4205 | int size; | |||
4206 | ||||
4207 | if (sched_max_luid / 2 > 1024) | |||
4208 | size = sched_max_luid + 1024; | |||
4209 | else | |||
4210 | size = 3 * sched_max_luid / 2; | |||
4211 | ||||
4212 | ||||
4213 | s_i_d.safe_grow_cleared (size, true); | |||
4214 | } | |||
4215 | } | |||
4216 | ||||
4217 | /* Finalize data structures for insns from current region. */ | |||
4218 | static void | |||
4219 | finish_insns (void) | |||
4220 | { | |||
4221 | unsigned i; | |||
4222 | ||||
4223 | /* Clear here all dependence contexts that may have left from insns that were | |||
4224 | removed during the scheduling. */ | |||
4225 | for (i = 0; i < s_i_d.length (); i++) | |||
4226 | { | |||
4227 | sel_insn_data_def *sid_entry = &s_i_d[i]; | |||
4228 | ||||
4229 | if (sid_entry->live) | |||
4230 | return_regset_to_pool (sid_entry->live); | |||
4231 | if (sid_entry->analyzed_deps) | |||
4232 | { | |||
4233 | BITMAP_FREE (sid_entry->analyzed_deps)((void) (bitmap_obstack_free ((bitmap) sid_entry->analyzed_deps ), (sid_entry->analyzed_deps) = (bitmap) nullptr)); | |||
4234 | BITMAP_FREE (sid_entry->found_deps)((void) (bitmap_obstack_free ((bitmap) sid_entry->found_deps ), (sid_entry->found_deps) = (bitmap) nullptr)); | |||
4235 | htab_delete (sid_entry->transformed_insns); | |||
4236 | free_deps (&sid_entry->deps_context); | |||
4237 | } | |||
4238 | if (EXPR_VINSN (&sid_entry->expr)((&sid_entry->expr)->vinsn)) | |||
4239 | { | |||
4240 | clear_expr (&sid_entry->expr); | |||
4241 | ||||
4242 | /* Also, clear CANT_MOVE bit here, because we really don't want it | |||
4243 | to be passed to the next region. */ | |||
4244 | CANT_MOVE_BY_LUID (i)(h_d_i_d[i].cant_move) = 0; | |||
4245 | } | |||
4246 | } | |||
4247 | ||||
4248 | s_i_d.release (); | |||
4249 | } | |||
4250 | ||||
4251 | /* A proxy to pass initialization data to init_insn (). */ | |||
4252 | static sel_insn_data_def _insn_init_ssid; | |||
4253 | static sel_insn_data_t insn_init_ssid = &_insn_init_ssid; | |||
4254 | ||||
4255 | /* If true create a new vinsn. Otherwise use the one from EXPR. */ | |||
4256 | static bool insn_init_create_new_vinsn_p; | |||
4257 | ||||
4258 | /* Set all necessary data for initialization of the new insn[s]. */ | |||
4259 | static expr_t | |||
4260 | set_insn_init (expr_t expr, vinsn_t vi, int seqno) | |||
4261 | { | |||
4262 | expr_t x = &insn_init_ssid->expr; | |||
4263 | ||||
4264 | copy_expr_onside (x, expr); | |||
4265 | if (vi != NULLnullptr) | |||
4266 | { | |||
4267 | insn_init_create_new_vinsn_p = false; | |||
4268 | change_vinsn_in_expr (x, vi); | |||
4269 | } | |||
4270 | else | |||
4271 | insn_init_create_new_vinsn_p = true; | |||
4272 | ||||
4273 | insn_init_ssid->seqno = seqno; | |||
4274 | return x; | |||
4275 | } | |||
4276 | ||||
4277 | /* Init data for INSN. */ | |||
4278 | static void | |||
4279 | init_insn_data (insn_t insn) | |||
4280 | { | |||
4281 | expr_t expr; | |||
4282 | sel_insn_data_t ssid = insn_init_ssid; | |||
4283 | ||||
4284 | /* The fields mentioned below are special and hence are not being | |||
4285 | propagated to the new insns. */ | |||
4286 | gcc_assert (!ssid->asm_p && ssid->sched_next == NULL((void)(!(!ssid->asm_p && ssid->sched_next == nullptr && !ssid->after_stall_p && ssid->sched_cycle == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4287, __FUNCTION__), 0 : 0)) | |||
4287 | && !ssid->after_stall_p && ssid->sched_cycle == 0)((void)(!(!ssid->asm_p && ssid->sched_next == nullptr && !ssid->after_stall_p && ssid->sched_cycle == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4287, __FUNCTION__), 0 : 0)); | |||
4288 | gcc_assert (INSN_P (insn) && INSN_LUID (insn) > 0)((void)(!((((((enum rtx_code) (insn)->code) == INSN) || (( (enum rtx_code) (insn)->code) == JUMP_INSN) || (((enum rtx_code ) (insn)->code) == CALL_INSN)) || (((enum rtx_code) (insn) ->code) == DEBUG_INSN)) && (sched_luids[INSN_UID ( insn)]) > 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4288, __FUNCTION__), 0 : 0)); | |||
4289 | ||||
4290 | expr = INSN_EXPR (insn)(&(&s_i_d[(sched_luids[INSN_UID (insn)])])->expr); | |||
4291 | copy_expr (expr, &ssid->expr); | |||
4292 | prepare_insn_expr (insn, ssid->seqno); | |||
4293 | ||||
4294 | if (insn_init_create_new_vinsn_p) | |||
4295 | change_vinsn_in_expr (expr, vinsn_create (insn, init_insn_force_unique_p)); | |||
4296 | ||||
4297 | if (first_time_insn_init (insn)) | |||
4298 | init_first_time_insn_data (insn); | |||
4299 | } | |||
4300 | ||||
4301 | /* This is used to initialize spurious jumps generated by | |||
4302 | sel_redirect_edge (). OLD_SEQNO is used for initializing seqnos | |||
4303 | in corner cases within get_seqno_for_a_jump. */ | |||
4304 | static void | |||
4305 | init_simplejump_data (insn_t insn, int old_seqno) | |||
4306 | { | |||
4307 | init_expr (INSN_EXPR (insn)(&(&s_i_d[(sched_luids[INSN_UID (insn)])])->expr), vinsn_create (insn, false), 0, | |||
4308 | REG_BR_PROB_BASE10000, 0, 0, 0, 0, 0, 0, | |||
4309 | vNULL, true, false, false, | |||
4310 | false, true); | |||
4311 | INSN_SEQNO (insn)((&s_i_d[(sched_luids[INSN_UID (insn)])])->seqno) = get_seqno_for_a_jump (insn, old_seqno); | |||
4312 | init_first_time_insn_data (insn); | |||
4313 | } | |||
4314 | ||||
4315 | /* Perform deferred initialization of insns. This is used to process | |||
4316 | a new jump that may be created by redirect_edge. OLD_SEQNO is used | |||
4317 | for initializing simplejumps in init_simplejump_data. */ | |||
4318 | static void | |||
4319 | sel_init_new_insn (insn_t insn, int flags, int old_seqno) | |||
4320 | { | |||
4321 | /* We create data structures for bb when the first insn is emitted in it. */ | |||
4322 | if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) == DEBUG_INSN)) | |||
4323 | && INSN_IN_STREAM_P (insn)(PREV_INSN (insn) && NEXT_INSN (insn)) | |||
4324 | && insn_is_the_only_one_in_bb_p (insn)) | |||
4325 | { | |||
4326 | extend_bb_info (); | |||
4327 | create_initial_data_sets (BLOCK_FOR_INSN (insn)); | |||
4328 | } | |||
4329 | ||||
4330 | if (flags & INSN_INIT_TODO_LUID(1)) | |||
4331 | { | |||
4332 | sched_extend_luids (); | |||
4333 | sched_init_insn_luid (insn); | |||
4334 | } | |||
4335 | ||||
4336 | if (flags & INSN_INIT_TODO_SSID(2)) | |||
4337 | { | |||
4338 | extend_insn_data (); | |||
4339 | init_insn_data (insn); | |||
4340 | clear_expr (&insn_init_ssid->expr); | |||
4341 | } | |||
4342 | ||||
4343 | if (flags & INSN_INIT_TODO_SIMPLEJUMP(4)) | |||
4344 | { | |||
4345 | extend_insn_data (); | |||
4346 | init_simplejump_data (insn, old_seqno); | |||
4347 | } | |||
4348 | ||||
4349 | gcc_assert (CONTAINING_RGN (BLOCK_NUM (insn))((void)(!((containing_rgn[(BLOCK_FOR_INSN (insn)->index + 0 )]) == (containing_rgn[(rgn_bb_table[ebb_head[0]])])) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4350, __FUNCTION__), 0 : 0)) | |||
4350 | == CONTAINING_RGN (BB_TO_BLOCK (0)))((void)(!((containing_rgn[(BLOCK_FOR_INSN (insn)->index + 0 )]) == (containing_rgn[(rgn_bb_table[ebb_head[0]])])) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4350, __FUNCTION__), 0 : 0)); | |||
4351 | } | |||
4352 | ||||
4353 | ||||
4354 | /* Functions to init/finish work with lv sets. */ | |||
4355 | ||||
4356 | /* Init BB_LV_SET of BB from DF_LR_IN set of BB. */ | |||
4357 | static void | |||
4358 | init_lv_set (basic_block bb) | |||
4359 | { | |||
4360 | gcc_assert (!BB_LV_SET_VALID_P (bb))((void)(!(!((&sel_global_bb_info[(bb)->index])->lv_set_valid_p )) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4360, __FUNCTION__), 0 : 0)); | |||
4361 | ||||
4362 | BB_LV_SET (bb)((&sel_global_bb_info[(bb)->index])->lv_set) = get_regset_from_pool (); | |||
4363 | COPY_REG_SET (BB_LV_SET (bb), DF_LR_IN (bb))bitmap_copy (((&sel_global_bb_info[(bb)->index])->lv_set ), (&(df_lr_get_bb_info ((bb)->index))->in)); | |||
4364 | BB_LV_SET_VALID_P (bb)((&sel_global_bb_info[(bb)->index])->lv_set_valid_p ) = true; | |||
4365 | } | |||
4366 | ||||
4367 | /* Copy liveness information to BB from FROM_BB. */ | |||
4368 | static void | |||
4369 | copy_lv_set_from (basic_block bb, basic_block from_bb) | |||
4370 | { | |||
4371 | gcc_assert (!BB_LV_SET_VALID_P (bb))((void)(!(!((&sel_global_bb_info[(bb)->index])->lv_set_valid_p )) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4371, __FUNCTION__), 0 : 0)); | |||
4372 | ||||
4373 | COPY_REG_SET (BB_LV_SET (bb), BB_LV_SET (from_bb))bitmap_copy (((&sel_global_bb_info[(bb)->index])->lv_set ), ((&sel_global_bb_info[(from_bb)->index])->lv_set )); | |||
4374 | BB_LV_SET_VALID_P (bb)((&sel_global_bb_info[(bb)->index])->lv_set_valid_p ) = true; | |||
4375 | } | |||
4376 | ||||
4377 | /* Initialize lv set of all bb headers. */ | |||
4378 | void | |||
4379 | init_lv_sets (void) | |||
4380 | { | |||
4381 | basic_block bb; | |||
4382 | ||||
4383 | /* Initialize of LV sets. */ | |||
4384 | FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb ; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb-> next_bb) | |||
4385 | init_lv_set (bb); | |||
4386 | ||||
4387 | /* Don't forget EXIT_BLOCK. */ | |||
4388 | init_lv_set (EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr)); | |||
4389 | } | |||
4390 | ||||
4391 | /* Release lv set of HEAD. */ | |||
4392 | static void | |||
4393 | free_lv_set (basic_block bb) | |||
4394 | { | |||
4395 | gcc_assert (BB_LV_SET (bb) != NULL)((void)(!(((&sel_global_bb_info[(bb)->index])->lv_set ) != nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4395, __FUNCTION__), 0 : 0)); | |||
4396 | ||||
4397 | return_regset_to_pool (BB_LV_SET (bb)((&sel_global_bb_info[(bb)->index])->lv_set)); | |||
4398 | BB_LV_SET (bb)((&sel_global_bb_info[(bb)->index])->lv_set) = NULLnullptr; | |||
4399 | BB_LV_SET_VALID_P (bb)((&sel_global_bb_info[(bb)->index])->lv_set_valid_p ) = false; | |||
4400 | } | |||
4401 | ||||
4402 | /* Finalize lv sets of all bb headers. */ | |||
4403 | void | |||
4404 | free_lv_sets (void) | |||
4405 | { | |||
4406 | basic_block bb; | |||
4407 | ||||
4408 | /* Don't forget EXIT_BLOCK. */ | |||
4409 | free_lv_set (EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr)); | |||
4410 | ||||
4411 | /* Free LV sets. */ | |||
4412 | FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb ; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb-> next_bb) | |||
4413 | if (BB_LV_SET (bb)((&sel_global_bb_info[(bb)->index])->lv_set)) | |||
4414 | free_lv_set (bb); | |||
4415 | } | |||
4416 | ||||
4417 | /* Mark AV_SET for BB as invalid, so this set will be updated the next time | |||
4418 | compute_av() processes BB. This function is called when creating new basic | |||
4419 | blocks, as well as for blocks (either new or existing) where new jumps are | |||
4420 | created when the control flow is being updated. */ | |||
4421 | static void | |||
4422 | invalidate_av_set (basic_block bb) | |||
4423 | { | |||
4424 | BB_AV_LEVEL (bb)((&sel_region_bb_info[(bb)->index])->av_level) = -1; | |||
4425 | } | |||
4426 | ||||
4427 | /* Create initial data sets for BB (they will be invalid). */ | |||
4428 | static void | |||
4429 | create_initial_data_sets (basic_block bb) | |||
4430 | { | |||
4431 | if (BB_LV_SET (bb)((&sel_global_bb_info[(bb)->index])->lv_set)) | |||
4432 | BB_LV_SET_VALID_P (bb)((&sel_global_bb_info[(bb)->index])->lv_set_valid_p ) = false; | |||
4433 | else | |||
4434 | BB_LV_SET (bb)((&sel_global_bb_info[(bb)->index])->lv_set) = get_regset_from_pool (); | |||
4435 | invalidate_av_set (bb); | |||
4436 | } | |||
4437 | ||||
4438 | /* Free av set of BB. */ | |||
4439 | static void | |||
4440 | free_av_set (basic_block bb) | |||
4441 | { | |||
4442 | av_set_clear (&BB_AV_SET (bb)((&sel_region_bb_info[(bb)->index])->av_set)); | |||
4443 | BB_AV_LEVEL (bb)((&sel_region_bb_info[(bb)->index])->av_level) = 0; | |||
4444 | } | |||
4445 | ||||
4446 | /* Free data sets of BB. */ | |||
4447 | void | |||
4448 | free_data_sets (basic_block bb) | |||
4449 | { | |||
4450 | free_lv_set (bb); | |||
4451 | free_av_set (bb); | |||
4452 | } | |||
4453 | ||||
4454 | /* Exchange data sets of TO and FROM. */ | |||
4455 | void | |||
4456 | exchange_data_sets (basic_block to, basic_block from) | |||
4457 | { | |||
4458 | /* Exchange lv sets of TO and FROM. */ | |||
4459 | std::swap (BB_LV_SET (from)((&sel_global_bb_info[(from)->index])->lv_set), BB_LV_SET (to)((&sel_global_bb_info[(to)->index])->lv_set)); | |||
4460 | std::swap (BB_LV_SET_VALID_P (from)((&sel_global_bb_info[(from)->index])->lv_set_valid_p ), BB_LV_SET_VALID_P (to)((&sel_global_bb_info[(to)->index])->lv_set_valid_p )); | |||
4461 | ||||
4462 | /* Exchange av sets of TO and FROM. */ | |||
4463 | std::swap (BB_AV_SET (from)((&sel_region_bb_info[(from)->index])->av_set), BB_AV_SET (to)((&sel_region_bb_info[(to)->index])->av_set)); | |||
4464 | std::swap (BB_AV_LEVEL (from)((&sel_region_bb_info[(from)->index])->av_level), BB_AV_LEVEL (to)((&sel_region_bb_info[(to)->index])->av_level)); | |||
4465 | } | |||
4466 | ||||
4467 | /* Copy data sets of FROM to TO. */ | |||
4468 | void | |||
4469 | copy_data_sets (basic_block to, basic_block from) | |||
4470 | { | |||
4471 | gcc_assert (!BB_LV_SET_VALID_P (to) && !BB_AV_SET_VALID_P (to))((void)(!(!((&sel_global_bb_info[(to)->index])->lv_set_valid_p ) && !(((&sel_region_bb_info[(to)->index])-> av_level) == global_level)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4471, __FUNCTION__), 0 : 0)); | |||
4472 | gcc_assert (BB_AV_SET (to) == NULL)((void)(!(((&sel_region_bb_info[(to)->index])->av_set ) == nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4472, __FUNCTION__), 0 : 0)); | |||
4473 | ||||
4474 | BB_AV_LEVEL (to)((&sel_region_bb_info[(to)->index])->av_level) = BB_AV_LEVEL (from)((&sel_region_bb_info[(from)->index])->av_level); | |||
4475 | BB_LV_SET_VALID_P (to)((&sel_global_bb_info[(to)->index])->lv_set_valid_p ) = BB_LV_SET_VALID_P (from)((&sel_global_bb_info[(from)->index])->lv_set_valid_p ); | |||
4476 | ||||
4477 | if (BB_AV_SET_VALID_P (from)(((&sel_region_bb_info[(from)->index])->av_level) == global_level)) | |||
4478 | { | |||
4479 | BB_AV_SET (to)((&sel_region_bb_info[(to)->index])->av_set) = av_set_copy (BB_AV_SET (from)((&sel_region_bb_info[(from)->index])->av_set)); | |||
4480 | } | |||
4481 | if (BB_LV_SET_VALID_P (from)((&sel_global_bb_info[(from)->index])->lv_set_valid_p )) | |||
4482 | { | |||
4483 | gcc_assert (BB_LV_SET (to) != NULL)((void)(!(((&sel_global_bb_info[(to)->index])->lv_set ) != nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4483, __FUNCTION__), 0 : 0)); | |||
4484 | COPY_REG_SET (BB_LV_SET (to), BB_LV_SET (from))bitmap_copy (((&sel_global_bb_info[(to)->index])->lv_set ), ((&sel_global_bb_info[(from)->index])->lv_set)); | |||
4485 | } | |||
4486 | } | |||
4487 | ||||
4488 | /* Return an av set for INSN, if any. */ | |||
4489 | av_set_t | |||
4490 | get_av_set (insn_t insn) | |||
4491 | { | |||
4492 | av_set_t av_set; | |||
4493 | ||||
4494 | gcc_assert (AV_SET_VALID_P (insn))((void)(!(((get_av_level (insn)) == global_level)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4494, __FUNCTION__), 0 : 0)); | |||
4495 | ||||
4496 | if (sel_bb_head_p (insn)) | |||
4497 | av_set = BB_AV_SET (BLOCK_FOR_INSN (insn))((&sel_region_bb_info[(BLOCK_FOR_INSN (insn))->index]) ->av_set); | |||
4498 | else | |||
4499 | av_set = NULLnullptr; | |||
4500 | ||||
4501 | return av_set; | |||
4502 | } | |||
4503 | ||||
4504 | /* Implementation of AV_LEVEL () macro. Return AV_LEVEL () of INSN. */ | |||
4505 | int | |||
4506 | get_av_level (insn_t insn) | |||
4507 | { | |||
4508 | int av_level; | |||
4509 | ||||
4510 | gcc_assert (INSN_P (insn))((void)(!((((((enum rtx_code) (insn)->code) == INSN) || (( (enum rtx_code) (insn)->code) == JUMP_INSN) || (((enum rtx_code ) (insn)->code) == CALL_INSN)) || (((enum rtx_code) (insn) ->code) == DEBUG_INSN))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4510, __FUNCTION__), 0 : 0)); | |||
4511 | ||||
4512 | if (sel_bb_head_p (insn)) | |||
4513 | av_level = BB_AV_LEVEL (BLOCK_FOR_INSN (insn))((&sel_region_bb_info[(BLOCK_FOR_INSN (insn))->index]) ->av_level); | |||
4514 | else | |||
4515 | av_level = INSN_WS_LEVEL (insn)((&s_i_d[(sched_luids[INSN_UID (insn)])])->ws_level); | |||
4516 | ||||
4517 | return av_level; | |||
4518 | } | |||
4519 | ||||
4520 | ||||
4521 | ||||
4522 | /* Variables to work with control-flow graph. */ | |||
4523 | ||||
4524 | /* The basic block that already has been processed by the sched_data_update (), | |||
4525 | but hasn't been in sel_add_bb () yet. */ | |||
4526 | static vec<basic_block> last_added_blocks; | |||
4527 | ||||
4528 | /* A pool for allocating successor infos. */ | |||
4529 | static struct | |||
4530 | { | |||
4531 | /* A stack for saving succs_info structures. */ | |||
4532 | struct succs_info *stack; | |||
4533 | ||||
4534 | /* Its size. */ | |||
4535 | int size; | |||
4536 | ||||
4537 | /* Top of the stack. */ | |||
4538 | int top; | |||
4539 | ||||
4540 | /* Maximal value of the top. */ | |||
4541 | int max_top; | |||
4542 | } succs_info_pool; | |||
4543 | ||||
4544 | /* Functions to work with control-flow graph. */ | |||
4545 | ||||
4546 | /* Return basic block note of BB. */ | |||
4547 | rtx_insn * | |||
4548 | sel_bb_head (basic_block bb) | |||
4549 | { | |||
4550 | rtx_insn *head; | |||
4551 | ||||
4552 | if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr)) | |||
4553 | { | |||
4554 | gcc_assert (exit_insn != NULL_RTX)((void)(!(exit_insn != (rtx) 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4554, __FUNCTION__), 0 : 0)); | |||
4555 | head = exit_insn; | |||
4556 | } | |||
4557 | else | |||
4558 | { | |||
4559 | rtx_note *note = bb_note (bb); | |||
4560 | head = next_nonnote_insn (note); | |||
4561 | ||||
4562 | if (head && (BARRIER_P (head)(((enum rtx_code) (head)->code) == BARRIER) || BLOCK_FOR_INSN (head) != bb)) | |||
4563 | head = NULLnullptr; | |||
4564 | } | |||
4565 | ||||
4566 | return head; | |||
4567 | } | |||
4568 | ||||
4569 | /* Return true if INSN is a basic block header. */ | |||
4570 | bool | |||
4571 | sel_bb_head_p (insn_t insn) | |||
4572 | { | |||
4573 | return sel_bb_head (BLOCK_FOR_INSN (insn)) == insn; | |||
4574 | } | |||
4575 | ||||
4576 | /* Return last insn of BB. */ | |||
4577 | rtx_insn * | |||
4578 | sel_bb_end (basic_block bb) | |||
4579 | { | |||
4580 | if (sel_bb_empty_p (bb)) | |||
4581 | return NULLnullptr; | |||
4582 | ||||
4583 | gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))((void)(!(bb != (((cfun + 0))->cfg->x_exit_block_ptr)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4583, __FUNCTION__), 0 : 0)); | |||
4584 | ||||
4585 | return BB_END (bb)(bb)->il.x.rtl->end_; | |||
4586 | } | |||
4587 | ||||
4588 | /* Return true if INSN is the last insn in its basic block. */ | |||
4589 | bool | |||
4590 | sel_bb_end_p (insn_t insn) | |||
4591 | { | |||
4592 | return insn == sel_bb_end (BLOCK_FOR_INSN (insn)); | |||
4593 | } | |||
4594 | ||||
4595 | /* Return true if BB consist of single NOTE_INSN_BASIC_BLOCK. */ | |||
4596 | bool | |||
4597 | sel_bb_empty_p (basic_block bb) | |||
4598 | { | |||
4599 | return sel_bb_head (bb) == NULLnullptr; | |||
4600 | } | |||
4601 | ||||
4602 | /* True when BB belongs to the current scheduling region. */ | |||
4603 | bool | |||
4604 | in_current_region_p (basic_block bb) | |||
4605 | { | |||
4606 | if (bb->index < NUM_FIXED_BLOCKS(2)) | |||
4607 | return false; | |||
4608 | ||||
4609 | return CONTAINING_RGN (bb->index)(containing_rgn[bb->index]) == CONTAINING_RGN (BB_TO_BLOCK (0))(containing_rgn[(rgn_bb_table[ebb_head[0]])]); | |||
4610 | } | |||
4611 | ||||
4612 | /* Return the block which is a fallthru bb of a conditional jump JUMP. */ | |||
4613 | basic_block | |||
4614 | fallthru_bb_of_jump (const rtx_insn *jump) | |||
4615 | { | |||
4616 | if (!JUMP_P (jump)(((enum rtx_code) (jump)->code) == JUMP_INSN)) | |||
4617 | return NULLnullptr; | |||
4618 | ||||
4619 | if (!any_condjump_p (jump)) | |||
4620 | return NULLnullptr; | |||
4621 | ||||
4622 | /* A basic block that ends with a conditional jump may still have one successor | |||
4623 | (and be followed by a barrier), we are not interested. */ | |||
4624 | if (single_succ_p (BLOCK_FOR_INSN (jump))) | |||
4625 | return NULLnullptr; | |||
4626 | ||||
4627 | return FALLTHRU_EDGE (BLOCK_FOR_INSN (jump))((*((BLOCK_FOR_INSN (jump)))->succs)[(0)]->flags & EDGE_FALLTHRU ? (*((BLOCK_FOR_INSN (jump)))->succs)[(0)] : (*((BLOCK_FOR_INSN (jump)))->succs)[(1)])->dest; | |||
4628 | } | |||
4629 | ||||
4630 | /* Remove all notes from BB. */ | |||
4631 | static void | |||
4632 | init_bb (basic_block bb) | |||
4633 | { | |||
4634 | remove_notes (bb_note (bb), BB_END (bb)(bb)->il.x.rtl->end_); | |||
4635 | BB_NOTE_LIST (bb)((&sel_region_bb_info[(bb)->index])->note_list) = note_list; | |||
4636 | } | |||
4637 | ||||
4638 | void | |||
4639 | sel_init_bbs (bb_vec_t bbs) | |||
4640 | { | |||
4641 | const struct sched_scan_info_def ssi = | |||
4642 | { | |||
4643 | extend_bb_info, /* extend_bb */ | |||
4644 | init_bb, /* init_bb */ | |||
4645 | NULLnullptr, /* extend_insn */ | |||
4646 | NULLnullptr /* init_insn */ | |||
4647 | }; | |||
4648 | ||||
4649 | sched_scan (&ssi, bbs); | |||
4650 | } | |||
4651 | ||||
4652 | /* Restore notes for the whole region. */ | |||
4653 | static void | |||
4654 | sel_restore_notes (void) | |||
4655 | { | |||
4656 | int bb; | |||
4657 | insn_t insn; | |||
4658 | ||||
4659 | for (bb = 0; bb < current_nr_blocks; bb++) | |||
4660 | { | |||
4661 | basic_block first, last; | |||
4662 | ||||
4663 | first = EBB_FIRST_BB (bb)((*(((cfun + 0))->cfg->x_basic_block_info))[((rgn_bb_table [ebb_head[bb]]))]); | |||
4664 | last = EBB_LAST_BB (bb)((*(((cfun + 0))->cfg->x_basic_block_info))[(rgn_bb_table [ebb_head[bb + 1] - 1])])->next_bb; | |||
4665 | ||||
4666 | do | |||
4667 | { | |||
4668 | note_list = BB_NOTE_LIST (first)((&sel_region_bb_info[(first)->index])->note_list); | |||
4669 | restore_other_notes (NULLnullptr, first); | |||
4670 | BB_NOTE_LIST (first)((&sel_region_bb_info[(first)->index])->note_list) = NULLnullptr; | |||
4671 | ||||
4672 | FOR_BB_INSNS (first, insn)for ((insn) = (first)->il.x.head_; (insn) && (insn ) != NEXT_INSN ((first)->il.x.rtl->end_); (insn) = NEXT_INSN (insn)) | |||
4673 | if (NONDEBUG_INSN_P (insn)((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN))) | |||
4674 | reemit_notes (insn); | |||
4675 | ||||
4676 | first = first->next_bb; | |||
4677 | } | |||
4678 | while (first != last); | |||
4679 | } | |||
4680 | } | |||
4681 | ||||
4682 | /* Free per-bb data structures. */ | |||
4683 | void | |||
4684 | sel_finish_bbs (void) | |||
4685 | { | |||
4686 | sel_restore_notes (); | |||
4687 | ||||
4688 | /* Remove current loop preheader from this loop. */ | |||
4689 | if (current_loop_nest) | |||
4690 | sel_remove_loop_preheader (); | |||
4691 | ||||
4692 | finish_region_bb_info (); | |||
4693 | } | |||
4694 | ||||
4695 | /* Return true if INSN has a single successor of type FLAGS. */ | |||
4696 | bool | |||
4697 | sel_insn_has_single_succ_p (insn_t insn, int flags) | |||
4698 | { | |||
4699 | insn_t succ; | |||
4700 | succ_iterator si; | |||
4701 | bool first_p = true; | |||
4702 | ||||
4703 | FOR_EACH_SUCC_1 (succ, si, insn, flags)for ((si) = _succ_iter_start (&(succ), (insn), (flags)); _succ_iter_cond (&(si), &(succ), (insn), _eligible_successor_edge_p) ; _succ_iter_next (&(si))) | |||
4704 | { | |||
4705 | if (first_p) | |||
4706 | first_p = false; | |||
4707 | else | |||
4708 | return false; | |||
4709 | } | |||
4710 | ||||
4711 | return true; | |||
4712 | } | |||
4713 | ||||
4714 | /* Allocate successor's info. */ | |||
4715 | static struct succs_info * | |||
4716 | alloc_succs_info (void) | |||
4717 | { | |||
4718 | if (succs_info_pool.top == succs_info_pool.max_top) | |||
4719 | { | |||
4720 | int i; | |||
4721 | ||||
4722 | if (++succs_info_pool.max_top >= succs_info_pool.size) | |||
4723 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4723, __FUNCTION__)); | |||
4724 | ||||
4725 | i = ++succs_info_pool.top; | |||
4726 | succs_info_pool.stack[i].succs_ok.create (10); | |||
4727 | succs_info_pool.stack[i].succs_other.create (10); | |||
4728 | succs_info_pool.stack[i].probs_ok.create (10); | |||
4729 | } | |||
4730 | else | |||
4731 | succs_info_pool.top++; | |||
4732 | ||||
4733 | return &succs_info_pool.stack[succs_info_pool.top]; | |||
4734 | } | |||
4735 | ||||
4736 | /* Free successor's info. */ | |||
4737 | void | |||
4738 | free_succs_info (struct succs_info * sinfo) | |||
4739 | { | |||
4740 | gcc_assert (succs_info_pool.top >= 0((void)(!(succs_info_pool.top >= 0 && &succs_info_pool .stack[succs_info_pool.top] == sinfo) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4741, __FUNCTION__), 0 : 0)) | |||
4741 | && &succs_info_pool.stack[succs_info_pool.top] == sinfo)((void)(!(succs_info_pool.top >= 0 && &succs_info_pool .stack[succs_info_pool.top] == sinfo) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4741, __FUNCTION__), 0 : 0)); | |||
4742 | succs_info_pool.top--; | |||
4743 | ||||
4744 | /* Clear stale info. */ | |||
4745 | sinfo->succs_ok.block_remove (0, sinfo->succs_ok.length ()); | |||
4746 | sinfo->succs_other.block_remove (0, sinfo->succs_other.length ()); | |||
4747 | sinfo->probs_ok.block_remove (0, sinfo->probs_ok.length ()); | |||
4748 | sinfo->all_prob = 0; | |||
4749 | sinfo->succs_ok_n = 0; | |||
4750 | sinfo->all_succs_n = 0; | |||
4751 | } | |||
4752 | ||||
4753 | /* Compute successor info for INSN. FLAGS are the flags passed | |||
4754 | to the FOR_EACH_SUCC_1 iterator. */ | |||
4755 | struct succs_info * | |||
4756 | compute_succs_info (insn_t insn, short flags) | |||
4757 | { | |||
4758 | succ_iterator si; | |||
4759 | insn_t succ; | |||
4760 | struct succs_info *sinfo = alloc_succs_info (); | |||
4761 | ||||
4762 | /* Traverse *all* successors and decide what to do with each. */ | |||
4763 | FOR_EACH_SUCC_1 (succ, si, insn, SUCCS_ALL)for ((si) = _succ_iter_start (&(succ), (insn), (((1) | (2 ) | (4)))); _succ_iter_cond (&(si), &(succ), (insn), _eligible_successor_edge_p ); _succ_iter_next (&(si))) | |||
4764 | { | |||
4765 | /* FIXME: this doesn't work for skipping to loop exits, as we don't | |||
4766 | perform code motion through inner loops. */ | |||
4767 | short current_flags = si.current_flags & ~SUCCS_SKIP_TO_LOOP_EXITS(8); | |||
4768 | ||||
4769 | if (current_flags & flags) | |||
4770 | { | |||
4771 | sinfo->succs_ok.safe_push (succ); | |||
4772 | sinfo->probs_ok.safe_push ( | |||
4773 | /* FIXME: Improve calculation when skipping | |||
4774 | inner loop to exits. */ | |||
4775 | si.bb_end | |||
4776 | ? (si.e1->probability.initialized_p () | |||
4777 | ? si.e1->probability.to_reg_br_prob_base () | |||
4778 | : 0) | |||
4779 | : REG_BR_PROB_BASE10000); | |||
4780 | sinfo->succs_ok_n++; | |||
4781 | } | |||
4782 | else | |||
4783 | sinfo->succs_other.safe_push (succ); | |||
4784 | ||||
4785 | /* Compute all_prob. */ | |||
4786 | if (!si.bb_end) | |||
4787 | sinfo->all_prob = REG_BR_PROB_BASE10000; | |||
4788 | else if (si.e1->probability.initialized_p ()) | |||
4789 | sinfo->all_prob += si.e1->probability.to_reg_br_prob_base (); | |||
4790 | ||||
4791 | sinfo->all_succs_n++; | |||
4792 | } | |||
4793 | ||||
4794 | return sinfo; | |||
4795 | } | |||
4796 | ||||
4797 | /* Return the predecessors of BB in PREDS and their number in N. | |||
4798 | Empty blocks are skipped. SIZE is used to allocate PREDS. */ | |||
4799 | static void | |||
4800 | cfg_preds_1 (basic_block bb, insn_t **preds, int *n, int *size) | |||
4801 | { | |||
4802 | edge e; | |||
4803 | edge_iterator ei; | |||
4804 | ||||
4805 | gcc_assert (BLOCK_TO_BB (bb->index) != 0)((void)(!((block_to_bb[bb->index]) != 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4805, __FUNCTION__), 0 : 0)); | |||
4806 | ||||
4807 | FOR_EACH_EDGE (e, ei, bb->preds)for ((ei) = ei_start_1 (&((bb->preds))); ei_cond ((ei) , &(e)); ei_next (&(ei))) | |||
4808 | { | |||
4809 | basic_block pred_bb = e->src; | |||
4810 | insn_t bb_end = BB_END (pred_bb)(pred_bb)->il.x.rtl->end_; | |||
4811 | ||||
4812 | if (!in_current_region_p (pred_bb)) | |||
4813 | { | |||
4814 | gcc_assert (flag_sel_sched_pipelining_outer_loops((void)(!(global_options.x_flag_sel_sched_pipelining_outer_loops && current_loop_nest) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4815, __FUNCTION__), 0 : 0)) | |||
4815 | && current_loop_nest)((void)(!(global_options.x_flag_sel_sched_pipelining_outer_loops && current_loop_nest) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4815, __FUNCTION__), 0 : 0)); | |||
4816 | continue; | |||
4817 | } | |||
4818 | ||||
4819 | if (sel_bb_empty_p (pred_bb)) | |||
4820 | cfg_preds_1 (pred_bb, preds, n, size); | |||
4821 | else | |||
4822 | { | |||
4823 | if (*n == *size) | |||
4824 | *preds = XRESIZEVEC (insn_t, *preds,((insn_t *) xrealloc ((void *) (*preds), sizeof (insn_t) * (( *size = 2 * *size + 1)))) | |||
4825 | (*size = 2 * *size + 1))((insn_t *) xrealloc ((void *) (*preds), sizeof (insn_t) * (( *size = 2 * *size + 1)))); | |||
4826 | (*preds)[(*n)++] = bb_end; | |||
4827 | } | |||
4828 | } | |||
4829 | ||||
4830 | gcc_assert (*n != 0((void)(!(*n != 0 || (global_options.x_flag_sel_sched_pipelining_outer_loops && current_loop_nest)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4832, __FUNCTION__), 0 : 0)) | |||
4831 | || (flag_sel_sched_pipelining_outer_loops((void)(!(*n != 0 || (global_options.x_flag_sel_sched_pipelining_outer_loops && current_loop_nest)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4832, __FUNCTION__), 0 : 0)) | |||
4832 | && current_loop_nest))((void)(!(*n != 0 || (global_options.x_flag_sel_sched_pipelining_outer_loops && current_loop_nest)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4832, __FUNCTION__), 0 : 0)); | |||
4833 | } | |||
4834 | ||||
4835 | /* Find all predecessors of BB and record them in PREDS and their number | |||
4836 | in N. Empty blocks are skipped, and only normal (forward in-region) | |||
4837 | edges are processed. */ | |||
4838 | static void | |||
4839 | cfg_preds (basic_block bb, insn_t **preds, int *n) | |||
4840 | { | |||
4841 | int size = 0; | |||
4842 | ||||
4843 | *preds = NULLnullptr; | |||
4844 | *n = 0; | |||
4845 | cfg_preds_1 (bb, preds, n, &size); | |||
4846 | } | |||
4847 | ||||
4848 | /* Returns true if we are moving INSN through join point. */ | |||
4849 | bool | |||
4850 | sel_num_cfg_preds_gt_1 (insn_t insn) | |||
4851 | { | |||
4852 | basic_block bb; | |||
4853 | ||||
4854 | if (!sel_bb_head_p (insn) || INSN_BB (insn)((block_to_bb[(BLOCK_FOR_INSN (insn)->index + 0)])) == 0) | |||
4855 | return false; | |||
4856 | ||||
4857 | bb = BLOCK_FOR_INSN (insn); | |||
4858 | ||||
4859 | while (1) | |||
4860 | { | |||
4861 | if (EDGE_COUNT (bb->preds)vec_safe_length (bb->preds) > 1) | |||
4862 | return true; | |||
4863 | ||||
4864 | gcc_assert (EDGE_PRED (bb, 0)->dest == bb)((void)(!((*(bb)->preds)[(0)]->dest == bb) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4864, __FUNCTION__), 0 : 0)); | |||
4865 | bb = EDGE_PRED (bb, 0)(*(bb)->preds)[(0)]->src; | |||
4866 | ||||
4867 | if (!sel_bb_empty_p (bb)) | |||
4868 | break; | |||
4869 | } | |||
4870 | ||||
4871 | return false; | |||
4872 | } | |||
4873 | ||||
4874 | /* Returns true when BB should be the end of an ebb. Adapted from the | |||
4875 | code in sched-ebb.cc. */ | |||
4876 | bool | |||
4877 | bb_ends_ebb_p (basic_block bb) | |||
4878 | { | |||
4879 | basic_block next_bb = bb_next_bb (bb); | |||
4880 | edge e; | |||
4881 | ||||
4882 | if (next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr) | |||
4883 | || bitmap_bit_p (forced_ebb_heads, next_bb->index) | |||
4884 | || (LABEL_P (BB_HEAD (next_bb))(((enum rtx_code) ((next_bb)->il.x.head_)->code) == CODE_LABEL ) | |||
4885 | /* NB: LABEL_NUSES () is not maintained outside of jump.cc. | |||
4886 | Work around that. */ | |||
4887 | && !single_pred_p (next_bb))) | |||
4888 | return true; | |||
4889 | ||||
4890 | if (!in_current_region_p (next_bb)) | |||
4891 | return true; | |||
4892 | ||||
4893 | e = find_fallthru_edge (bb->succs); | |||
4894 | if (e) | |||
4895 | { | |||
4896 | gcc_assert (e->dest == next_bb)((void)(!(e->dest == next_bb) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4896, __FUNCTION__), 0 : 0)); | |||
4897 | ||||
4898 | return false; | |||
4899 | } | |||
4900 | ||||
4901 | return true; | |||
4902 | } | |||
4903 | ||||
4904 | /* Returns true when INSN and SUCC are in the same EBB, given that SUCC is a | |||
4905 | successor of INSN. */ | |||
4906 | bool | |||
4907 | in_same_ebb_p (insn_t insn, insn_t succ) | |||
4908 | { | |||
4909 | basic_block ptr = BLOCK_FOR_INSN (insn); | |||
4910 | ||||
4911 | for (;;) | |||
4912 | { | |||
4913 | if (ptr == BLOCK_FOR_INSN (succ)) | |||
4914 | return true; | |||
4915 | ||||
4916 | if (bb_ends_ebb_p (ptr)) | |||
4917 | return false; | |||
4918 | ||||
4919 | ptr = bb_next_bb (ptr); | |||
4920 | } | |||
4921 | } | |||
4922 | ||||
4923 | /* Recomputes the reverse topological order for the function and | |||
4924 | saves it in REV_TOP_ORDER_INDEX. REV_TOP_ORDER_INDEX_LEN is also | |||
4925 | modified appropriately. */ | |||
4926 | static void | |||
4927 | recompute_rev_top_order (void) | |||
4928 | { | |||
4929 | int *postorder; | |||
4930 | int n_blocks, i; | |||
4931 | ||||
4932 | if (!rev_top_order_index | |||
4933 | || rev_top_order_index_len < last_basic_block_for_fn (cfun)(((cfun + 0))->cfg->x_last_basic_block)) | |||
4934 | { | |||
4935 | rev_top_order_index_len = last_basic_block_for_fn (cfun)(((cfun + 0))->cfg->x_last_basic_block); | |||
4936 | rev_top_order_index = XRESIZEVEC (int, rev_top_order_index,((int *) xrealloc ((void *) (rev_top_order_index), sizeof (int ) * (rev_top_order_index_len))) | |||
4937 | rev_top_order_index_len)((int *) xrealloc ((void *) (rev_top_order_index), sizeof (int ) * (rev_top_order_index_len))); | |||
4938 | } | |||
4939 | ||||
4940 | postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun))((int *) xmalloc (sizeof (int) * ((((cfun + 0))->cfg->x_n_basic_blocks )))); | |||
4941 | ||||
4942 | n_blocks = post_order_compute (postorder, true, false); | |||
4943 | gcc_assert (n_basic_blocks_for_fn (cfun) == n_blocks)((void)(!((((cfun + 0))->cfg->x_n_basic_blocks) == n_blocks ) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4943, __FUNCTION__), 0 : 0)); | |||
4944 | ||||
4945 | /* Build reverse function: for each basic block with BB->INDEX == K | |||
4946 | rev_top_order_index[K] is it's reverse topological sort number. */ | |||
4947 | for (i = 0; i < n_blocks; i++) | |||
4948 | { | |||
4949 | gcc_assert (postorder[i] < rev_top_order_index_len)((void)(!(postorder[i] < rev_top_order_index_len) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4949, __FUNCTION__), 0 : 0)); | |||
4950 | rev_top_order_index[postorder[i]] = i; | |||
4951 | } | |||
4952 | ||||
4953 | free (postorder); | |||
4954 | } | |||
4955 | ||||
4956 | /* Clear all flags from insns in BB that could spoil its rescheduling. */ | |||
4957 | void | |||
4958 | clear_outdated_rtx_info (basic_block bb) | |||
4959 | { | |||
4960 | rtx_insn *insn; | |||
4961 | ||||
4962 | FOR_BB_INSNS (bb, insn)for ((insn) = (bb)->il.x.head_; (insn) && (insn) != NEXT_INSN ((bb)->il.x.rtl->end_); (insn) = NEXT_INSN ( insn)) | |||
4963 | if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) == DEBUG_INSN))) | |||
4964 | { | |||
4965 | SCHED_GROUP_P (insn)(__extension__ ({ __typeof ((insn)) const _rtx = ((insn)); if (((enum rtx_code) (_rtx)->code) != DEBUG_INSN && ( (enum rtx_code) (_rtx)->code) != INSN && ((enum rtx_code ) (_rtx)->code) != JUMP_INSN && ((enum rtx_code) ( _rtx)->code) != CALL_INSN) rtl_check_failed_flag ("SCHED_GROUP_P" , _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4965, __FUNCTION__); _rtx; })->in_struct) = 0; | |||
4966 | INSN_AFTER_STALL_P (insn)((&s_i_d[(sched_luids[INSN_UID (insn)])])->after_stall_p ) = 0; | |||
4967 | INSN_SCHED_TIMES (insn)((((&(&s_i_d[(sched_luids[INSN_UID (insn)])])->expr ))->sched_times)) = 0; | |||
4968 | EXPR_PRIORITY_ADJ (INSN_EXPR (insn))(((&(&s_i_d[(sched_luids[INSN_UID (insn)])])->expr ))->priority_adj) = 0; | |||
4969 | ||||
4970 | /* We cannot use the changed caches, as previously we could ignore | |||
4971 | the LHS dependence due to enabled renaming and transform | |||
4972 | the expression, and currently we'll be unable to do this. */ | |||
4973 | htab_empty (INSN_TRANSFORMED_INSNS (insn)((&s_i_d[(sched_luids[INSN_UID (insn)])])->transformed_insns )); | |||
4974 | } | |||
4975 | } | |||
4976 | ||||
4977 | /* Add BB_NOTE to the pool of available basic block notes. */ | |||
4978 | static void | |||
4979 | return_bb_to_pool (basic_block bb) | |||
4980 | { | |||
4981 | rtx_note *note = bb_note (bb); | |||
4982 | ||||
4983 | gcc_assert (NOTE_BASIC_BLOCK (note) == bb((void)(!((((note)->u.fld[3]).rt_bb) == bb && bb-> aux == nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4984, __FUNCTION__), 0 : 0)) | |||
4984 | && bb->aux == NULL)((void)(!((((note)->u.fld[3]).rt_bb) == bb && bb-> aux == nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 4984, __FUNCTION__), 0 : 0)); | |||
4985 | ||||
4986 | /* It turns out that current cfg infrastructure does not support | |||
4987 | reuse of basic blocks. Don't bother for now. */ | |||
4988 | /*bb_note_pool.safe_push (note);*/ | |||
4989 | } | |||
4990 | ||||
4991 | /* Get a bb_note from pool or return NULL_RTX if pool is empty. */ | |||
4992 | static rtx_note * | |||
4993 | get_bb_note_from_pool (void) | |||
4994 | { | |||
4995 | if (bb_note_pool.is_empty ()) | |||
4996 | return NULLnullptr; | |||
4997 | else | |||
4998 | { | |||
4999 | rtx_note *note = bb_note_pool.pop (); | |||
5000 | ||||
5001 | SET_PREV_INSN (note) = NULL_RTX(rtx) 0; | |||
5002 | SET_NEXT_INSN (note) = NULL_RTX(rtx) 0; | |||
5003 | ||||
5004 | return note; | |||
5005 | } | |||
5006 | } | |||
5007 | ||||
5008 | /* Free bb_note_pool. */ | |||
5009 | void | |||
5010 | free_bb_note_pool (void) | |||
5011 | { | |||
5012 | bb_note_pool.release (); | |||
5013 | } | |||
5014 | ||||
5015 | /* Setup scheduler pool and successor structure. */ | |||
5016 | void | |||
5017 | alloc_sched_pools (void) | |||
5018 | { | |||
5019 | int succs_size; | |||
5020 | ||||
5021 | succs_size = MAX_WS(global_options.x_param_selsched_max_lookahead) + 1; | |||
5022 | succs_info_pool.stack = XCNEWVEC (struct succs_info, succs_size)((struct succs_info *) xcalloc ((succs_size), sizeof (struct succs_info ))); | |||
5023 | succs_info_pool.size = succs_size; | |||
5024 | succs_info_pool.top = -1; | |||
5025 | succs_info_pool.max_top = -1; | |||
5026 | } | |||
5027 | ||||
5028 | /* Free the pools. */ | |||
5029 | void | |||
5030 | free_sched_pools (void) | |||
5031 | { | |||
5032 | int i; | |||
5033 | ||||
5034 | sched_lists_pool.release (); | |||
5035 | gcc_assert (succs_info_pool.top == -1)((void)(!(succs_info_pool.top == -1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5035, __FUNCTION__), 0 : 0)); | |||
5036 | for (i = 0; i <= succs_info_pool.max_top; i++) | |||
5037 | { | |||
5038 | succs_info_pool.stack[i].succs_ok.release (); | |||
5039 | succs_info_pool.stack[i].succs_other.release (); | |||
5040 | succs_info_pool.stack[i].probs_ok.release (); | |||
5041 | } | |||
5042 | free (succs_info_pool.stack); | |||
5043 | } | |||
5044 | ||||
5045 | ||||
5046 | /* Returns a position in RGN where BB can be inserted retaining | |||
5047 | topological order. */ | |||
5048 | static int | |||
5049 | find_place_to_insert_bb (basic_block bb, int rgn) | |||
5050 | { | |||
5051 | bool has_preds_outside_rgn = false; | |||
5052 | edge e; | |||
5053 | edge_iterator ei; | |||
5054 | ||||
5055 | /* Find whether we have preds outside the region. */ | |||
5056 | FOR_EACH_EDGE (e, ei, bb->preds)for ((ei) = ei_start_1 (&((bb->preds))); ei_cond ((ei) , &(e)); ei_next (&(ei))) | |||
5057 | if (!in_current_region_p (e->src)) | |||
5058 | { | |||
5059 | has_preds_outside_rgn = true; | |||
5060 | break; | |||
5061 | } | |||
5062 | ||||
5063 | /* Recompute the top order -- needed when we have > 1 pred | |||
5064 | and in case we don't have preds outside. */ | |||
5065 | if (flag_sel_sched_pipelining_outer_loopsglobal_options.x_flag_sel_sched_pipelining_outer_loops | |||
5066 | && (has_preds_outside_rgn || EDGE_COUNT (bb->preds)vec_safe_length (bb->preds) > 1)) | |||
5067 | { | |||
5068 | int i, bbi = bb->index, cur_bbi; | |||
5069 | ||||
5070 | recompute_rev_top_order (); | |||
5071 | for (i = RGN_NR_BLOCKS (rgn)(rgn_table[rgn].rgn_nr_blocks) - 1; i >= 0; i--) | |||
5072 | { | |||
5073 | cur_bbi = BB_TO_BLOCK (i)(rgn_bb_table[ebb_head[i]]); | |||
5074 | if (rev_top_order_index[bbi] | |||
5075 | < rev_top_order_index[cur_bbi]) | |||
5076 | break; | |||
5077 | } | |||
5078 | ||||
5079 | /* We skipped the right block, so we increase i. We accommodate | |||
5080 | it for increasing by step later, so we decrease i. */ | |||
5081 | return (i + 1) - 1; | |||
5082 | } | |||
5083 | else if (has_preds_outside_rgn) | |||
5084 | { | |||
5085 | /* This is the case when we generate an extra empty block | |||
5086 | to serve as region head during pipelining. */ | |||
5087 | e = EDGE_SUCC (bb, 0)(*(bb)->succs)[(0)]; | |||
5088 | gcc_assert (EDGE_COUNT (bb->succs) == 1((void)(!(vec_safe_length (bb->succs) == 1 && in_current_region_p ((*(bb)->succs)[(0)]->dest) && ((block_to_bb[e ->dest->index]) == 0)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5090, __FUNCTION__), 0 : 0)) | |||
5089 | && in_current_region_p (EDGE_SUCC (bb, 0)->dest)((void)(!(vec_safe_length (bb->succs) == 1 && in_current_region_p ((*(bb)->succs)[(0)]->dest) && ((block_to_bb[e ->dest->index]) == 0)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5090, __FUNCTION__), 0 : 0)) | |||
5090 | && (BLOCK_TO_BB (e->dest->index) == 0))((void)(!(vec_safe_length (bb->succs) == 1 && in_current_region_p ((*(bb)->succs)[(0)]->dest) && ((block_to_bb[e ->dest->index]) == 0)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5090, __FUNCTION__), 0 : 0)); | |||
5091 | return -1; | |||
5092 | } | |||
5093 | ||||
5094 | /* We don't have preds outside the region. We should have | |||
5095 | the only pred, because the multiple preds case comes from | |||
5096 | the pipelining of outer loops, and that is handled above. | |||
5097 | Just take the bbi of this single pred. */ | |||
5098 | if (EDGE_COUNT (bb->succs)vec_safe_length (bb->succs) > 0) | |||
5099 | { | |||
5100 | int pred_bbi; | |||
5101 | ||||
5102 | gcc_assert (EDGE_COUNT (bb->preds) == 1)((void)(!(vec_safe_length (bb->preds) == 1) ? fancy_abort ( "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5102, __FUNCTION__), 0 : 0)); | |||
5103 | ||||
5104 | pred_bbi = EDGE_PRED (bb, 0)(*(bb)->preds)[(0)]->src->index; | |||
5105 | return BLOCK_TO_BB (pred_bbi)(block_to_bb[pred_bbi]); | |||
5106 | } | |||
5107 | else | |||
5108 | /* BB has no successors. It is safe to put it in the end. */ | |||
5109 | return current_nr_blocks - 1; | |||
5110 | } | |||
5111 | ||||
5112 | /* Deletes an empty basic block freeing its data. */ | |||
5113 | static void | |||
5114 | delete_and_free_basic_block (basic_block bb) | |||
5115 | { | |||
5116 | gcc_assert (sel_bb_empty_p (bb))((void)(!(sel_bb_empty_p (bb)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5116, __FUNCTION__), 0 : 0)); | |||
5117 | ||||
5118 | if (BB_LV_SET (bb)((&sel_global_bb_info[(bb)->index])->lv_set)) | |||
5119 | free_lv_set (bb); | |||
5120 | ||||
5121 | bitmap_clear_bit (blocks_to_reschedule, bb->index); | |||
5122 | ||||
5123 | /* Can't assert av_set properties because we use sel_aremove_bb | |||
5124 | when removing loop preheader from the region. At the point of | |||
5125 | removing the preheader we already have deallocated sel_region_bb_info. */ | |||
5126 | gcc_assert (BB_LV_SET (bb) == NULL((void)(!(((&sel_global_bb_info[(bb)->index])->lv_set ) == nullptr && !((&sel_global_bb_info[(bb)->index ])->lv_set_valid_p) && ((&sel_region_bb_info[( bb)->index])->av_level) == 0 && ((&sel_region_bb_info [(bb)->index])->av_set) == nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5129, __FUNCTION__), 0 : 0)) | |||
5127 | && !BB_LV_SET_VALID_P (bb)((void)(!(((&sel_global_bb_info[(bb)->index])->lv_set ) == nullptr && !((&sel_global_bb_info[(bb)->index ])->lv_set_valid_p) && ((&sel_region_bb_info[( bb)->index])->av_level) == 0 && ((&sel_region_bb_info [(bb)->index])->av_set) == nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5129, __FUNCTION__), 0 : 0)) | |||
5128 | && BB_AV_LEVEL (bb) == 0((void)(!(((&sel_global_bb_info[(bb)->index])->lv_set ) == nullptr && !((&sel_global_bb_info[(bb)->index ])->lv_set_valid_p) && ((&sel_region_bb_info[( bb)->index])->av_level) == 0 && ((&sel_region_bb_info [(bb)->index])->av_set) == nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5129, __FUNCTION__), 0 : 0)) | |||
5129 | && BB_AV_SET (bb) == NULL)((void)(!(((&sel_global_bb_info[(bb)->index])->lv_set ) == nullptr && !((&sel_global_bb_info[(bb)->index ])->lv_set_valid_p) && ((&sel_region_bb_info[( bb)->index])->av_level) == 0 && ((&sel_region_bb_info [(bb)->index])->av_set) == nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5129, __FUNCTION__), 0 : 0)); | |||
5130 | ||||
5131 | delete_basic_block (bb); | |||
5132 | } | |||
5133 | ||||
5134 | /* Add BB to the current region and update the region data. */ | |||
5135 | static void | |||
5136 | add_block_to_current_region (basic_block bb) | |||
5137 | { | |||
5138 | int i, pos, bbi = -2, rgn; | |||
5139 | ||||
5140 | rgn = CONTAINING_RGN (BB_TO_BLOCK (0))(containing_rgn[(rgn_bb_table[ebb_head[0]])]); | |||
5141 | bbi = find_place_to_insert_bb (bb, rgn); | |||
5142 | bbi += 1; | |||
5143 | pos = RGN_BLOCKS (rgn)(rgn_table[rgn].rgn_blocks) + bbi; | |||
5144 | ||||
5145 | gcc_assert (RGN_HAS_REAL_EBB (rgn) == 0((void)(!((rgn_table[rgn].has_real_ebb) == 0 && ebb_head [bbi] == pos) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5146, __FUNCTION__), 0 : 0)) | |||
5146 | && ebb_head[bbi] == pos)((void)(!((rgn_table[rgn].has_real_ebb) == 0 && ebb_head [bbi] == pos) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5146, __FUNCTION__), 0 : 0)); | |||
5147 | ||||
5148 | /* Make a place for the new block. */ | |||
5149 | extend_regions (); | |||
5150 | ||||
5151 | for (i = RGN_BLOCKS (rgn + 1)(rgn_table[rgn + 1].rgn_blocks) - 1; i >= pos; i--) | |||
5152 | BLOCK_TO_BB (rgn_bb_table[i])(block_to_bb[rgn_bb_table[i]])++; | |||
5153 | ||||
5154 | memmove (rgn_bb_table + pos + 1, | |||
5155 | rgn_bb_table + pos, | |||
5156 | (RGN_BLOCKS (nr_regions)(rgn_table[nr_regions].rgn_blocks) - pos) * sizeof (*rgn_bb_table)); | |||
5157 | ||||
5158 | /* Initialize data for BB. */ | |||
5159 | rgn_bb_table[pos] = bb->index; | |||
5160 | BLOCK_TO_BB (bb->index)(block_to_bb[bb->index]) = bbi; | |||
5161 | CONTAINING_RGN (bb->index)(containing_rgn[bb->index]) = rgn; | |||
5162 | ||||
5163 | RGN_NR_BLOCKS (rgn)(rgn_table[rgn].rgn_nr_blocks)++; | |||
5164 | ||||
5165 | for (i = rgn + 1; i <= nr_regions; i++) | |||
5166 | RGN_BLOCKS (i)(rgn_table[i].rgn_blocks)++; | |||
5167 | } | |||
5168 | ||||
5169 | /* Remove BB from the current region and update the region data. */ | |||
5170 | static void | |||
5171 | remove_bb_from_region (basic_block bb) | |||
5172 | { | |||
5173 | int i, pos, bbi = -2, rgn; | |||
5174 | ||||
5175 | rgn = CONTAINING_RGN (BB_TO_BLOCK (0))(containing_rgn[(rgn_bb_table[ebb_head[0]])]); | |||
5176 | bbi = BLOCK_TO_BB (bb->index)(block_to_bb[bb->index]); | |||
5177 | pos = RGN_BLOCKS (rgn)(rgn_table[rgn].rgn_blocks) + bbi; | |||
5178 | ||||
5179 | gcc_assert (RGN_HAS_REAL_EBB (rgn) == 0((void)(!((rgn_table[rgn].has_real_ebb) == 0 && ebb_head [bbi] == pos) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5180, __FUNCTION__), 0 : 0)) | |||
5180 | && ebb_head[bbi] == pos)((void)(!((rgn_table[rgn].has_real_ebb) == 0 && ebb_head [bbi] == pos) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5180, __FUNCTION__), 0 : 0)); | |||
5181 | ||||
5182 | for (i = RGN_BLOCKS (rgn + 1)(rgn_table[rgn + 1].rgn_blocks) - 1; i >= pos; i--) | |||
5183 | BLOCK_TO_BB (rgn_bb_table[i])(block_to_bb[rgn_bb_table[i]])--; | |||
5184 | ||||
5185 | memmove (rgn_bb_table + pos, | |||
5186 | rgn_bb_table + pos + 1, | |||
5187 | (RGN_BLOCKS (nr_regions)(rgn_table[nr_regions].rgn_blocks) - pos) * sizeof (*rgn_bb_table)); | |||
5188 | ||||
5189 | RGN_NR_BLOCKS (rgn)(rgn_table[rgn].rgn_nr_blocks)--; | |||
5190 | for (i = rgn + 1; i <= nr_regions; i++) | |||
5191 | RGN_BLOCKS (i)(rgn_table[i].rgn_blocks)--; | |||
5192 | } | |||
5193 | ||||
5194 | /* Add BB to the current region and update all data. If BB is NULL, add all | |||
5195 | blocks from last_added_blocks vector. */ | |||
5196 | static void | |||
5197 | sel_add_bb (basic_block bb) | |||
5198 | { | |||
5199 | /* Extend luids so that new notes will receive zero luids. */ | |||
5200 | sched_extend_luids (); | |||
5201 | sched_init_bbs (); | |||
5202 | sel_init_bbs (last_added_blocks); | |||
5203 | ||||
5204 | /* When bb is passed explicitly, the vector should contain | |||
5205 | the only element that equals to bb; otherwise, the vector | |||
5206 | should not be NULL. */ | |||
5207 | gcc_assert (last_added_blocks.exists ())((void)(!(last_added_blocks.exists ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5207, __FUNCTION__), 0 : 0)); | |||
5208 | ||||
5209 | if (bb != NULLnullptr) | |||
5210 | { | |||
5211 | gcc_assert (last_added_blocks.length () == 1((void)(!(last_added_blocks.length () == 1 && last_added_blocks [0] == bb) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5212, __FUNCTION__), 0 : 0)) | |||
5212 | && last_added_blocks[0] == bb)((void)(!(last_added_blocks.length () == 1 && last_added_blocks [0] == bb) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5212, __FUNCTION__), 0 : 0)); | |||
5213 | add_block_to_current_region (bb); | |||
5214 | ||||
5215 | /* We associate creating/deleting data sets with the first insn | |||
5216 | appearing / disappearing in the bb. */ | |||
5217 | if (!sel_bb_empty_p (bb) && BB_LV_SET (bb)((&sel_global_bb_info[(bb)->index])->lv_set) == NULLnullptr) | |||
5218 | create_initial_data_sets (bb); | |||
5219 | ||||
5220 | last_added_blocks.release (); | |||
5221 | } | |||
5222 | else | |||
5223 | /* BB is NULL - process LAST_ADDED_BLOCKS instead. */ | |||
5224 | { | |||
5225 | int i; | |||
5226 | basic_block temp_bb = NULLnullptr; | |||
5227 | ||||
5228 | for (i = 0; | |||
5229 | last_added_blocks.iterate (i, &bb); i++) | |||
5230 | { | |||
5231 | add_block_to_current_region (bb); | |||
5232 | temp_bb = bb; | |||
5233 | } | |||
5234 | ||||
5235 | /* We need to fetch at least one bb so we know the region | |||
5236 | to update. */ | |||
5237 | gcc_assert (temp_bb != NULL)((void)(!(temp_bb != nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5237, __FUNCTION__), 0 : 0)); | |||
5238 | bb = temp_bb; | |||
5239 | ||||
5240 | last_added_blocks.release (); | |||
5241 | } | |||
5242 | ||||
5243 | rgn_setup_region (CONTAINING_RGN (bb->index)(containing_rgn[bb->index])); | |||
5244 | } | |||
5245 | ||||
5246 | /* Remove BB from the current region and update all data. | |||
5247 | If REMOVE_FROM_CFG_PBB is true, also remove the block cfom cfg. */ | |||
5248 | static void | |||
5249 | sel_remove_bb (basic_block bb, bool remove_from_cfg_p) | |||
5250 | { | |||
5251 | unsigned idx = bb->index; | |||
5252 | ||||
5253 | gcc_assert (bb != NULL && BB_NOTE_LIST (bb) == NULL_RTX)((void)(!(bb != nullptr && ((&sel_region_bb_info[ (bb)->index])->note_list) == (rtx) 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5253, __FUNCTION__), 0 : 0)); | |||
5254 | ||||
5255 | remove_bb_from_region (bb); | |||
5256 | return_bb_to_pool (bb); | |||
5257 | bitmap_clear_bit (blocks_to_reschedule, idx); | |||
5258 | ||||
5259 | if (remove_from_cfg_p) | |||
5260 | { | |||
5261 | basic_block succ = single_succ (bb); | |||
5262 | delete_and_free_basic_block (bb); | |||
5263 | set_immediate_dominator (CDI_DOMINATORS, succ, | |||
5264 | recompute_dominator (CDI_DOMINATORS, succ)); | |||
5265 | } | |||
5266 | ||||
5267 | rgn_setup_region (CONTAINING_RGN (idx)(containing_rgn[idx])); | |||
5268 | } | |||
5269 | ||||
5270 | /* Concatenate info of EMPTY_BB to info of MERGE_BB. */ | |||
5271 | static void | |||
5272 | move_bb_info (basic_block merge_bb, basic_block empty_bb) | |||
5273 | { | |||
5274 | if (in_current_region_p (merge_bb)) | |||
5275 | concat_note_lists (BB_NOTE_LIST (empty_bb)((&sel_region_bb_info[(empty_bb)->index])->note_list ), | |||
5276 | &BB_NOTE_LIST (merge_bb)((&sel_region_bb_info[(merge_bb)->index])->note_list )); | |||
5277 | BB_NOTE_LIST (empty_bb)((&sel_region_bb_info[(empty_bb)->index])->note_list ) = NULLnullptr; | |||
5278 | ||||
5279 | } | |||
5280 | ||||
5281 | /* Remove EMPTY_BB. If REMOVE_FROM_CFG_P is false, remove EMPTY_BB from | |||
5282 | region, but keep it in CFG. */ | |||
5283 | static void | |||
5284 | remove_empty_bb (basic_block empty_bb, bool remove_from_cfg_p) | |||
5285 | { | |||
5286 | /* The block should contain just a note or a label. | |||
5287 | We try to check whether it is unused below. */ | |||
5288 | gcc_assert (BB_HEAD (empty_bb) == BB_END (empty_bb)((void)(!((empty_bb)->il.x.head_ == (empty_bb)->il.x.rtl ->end_ || (((enum rtx_code) ((empty_bb)->il.x.head_)-> code) == CODE_LABEL)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5289, __FUNCTION__), 0 : 0)) | |||
5289 | || LABEL_P (BB_HEAD (empty_bb)))((void)(!((empty_bb)->il.x.head_ == (empty_bb)->il.x.rtl ->end_ || (((enum rtx_code) ((empty_bb)->il.x.head_)-> code) == CODE_LABEL)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5289, __FUNCTION__), 0 : 0)); | |||
5290 | ||||
5291 | /* If basic block has predecessors or successors, redirect them. */ | |||
5292 | if (remove_from_cfg_p | |||
5293 | && (EDGE_COUNT (empty_bb->preds)vec_safe_length (empty_bb->preds) > 0 | |||
5294 | || EDGE_COUNT (empty_bb->succs)vec_safe_length (empty_bb->succs) > 0)) | |||
5295 | { | |||
5296 | basic_block pred; | |||
5297 | basic_block succ; | |||
5298 | ||||
5299 | /* We need to init PRED and SUCC before redirecting edges. */ | |||
5300 | if (EDGE_COUNT (empty_bb->preds)vec_safe_length (empty_bb->preds) > 0) | |||
5301 | { | |||
5302 | edge e; | |||
5303 | ||||
5304 | gcc_assert (EDGE_COUNT (empty_bb->preds) == 1)((void)(!(vec_safe_length (empty_bb->preds) == 1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5304, __FUNCTION__), 0 : 0)); | |||
5305 | ||||
5306 | e = EDGE_PRED (empty_bb, 0)(*(empty_bb)->preds)[(0)]; | |||
5307 | gcc_assert (e->src == empty_bb->prev_bb((void)(!(e->src == empty_bb->prev_bb && (e-> flags & EDGE_FALLTHRU)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5308, __FUNCTION__), 0 : 0)) | |||
5308 | && (e->flags & EDGE_FALLTHRU))((void)(!(e->src == empty_bb->prev_bb && (e-> flags & EDGE_FALLTHRU)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5308, __FUNCTION__), 0 : 0)); | |||
5309 | ||||
5310 | pred = empty_bb->prev_bb; | |||
5311 | } | |||
5312 | else | |||
5313 | pred = NULLnullptr; | |||
5314 | ||||
5315 | if (EDGE_COUNT (empty_bb->succs)vec_safe_length (empty_bb->succs) > 0) | |||
5316 | { | |||
5317 | /* We do not check fallthruness here as above, because | |||
5318 | after removing a jump the edge may actually be not fallthru. */ | |||
5319 | gcc_assert (EDGE_COUNT (empty_bb->succs) == 1)((void)(!(vec_safe_length (empty_bb->succs) == 1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5319, __FUNCTION__), 0 : 0)); | |||
5320 | succ = EDGE_SUCC (empty_bb, 0)(*(empty_bb)->succs)[(0)]->dest; | |||
5321 | } | |||
5322 | else | |||
5323 | succ = NULLnullptr; | |||
5324 | ||||
5325 | if (EDGE_COUNT (empty_bb->preds)vec_safe_length (empty_bb->preds) > 0 && succ != NULLnullptr) | |||
5326 | { | |||
5327 | edge e = EDGE_PRED (empty_bb, 0)(*(empty_bb)->preds)[(0)]; | |||
5328 | ||||
5329 | if (e->flags & EDGE_FALLTHRU) | |||
5330 | redirect_edge_succ_nodup (e, succ); | |||
5331 | else | |||
5332 | sel_redirect_edge_and_branch (EDGE_PRED (empty_bb, 0)(*(empty_bb)->preds)[(0)], succ); | |||
5333 | } | |||
5334 | ||||
5335 | if (EDGE_COUNT (empty_bb->succs)vec_safe_length (empty_bb->succs) > 0 && pred != NULLnullptr) | |||
5336 | { | |||
5337 | edge e = EDGE_SUCC (empty_bb, 0)(*(empty_bb)->succs)[(0)]; | |||
5338 | ||||
5339 | if (find_edge (pred, e->dest) == NULLnullptr) | |||
5340 | redirect_edge_pred (e, pred); | |||
5341 | } | |||
5342 | } | |||
5343 | ||||
5344 | /* Finish removing. */ | |||
5345 | sel_remove_bb (empty_bb, remove_from_cfg_p); | |||
5346 | } | |||
5347 | ||||
5348 | /* An implementation of create_basic_block hook, which additionally updates | |||
5349 | per-bb data structures. */ | |||
5350 | static basic_block | |||
5351 | sel_create_basic_block (void *headp, void *endp, basic_block after) | |||
5352 | { | |||
5353 | basic_block new_bb; | |||
5354 | rtx_note *new_bb_note; | |||
5355 | ||||
5356 | gcc_assert (flag_sel_sched_pipelining_outer_loops((void)(!(global_options.x_flag_sel_sched_pipelining_outer_loops || !last_added_blocks.exists ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5357, __FUNCTION__), 0 : 0)) | |||
5357 | || !last_added_blocks.exists ())((void)(!(global_options.x_flag_sel_sched_pipelining_outer_loops || !last_added_blocks.exists ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5357, __FUNCTION__), 0 : 0)); | |||
5358 | ||||
5359 | new_bb_note = get_bb_note_from_pool (); | |||
5360 | ||||
5361 | if (new_bb_note == NULL_RTX(rtx) 0) | |||
5362 | new_bb = orig_cfg_hooks.create_basic_block (headp, endp, after); | |||
5363 | else | |||
5364 | { | |||
5365 | new_bb = create_basic_block_structure ((rtx_insn *) headp, | |||
5366 | (rtx_insn *) endp, | |||
5367 | new_bb_note, after); | |||
5368 | new_bb->aux = NULLnullptr; | |||
5369 | } | |||
5370 | ||||
5371 | last_added_blocks.safe_push (new_bb); | |||
5372 | ||||
5373 | return new_bb; | |||
5374 | } | |||
5375 | ||||
5376 | /* Implement sched_init_only_bb (). */ | |||
5377 | static void | |||
5378 | sel_init_only_bb (basic_block bb, basic_block after) | |||
5379 | { | |||
5380 | gcc_assert (after == NULL)((void)(!(after == nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5380, __FUNCTION__), 0 : 0)); | |||
5381 | ||||
5382 | extend_regions (); | |||
5383 | rgn_make_new_region_out_of_new_block (bb); | |||
5384 | } | |||
5385 | ||||
5386 | /* Update the latch when we've splitted or merged it from FROM block to TO. | |||
5387 | This should be checked for all outer loops, too. */ | |||
5388 | static void | |||
5389 | change_loops_latches (basic_block from, basic_block to) | |||
5390 | { | |||
5391 | gcc_assert (from != to)((void)(!(from != to) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5391, __FUNCTION__), 0 : 0)); | |||
5392 | ||||
5393 | if (current_loop_nest) | |||
5394 | { | |||
5395 | class loop *loop; | |||
5396 | ||||
5397 | for (loop = current_loop_nest; loop; loop = loop_outer (loop)) | |||
5398 | if (considered_for_pipelining_p (loop) && loop->latch == from) | |||
5399 | { | |||
5400 | gcc_assert (loop == current_loop_nest)((void)(!(loop == current_loop_nest) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5400, __FUNCTION__), 0 : 0)); | |||
5401 | loop->latch = to; | |||
5402 | gcc_assert (loop_latch_edge (loop))((void)(!(loop_latch_edge (loop)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5402, __FUNCTION__), 0 : 0)); | |||
5403 | } | |||
5404 | } | |||
5405 | } | |||
5406 | ||||
5407 | /* Splits BB on two basic blocks, adding it to the region and extending | |||
5408 | per-bb data structures. Returns the newly created bb. */ | |||
5409 | static basic_block | |||
5410 | sel_split_block (basic_block bb, rtx after) | |||
5411 | { | |||
5412 | basic_block new_bb; | |||
5413 | insn_t insn; | |||
5414 | ||||
5415 | new_bb = sched_split_block_1 (bb, after); | |||
5416 | sel_add_bb (new_bb); | |||
5417 | ||||
5418 | /* This should be called after sel_add_bb, because this uses | |||
5419 | CONTAINING_RGN for the new block, which is not yet initialized. | |||
5420 | FIXME: this function may be a no-op now. */ | |||
5421 | change_loops_latches (bb, new_bb); | |||
5422 | ||||
5423 | /* Update ORIG_BB_INDEX for insns moved into the new block. */ | |||
5424 | FOR_BB_INSNS (new_bb, insn)for ((insn) = (new_bb)->il.x.head_; (insn) && (insn ) != NEXT_INSN ((new_bb)->il.x.rtl->end_); (insn) = NEXT_INSN (insn)) | |||
5425 | if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) == DEBUG_INSN))) | |||
5426 | EXPR_ORIG_BB_INDEX (INSN_EXPR (insn))(((&(&s_i_d[(sched_luids[INSN_UID (insn)])])->expr ))->orig_bb_index) = new_bb->index; | |||
5427 | ||||
5428 | if (sel_bb_empty_p (bb)) | |||
5429 | { | |||
5430 | gcc_assert (!sel_bb_empty_p (new_bb))((void)(!(!sel_bb_empty_p (new_bb)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5430, __FUNCTION__), 0 : 0)); | |||
5431 | ||||
5432 | /* NEW_BB has data sets that need to be updated and BB holds | |||
5433 | data sets that should be removed. Exchange these data sets | |||
5434 | so that we won't lose BB's valid data sets. */ | |||
5435 | exchange_data_sets (new_bb, bb); | |||
5436 | free_data_sets (bb); | |||
5437 | } | |||
5438 | ||||
5439 | if (!sel_bb_empty_p (new_bb) | |||
5440 | && bitmap_bit_p (blocks_to_reschedule, bb->index)) | |||
5441 | bitmap_set_bit (blocks_to_reschedule, new_bb->index); | |||
5442 | ||||
5443 | return new_bb; | |||
5444 | } | |||
5445 | ||||
5446 | /* If BB ends with a jump insn whose ID is bigger then PREV_MAX_UID, return it. | |||
5447 | Otherwise returns NULL. */ | |||
5448 | static rtx_insn * | |||
5449 | check_for_new_jump (basic_block bb, int prev_max_uid) | |||
5450 | { | |||
5451 | rtx_insn *end; | |||
5452 | ||||
5453 | end = sel_bb_end (bb); | |||
5454 | if (end && INSN_UID (end) >= prev_max_uid) | |||
5455 | return end; | |||
5456 | return NULLnullptr; | |||
5457 | } | |||
5458 | ||||
5459 | /* Look for a new jump either in FROM_BB block or in newly created JUMP_BB block. | |||
5460 | New means having UID at least equal to PREV_MAX_UID. */ | |||
5461 | static rtx_insn * | |||
5462 | find_new_jump (basic_block from, basic_block jump_bb, int prev_max_uid) | |||
5463 | { | |||
5464 | rtx_insn *jump; | |||
5465 | ||||
5466 | /* Return immediately if no new insns were emitted. */ | |||
5467 | if (get_max_uid () == prev_max_uid) | |||
5468 | return NULLnullptr; | |||
5469 | ||||
5470 | /* Now check both blocks for new jumps. It will ever be only one. */ | |||
5471 | if ((jump = check_for_new_jump (from, prev_max_uid))) | |||
5472 | return jump; | |||
5473 | ||||
5474 | if (jump_bb != NULLnullptr | |||
5475 | && (jump = check_for_new_jump (jump_bb, prev_max_uid))) | |||
5476 | return jump; | |||
5477 | return NULLnullptr; | |||
5478 | } | |||
5479 | ||||
5480 | /* Splits E and adds the newly created basic block to the current region. | |||
5481 | Returns this basic block. */ | |||
5482 | basic_block | |||
5483 | sel_split_edge (edge e) | |||
5484 | { | |||
5485 | basic_block new_bb, src, other_bb = NULLnullptr; | |||
5486 | int prev_max_uid; | |||
5487 | rtx_insn *jump; | |||
5488 | ||||
5489 | src = e->src; | |||
5490 | prev_max_uid = get_max_uid (); | |||
5491 | new_bb = split_edge (e); | |||
5492 | ||||
5493 | if (flag_sel_sched_pipelining_outer_loopsglobal_options.x_flag_sel_sched_pipelining_outer_loops | |||
5494 | && current_loop_nest) | |||
5495 | { | |||
5496 | int i; | |||
5497 | basic_block bb; | |||
5498 | ||||
5499 | /* Some of the basic blocks might not have been added to the loop. | |||
5500 | Add them here, until this is fixed in force_fallthru. */ | |||
5501 | for (i = 0; | |||
5502 | last_added_blocks.iterate (i, &bb); i++) | |||
5503 | if (!bb->loop_father) | |||
5504 | { | |||
5505 | add_bb_to_loop (bb, e->dest->loop_father); | |||
5506 | ||||
5507 | gcc_assert (!other_bb && (new_bb->index != bb->index))((void)(!(!other_bb && (new_bb->index != bb->index )) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5507, __FUNCTION__), 0 : 0)); | |||
5508 | other_bb = bb; | |||
5509 | } | |||
5510 | } | |||
5511 | ||||
5512 | /* Add all last_added_blocks to the region. */ | |||
5513 | sel_add_bb (NULLnullptr); | |||
5514 | ||||
5515 | jump = find_new_jump (src, new_bb, prev_max_uid); | |||
5516 | if (jump) | |||
5517 | sel_init_new_insn (jump, INSN_INIT_TODO_LUID(1) | INSN_INIT_TODO_SIMPLEJUMP(4)); | |||
5518 | ||||
5519 | /* Put the correct lv set on this block. */ | |||
5520 | if (other_bb && !sel_bb_empty_p (other_bb)) | |||
5521 | compute_live (sel_bb_head (other_bb)); | |||
5522 | ||||
5523 | return new_bb; | |||
5524 | } | |||
5525 | ||||
5526 | /* Implement sched_create_empty_bb (). */ | |||
5527 | static basic_block | |||
5528 | sel_create_empty_bb (basic_block after) | |||
5529 | { | |||
5530 | basic_block new_bb; | |||
5531 | ||||
5532 | new_bb = sched_create_empty_bb_1 (after); | |||
5533 | ||||
5534 | /* We'll explicitly initialize NEW_BB via sel_init_only_bb () a bit | |||
5535 | later. */ | |||
5536 | gcc_assert (last_added_blocks.length () == 1((void)(!(last_added_blocks.length () == 1 && last_added_blocks [0] == new_bb) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5537, __FUNCTION__), 0 : 0)) | |||
5537 | && last_added_blocks[0] == new_bb)((void)(!(last_added_blocks.length () == 1 && last_added_blocks [0] == new_bb) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5537, __FUNCTION__), 0 : 0)); | |||
5538 | ||||
5539 | last_added_blocks.release (); | |||
5540 | return new_bb; | |||
5541 | } | |||
5542 | ||||
5543 | /* Implement sched_create_recovery_block. ORIG_INSN is where block | |||
5544 | will be splitted to insert a check. */ | |||
5545 | basic_block | |||
5546 | sel_create_recovery_block (insn_t orig_insn) | |||
5547 | { | |||
5548 | basic_block first_bb, second_bb, recovery_block; | |||
5549 | basic_block before_recovery = NULLnullptr; | |||
5550 | rtx_insn *jump; | |||
5551 | ||||
5552 | first_bb = BLOCK_FOR_INSN (orig_insn); | |||
5553 | if (sel_bb_end_p (orig_insn)) | |||
5554 | { | |||
5555 | /* Avoid introducing an empty block while splitting. */ | |||
5556 | gcc_assert (single_succ_p (first_bb))((void)(!(single_succ_p (first_bb)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5556, __FUNCTION__), 0 : 0)); | |||
5557 | second_bb = single_succ (first_bb); | |||
5558 | } | |||
5559 | else | |||
5560 | second_bb = sched_split_block (first_bb, orig_insn); | |||
5561 | ||||
5562 | recovery_block = sched_create_recovery_block (&before_recovery); | |||
5563 | if (before_recovery) | |||
5564 | copy_lv_set_from (before_recovery, EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr)); | |||
5565 | ||||
5566 | gcc_assert (sel_bb_empty_p (recovery_block))((void)(!(sel_bb_empty_p (recovery_block)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5566, __FUNCTION__), 0 : 0)); | |||
5567 | sched_create_recovery_edges (first_bb, recovery_block, second_bb); | |||
5568 | if (current_loops((cfun + 0)->x_current_loops) != NULLnullptr) | |||
5569 | add_bb_to_loop (recovery_block, first_bb->loop_father); | |||
5570 | ||||
5571 | sel_add_bb (recovery_block); | |||
5572 | ||||
5573 | jump = BB_END (recovery_block)(recovery_block)->il.x.rtl->end_; | |||
5574 | gcc_assert (sel_bb_head (recovery_block) == jump)((void)(!(sel_bb_head (recovery_block) == jump) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5574, __FUNCTION__), 0 : 0)); | |||
5575 | sel_init_new_insn (jump, INSN_INIT_TODO_LUID(1) | INSN_INIT_TODO_SIMPLEJUMP(4)); | |||
5576 | ||||
5577 | return recovery_block; | |||
5578 | } | |||
5579 | ||||
5580 | /* Merge basic block B into basic block A. */ | |||
5581 | static void | |||
5582 | sel_merge_blocks (basic_block a, basic_block b) | |||
5583 | { | |||
5584 | gcc_assert (sel_bb_empty_p (b)((void)(!(sel_bb_empty_p (b) && vec_safe_length (b-> preds) == 1 && (*(b)->preds)[(0)]->src == b-> prev_bb) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5586, __FUNCTION__), 0 : 0)) | |||
5585 | && EDGE_COUNT (b->preds) == 1((void)(!(sel_bb_empty_p (b) && vec_safe_length (b-> preds) == 1 && (*(b)->preds)[(0)]->src == b-> prev_bb) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5586, __FUNCTION__), 0 : 0)) | |||
5586 | && EDGE_PRED (b, 0)->src == b->prev_bb)((void)(!(sel_bb_empty_p (b) && vec_safe_length (b-> preds) == 1 && (*(b)->preds)[(0)]->src == b-> prev_bb) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5586, __FUNCTION__), 0 : 0)); | |||
5587 | ||||
5588 | move_bb_info (b->prev_bb, b); | |||
5589 | remove_empty_bb (b, false); | |||
5590 | merge_blocks (a, b); | |||
5591 | change_loops_latches (b, a); | |||
5592 | } | |||
5593 | ||||
5594 | /* A wrapper for redirect_edge_and_branch_force, which also initializes | |||
5595 | data structures for possibly created bb and insns. */ | |||
5596 | void | |||
5597 | sel_redirect_edge_and_branch_force (edge e, basic_block to) | |||
5598 | { | |||
5599 | basic_block jump_bb, src, orig_dest = e->dest; | |||
5600 | int prev_max_uid; | |||
5601 | rtx_insn *jump; | |||
5602 | int old_seqno = -1; | |||
5603 | ||||
5604 | /* This function is now used only for bookkeeping code creation, where | |||
5605 | we'll never get the single pred of orig_dest block and thus will not | |||
5606 | hit unreachable blocks when updating dominator info. */ | |||
5607 | gcc_assert (!sel_bb_empty_p (e->src)((void)(!(!sel_bb_empty_p (e->src) && !single_pred_p (orig_dest)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5608, __FUNCTION__), 0 : 0)) | |||
5608 | && !single_pred_p (orig_dest))((void)(!(!sel_bb_empty_p (e->src) && !single_pred_p (orig_dest)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5608, __FUNCTION__), 0 : 0)); | |||
5609 | src = e->src; | |||
5610 | prev_max_uid = get_max_uid (); | |||
5611 | /* Compute and pass old_seqno down to sel_init_new_insn only for the case | |||
5612 | when the conditional jump being redirected may become unconditional. */ | |||
5613 | if (any_condjump_p (BB_END (src)(src)->il.x.rtl->end_) | |||
5614 | && INSN_SEQNO (BB_END (src))((&s_i_d[(sched_luids[INSN_UID ((src)->il.x.rtl->end_ )])])->seqno) >= 0) | |||
5615 | old_seqno = INSN_SEQNO (BB_END (src))((&s_i_d[(sched_luids[INSN_UID ((src)->il.x.rtl->end_ )])])->seqno); | |||
5616 | ||||
5617 | jump_bb = redirect_edge_and_branch_force (e, to); | |||
5618 | if (jump_bb != NULLnullptr) | |||
5619 | sel_add_bb (jump_bb); | |||
5620 | ||||
5621 | /* This function could not be used to spoil the loop structure by now, | |||
5622 | thus we don't care to update anything. But check it to be sure. */ | |||
5623 | if (current_loop_nest | |||
5624 | && pipelining_p) | |||
5625 | gcc_assert (loop_latch_edge (current_loop_nest))((void)(!(loop_latch_edge (current_loop_nest)) ? fancy_abort ( "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5625, __FUNCTION__), 0 : 0)); | |||
5626 | ||||
5627 | jump = find_new_jump (src, jump_bb, prev_max_uid); | |||
5628 | if (jump) | |||
5629 | sel_init_new_insn (jump, INSN_INIT_TODO_LUID(1) | INSN_INIT_TODO_SIMPLEJUMP(4), | |||
5630 | old_seqno); | |||
5631 | set_immediate_dominator (CDI_DOMINATORS, to, | |||
5632 | recompute_dominator (CDI_DOMINATORS, to)); | |||
5633 | set_immediate_dominator (CDI_DOMINATORS, orig_dest, | |||
5634 | recompute_dominator (CDI_DOMINATORS, orig_dest)); | |||
5635 | if (jump && sel_bb_head_p (jump)) | |||
5636 | compute_live (jump); | |||
5637 | } | |||
5638 | ||||
5639 | /* A wrapper for redirect_edge_and_branch. Return TRUE if blocks connected by | |||
5640 | redirected edge are in reverse topological order. */ | |||
5641 | bool | |||
5642 | sel_redirect_edge_and_branch (edge e, basic_block to) | |||
5643 | { | |||
5644 | bool latch_edge_p; | |||
5645 | basic_block src, orig_dest = e->dest; | |||
5646 | int prev_max_uid; | |||
5647 | rtx_insn *jump; | |||
5648 | edge redirected; | |||
5649 | bool recompute_toporder_p = false; | |||
5650 | bool maybe_unreachable = single_pred_p (orig_dest); | |||
5651 | int old_seqno = -1; | |||
5652 | ||||
5653 | latch_edge_p = (pipelining_p | |||
5654 | && current_loop_nest | |||
5655 | && e == loop_latch_edge (current_loop_nest)); | |||
5656 | ||||
5657 | src = e->src; | |||
5658 | prev_max_uid = get_max_uid (); | |||
5659 | ||||
5660 | /* Compute and pass old_seqno down to sel_init_new_insn only for the case | |||
5661 | when the conditional jump being redirected may become unconditional. */ | |||
5662 | if (any_condjump_p (BB_END (src)(src)->il.x.rtl->end_) | |||
5663 | && INSN_SEQNO (BB_END (src))((&s_i_d[(sched_luids[INSN_UID ((src)->il.x.rtl->end_ )])])->seqno) >= 0) | |||
5664 | old_seqno = INSN_SEQNO (BB_END (src))((&s_i_d[(sched_luids[INSN_UID ((src)->il.x.rtl->end_ )])])->seqno); | |||
5665 | ||||
5666 | redirected = redirect_edge_and_branch (e, to); | |||
5667 | ||||
5668 | gcc_assert (redirected && !last_added_blocks.exists ())((void)(!(redirected && !last_added_blocks.exists ()) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5668, __FUNCTION__), 0 : 0)); | |||
5669 | ||||
5670 | /* When we've redirected a latch edge, update the header. */ | |||
5671 | if (latch_edge_p) | |||
5672 | { | |||
5673 | current_loop_nest->header = to; | |||
5674 | gcc_assert (loop_latch_edge (current_loop_nest))((void)(!(loop_latch_edge (current_loop_nest)) ? fancy_abort ( "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5674, __FUNCTION__), 0 : 0)); | |||
5675 | } | |||
5676 | ||||
5677 | /* In rare situations, the topological relation between the blocks connected | |||
5678 | by the redirected edge can change (see PR42245 for an example). Update | |||
5679 | block_to_bb/bb_to_block. */ | |||
5680 | if (CONTAINING_RGN (e->src->index)(containing_rgn[e->src->index]) == CONTAINING_RGN (to->index)(containing_rgn[to->index]) | |||
5681 | && BLOCK_TO_BB (e->src->index)(block_to_bb[e->src->index]) > BLOCK_TO_BB (to->index)(block_to_bb[to->index])) | |||
5682 | recompute_toporder_p = true; | |||
5683 | ||||
5684 | jump = find_new_jump (src, NULLnullptr, prev_max_uid); | |||
5685 | if (jump) | |||
5686 | sel_init_new_insn (jump, INSN_INIT_TODO_LUID(1) | INSN_INIT_TODO_SIMPLEJUMP(4), old_seqno); | |||
5687 | ||||
5688 | /* Only update dominator info when we don't have unreachable blocks. | |||
5689 | Otherwise we'll update in maybe_tidy_empty_bb. */ | |||
5690 | if (!maybe_unreachable) | |||
5691 | { | |||
5692 | set_immediate_dominator (CDI_DOMINATORS, to, | |||
5693 | recompute_dominator (CDI_DOMINATORS, to)); | |||
5694 | set_immediate_dominator (CDI_DOMINATORS, orig_dest, | |||
5695 | recompute_dominator (CDI_DOMINATORS, orig_dest)); | |||
5696 | } | |||
5697 | if (jump && sel_bb_head_p (jump)) | |||
5698 | compute_live (jump); | |||
5699 | return recompute_toporder_p; | |||
5700 | } | |||
5701 | ||||
5702 | /* This variable holds the cfg hooks used by the selective scheduler. */ | |||
5703 | static struct cfg_hooks sel_cfg_hooks; | |||
5704 | ||||
5705 | /* Register sel-sched cfg hooks. */ | |||
5706 | void | |||
5707 | sel_register_cfg_hooks (void) | |||
5708 | { | |||
5709 | sched_split_block = sel_split_block; | |||
5710 | ||||
5711 | orig_cfg_hooks = get_cfg_hooks (); | |||
5712 | sel_cfg_hooks = orig_cfg_hooks; | |||
5713 | ||||
5714 | sel_cfg_hooks.create_basic_block = sel_create_basic_block; | |||
5715 | ||||
5716 | set_cfg_hooks (sel_cfg_hooks); | |||
5717 | ||||
5718 | sched_init_only_bb = sel_init_only_bb; | |||
5719 | sched_split_block = sel_split_block; | |||
5720 | sched_create_empty_bb = sel_create_empty_bb; | |||
5721 | } | |||
5722 | ||||
5723 | /* Unregister sel-sched cfg hooks. */ | |||
5724 | void | |||
5725 | sel_unregister_cfg_hooks (void) | |||
5726 | { | |||
5727 | sched_create_empty_bb = NULLnullptr; | |||
5728 | sched_split_block = NULLnullptr; | |||
5729 | sched_init_only_bb = NULLnullptr; | |||
5730 | ||||
5731 | set_cfg_hooks (orig_cfg_hooks); | |||
5732 | } | |||
5733 | ||||
5734 | ||||
5735 | /* Emit an insn rtx based on PATTERN. If a jump insn is wanted, | |||
5736 | LABEL is where this jump should be directed. */ | |||
5737 | rtx_insn * | |||
5738 | create_insn_rtx_from_pattern (rtx pattern, rtx label) | |||
5739 | { | |||
5740 | rtx_insn *insn_rtx; | |||
5741 | ||||
5742 | gcc_assert (!INSN_P (pattern))((void)(!(!(((((enum rtx_code) (pattern)->code) == INSN) || (((enum rtx_code) (pattern)->code) == JUMP_INSN) || (((enum rtx_code) (pattern)->code) == CALL_INSN)) || (((enum rtx_code ) (pattern)->code) == DEBUG_INSN))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5742, __FUNCTION__), 0 : 0)); | |||
5743 | ||||
5744 | start_sequence (); | |||
5745 | ||||
5746 | if (label == NULL_RTX(rtx) 0) | |||
5747 | insn_rtx = emit_insn (pattern); | |||
5748 | else if (DEBUG_INSN_P (label)(((enum rtx_code) (label)->code) == DEBUG_INSN)) | |||
5749 | insn_rtx = emit_debug_insn (pattern); | |||
5750 | else | |||
5751 | { | |||
5752 | insn_rtx = emit_jump_insn (pattern); | |||
5753 | JUMP_LABEL (insn_rtx)(((insn_rtx)->u.fld[7]).rt_rtx) = label; | |||
5754 | ++LABEL_NUSES (label)(((label)->u.fld[4]).rt_int); | |||
5755 | } | |||
5756 | ||||
5757 | end_sequence (); | |||
5758 | ||||
5759 | sched_extend_luids (); | |||
5760 | sched_extend_target (); | |||
5761 | sched_deps_init (false); | |||
5762 | ||||
5763 | /* Initialize INSN_CODE now. */ | |||
5764 | recog_memoized (insn_rtx); | |||
5765 | return insn_rtx; | |||
5766 | } | |||
5767 | ||||
5768 | /* Create a new vinsn for INSN_RTX. FORCE_UNIQUE_P is true when the vinsn | |||
5769 | must not be clonable. */ | |||
5770 | vinsn_t | |||
5771 | create_vinsn_from_insn_rtx (rtx_insn *insn_rtx, bool force_unique_p) | |||
5772 | { | |||
5773 | gcc_assert (INSN_P (insn_rtx) && !INSN_IN_STREAM_P (insn_rtx))((void)(!((((((enum rtx_code) (insn_rtx)->code) == INSN) || (((enum rtx_code) (insn_rtx)->code) == JUMP_INSN) || (((enum rtx_code) (insn_rtx)->code) == CALL_INSN)) || (((enum rtx_code ) (insn_rtx)->code) == DEBUG_INSN)) && !(PREV_INSN (insn_rtx) && NEXT_INSN (insn_rtx))) ? fancy_abort ( "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5773, __FUNCTION__), 0 : 0)); | |||
5774 | ||||
5775 | /* If VINSN_TYPE is not USE, retain its uniqueness. */ | |||
5776 | return vinsn_create (insn_rtx, force_unique_p); | |||
5777 | } | |||
5778 | ||||
5779 | /* Create a copy of INSN_RTX. */ | |||
5780 | rtx_insn * | |||
5781 | create_copy_of_insn_rtx (rtx insn_rtx) | |||
5782 | { | |||
5783 | rtx_insn *res; | |||
5784 | rtx link; | |||
5785 | ||||
5786 | if (DEBUG_INSN_P (insn_rtx)(((enum rtx_code) (insn_rtx)->code) == DEBUG_INSN)) | |||
5787 | return create_insn_rtx_from_pattern (copy_rtx (PATTERN (insn_rtx)), | |||
5788 | insn_rtx); | |||
5789 | ||||
5790 | gcc_assert (NONJUMP_INSN_P (insn_rtx))((void)(!((((enum rtx_code) (insn_rtx)->code) == INSN)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/sel-sched-ir.cc" , 5790, __FUNCTION__), 0 : 0)); | |||
5791 | ||||
5792 | res = create_insn_rtx_from_pattern (copy_rtx (PATTERN (insn_rtx)), | |||
5793 | NULL_RTX(rtx) 0); | |||
5794 | ||||
5795 | /* Locate the end of existing REG_NOTES in NEW_RTX. */ | |||
5796 | rtx *ptail = ®_NOTES (res)(((res)->u.fld[6]).rt_rtx); | |||
5797 | while (*ptail != NULL_RTX(rtx) 0) | |||
5798 | ptail = &XEXP (*ptail, 1)(((*ptail)->u.fld[1]).rt_rtx); | |||
5799 | ||||
5800 | /* Copy all REG_NOTES except REG_EQUAL/REG_EQUIV and REG_LABEL_OPERAND | |||
5801 | since mark_jump_label will make them. REG_LABEL_TARGETs are created | |||
5802 | there too, but are supposed to be sticky, so we copy them. */ | |||
5803 | for (link = REG_NOTES (insn_rtx)(((insn_rtx)->u.fld[6]).rt_rtx); link; link = XEXP (link, 1)(((link)->u.fld[1]).rt_rtx)) | |||
5804 | if (REG_NOTE_KIND (link)((enum reg_note) ((machine_mode) (link)->mode)) != REG_LABEL_OPERAND | |||
5805 | && REG_NOTE_KIND (link)((enum reg_note) ((machine_mode) (link)->mode)) != REG_EQUAL | |||
5806 | && REG_NOTE_KIND (link)((enum reg_note) ((machine_mode) (link)->mode)) != REG_EQUIV) | |||
5807 | { | |||
5808 | *ptail = duplicate_reg_note (link); | |||
5809 | ptail = &XEXP (*ptail, 1)(((*ptail)->u.fld[1]).rt_rtx); | |||
5810 | } | |||
5811 | ||||
5812 | return res; | |||
5813 | } | |||
5814 | ||||
5815 | /* Change vinsn field of EXPR to hold NEW_VINSN. */ | |||
5816 | void | |||
5817 | change_vinsn_in_expr (expr_t expr, vinsn_t new_vinsn) | |||
5818 | { | |||
5819 | vinsn_detach (EXPR_VINSN (expr)((expr)->vinsn)); | |||
5820 | ||||
5821 | EXPR_VINSN (expr)((expr)->vinsn) = new_vinsn; | |||
5822 | vinsn_attach (new_vinsn); | |||
| ||||
5823 | } | |||
5824 | ||||
5825 | /* Helpers for global init. */ | |||
5826 | /* This structure is used to be able to call existing bundling mechanism | |||
5827 | and calculate insn priorities. */ | |||
5828 | static struct haifa_sched_info sched_sel_haifa_sched_info = | |||
5829 | { | |||
5830 | NULLnullptr, /* init_ready_list */ | |||
5831 | NULLnullptr, /* can_schedule_ready_p */ | |||
5832 | NULLnullptr, /* schedule_more_p */ | |||
5833 | NULLnullptr, /* new_ready */ | |||
5834 | NULLnullptr, /* rgn_rank */ | |||
5835 | sel_print_insn, /* rgn_print_insn */ | |||
5836 | contributes_to_priority, | |||
5837 | NULLnullptr, /* insn_finishes_block_p */ | |||
5838 | ||||
5839 | NULLnullptr, NULLnullptr, | |||
5840 | NULLnullptr, NULLnullptr, | |||
5841 |