File: | build/gcc/recog.cc |
Warning: | line 2270, column 4 Value stored to 'incdec_ok' is never read |
Press '?' to see keyboard shortcuts
Keyboard shortcuts:
1 | /* Subroutines used by or related to instruction recognition. |
2 | Copyright (C) 1987-2023 Free Software Foundation, Inc. |
3 | |
4 | This file is part of GCC. |
5 | |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free |
8 | Software Foundation; either version 3, or (at your option) any later |
9 | version. |
10 | |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
14 | for more details. |
15 | |
16 | You should have received a copy of the GNU General Public License |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ |
19 | |
20 | |
21 | #include "config.h" |
22 | #include "system.h" |
23 | #include "coretypes.h" |
24 | #include "backend.h" |
25 | #include "target.h" |
26 | #include "rtl.h" |
27 | #include "tree.h" |
28 | #include "cfghooks.h" |
29 | #include "df.h" |
30 | #include "memmodel.h" |
31 | #include "tm_p.h" |
32 | #include "insn-config.h" |
33 | #include "regs.h" |
34 | #include "emit-rtl.h" |
35 | #include "recog.h" |
36 | #include "insn-attr.h" |
37 | #include "addresses.h" |
38 | #include "cfgrtl.h" |
39 | #include "cfgbuild.h" |
40 | #include "cfgcleanup.h" |
41 | #include "reload.h" |
42 | #include "tree-pass.h" |
43 | #include "function-abi.h" |
44 | |
45 | #ifndef STACK_POP_CODEPOST_INC |
46 | #if STACK_GROWS_DOWNWARD1 |
47 | #define STACK_POP_CODEPOST_INC POST_INC |
48 | #else |
49 | #define STACK_POP_CODEPOST_INC POST_DEC |
50 | #endif |
51 | #endif |
52 | |
53 | static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx_insn *, bool); |
54 | static void validate_replace_src_1 (rtx *, void *); |
55 | static rtx_insn *split_insn (rtx_insn *); |
56 | |
57 | struct target_recog default_target_recog; |
58 | #if SWITCHABLE_TARGET1 |
59 | struct target_recog *this_target_recog = &default_target_recog; |
60 | #endif |
61 | |
62 | /* Nonzero means allow operands to be volatile. |
63 | This should be 0 if you are generating rtl, such as if you are calling |
64 | the functions in optabs.cc and expmed.cc (most of the time). |
65 | This should be 1 if all valid insns need to be recognized, |
66 | such as in reginfo.cc and final.cc and reload.cc. |
67 | |
68 | init_recog and init_recog_no_volatile are responsible for setting this. */ |
69 | |
70 | int volatile_ok; |
71 | |
72 | struct recog_data_d recog_data; |
73 | |
74 | /* Contains a vector of operand_alternative structures, such that |
75 | operand OP of alternative A is at index A * n_operands + OP. |
76 | Set up by preprocess_constraints. */ |
77 | const operand_alternative *recog_op_alt; |
78 | |
79 | /* Used to provide recog_op_alt for asms. */ |
80 | static operand_alternative asm_op_alt[MAX_RECOG_OPERANDS30 |
81 | * MAX_RECOG_ALTERNATIVES35]; |
82 | |
83 | /* On return from `constrain_operands', indicate which alternative |
84 | was satisfied. */ |
85 | |
86 | int which_alternative; |
87 | |
88 | /* Nonzero after end of reload pass. |
89 | Set to 1 or 0 by toplev.cc. |
90 | Controls the significance of (SUBREG (MEM)). */ |
91 | |
92 | int reload_completed; |
93 | |
94 | /* Nonzero after thread_prologue_and_epilogue_insns has run. */ |
95 | int epilogue_completed; |
96 | |
97 | /* Initialize data used by the function `recog'. |
98 | This must be called once in the compilation of a function |
99 | before any insn recognition may be done in the function. */ |
100 | |
101 | void |
102 | init_recog_no_volatile (void) |
103 | { |
104 | volatile_ok = 0; |
105 | } |
106 | |
107 | void |
108 | init_recog (void) |
109 | { |
110 | volatile_ok = 1; |
111 | } |
112 | |
113 | |
114 | /* Return true if labels in asm operands BODY are LABEL_REFs. */ |
115 | |
116 | static bool |
117 | asm_labels_ok (rtx body) |
118 | { |
119 | rtx asmop; |
120 | int i; |
121 | |
122 | asmop = extract_asm_operands (body); |
123 | if (asmop == NULL_RTX(rtx) 0) |
124 | return true; |
125 | |
126 | for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop)(((((asmop)->u.fld[5]).rt_rtvec))->num_elem); i++) |
127 | if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i))((enum rtx_code) ((((((asmop)->u.fld[5]).rt_rtvec))->elem [i]))->code) != LABEL_REF) |
128 | return false; |
129 | |
130 | return true; |
131 | } |
132 | |
133 | /* Check that X is an insn-body for an `asm' with operands |
134 | and that the operands mentioned in it are legitimate. */ |
135 | |
136 | int |
137 | check_asm_operands (rtx x) |
138 | { |
139 | int noperands; |
140 | rtx *operands; |
141 | const char **constraints; |
142 | int i; |
143 | |
144 | if (!asm_labels_ok (x)) |
145 | return 0; |
146 | |
147 | /* Post-reload, be more strict with things. */ |
148 | if (reload_completed) |
149 | { |
150 | /* ??? Doh! We've not got the wrapping insn. Cook one up. */ |
151 | rtx_insn *insn = make_insn_raw (x); |
152 | extract_insn (insn); |
153 | constrain_operands (1, get_enabled_alternatives (insn)); |
154 | return which_alternative >= 0; |
155 | } |
156 | |
157 | noperands = asm_noperands (x); |
158 | if (noperands < 0) |
159 | return 0; |
160 | if (noperands == 0) |
161 | return 1; |
162 | |
163 | operands = XALLOCAVEC (rtx, noperands)((rtx *) __builtin_alloca(sizeof (rtx) * (noperands))); |
164 | constraints = XALLOCAVEC (const char *, noperands)((const char * *) __builtin_alloca(sizeof (const char *) * (noperands ))); |
165 | |
166 | decode_asm_operands (x, operands, NULLnullptr, constraints, NULLnullptr, NULLnullptr); |
167 | |
168 | for (i = 0; i < noperands; i++) |
169 | { |
170 | const char *c = constraints[i]; |
171 | if (c[0] == '%') |
172 | c++; |
173 | if (! asm_operand_ok (operands[i], c, constraints)) |
174 | return 0; |
175 | } |
176 | |
177 | return 1; |
178 | } |
179 | |
180 | /* Static data for the next two routines. */ |
181 | |
182 | struct change_t |
183 | { |
184 | rtx object; |
185 | int old_code; |
186 | int old_len; |
187 | bool unshare; |
188 | rtx *loc; |
189 | rtx old; |
190 | }; |
191 | |
192 | static change_t *changes; |
193 | static int changes_allocated; |
194 | |
195 | static int num_changes = 0; |
196 | static int temporarily_undone_changes = 0; |
197 | |
198 | /* Validate a proposed change to OBJECT. LOC is the location in the rtl |
199 | at which NEW_RTX will be placed. If NEW_LEN is >= 0, XVECLEN (NEW_RTX, 0) |
200 | will also be changed to NEW_LEN, which is no greater than the current |
201 | XVECLEN. If OBJECT is zero, no validation is done, the change is |
202 | simply made. |
203 | |
204 | Two types of objects are supported: If OBJECT is a MEM, memory_address_p |
205 | will be called with the address and mode as parameters. If OBJECT is |
206 | an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with |
207 | the change in place. |
208 | |
209 | IN_GROUP is nonzero if this is part of a group of changes that must be |
210 | performed as a group. In that case, the changes will be stored. The |
211 | function `apply_change_group' will validate and apply the changes. |
212 | |
213 | If IN_GROUP is zero, this is a single change. Try to recognize the insn |
214 | or validate the memory reference with the change applied. If the result |
215 | is not valid for the machine, suppress the change and return zero. |
216 | Otherwise, perform the change and return 1. */ |
217 | |
218 | static bool |
219 | validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, |
220 | bool unshare, int new_len = -1) |
221 | { |
222 | gcc_assert (temporarily_undone_changes == 0)((void)(!(temporarily_undone_changes == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 222, __FUNCTION__), 0 : 0)); |
223 | rtx old = *loc; |
224 | |
225 | /* Single-element parallels aren't valid and won't match anything. |
226 | Replace them with the single element. */ |
227 | if (new_len == 1 && GET_CODE (new_rtx)((enum rtx_code) (new_rtx)->code) == PARALLEL) |
228 | { |
229 | new_rtx = XVECEXP (new_rtx, 0, 0)(((((new_rtx)->u.fld[0]).rt_rtvec))->elem[0]); |
230 | new_len = -1; |
231 | } |
232 | |
233 | if ((old == new_rtx || rtx_equal_p (old, new_rtx)) |
234 | && (new_len < 0 || XVECLEN (new_rtx, 0)(((((new_rtx)->u.fld[0]).rt_rtvec))->num_elem) == new_len)) |
235 | return 1; |
236 | |
237 | gcc_assert ((in_group != 0 || num_changes == 0)((void)(!((in_group != 0 || num_changes == 0) && (new_len < 0 || new_rtx == *loc)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 238, __FUNCTION__), 0 : 0)) |
238 | && (new_len < 0 || new_rtx == *loc))((void)(!((in_group != 0 || num_changes == 0) && (new_len < 0 || new_rtx == *loc)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 238, __FUNCTION__), 0 : 0)); |
239 | |
240 | *loc = new_rtx; |
241 | |
242 | /* Save the information describing this change. */ |
243 | if (num_changes >= changes_allocated) |
244 | { |
245 | if (changes_allocated == 0) |
246 | /* This value allows for repeated substitutions inside complex |
247 | indexed addresses, or changes in up to 5 insns. */ |
248 | changes_allocated = MAX_RECOG_OPERANDS30 * 5; |
249 | else |
250 | changes_allocated *= 2; |
251 | |
252 | changes = XRESIZEVEC (change_t, changes, changes_allocated)((change_t *) xrealloc ((void *) (changes), sizeof (change_t) * (changes_allocated))); |
253 | } |
254 | |
255 | changes[num_changes].object = object; |
256 | changes[num_changes].loc = loc; |
257 | changes[num_changes].old = old; |
258 | changes[num_changes].old_len = (new_len >= 0 ? XVECLEN (new_rtx, 0)(((((new_rtx)->u.fld[0]).rt_rtvec))->num_elem) : -1); |
259 | changes[num_changes].unshare = unshare; |
260 | |
261 | if (new_len >= 0) |
262 | XVECLEN (new_rtx, 0)(((((new_rtx)->u.fld[0]).rt_rtvec))->num_elem) = new_len; |
263 | |
264 | if (object && !MEM_P (object)(((enum rtx_code) (object)->code) == MEM)) |
265 | { |
266 | /* Set INSN_CODE to force rerecognition of insn. Save old code in |
267 | case invalid. */ |
268 | changes[num_changes].old_code = INSN_CODE (object)(((object)->u.fld[5]).rt_int); |
269 | INSN_CODE (object)(((object)->u.fld[5]).rt_int) = -1; |
270 | } |
271 | |
272 | num_changes++; |
273 | |
274 | /* If we are making a group of changes, return 1. Otherwise, validate the |
275 | change group we made. */ |
276 | |
277 | if (in_group) |
278 | return 1; |
279 | else |
280 | return apply_change_group (); |
281 | } |
282 | |
283 | /* Wrapper for validate_change_1 without the UNSHARE argument defaulting |
284 | UNSHARE to false. */ |
285 | |
286 | bool |
287 | validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group) |
288 | { |
289 | return validate_change_1 (object, loc, new_rtx, in_group, false); |
290 | } |
291 | |
292 | /* Wrapper for validate_change_1 without the UNSHARE argument defaulting |
293 | UNSHARE to true. */ |
294 | |
295 | bool |
296 | validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group) |
297 | { |
298 | return validate_change_1 (object, loc, new_rtx, in_group, true); |
299 | } |
300 | |
301 | /* Change XVECLEN (*LOC, 0) to NEW_LEN. OBJECT, IN_GROUP and the return |
302 | value are as for validate_change_1. */ |
303 | |
304 | bool |
305 | validate_change_xveclen (rtx object, rtx *loc, int new_len, bool in_group) |
306 | { |
307 | return validate_change_1 (object, loc, *loc, in_group, false, new_len); |
308 | } |
309 | |
310 | /* Keep X canonicalized if some changes have made it non-canonical; only |
311 | modifies the operands of X, not (for example) its code. Simplifications |
312 | are not the job of this routine. |
313 | |
314 | Return true if anything was changed. */ |
315 | bool |
316 | canonicalize_change_group (rtx_insn *insn, rtx x) |
317 | { |
318 | if (COMMUTATIVE_P (x)(((rtx_class[(int) (((enum rtx_code) (x)->code))]) & ( ~2)) == (RTX_COMM_COMPARE & (~2))) |
319 | && swap_commutative_operands_p (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), XEXP (x, 1)(((x)->u.fld[1]).rt_rtx))) |
320 | { |
321 | /* Oops, the caller has made X no longer canonical. |
322 | Let's redo the changes in the correct order. */ |
323 | rtx tem = XEXP (x, 0)(((x)->u.fld[0]).rt_rtx); |
324 | validate_unshare_change (insn, &XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), XEXP (x, 1)(((x)->u.fld[1]).rt_rtx), 1); |
325 | validate_unshare_change (insn, &XEXP (x, 1)(((x)->u.fld[1]).rt_rtx), tem, 1); |
326 | return true; |
327 | } |
328 | else |
329 | return false; |
330 | } |
331 | |
332 | /* Check if REG_INC argument in *data overlaps a stored REG. */ |
333 | |
334 | static void |
335 | check_invalid_inc_dec (rtx reg, const_rtx, void *data) |
336 | { |
337 | rtx *pinc = (rtx *) data; |
338 | if (*pinc == NULL_RTX(rtx) 0 || MEM_P (reg)(((enum rtx_code) (reg)->code) == MEM)) |
339 | return; |
340 | if (reg_overlap_mentioned_p (reg, *pinc)) |
341 | *pinc = NULL_RTX(rtx) 0; |
342 | } |
343 | |
344 | /* This subroutine of apply_change_group verifies whether the changes to INSN |
345 | were valid; i.e. whether INSN can still be recognized. |
346 | |
347 | If IN_GROUP is true clobbers which have to be added in order to |
348 | match the instructions will be added to the current change group. |
349 | Otherwise the changes will take effect immediately. */ |
350 | |
351 | int |
352 | insn_invalid_p (rtx_insn *insn, bool in_group) |
353 | { |
354 | rtx pat = PATTERN (insn); |
355 | int num_clobbers = 0; |
356 | /* If we are before reload and the pattern is a SET, see if we can add |
357 | clobbers. */ |
358 | int icode = recog (pat, insn, |
359 | (GET_CODE (pat)((enum rtx_code) (pat)->code) == SET |
360 | && ! reload_completed |
361 | && ! reload_in_progress) |
362 | ? &num_clobbers : 0); |
363 | int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0; |
364 | |
365 | |
366 | /* If this is an asm and the operand aren't legal, then fail. Likewise if |
367 | this is not an asm and the insn wasn't recognized. */ |
368 | if ((is_asm && ! check_asm_operands (PATTERN (insn))) |
369 | || (!is_asm && icode < 0)) |
370 | return 1; |
371 | |
372 | /* If we have to add CLOBBERs, fail if we have to add ones that reference |
373 | hard registers since our callers can't know if they are live or not. |
374 | Otherwise, add them. */ |
375 | if (num_clobbers > 0) |
376 | { |
377 | rtx newpat; |
378 | |
379 | if (added_clobbers_hard_reg_p (icode)) |
380 | return 1; |
381 | |
382 | newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1))gen_rtx_fmt_E_stat ((PARALLEL), ((((void) 0, E_VOIDmode))), ( (rtvec_alloc (num_clobbers + 1))) ); |
383 | XVECEXP (newpat, 0, 0)(((((newpat)->u.fld[0]).rt_rtvec))->elem[0]) = pat; |
384 | add_clobbers (newpat, icode); |
385 | if (in_group) |
386 | validate_change (insn, &PATTERN (insn), newpat, 1); |
387 | else |
388 | PATTERN (insn) = pat = newpat; |
389 | } |
390 | |
391 | /* After reload, verify that all constraints are satisfied. */ |
392 | if (reload_completed) |
393 | { |
394 | extract_insn (insn); |
395 | |
396 | if (! constrain_operands (1, get_preferred_alternatives (insn))) |
397 | return 1; |
398 | } |
399 | |
400 | /* Punt if REG_INC argument overlaps some stored REG. */ |
401 | for (rtx link = FIND_REG_INC_NOTE (insn, NULL_RTX)0; |
402 | link; link = XEXP (link, 1)(((link)->u.fld[1]).rt_rtx)) |
403 | if (REG_NOTE_KIND (link)((enum reg_note) ((machine_mode) (link)->mode)) == REG_INC) |
404 | { |
405 | rtx reg = XEXP (link, 0)(((link)->u.fld[0]).rt_rtx); |
406 | note_stores (insn, check_invalid_inc_dec, ®); |
407 | if (reg == NULL_RTX(rtx) 0) |
408 | return 1; |
409 | } |
410 | |
411 | INSN_CODE (insn)(((insn)->u.fld[5]).rt_int) = icode; |
412 | return 0; |
413 | } |
414 | |
415 | /* Return number of changes made and not validated yet. */ |
416 | int |
417 | num_changes_pending (void) |
418 | { |
419 | return num_changes; |
420 | } |
421 | |
422 | /* Tentatively apply the changes numbered NUM and up. |
423 | Return 1 if all changes are valid, zero otherwise. */ |
424 | |
425 | int |
426 | verify_changes (int num) |
427 | { |
428 | int i; |
429 | rtx last_validated = NULL_RTX(rtx) 0; |
430 | |
431 | /* The changes have been applied and all INSN_CODEs have been reset to force |
432 | rerecognition. |
433 | |
434 | The changes are valid if we aren't given an object, or if we are |
435 | given a MEM and it still is a valid address, or if this is in insn |
436 | and it is recognized. In the latter case, if reload has completed, |
437 | we also require that the operands meet the constraints for |
438 | the insn. */ |
439 | |
440 | for (i = num; i < num_changes; i++) |
441 | { |
442 | rtx object = changes[i].object; |
443 | |
444 | /* If there is no object to test or if it is the same as the one we |
445 | already tested, ignore it. */ |
446 | if (object == 0 || object == last_validated) |
447 | continue; |
448 | |
449 | if (MEM_P (object)(((enum rtx_code) (object)->code) == MEM)) |
450 | { |
451 | if (! memory_address_addr_space_p (GET_MODE (object)((machine_mode) (object)->mode), |
452 | XEXP (object, 0)(((object)->u.fld[0]).rt_rtx), |
453 | MEM_ADDR_SPACE (object)(get_mem_attrs (object)->addrspace))) |
454 | break; |
455 | } |
456 | else if (/* changes[i].old might be zero, e.g. when putting a |
457 | REG_FRAME_RELATED_EXPR into a previously empty list. */ |
458 | changes[i].old |
459 | && REG_P (changes[i].old)(((enum rtx_code) (changes[i].old)->code) == REG) |
460 | && asm_noperands (PATTERN (object)) > 0 |
461 | && register_asm_p (changes[i].old)) |
462 | { |
463 | /* Don't allow changes of hard register operands to inline |
464 | assemblies if they have been defined as register asm ("x"). */ |
465 | break; |
466 | } |
467 | else if (DEBUG_INSN_P (object)(((enum rtx_code) (object)->code) == DEBUG_INSN)) |
468 | continue; |
469 | else if (insn_invalid_p (as_a <rtx_insn *> (object), true)) |
470 | { |
471 | rtx pat = PATTERN (object); |
472 | |
473 | /* Perhaps we couldn't recognize the insn because there were |
474 | extra CLOBBERs at the end. If so, try to re-recognize |
475 | without the last CLOBBER (later iterations will cause each of |
476 | them to be eliminated, in turn). But don't do this if we |
477 | have an ASM_OPERAND. */ |
478 | if (GET_CODE (pat)((enum rtx_code) (pat)->code) == PARALLEL |
479 | && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1))((enum rtx_code) ((((((pat)->u.fld[0]).rt_rtvec))->elem [(((((pat)->u.fld[0]).rt_rtvec))->num_elem) - 1]))-> code) == CLOBBER |
480 | && asm_noperands (PATTERN (object)) < 0) |
481 | { |
482 | rtx newpat; |
483 | |
484 | if (XVECLEN (pat, 0)(((((pat)->u.fld[0]).rt_rtvec))->num_elem) == 2) |
485 | newpat = XVECEXP (pat, 0, 0)(((((pat)->u.fld[0]).rt_rtvec))->elem[0]); |
486 | else |
487 | { |
488 | int j; |
489 | |
490 | newpat |
491 | = gen_rtx_PARALLEL (VOIDmode,gen_rtx_fmt_E_stat ((PARALLEL), ((((void) 0, E_VOIDmode))), ( (rtvec_alloc ((((((pat)->u.fld[0]).rt_rtvec))->num_elem ) - 1))) ) |
492 | rtvec_alloc (XVECLEN (pat, 0) - 1))gen_rtx_fmt_E_stat ((PARALLEL), ((((void) 0, E_VOIDmode))), ( (rtvec_alloc ((((((pat)->u.fld[0]).rt_rtvec))->num_elem ) - 1))) ); |
493 | for (j = 0; j < XVECLEN (newpat, 0)(((((newpat)->u.fld[0]).rt_rtvec))->num_elem); j++) |
494 | XVECEXP (newpat, 0, j)(((((newpat)->u.fld[0]).rt_rtvec))->elem[j]) = XVECEXP (pat, 0, j)(((((pat)->u.fld[0]).rt_rtvec))->elem[j]); |
495 | } |
496 | |
497 | /* Add a new change to this group to replace the pattern |
498 | with this new pattern. Then consider this change |
499 | as having succeeded. The change we added will |
500 | cause the entire call to fail if things remain invalid. |
501 | |
502 | Note that this can lose if a later change than the one |
503 | we are processing specified &XVECEXP (PATTERN (object), 0, X) |
504 | but this shouldn't occur. */ |
505 | |
506 | validate_change (object, &PATTERN (object), newpat, 1); |
507 | continue; |
508 | } |
509 | else if (GET_CODE (pat)((enum rtx_code) (pat)->code) == USE || GET_CODE (pat)((enum rtx_code) (pat)->code) == CLOBBER |
510 | || GET_CODE (pat)((enum rtx_code) (pat)->code) == VAR_LOCATION) |
511 | /* If this insn is a CLOBBER or USE, it is always valid, but is |
512 | never recognized. */ |
513 | continue; |
514 | else |
515 | break; |
516 | } |
517 | last_validated = object; |
518 | } |
519 | |
520 | return (i == num_changes); |
521 | } |
522 | |
523 | /* A group of changes has previously been issued with validate_change |
524 | and verified with verify_changes. Call df_insn_rescan for each of |
525 | the insn changed and clear num_changes. */ |
526 | |
527 | void |
528 | confirm_change_group (void) |
529 | { |
530 | int i; |
531 | rtx last_object = NULLnullptr; |
532 | |
533 | gcc_assert (temporarily_undone_changes == 0)((void)(!(temporarily_undone_changes == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 533, __FUNCTION__), 0 : 0)); |
534 | for (i = 0; i < num_changes; i++) |
535 | { |
536 | rtx object = changes[i].object; |
537 | |
538 | if (changes[i].unshare) |
539 | *changes[i].loc = copy_rtx (*changes[i].loc); |
540 | |
541 | /* Avoid unnecessary rescanning when multiple changes to same instruction |
542 | are made. */ |
543 | if (object) |
544 | { |
545 | if (object != last_object && last_object && INSN_P (last_object)(((((enum rtx_code) (last_object)->code) == INSN) || (((enum rtx_code) (last_object)->code) == JUMP_INSN) || (((enum rtx_code ) (last_object)->code) == CALL_INSN)) || (((enum rtx_code) (last_object)->code) == DEBUG_INSN))) |
546 | df_insn_rescan (as_a <rtx_insn *> (last_object)); |
547 | last_object = object; |
548 | } |
549 | } |
550 | |
551 | if (last_object && INSN_P (last_object)(((((enum rtx_code) (last_object)->code) == INSN) || (((enum rtx_code) (last_object)->code) == JUMP_INSN) || (((enum rtx_code ) (last_object)->code) == CALL_INSN)) || (((enum rtx_code) (last_object)->code) == DEBUG_INSN))) |
552 | df_insn_rescan (as_a <rtx_insn *> (last_object)); |
553 | num_changes = 0; |
554 | } |
555 | |
556 | /* Apply a group of changes previously issued with `validate_change'. |
557 | If all changes are valid, call confirm_change_group and return 1, |
558 | otherwise, call cancel_changes and return 0. */ |
559 | |
560 | int |
561 | apply_change_group (void) |
562 | { |
563 | if (verify_changes (0)) |
564 | { |
565 | confirm_change_group (); |
566 | return 1; |
567 | } |
568 | else |
569 | { |
570 | cancel_changes (0); |
571 | return 0; |
572 | } |
573 | } |
574 | |
575 | |
576 | /* Return the number of changes so far in the current group. */ |
577 | |
578 | int |
579 | num_validated_changes (void) |
580 | { |
581 | return num_changes; |
582 | } |
583 | |
584 | /* Retract the changes numbered NUM and up. */ |
585 | |
586 | void |
587 | cancel_changes (int num) |
588 | { |
589 | gcc_assert (temporarily_undone_changes == 0)((void)(!(temporarily_undone_changes == 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 589, __FUNCTION__), 0 : 0)); |
590 | int i; |
591 | |
592 | /* Back out all the changes. Do this in the opposite order in which |
593 | they were made. */ |
594 | for (i = num_changes - 1; i >= num; i--) |
595 | { |
596 | if (changes[i].old_len >= 0) |
597 | XVECLEN (*changes[i].loc, 0)(((((*changes[i].loc)->u.fld[0]).rt_rtvec))->num_elem) = changes[i].old_len; |
598 | else |
599 | *changes[i].loc = changes[i].old; |
600 | if (changes[i].object && !MEM_P (changes[i].object)(((enum rtx_code) (changes[i].object)->code) == MEM)) |
601 | INSN_CODE (changes[i].object)(((changes[i].object)->u.fld[5]).rt_int) = changes[i].old_code; |
602 | } |
603 | num_changes = num; |
604 | } |
605 | |
606 | /* Swap the status of change NUM from being applied to not being applied, |
607 | or vice versa. */ |
608 | |
609 | static void |
610 | swap_change (int num) |
611 | { |
612 | if (changes[num].old_len >= 0) |
613 | std::swap (XVECLEN (*changes[num].loc, 0)(((((*changes[num].loc)->u.fld[0]).rt_rtvec))->num_elem ), changes[num].old_len); |
614 | else |
615 | std::swap (*changes[num].loc, changes[num].old); |
616 | if (changes[num].object && !MEM_P (changes[num].object)(((enum rtx_code) (changes[num].object)->code) == MEM)) |
617 | std::swap (INSN_CODE (changes[num].object)(((changes[num].object)->u.fld[5]).rt_int), changes[num].old_code); |
618 | } |
619 | |
620 | /* Temporarily undo all the changes numbered NUM and up, with a view |
621 | to reapplying them later. The next call to the changes machinery |
622 | must be: |
623 | |
624 | redo_changes (NUM) |
625 | |
626 | otherwise things will end up in an invalid state. */ |
627 | |
628 | void |
629 | temporarily_undo_changes (int num) |
630 | { |
631 | gcc_assert (temporarily_undone_changes == 0 && num <= num_changes)((void)(!(temporarily_undone_changes == 0 && num <= num_changes) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 631, __FUNCTION__), 0 : 0)); |
632 | for (int i = num_changes - 1; i >= num; i--) |
633 | swap_change (i); |
634 | temporarily_undone_changes = num_changes - num; |
635 | } |
636 | |
637 | /* Redo the changes that were temporarily undone by: |
638 | |
639 | temporarily_undo_changes (NUM). */ |
640 | |
641 | void |
642 | redo_changes (int num) |
643 | { |
644 | gcc_assert (temporarily_undone_changes == num_changes - num)((void)(!(temporarily_undone_changes == num_changes - num) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 644, __FUNCTION__), 0 : 0)); |
645 | for (int i = num; i < num_changes; ++i) |
646 | swap_change (i); |
647 | temporarily_undone_changes = 0; |
648 | } |
649 | |
650 | /* Reduce conditional compilation elsewhere. */ |
651 | /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting |
652 | rtx. */ |
653 | |
654 | static void |
655 | simplify_while_replacing (rtx *loc, rtx to, rtx_insn *object, |
656 | machine_mode op0_mode) |
657 | { |
658 | rtx x = *loc; |
659 | enum rtx_code code = GET_CODE (x)((enum rtx_code) (x)->code); |
660 | rtx new_rtx = NULL_RTX(rtx) 0; |
661 | scalar_int_mode is_mode; |
662 | |
663 | if (SWAPPABLE_OPERANDS_P (x)((1 << (rtx_class[(int) (((enum rtx_code) (x)->code) )])) & ((1 << RTX_COMM_ARITH) | (1 << RTX_COMM_COMPARE ) | (1 << RTX_COMPARE))) |
664 | && swap_commutative_operands_p (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), XEXP (x, 1)(((x)->u.fld[1]).rt_rtx))) |
665 | { |
666 | validate_unshare_change (object, loc, |
667 | gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? codegen_rtx_fmt_ee_stat ((((rtx_class[(int) (((enum rtx_code) (x) ->code))]) == RTX_COMM_ARITH) ? code : swap_condition (code )), (((machine_mode) (x)->mode)), ((((x)->u.fld[1]).rt_rtx )), ((((x)->u.fld[0]).rt_rtx)) ) |
668 | : swap_condition (code),gen_rtx_fmt_ee_stat ((((rtx_class[(int) (((enum rtx_code) (x) ->code))]) == RTX_COMM_ARITH) ? code : swap_condition (code )), (((machine_mode) (x)->mode)), ((((x)->u.fld[1]).rt_rtx )), ((((x)->u.fld[0]).rt_rtx)) ) |
669 | GET_MODE (x), XEXP (x, 1),gen_rtx_fmt_ee_stat ((((rtx_class[(int) (((enum rtx_code) (x) ->code))]) == RTX_COMM_ARITH) ? code : swap_condition (code )), (((machine_mode) (x)->mode)), ((((x)->u.fld[1]).rt_rtx )), ((((x)->u.fld[0]).rt_rtx)) ) |
670 | XEXP (x, 0))gen_rtx_fmt_ee_stat ((((rtx_class[(int) (((enum rtx_code) (x) ->code))]) == RTX_COMM_ARITH) ? code : swap_condition (code )), (((machine_mode) (x)->mode)), ((((x)->u.fld[1]).rt_rtx )), ((((x)->u.fld[0]).rt_rtx)) ), 1); |
671 | x = *loc; |
672 | code = GET_CODE (x)((enum rtx_code) (x)->code); |
673 | } |
674 | |
675 | /* Canonicalize arithmetics with all constant operands. */ |
676 | switch (GET_RTX_CLASS (code)(rtx_class[(int) (code)])) |
677 | { |
678 | case RTX_UNARY: |
679 | if (CONSTANT_P (XEXP (x, 0))((rtx_class[(int) (((enum rtx_code) ((((x)->u.fld[0]).rt_rtx ))->code))]) == RTX_CONST_OBJ)) |
680 | new_rtx = simplify_unary_operation (code, GET_MODE (x)((machine_mode) (x)->mode), XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), |
681 | op0_mode); |
682 | break; |
683 | case RTX_COMM_ARITH: |
684 | case RTX_BIN_ARITH: |
685 | if (CONSTANT_P (XEXP (x, 0))((rtx_class[(int) (((enum rtx_code) ((((x)->u.fld[0]).rt_rtx ))->code))]) == RTX_CONST_OBJ) && CONSTANT_P (XEXP (x, 1))((rtx_class[(int) (((enum rtx_code) ((((x)->u.fld[1]).rt_rtx ))->code))]) == RTX_CONST_OBJ)) |
686 | new_rtx = simplify_binary_operation (code, GET_MODE (x)((machine_mode) (x)->mode), XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), |
687 | XEXP (x, 1)(((x)->u.fld[1]).rt_rtx)); |
688 | break; |
689 | case RTX_COMPARE: |
690 | case RTX_COMM_COMPARE: |
691 | if (CONSTANT_P (XEXP (x, 0))((rtx_class[(int) (((enum rtx_code) ((((x)->u.fld[0]).rt_rtx ))->code))]) == RTX_CONST_OBJ) && CONSTANT_P (XEXP (x, 1))((rtx_class[(int) (((enum rtx_code) ((((x)->u.fld[1]).rt_rtx ))->code))]) == RTX_CONST_OBJ)) |
692 | new_rtx = simplify_relational_operation (code, GET_MODE (x)((machine_mode) (x)->mode), op0_mode, |
693 | XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), XEXP (x, 1)(((x)->u.fld[1]).rt_rtx)); |
694 | break; |
695 | default: |
696 | break; |
697 | } |
698 | if (new_rtx) |
699 | { |
700 | validate_change (object, loc, new_rtx, 1); |
701 | return; |
702 | } |
703 | |
704 | switch (code) |
705 | { |
706 | case PLUS: |
707 | /* If we have a PLUS whose second operand is now a CONST_INT, use |
708 | simplify_gen_binary to try to simplify it. |
709 | ??? We may want later to remove this, once simplification is |
710 | separated from this function. */ |
711 | if (CONST_INT_P (XEXP (x, 1))(((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) == CONST_INT ) && XEXP (x, 1)(((x)->u.fld[1]).rt_rtx) == to) |
712 | validate_change (object, loc, |
713 | simplify_gen_binary |
714 | (PLUS, GET_MODE (x)((machine_mode) (x)->mode), XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), XEXP (x, 1)(((x)->u.fld[1]).rt_rtx)), 1); |
715 | break; |
716 | case MINUS: |
717 | if (CONST_SCALAR_INT_P (XEXP (x, 1))((((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) == CONST_INT) || (((enum rtx_code) ((((x)->u.fld[1]).rt_rtx) )->code) == CONST_WIDE_INT))) |
718 | validate_change (object, loc, |
719 | simplify_gen_binary |
720 | (PLUS, GET_MODE (x)((machine_mode) (x)->mode), XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), |
721 | simplify_gen_unary (NEG, |
722 | GET_MODE (x)((machine_mode) (x)->mode), XEXP (x, 1)(((x)->u.fld[1]).rt_rtx), |
723 | GET_MODE (x)((machine_mode) (x)->mode))), 1); |
724 | break; |
725 | case ZERO_EXTEND: |
726 | case SIGN_EXTEND: |
727 | if (GET_MODE (XEXP (x, 0))((machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode) == VOIDmode((void) 0, E_VOIDmode)) |
728 | { |
729 | new_rtx = simplify_gen_unary (code, GET_MODE (x)((machine_mode) (x)->mode), XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), |
730 | op0_mode); |
731 | /* If any of the above failed, substitute in something that |
732 | we know won't be recognized. */ |
733 | if (!new_rtx) |
734 | new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx)gen_rtx_fmt_e_stat ((CLOBBER), ((((machine_mode) (x)->mode ))), (((const_int_rtx[64]))) ); |
735 | validate_change (object, loc, new_rtx, 1); |
736 | } |
737 | break; |
738 | case SUBREG: |
739 | /* All subregs possible to simplify should be simplified. */ |
740 | new_rtx = simplify_subreg (GET_MODE (x)((machine_mode) (x)->mode), SUBREG_REG (x)(((x)->u.fld[0]).rt_rtx), op0_mode, |
741 | SUBREG_BYTE (x)(((x)->u.fld[1]).rt_subreg)); |
742 | |
743 | /* Subregs of VOIDmode operands are incorrect. */ |
744 | if (!new_rtx && GET_MODE (SUBREG_REG (x))((machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode) == VOIDmode((void) 0, E_VOIDmode)) |
745 | new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx)gen_rtx_fmt_e_stat ((CLOBBER), ((((machine_mode) (x)->mode ))), (((const_int_rtx[64]))) ); |
746 | if (new_rtx) |
747 | validate_change (object, loc, new_rtx, 1); |
748 | break; |
749 | case ZERO_EXTRACT: |
750 | case SIGN_EXTRACT: |
751 | /* If we are replacing a register with memory, try to change the memory |
752 | to be the mode required for memory in extract operations (this isn't |
753 | likely to be an insertion operation; if it was, nothing bad will |
754 | happen, we might just fail in some cases). */ |
755 | |
756 | if (MEM_P (XEXP (x, 0))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == MEM ) |
757 | && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0))((machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode), &is_mode) |
758 | && CONST_INT_P (XEXP (x, 1))(((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) == CONST_INT ) |
759 | && CONST_INT_P (XEXP (x, 2))(((enum rtx_code) ((((x)->u.fld[2]).rt_rtx))->code) == CONST_INT ) |
760 | && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0)((((((x)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx), |
761 | MEM_ADDR_SPACE (XEXP (x, 0))(get_mem_attrs ((((x)->u.fld[0]).rt_rtx))->addrspace)) |
762 | && !MEM_VOLATILE_P (XEXP (x, 0))(__extension__ ({ __typeof (((((x)->u.fld[0]).rt_rtx))) const _rtx = (((((x)->u.fld[0]).rt_rtx))); if (((enum rtx_code) (_rtx)->code) != MEM && ((enum rtx_code) (_rtx)-> code) != ASM_OPERANDS && ((enum rtx_code) (_rtx)-> code) != ASM_INPUT) rtl_check_failed_flag ("MEM_VOLATILE_P", _rtx , "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 762, __FUNCTION__); _rtx; })->volatil)) |
763 | { |
764 | int pos = INTVAL (XEXP (x, 2))(((((x)->u.fld[2]).rt_rtx))->u.hwint[0]); |
765 | machine_mode new_mode = is_mode; |
766 | if (GET_CODE (x)((enum rtx_code) (x)->code) == ZERO_EXTRACT && targetm.have_extzv ()) |
767 | new_mode = insn_data[targetm.code_for_extzv].operand[1].mode; |
768 | else if (GET_CODE (x)((enum rtx_code) (x)->code) == SIGN_EXTRACT && targetm.have_extv ()) |
769 | new_mode = insn_data[targetm.code_for_extv].operand[1].mode; |
770 | scalar_int_mode wanted_mode = (new_mode == VOIDmode((void) 0, E_VOIDmode) |
771 | ? word_mode |
772 | : as_a <scalar_int_mode> (new_mode)); |
773 | |
774 | /* If we have a narrower mode, we can do something. */ |
775 | if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode)) |
776 | { |
777 | int offset = pos / BITS_PER_UNIT(8); |
778 | rtx newmem; |
779 | |
780 | /* If the bytes and bits are counted differently, we |
781 | must adjust the offset. */ |
782 | if (BYTES_BIG_ENDIAN0 != BITS_BIG_ENDIAN0) |
783 | offset = |
784 | (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) - |
785 | offset); |
786 | |
787 | gcc_assert (GET_MODE_PRECISION (wanted_mode)((void)(!(GET_MODE_PRECISION (wanted_mode) == GET_MODE_BITSIZE (wanted_mode)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 788, __FUNCTION__), 0 : 0)) |
788 | == GET_MODE_BITSIZE (wanted_mode))((void)(!(GET_MODE_PRECISION (wanted_mode) == GET_MODE_BITSIZE (wanted_mode)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 788, __FUNCTION__), 0 : 0)); |
789 | pos %= GET_MODE_BITSIZE (wanted_mode); |
790 | |
791 | newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset)adjust_address_1 ((((x)->u.fld[0]).rt_rtx), wanted_mode, offset , 0, 1, 0, 0); |
792 | |
793 | validate_change (object, &XEXP (x, 2)(((x)->u.fld[2]).rt_rtx), GEN_INT (pos)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (pos)), 1); |
794 | validate_change (object, &XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), newmem, 1); |
795 | } |
796 | } |
797 | |
798 | break; |
799 | |
800 | default: |
801 | break; |
802 | } |
803 | } |
804 | |
805 | /* Replace every occurrence of FROM in X with TO. Mark each change with |
806 | validate_change passing OBJECT. */ |
807 | |
808 | static void |
809 | validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx_insn *object, |
810 | bool simplify) |
811 | { |
812 | int i, j; |
813 | const char *fmt; |
814 | rtx x = *loc; |
815 | enum rtx_code code; |
816 | machine_mode op0_mode = VOIDmode((void) 0, E_VOIDmode); |
817 | int prev_changes = num_changes; |
818 | |
819 | if (!x) |
820 | return; |
821 | |
822 | code = GET_CODE (x)((enum rtx_code) (x)->code); |
823 | fmt = GET_RTX_FORMAT (code)(rtx_format[(int) (code)]); |
824 | if (fmt[0] == 'e') |
825 | op0_mode = GET_MODE (XEXP (x, 0))((machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode); |
826 | |
827 | /* X matches FROM if it is the same rtx or they are both referring to the |
828 | same register in the same mode. Avoid calling rtx_equal_p unless the |
829 | operands look similar. */ |
830 | |
831 | if (x == from |
832 | || (REG_P (x)(((enum rtx_code) (x)->code) == REG) && REG_P (from)(((enum rtx_code) (from)->code) == REG) |
833 | && GET_MODE (x)((machine_mode) (x)->mode) == GET_MODE (from)((machine_mode) (from)->mode) |
834 | && REGNO (x)(rhs_regno(x)) == REGNO (from)(rhs_regno(from))) |
835 | || (GET_CODE (x)((enum rtx_code) (x)->code) == GET_CODE (from)((enum rtx_code) (from)->code) && GET_MODE (x)((machine_mode) (x)->mode) == GET_MODE (from)((machine_mode) (from)->mode) |
836 | && rtx_equal_p (x, from))) |
837 | { |
838 | validate_unshare_change (object, loc, to, 1); |
839 | return; |
840 | } |
841 | |
842 | /* Call ourself recursively to perform the replacements. |
843 | We must not replace inside already replaced expression, otherwise we |
844 | get infinite recursion for replacements like (reg X)->(subreg (reg X)) |
845 | so we must special case shared ASM_OPERANDS. */ |
846 | |
847 | if (GET_CODE (x)((enum rtx_code) (x)->code) == PARALLEL) |
848 | { |
849 | for (j = XVECLEN (x, 0)(((((x)->u.fld[0]).rt_rtvec))->num_elem) - 1; j >= 0; j--) |
850 | { |
851 | if (j && GET_CODE (XVECEXP (x, 0, j))((enum rtx_code) ((((((x)->u.fld[0]).rt_rtvec))->elem[j ]))->code) == SET |
852 | && GET_CODE (SET_SRC (XVECEXP (x, 0, j)))((enum rtx_code) (((((((((x)->u.fld[0]).rt_rtvec))->elem [j]))->u.fld[1]).rt_rtx))->code) == ASM_OPERANDS) |
853 | { |
854 | /* Verify that operands are really shared. */ |
855 | gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))((void)(!((((((((((((x)->u.fld[0]).rt_rtvec))->elem[0]) )->u.fld[1]).rt_rtx))->u.fld[3]).rt_rtvec) == ((((((((( ((x)->u.fld[0]).rt_rtvec))->elem[j]))->u.fld[1]).rt_rtx ))->u.fld[3]).rt_rtvec)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 857, __FUNCTION__), 0 : 0)) |
856 | == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP((void)(!((((((((((((x)->u.fld[0]).rt_rtvec))->elem[0]) )->u.fld[1]).rt_rtx))->u.fld[3]).rt_rtvec) == ((((((((( ((x)->u.fld[0]).rt_rtvec))->elem[j]))->u.fld[1]).rt_rtx ))->u.fld[3]).rt_rtvec)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 857, __FUNCTION__), 0 : 0)) |
857 | (x, 0, j))))((void)(!((((((((((((x)->u.fld[0]).rt_rtvec))->elem[0]) )->u.fld[1]).rt_rtx))->u.fld[3]).rt_rtvec) == ((((((((( ((x)->u.fld[0]).rt_rtvec))->elem[j]))->u.fld[1]).rt_rtx ))->u.fld[3]).rt_rtvec)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 857, __FUNCTION__), 0 : 0)); |
858 | validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j))((((((((x)->u.fld[0]).rt_rtvec))->elem[j]))->u.fld[0 ]).rt_rtx), |
859 | from, to, object, simplify); |
860 | } |
861 | else |
862 | validate_replace_rtx_1 (&XVECEXP (x, 0, j)(((((x)->u.fld[0]).rt_rtvec))->elem[j]), from, to, object, |
863 | simplify); |
864 | } |
865 | } |
866 | else |
867 | for (i = GET_RTX_LENGTH (code)(rtx_length[(int) (code)]) - 1; i >= 0; i--) |
868 | { |
869 | if (fmt[i] == 'e') |
870 | validate_replace_rtx_1 (&XEXP (x, i)(((x)->u.fld[i]).rt_rtx), from, to, object, simplify); |
871 | else if (fmt[i] == 'E') |
872 | for (j = XVECLEN (x, i)(((((x)->u.fld[i]).rt_rtvec))->num_elem) - 1; j >= 0; j--) |
873 | validate_replace_rtx_1 (&XVECEXP (x, i, j)(((((x)->u.fld[i]).rt_rtvec))->elem[j]), from, to, object, |
874 | simplify); |
875 | } |
876 | |
877 | /* If we didn't substitute, there is nothing more to do. */ |
878 | if (num_changes == prev_changes) |
879 | return; |
880 | |
881 | /* ??? The regmove is no more, so is this aberration still necessary? */ |
882 | /* Allow substituted expression to have different mode. This is used by |
883 | regmove to change mode of pseudo register. */ |
884 | if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0))((machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode) != VOIDmode((void) 0, E_VOIDmode)) |
885 | op0_mode = GET_MODE (XEXP (x, 0))((machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode); |
886 | |
887 | /* Do changes needed to keep rtx consistent. Don't do any other |
888 | simplifications, as it is not our job. */ |
889 | if (simplify) |
890 | simplify_while_replacing (loc, to, object, op0_mode); |
891 | } |
892 | |
893 | /* Try replacing every occurrence of FROM in subexpression LOC of INSN |
894 | with TO. After all changes have been made, validate by seeing |
895 | if INSN is still valid. */ |
896 | |
897 | int |
898 | validate_replace_rtx_subexp (rtx from, rtx to, rtx_insn *insn, rtx *loc) |
899 | { |
900 | validate_replace_rtx_1 (loc, from, to, insn, true); |
901 | return apply_change_group (); |
902 | } |
903 | |
904 | /* Try replacing every occurrence of FROM in INSN with TO. After all |
905 | changes have been made, validate by seeing if INSN is still valid. */ |
906 | |
907 | int |
908 | validate_replace_rtx (rtx from, rtx to, rtx_insn *insn) |
909 | { |
910 | validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true); |
911 | return apply_change_group (); |
912 | } |
913 | |
914 | /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE |
915 | is a part of INSN. After all changes have been made, validate by seeing if |
916 | INSN is still valid. |
917 | validate_replace_rtx (from, to, insn) is equivalent to |
918 | validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */ |
919 | |
920 | int |
921 | validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx_insn *insn) |
922 | { |
923 | validate_replace_rtx_1 (where, from, to, insn, true); |
924 | return apply_change_group (); |
925 | } |
926 | |
927 | /* Same as above, but do not simplify rtx afterwards. */ |
928 | int |
929 | validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where, |
930 | rtx_insn *insn) |
931 | { |
932 | validate_replace_rtx_1 (where, from, to, insn, false); |
933 | return apply_change_group (); |
934 | |
935 | } |
936 | |
937 | /* Try replacing every occurrence of FROM in INSN with TO. This also |
938 | will replace in REG_EQUAL and REG_EQUIV notes. */ |
939 | |
940 | void |
941 | validate_replace_rtx_group (rtx from, rtx to, rtx_insn *insn) |
942 | { |
943 | rtx note; |
944 | validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true); |
945 | for (note = REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx); note; note = XEXP (note, 1)(((note)->u.fld[1]).rt_rtx)) |
946 | if (REG_NOTE_KIND (note)((enum reg_note) ((machine_mode) (note)->mode)) == REG_EQUAL |
947 | || REG_NOTE_KIND (note)((enum reg_note) ((machine_mode) (note)->mode)) == REG_EQUIV) |
948 | validate_replace_rtx_1 (&XEXP (note, 0)(((note)->u.fld[0]).rt_rtx), from, to, insn, true); |
949 | } |
950 | |
951 | /* Function called by note_uses to replace used subexpressions. */ |
952 | struct validate_replace_src_data |
953 | { |
954 | rtx from; /* Old RTX */ |
955 | rtx to; /* New RTX */ |
956 | rtx_insn *insn; /* Insn in which substitution is occurring. */ |
957 | }; |
958 | |
959 | static void |
960 | validate_replace_src_1 (rtx *x, void *data) |
961 | { |
962 | struct validate_replace_src_data *d |
963 | = (struct validate_replace_src_data *) data; |
964 | |
965 | validate_replace_rtx_1 (x, d->from, d->to, d->insn, true); |
966 | } |
967 | |
968 | /* Try replacing every occurrence of FROM in INSN with TO, avoiding |
969 | SET_DESTs. */ |
970 | |
971 | void |
972 | validate_replace_src_group (rtx from, rtx to, rtx_insn *insn) |
973 | { |
974 | struct validate_replace_src_data d; |
975 | |
976 | d.from = from; |
977 | d.to = to; |
978 | d.insn = insn; |
979 | note_uses (&PATTERN (insn), validate_replace_src_1, &d); |
980 | } |
981 | |
982 | /* Try simplify INSN. |
983 | Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's |
984 | pattern and return true if something was simplified. */ |
985 | |
986 | bool |
987 | validate_simplify_insn (rtx_insn *insn) |
988 | { |
989 | int i; |
990 | rtx pat = NULLnullptr; |
991 | rtx newpat = NULLnullptr; |
992 | |
993 | pat = PATTERN (insn); |
994 | |
995 | if (GET_CODE (pat)((enum rtx_code) (pat)->code) == SET) |
996 | { |
997 | newpat = simplify_rtx (SET_SRC (pat)(((pat)->u.fld[1]).rt_rtx)); |
998 | if (newpat && !rtx_equal_p (SET_SRC (pat)(((pat)->u.fld[1]).rt_rtx), newpat)) |
999 | validate_change (insn, &SET_SRC (pat)(((pat)->u.fld[1]).rt_rtx), newpat, 1); |
1000 | newpat = simplify_rtx (SET_DEST (pat)(((pat)->u.fld[0]).rt_rtx)); |
1001 | if (newpat && !rtx_equal_p (SET_DEST (pat)(((pat)->u.fld[0]).rt_rtx), newpat)) |
1002 | validate_change (insn, &SET_DEST (pat)(((pat)->u.fld[0]).rt_rtx), newpat, 1); |
1003 | } |
1004 | else if (GET_CODE (pat)((enum rtx_code) (pat)->code) == PARALLEL) |
1005 | for (i = 0; i < XVECLEN (pat, 0)(((((pat)->u.fld[0]).rt_rtvec))->num_elem); i++) |
1006 | { |
1007 | rtx s = XVECEXP (pat, 0, i)(((((pat)->u.fld[0]).rt_rtvec))->elem[i]); |
1008 | |
1009 | if (GET_CODE (XVECEXP (pat, 0, i))((enum rtx_code) ((((((pat)->u.fld[0]).rt_rtvec))->elem [i]))->code) == SET) |
1010 | { |
1011 | newpat = simplify_rtx (SET_SRC (s)(((s)->u.fld[1]).rt_rtx)); |
1012 | if (newpat && !rtx_equal_p (SET_SRC (s)(((s)->u.fld[1]).rt_rtx), newpat)) |
1013 | validate_change (insn, &SET_SRC (s)(((s)->u.fld[1]).rt_rtx), newpat, 1); |
1014 | newpat = simplify_rtx (SET_DEST (s)(((s)->u.fld[0]).rt_rtx)); |
1015 | if (newpat && !rtx_equal_p (SET_DEST (s)(((s)->u.fld[0]).rt_rtx), newpat)) |
1016 | validate_change (insn, &SET_DEST (s)(((s)->u.fld[0]).rt_rtx), newpat, 1); |
1017 | } |
1018 | } |
1019 | return ((num_changes_pending () > 0) && (apply_change_group () > 0)); |
1020 | } |
1021 | |
1022 | /* Try to process the address of memory expression MEM. Return true on |
1023 | success; leave the caller to clean up on failure. */ |
1024 | |
1025 | bool |
1026 | insn_propagation::apply_to_mem_1 (rtx mem) |
1027 | { |
1028 | auto old_num_changes = num_validated_changes (); |
1029 | mem_depth += 1; |
1030 | bool res = apply_to_rvalue_1 (&XEXP (mem, 0)(((mem)->u.fld[0]).rt_rtx)); |
1031 | mem_depth -= 1; |
1032 | if (!res) |
1033 | return false; |
1034 | |
1035 | if (old_num_changes != num_validated_changes () |
1036 | && should_check_mems |
1037 | && !check_mem (old_num_changes, mem)) |
1038 | return false; |
1039 | |
1040 | return true; |
1041 | } |
1042 | |
1043 | /* Try to process the rvalue expression at *LOC. Return true on success; |
1044 | leave the caller to clean up on failure. */ |
1045 | |
1046 | bool |
1047 | insn_propagation::apply_to_rvalue_1 (rtx *loc) |
1048 | { |
1049 | rtx x = *loc; |
1050 | enum rtx_code code = GET_CODE (x)((enum rtx_code) (x)->code); |
1051 | machine_mode mode = GET_MODE (x)((machine_mode) (x)->mode); |
1052 | |
1053 | auto old_num_changes = num_validated_changes (); |
1054 | if (from && GET_CODE (x)((enum rtx_code) (x)->code) == GET_CODE (from)((enum rtx_code) (from)->code) && rtx_equal_p (x, from)) |
1055 | { |
1056 | /* Don't replace register asms in asm statements; we mustn't |
1057 | change the user's register allocation. */ |
1058 | if (REG_P (x)(((enum rtx_code) (x)->code) == REG) |
1059 | && HARD_REGISTER_P (x)((((rhs_regno(x))) < 76)) |
1060 | && register_asm_p (x) |
1061 | && asm_noperands (PATTERN (insn)) > 0) |
1062 | return false; |
1063 | |
1064 | if (should_unshare) |
1065 | validate_unshare_change (insn, loc, to, 1); |
1066 | else |
1067 | validate_change (insn, loc, to, 1); |
1068 | if (mem_depth && !REG_P (to)(((enum rtx_code) (to)->code) == REG) && !CONSTANT_P (to)((rtx_class[(int) (((enum rtx_code) (to)->code))]) == RTX_CONST_OBJ )) |
1069 | { |
1070 | /* We're substituting into an address, but TO will have the |
1071 | form expected outside an address. Canonicalize it if |
1072 | necessary. */ |
1073 | insn_propagation subprop (insn); |
1074 | subprop.mem_depth += 1; |
1075 | if (!subprop.apply_to_rvalue (loc)) |
1076 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 1076, __FUNCTION__)); |
1077 | if (should_unshare |
1078 | && num_validated_changes () != old_num_changes + 1) |
1079 | { |
1080 | /* TO is owned by someone else, so create a copy and |
1081 | return TO to its original form. */ |
1082 | rtx to = copy_rtx (*loc); |
1083 | cancel_changes (old_num_changes); |
1084 | validate_change (insn, loc, to, 1); |
1085 | } |
1086 | } |
1087 | num_replacements += 1; |
1088 | should_unshare = true; |
1089 | result_flags |= UNSIMPLIFIED; |
1090 | return true; |
1091 | } |
1092 | |
1093 | /* Recursively apply the substitution and see if we can simplify |
1094 | the result. This specifically shouldn't use simplify_gen_* for |
1095 | speculative simplifications, since we want to avoid generating new |
1096 | expressions where possible. */ |
1097 | auto old_result_flags = result_flags; |
1098 | rtx newx = NULL_RTX(rtx) 0; |
1099 | bool recurse_p = false; |
1100 | switch (GET_RTX_CLASS (code)(rtx_class[(int) (code)])) |
1101 | { |
1102 | case RTX_UNARY: |
1103 | { |
1104 | machine_mode op0_mode = GET_MODE (XEXP (x, 0))((machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode); |
1105 | if (!apply_to_rvalue_1 (&XEXP (x, 0)(((x)->u.fld[0]).rt_rtx))) |
1106 | return false; |
1107 | if (from && old_num_changes == num_validated_changes ()) |
1108 | return true; |
1109 | |
1110 | newx = simplify_unary_operation (code, mode, XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), op0_mode); |
1111 | break; |
1112 | } |
1113 | |
1114 | case RTX_BIN_ARITH: |
1115 | case RTX_COMM_ARITH: |
1116 | { |
1117 | if (!apply_to_rvalue_1 (&XEXP (x, 0)(((x)->u.fld[0]).rt_rtx)) |
1118 | || !apply_to_rvalue_1 (&XEXP (x, 1)(((x)->u.fld[1]).rt_rtx))) |
1119 | return false; |
1120 | if (from && old_num_changes == num_validated_changes ()) |
1121 | return true; |
1122 | |
1123 | if (GET_RTX_CLASS (code)(rtx_class[(int) (code)]) == RTX_COMM_ARITH |
1124 | && swap_commutative_operands_p (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), XEXP (x, 1)(((x)->u.fld[1]).rt_rtx))) |
1125 | newx = simplify_gen_binary (code, mode, XEXP (x, 1)(((x)->u.fld[1]).rt_rtx), XEXP (x, 0)(((x)->u.fld[0]).rt_rtx)); |
1126 | else |
1127 | newx = simplify_binary_operation (code, mode, |
1128 | XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), XEXP (x, 1)(((x)->u.fld[1]).rt_rtx)); |
1129 | break; |
1130 | } |
1131 | |
1132 | case RTX_COMPARE: |
1133 | case RTX_COMM_COMPARE: |
1134 | { |
1135 | machine_mode op_mode = (GET_MODE (XEXP (x, 0))((machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode) != VOIDmode((void) 0, E_VOIDmode) |
1136 | ? GET_MODE (XEXP (x, 0))((machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode) |
1137 | : GET_MODE (XEXP (x, 1))((machine_mode) ((((x)->u.fld[1]).rt_rtx))->mode)); |
1138 | if (!apply_to_rvalue_1 (&XEXP (x, 0)(((x)->u.fld[0]).rt_rtx)) |
1139 | || !apply_to_rvalue_1 (&XEXP (x, 1)(((x)->u.fld[1]).rt_rtx))) |
1140 | return false; |
1141 | if (from && old_num_changes == num_validated_changes ()) |
1142 | return true; |
1143 | |
1144 | newx = simplify_relational_operation (code, mode, op_mode, |
1145 | XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), XEXP (x, 1)(((x)->u.fld[1]).rt_rtx)); |
1146 | break; |
1147 | } |
1148 | |
1149 | case RTX_TERNARY: |
1150 | case RTX_BITFIELD_OPS: |
1151 | { |
1152 | machine_mode op0_mode = GET_MODE (XEXP (x, 0))((machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode); |
1153 | if (!apply_to_rvalue_1 (&XEXP (x, 0)(((x)->u.fld[0]).rt_rtx)) |
1154 | || !apply_to_rvalue_1 (&XEXP (x, 1)(((x)->u.fld[1]).rt_rtx)) |
1155 | || !apply_to_rvalue_1 (&XEXP (x, 2)(((x)->u.fld[2]).rt_rtx))) |
1156 | return false; |
1157 | if (from && old_num_changes == num_validated_changes ()) |
1158 | return true; |
1159 | |
1160 | newx = simplify_ternary_operation (code, mode, op0_mode, |
1161 | XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), XEXP (x, 1)(((x)->u.fld[1]).rt_rtx), |
1162 | XEXP (x, 2)(((x)->u.fld[2]).rt_rtx)); |
1163 | break; |
1164 | } |
1165 | |
1166 | case RTX_EXTRA: |
1167 | if (code == SUBREG) |
1168 | { |
1169 | machine_mode inner_mode = GET_MODE (SUBREG_REG (x))((machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode); |
1170 | if (!apply_to_rvalue_1 (&SUBREG_REG (x)(((x)->u.fld[0]).rt_rtx))) |
1171 | return false; |
1172 | if (from && old_num_changes == num_validated_changes ()) |
1173 | return true; |
1174 | |
1175 | rtx inner = SUBREG_REG (x)(((x)->u.fld[0]).rt_rtx); |
1176 | newx = simplify_subreg (mode, inner, inner_mode, SUBREG_BYTE (x)(((x)->u.fld[1]).rt_subreg)); |
1177 | /* Reject the same cases that simplify_gen_subreg would. */ |
1178 | if (!newx |
1179 | && (GET_CODE (inner)((enum rtx_code) (inner)->code) == SUBREG |
1180 | || GET_CODE (inner)((enum rtx_code) (inner)->code) == CONCAT |
1181 | || GET_MODE (inner)((machine_mode) (inner)->mode) == VOIDmode((void) 0, E_VOIDmode) |
1182 | || !validate_subreg (mode, inner_mode, |
1183 | inner, SUBREG_BYTE (x)(((x)->u.fld[1]).rt_subreg)))) |
1184 | { |
1185 | failure_reason = "would create an invalid subreg"; |
1186 | return false; |
1187 | } |
1188 | break; |
1189 | } |
1190 | else |
1191 | recurse_p = true; |
1192 | break; |
1193 | |
1194 | case RTX_OBJ: |
1195 | if (code == LO_SUM) |
1196 | { |
1197 | if (!apply_to_rvalue_1 (&XEXP (x, 0)(((x)->u.fld[0]).rt_rtx)) |
1198 | || !apply_to_rvalue_1 (&XEXP (x, 1)(((x)->u.fld[1]).rt_rtx))) |
1199 | return false; |
1200 | if (from && old_num_changes == num_validated_changes ()) |
1201 | return true; |
1202 | |
1203 | /* (lo_sum (high x) y) -> y where x and y have the same base. */ |
1204 | rtx op0 = XEXP (x, 0)(((x)->u.fld[0]).rt_rtx); |
1205 | rtx op1 = XEXP (x, 1)(((x)->u.fld[1]).rt_rtx); |
1206 | if (GET_CODE (op0)((enum rtx_code) (op0)->code) == HIGH) |
1207 | { |
1208 | rtx base0, base1, offset0, offset1; |
1209 | split_const (XEXP (op0, 0)(((op0)->u.fld[0]).rt_rtx), &base0, &offset0); |
1210 | split_const (op1, &base1, &offset1); |
1211 | if (rtx_equal_p (base0, base1)) |
1212 | newx = op1; |
1213 | } |
1214 | } |
1215 | else if (code == REG) |
1216 | { |
1217 | if (from && REG_P (from)(((enum rtx_code) (from)->code) == REG) && reg_overlap_mentioned_p (x, from)) |
1218 | { |
1219 | failure_reason = "inexact register overlap"; |
1220 | return false; |
1221 | } |
1222 | } |
1223 | else if (code == MEM) |
1224 | return apply_to_mem_1 (x); |
1225 | else |
1226 | recurse_p = true; |
1227 | break; |
1228 | |
1229 | case RTX_CONST_OBJ: |
1230 | break; |
1231 | |
1232 | case RTX_AUTOINC: |
1233 | if (from && reg_overlap_mentioned_p (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), from)) |
1234 | { |
1235 | failure_reason = "is subject to autoinc"; |
1236 | return false; |
1237 | } |
1238 | recurse_p = true; |
1239 | break; |
1240 | |
1241 | case RTX_MATCH: |
1242 | case RTX_INSN: |
1243 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 1243, __FUNCTION__)); |
1244 | } |
1245 | |
1246 | if (recurse_p) |
1247 | { |
1248 | const char *fmt = GET_RTX_FORMAT (code)(rtx_format[(int) (code)]); |
1249 | for (int i = 0; fmt[i]; i++) |
1250 | switch (fmt[i]) |
1251 | { |
1252 | case 'E': |
1253 | for (int j = 0; j < XVECLEN (x, i)(((((x)->u.fld[i]).rt_rtvec))->num_elem); j++) |
1254 | if (!apply_to_rvalue_1 (&XVECEXP (x, i, j)(((((x)->u.fld[i]).rt_rtvec))->elem[j]))) |
1255 | return false; |
1256 | break; |
1257 | |
1258 | case 'e': |
1259 | if (XEXP (x, i)(((x)->u.fld[i]).rt_rtx) && !apply_to_rvalue_1 (&XEXP (x, i)(((x)->u.fld[i]).rt_rtx))) |
1260 | return false; |
1261 | break; |
1262 | } |
1263 | } |
1264 | else if (newx && !rtx_equal_p (x, newx)) |
1265 | { |
1266 | /* All substitutions made by OLD_NUM_CHANGES onwards have been |
1267 | simplified. */ |
1268 | result_flags = ((result_flags & ~UNSIMPLIFIED) |
1269 | | (old_result_flags & UNSIMPLIFIED)); |
1270 | |
1271 | if (should_note_simplifications) |
1272 | note_simplification (old_num_changes, old_result_flags, x, newx); |
1273 | |
1274 | /* There's no longer any point unsharing the substitutions made |
1275 | for subexpressions, since we'll just copy this one instead. */ |
1276 | bool unshare = false; |
1277 | for (int i = old_num_changes; i < num_changes; ++i) |
1278 | { |
1279 | unshare |= changes[i].unshare; |
1280 | changes[i].unshare = false; |
1281 | } |
1282 | if (unshare) |
1283 | validate_unshare_change (insn, loc, newx, 1); |
1284 | else |
1285 | validate_change (insn, loc, newx, 1); |
1286 | } |
1287 | |
1288 | return true; |
1289 | } |
1290 | |
1291 | /* Try to process the lvalue expression at *LOC. Return true on success; |
1292 | leave the caller to clean up on failure. */ |
1293 | |
1294 | bool |
1295 | insn_propagation::apply_to_lvalue_1 (rtx dest) |
1296 | { |
1297 | rtx old_dest = dest; |
1298 | while (GET_CODE (dest)((enum rtx_code) (dest)->code) == SUBREG |
1299 | || GET_CODE (dest)((enum rtx_code) (dest)->code) == ZERO_EXTRACT |
1300 | || GET_CODE (dest)((enum rtx_code) (dest)->code) == STRICT_LOW_PART) |
1301 | { |
1302 | if (GET_CODE (dest)((enum rtx_code) (dest)->code) == ZERO_EXTRACT |
1303 | && (!apply_to_rvalue_1 (&XEXP (dest, 1)(((dest)->u.fld[1]).rt_rtx)) |
1304 | || !apply_to_rvalue_1 (&XEXP (dest, 2)(((dest)->u.fld[2]).rt_rtx)))) |
1305 | return false; |
1306 | dest = XEXP (dest, 0)(((dest)->u.fld[0]).rt_rtx); |
1307 | } |
1308 | |
1309 | if (MEM_P (dest)(((enum rtx_code) (dest)->code) == MEM)) |
1310 | return apply_to_mem_1 (dest); |
1311 | |
1312 | /* Check whether the substitution is safe in the presence of this lvalue. */ |
1313 | if (!from |
1314 | || dest == old_dest |
1315 | || !REG_P (dest)(((enum rtx_code) (dest)->code) == REG) |
1316 | || !reg_overlap_mentioned_p (dest, from)) |
1317 | return true; |
1318 | |
1319 | if (SUBREG_P (old_dest)(((enum rtx_code) (old_dest)->code) == SUBREG) |
1320 | && SUBREG_REG (old_dest)(((old_dest)->u.fld[0]).rt_rtx) == dest |
1321 | && !read_modify_subreg_p (old_dest)) |
1322 | return true; |
1323 | |
1324 | failure_reason = "is part of a read-write destination"; |
1325 | return false; |
1326 | } |
1327 | |
1328 | /* Try to process the instruction pattern at *LOC. Return true on success; |
1329 | leave the caller to clean up on failure. */ |
1330 | |
1331 | bool |
1332 | insn_propagation::apply_to_pattern_1 (rtx *loc) |
1333 | { |
1334 | rtx body = *loc; |
1335 | switch (GET_CODE (body)((enum rtx_code) (body)->code)) |
1336 | { |
1337 | case COND_EXEC: |
1338 | return (apply_to_rvalue_1 (&COND_EXEC_TEST (body)(((body)->u.fld[0]).rt_rtx)) |
1339 | && apply_to_pattern_1 (&COND_EXEC_CODE (body)(((body)->u.fld[1]).rt_rtx))); |
1340 | |
1341 | case PARALLEL: |
1342 | { |
1343 | int last = XVECLEN (body, 0)(((((body)->u.fld[0]).rt_rtvec))->num_elem) - 1; |
1344 | for (int i = 0; i < last; ++i) |
1345 | if (!apply_to_pattern_1 (&XVECEXP (body, 0, i)(((((body)->u.fld[0]).rt_rtvec))->elem[i]))) |
1346 | return false; |
1347 | return apply_to_pattern_1 (&XVECEXP (body, 0, last)(((((body)->u.fld[0]).rt_rtvec))->elem[last])); |
1348 | } |
1349 | |
1350 | case ASM_OPERANDS: |
1351 | for (int i = 0, len = ASM_OPERANDS_INPUT_LENGTH (body)(((((body)->u.fld[3]).rt_rtvec))->num_elem); i < len; ++i) |
1352 | if (!apply_to_rvalue_1 (&ASM_OPERANDS_INPUT (body, i)(((((body)->u.fld[3]).rt_rtvec))->elem[i]))) |
1353 | return false; |
1354 | return true; |
1355 | |
1356 | case CLOBBER: |
1357 | return apply_to_lvalue_1 (XEXP (body, 0)(((body)->u.fld[0]).rt_rtx)); |
1358 | |
1359 | case SET: |
1360 | return (apply_to_lvalue_1 (SET_DEST (body)(((body)->u.fld[0]).rt_rtx)) |
1361 | && apply_to_rvalue_1 (&SET_SRC (body)(((body)->u.fld[1]).rt_rtx))); |
1362 | |
1363 | default: |
1364 | /* All the other possibilities never store and can use a normal |
1365 | rtx walk. This includes: |
1366 | |
1367 | - USE |
1368 | - TRAP_IF |
1369 | - PREFETCH |
1370 | - UNSPEC |
1371 | - UNSPEC_VOLATILE. */ |
1372 | return apply_to_rvalue_1 (loc); |
1373 | } |
1374 | } |
1375 | |
1376 | /* Apply this insn_propagation object's simplification or substitution |
1377 | to the instruction pattern at LOC. */ |
1378 | |
1379 | bool |
1380 | insn_propagation::apply_to_pattern (rtx *loc) |
1381 | { |
1382 | unsigned int num_changes = num_validated_changes (); |
1383 | bool res = apply_to_pattern_1 (loc); |
1384 | if (!res) |
1385 | cancel_changes (num_changes); |
1386 | return res; |
1387 | } |
1388 | |
1389 | /* Apply this insn_propagation object's simplification or substitution |
1390 | to the rvalue expression at LOC. */ |
1391 | |
1392 | bool |
1393 | insn_propagation::apply_to_rvalue (rtx *loc) |
1394 | { |
1395 | unsigned int num_changes = num_validated_changes (); |
1396 | bool res = apply_to_rvalue_1 (loc); |
1397 | if (!res) |
1398 | cancel_changes (num_changes); |
1399 | return res; |
1400 | } |
1401 | |
1402 | /* Check whether INSN matches a specific alternative of an .md pattern. */ |
1403 | |
1404 | bool |
1405 | valid_insn_p (rtx_insn *insn) |
1406 | { |
1407 | recog_memoized (insn); |
1408 | if (INSN_CODE (insn)(((insn)->u.fld[5]).rt_int) < 0) |
1409 | return false; |
1410 | extract_insn (insn); |
1411 | /* We don't know whether the insn will be in code that is optimized |
1412 | for size or speed, so consider all enabled alternatives. */ |
1413 | if (!constrain_operands (1, get_enabled_alternatives (insn))) |
1414 | return false; |
1415 | return true; |
1416 | } |
1417 | |
1418 | /* Return true if OP is a valid general operand for machine mode MODE. |
1419 | This is either a register reference, a memory reference, |
1420 | or a constant. In the case of a memory reference, the address |
1421 | is checked for general validity for the target machine. |
1422 | |
1423 | Register and memory references must have mode MODE in order to be valid, |
1424 | but some constants have no machine mode and are valid for any mode. |
1425 | |
1426 | If MODE is VOIDmode, OP is checked for validity for whatever mode |
1427 | it has. |
1428 | |
1429 | The main use of this function is as a predicate in match_operand |
1430 | expressions in the machine description. */ |
1431 | |
1432 | bool |
1433 | general_operand (rtx op, machine_mode mode) |
1434 | { |
1435 | enum rtx_code code = GET_CODE (op)((enum rtx_code) (op)->code); |
1436 | |
1437 | if (mode == VOIDmode((void) 0, E_VOIDmode)) |
1438 | mode = GET_MODE (op)((machine_mode) (op)->mode); |
1439 | |
1440 | /* Don't accept CONST_INT or anything similar |
1441 | if the caller wants something floating. */ |
1442 | if (GET_MODE (op)((machine_mode) (op)->mode) == VOIDmode((void) 0, E_VOIDmode) && mode != VOIDmode((void) 0, E_VOIDmode) |
1443 | && GET_MODE_CLASS (mode)((enum mode_class) mode_class[mode]) != MODE_INT |
1444 | && GET_MODE_CLASS (mode)((enum mode_class) mode_class[mode]) != MODE_PARTIAL_INT) |
1445 | return false; |
1446 | |
1447 | if (CONST_INT_P (op)(((enum rtx_code) (op)->code) == CONST_INT) |
1448 | && mode != VOIDmode((void) 0, E_VOIDmode) |
1449 | && trunc_int_for_mode (INTVAL (op)((op)->u.hwint[0]), mode) != INTVAL (op)((op)->u.hwint[0])) |
1450 | return false; |
1451 | |
1452 | if (CONSTANT_P (op)((rtx_class[(int) (((enum rtx_code) (op)->code))]) == RTX_CONST_OBJ )) |
1453 | return ((GET_MODE (op)((machine_mode) (op)->mode) == VOIDmode((void) 0, E_VOIDmode) || GET_MODE (op)((machine_mode) (op)->mode) == mode |
1454 | || mode == VOIDmode((void) 0, E_VOIDmode)) |
1455 | && (! flag_picglobal_options.x_flag_pic || LEGITIMATE_PIC_OPERAND_P (op)legitimate_pic_operand_p (op)) |
1456 | && targetm.legitimate_constant_p (mode == VOIDmode((void) 0, E_VOIDmode) |
1457 | ? GET_MODE (op)((machine_mode) (op)->mode) |
1458 | : mode, op)); |
1459 | |
1460 | /* Except for certain constants with VOIDmode, already checked for, |
1461 | OP's mode must match MODE if MODE specifies a mode. */ |
1462 | |
1463 | if (GET_MODE (op)((machine_mode) (op)->mode) != mode) |
1464 | return false; |
1465 | |
1466 | if (code == SUBREG) |
1467 | { |
1468 | rtx sub = SUBREG_REG (op)(((op)->u.fld[0]).rt_rtx); |
1469 | |
1470 | #ifdef INSN_SCHEDULING |
1471 | /* On machines that have insn scheduling, we want all memory |
1472 | reference to be explicit, so outlaw paradoxical SUBREGs. |
1473 | However, we must allow them after reload so that they can |
1474 | get cleaned up by cleanup_subreg_operands. */ |
1475 | if (!reload_completed && MEM_P (sub)(((enum rtx_code) (sub)->code) == MEM) |
1476 | && paradoxical_subreg_p (op)) |
1477 | return false; |
1478 | #endif |
1479 | /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory |
1480 | may result in incorrect reference. We should simplify all valid |
1481 | subregs of MEM anyway. But allow this after reload because we |
1482 | might be called from cleanup_subreg_operands. |
1483 | |
1484 | ??? This is a kludge. */ |
1485 | if (!reload_completed |
1486 | && maybe_ne (SUBREG_BYTE (op)(((op)->u.fld[1]).rt_subreg), 0) |
1487 | && MEM_P (sub)(((enum rtx_code) (sub)->code) == MEM)) |
1488 | return false; |
1489 | |
1490 | if (REG_P (sub)(((enum rtx_code) (sub)->code) == REG) |
1491 | && REGNO (sub)(rhs_regno(sub)) < FIRST_PSEUDO_REGISTER76 |
1492 | && !REG_CAN_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)(targetm.can_change_mode_class (((machine_mode) (sub)->mode ), mode, (regclass_map[((rhs_regno(sub)))]))) |
1493 | && GET_MODE_CLASS (GET_MODE (sub))((enum mode_class) mode_class[((machine_mode) (sub)->mode) ]) != MODE_COMPLEX_INT |
1494 | && GET_MODE_CLASS (GET_MODE (sub))((enum mode_class) mode_class[((machine_mode) (sub)->mode) ]) != MODE_COMPLEX_FLOAT |
1495 | /* LRA can generate some invalid SUBREGS just for matched |
1496 | operand reload presentation. LRA needs to treat them as |
1497 | valid. */ |
1498 | && ! LRA_SUBREG_P (op)(__extension__ ({ __typeof ((op)) const _rtx = ((op)); if ((( enum rtx_code) (_rtx)->code) != SUBREG) rtl_check_failed_flag ("LRA_SUBREG_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 1498, __FUNCTION__); _rtx; })->jump)) |
1499 | return false; |
1500 | |
1501 | /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally |
1502 | create such rtl, and we must reject it. */ |
1503 | if (SCALAR_FLOAT_MODE_P (GET_MODE (op))(((enum mode_class) mode_class[((machine_mode) (op)->mode) ]) == MODE_FLOAT || ((enum mode_class) mode_class[((machine_mode ) (op)->mode)]) == MODE_DECIMAL_FLOAT) |
1504 | /* LRA can use subreg to store a floating point value in an |
1505 | integer mode. Although the floating point and the |
1506 | integer modes need the same number of hard registers, the |
1507 | size of floating point mode can be less than the integer |
1508 | mode. */ |
1509 | && ! lra_in_progress |
1510 | && paradoxical_subreg_p (op)) |
1511 | return false; |
1512 | |
1513 | op = sub; |
1514 | code = GET_CODE (op)((enum rtx_code) (op)->code); |
1515 | } |
1516 | |
1517 | if (code == REG) |
1518 | return (REGNO (op)(rhs_regno(op)) >= FIRST_PSEUDO_REGISTER76 |
1519 | || in_hard_reg_set_p (operand_reg_set(this_target_hard_regs->x_operand_reg_set), GET_MODE (op)((machine_mode) (op)->mode), REGNO (op)(rhs_regno(op)))); |
1520 | |
1521 | if (code == MEM) |
1522 | { |
1523 | rtx y = XEXP (op, 0)(((op)->u.fld[0]).rt_rtx); |
1524 | |
1525 | if (! volatile_ok && MEM_VOLATILE_P (op)(__extension__ ({ __typeof ((op)) const _rtx = ((op)); if ((( enum rtx_code) (_rtx)->code) != MEM && ((enum rtx_code ) (_rtx)->code) != ASM_OPERANDS && ((enum rtx_code ) (_rtx)->code) != ASM_INPUT) rtl_check_failed_flag ("MEM_VOLATILE_P" , _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 1525, __FUNCTION__); _rtx; })->volatil)) |
1526 | return false; |
1527 | |
1528 | /* Use the mem's mode, since it will be reloaded thus. LRA can |
1529 | generate move insn with invalid addresses which is made valid |
1530 | and efficiently calculated by LRA through further numerous |
1531 | transformations. */ |
1532 | if (lra_in_progress |
1533 | || memory_address_addr_space_p (GET_MODE (op)((machine_mode) (op)->mode), y, MEM_ADDR_SPACE (op)(get_mem_attrs (op)->addrspace))) |
1534 | return true; |
1535 | } |
1536 | |
1537 | return false; |
1538 | } |
1539 | |
1540 | /* Return true if OP is a valid memory address for a memory reference |
1541 | of mode MODE. |
1542 | |
1543 | The main use of this function is as a predicate in match_operand |
1544 | expressions in the machine description. */ |
1545 | |
1546 | bool |
1547 | address_operand (rtx op, machine_mode mode) |
1548 | { |
1549 | /* Wrong mode for an address expr. */ |
1550 | if (GET_MODE (op)((machine_mode) (op)->mode) != VOIDmode((void) 0, E_VOIDmode) |
1551 | && ! SCALAR_INT_MODE_P (GET_MODE (op))(((enum mode_class) mode_class[((machine_mode) (op)->mode) ]) == MODE_INT || ((enum mode_class) mode_class[((machine_mode ) (op)->mode)]) == MODE_PARTIAL_INT)) |
1552 | return false; |
1553 | |
1554 | return memory_address_p (mode, op)memory_address_addr_space_p ((mode), (op), 0); |
1555 | } |
1556 | |
1557 | /* Return true if OP is a register reference of mode MODE. |
1558 | If MODE is VOIDmode, accept a register in any mode. |
1559 | |
1560 | The main use of this function is as a predicate in match_operand |
1561 | expressions in the machine description. */ |
1562 | |
1563 | bool |
1564 | register_operand (rtx op, machine_mode mode) |
1565 | { |
1566 | if (GET_CODE (op)((enum rtx_code) (op)->code) == SUBREG) |
1567 | { |
1568 | rtx sub = SUBREG_REG (op)(((op)->u.fld[0]).rt_rtx); |
1569 | |
1570 | /* Before reload, we can allow (SUBREG (MEM...)) as a register operand |
1571 | because it is guaranteed to be reloaded into one. |
1572 | Just make sure the MEM is valid in itself. |
1573 | (Ideally, (SUBREG (MEM)...) should not exist after reload, |
1574 | but currently it does result from (SUBREG (REG)...) where the |
1575 | reg went on the stack.) */ |
1576 | if (!REG_P (sub)(((enum rtx_code) (sub)->code) == REG) && (reload_completed || !MEM_P (sub)(((enum rtx_code) (sub)->code) == MEM))) |
1577 | return false; |
1578 | } |
1579 | else if (!REG_P (op)(((enum rtx_code) (op)->code) == REG)) |
1580 | return false; |
1581 | return general_operand (op, mode); |
1582 | } |
1583 | |
1584 | /* Return true for a register in Pmode; ignore the tested mode. */ |
1585 | |
1586 | bool |
1587 | pmode_register_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED__attribute__ ((__unused__))) |
1588 | { |
1589 | return register_operand (op, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))); |
1590 | } |
1591 | |
1592 | /* Return true if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH |
1593 | or a hard register. */ |
1594 | |
1595 | bool |
1596 | scratch_operand (rtx op, machine_mode mode) |
1597 | { |
1598 | if (GET_MODE (op)((machine_mode) (op)->mode) != mode && mode != VOIDmode((void) 0, E_VOIDmode)) |
1599 | return false; |
1600 | |
1601 | return (GET_CODE (op)((enum rtx_code) (op)->code) == SCRATCH |
1602 | || (REG_P (op)(((enum rtx_code) (op)->code) == REG) |
1603 | && (lra_in_progress |
1604 | || (REGNO (op)(rhs_regno(op)) < FIRST_PSEUDO_REGISTER76 |
1605 | && REGNO_REG_CLASS (REGNO (op))(regclass_map[((rhs_regno(op)))]) != NO_REGS)))); |
1606 | } |
1607 | |
1608 | /* Return true if OP is a valid immediate operand for mode MODE. |
1609 | |
1610 | The main use of this function is as a predicate in match_operand |
1611 | expressions in the machine description. */ |
1612 | |
1613 | bool |
1614 | immediate_operand (rtx op, machine_mode mode) |
1615 | { |
1616 | /* Don't accept CONST_INT or anything similar |
1617 | if the caller wants something floating. */ |
1618 | if (GET_MODE (op)((machine_mode) (op)->mode) == VOIDmode((void) 0, E_VOIDmode) && mode != VOIDmode((void) 0, E_VOIDmode) |
1619 | && GET_MODE_CLASS (mode)((enum mode_class) mode_class[mode]) != MODE_INT |
1620 | && GET_MODE_CLASS (mode)((enum mode_class) mode_class[mode]) != MODE_PARTIAL_INT) |
1621 | return false; |
1622 | |
1623 | if (CONST_INT_P (op)(((enum rtx_code) (op)->code) == CONST_INT) |
1624 | && mode != VOIDmode((void) 0, E_VOIDmode) |
1625 | && trunc_int_for_mode (INTVAL (op)((op)->u.hwint[0]), mode) != INTVAL (op)((op)->u.hwint[0])) |
1626 | return false; |
1627 | |
1628 | return (CONSTANT_P (op)((rtx_class[(int) (((enum rtx_code) (op)->code))]) == RTX_CONST_OBJ ) |
1629 | && (GET_MODE (op)((machine_mode) (op)->mode) == mode || mode == VOIDmode((void) 0, E_VOIDmode) |
1630 | || GET_MODE (op)((machine_mode) (op)->mode) == VOIDmode((void) 0, E_VOIDmode)) |
1631 | && (! flag_picglobal_options.x_flag_pic || LEGITIMATE_PIC_OPERAND_P (op)legitimate_pic_operand_p (op)) |
1632 | && targetm.legitimate_constant_p (mode == VOIDmode((void) 0, E_VOIDmode) |
1633 | ? GET_MODE (op)((machine_mode) (op)->mode) |
1634 | : mode, op)); |
1635 | } |
1636 | |
1637 | /* Return true if OP is an operand that is a CONST_INT of mode MODE. */ |
1638 | |
1639 | bool |
1640 | const_int_operand (rtx op, machine_mode mode) |
1641 | { |
1642 | if (!CONST_INT_P (op)(((enum rtx_code) (op)->code) == CONST_INT)) |
1643 | return false; |
1644 | |
1645 | if (mode != VOIDmode((void) 0, E_VOIDmode) |
1646 | && trunc_int_for_mode (INTVAL (op)((op)->u.hwint[0]), mode) != INTVAL (op)((op)->u.hwint[0])) |
1647 | return false; |
1648 | |
1649 | return true; |
1650 | } |
1651 | |
1652 | #if TARGET_SUPPORTS_WIDE_INT1 |
1653 | /* Return true if OP is an operand that is a CONST_INT or CONST_WIDE_INT |
1654 | of mode MODE. */ |
1655 | bool |
1656 | const_scalar_int_operand (rtx op, machine_mode mode) |
1657 | { |
1658 | if (!CONST_SCALAR_INT_P (op)((((enum rtx_code) (op)->code) == CONST_INT) || (((enum rtx_code ) (op)->code) == CONST_WIDE_INT))) |
1659 | return false; |
1660 | |
1661 | if (CONST_INT_P (op)(((enum rtx_code) (op)->code) == CONST_INT)) |
1662 | return const_int_operand (op, mode); |
1663 | |
1664 | if (mode != VOIDmode((void) 0, E_VOIDmode)) |
1665 | { |
1666 | scalar_int_mode int_mode = as_a <scalar_int_mode> (mode); |
1667 | int prec = GET_MODE_PRECISION (int_mode); |
1668 | int bitsize = GET_MODE_BITSIZE (int_mode); |
1669 | |
1670 | if (CONST_WIDE_INT_NUNITS (op)((int)__extension__ ({ __typeof ((op)) const _rtx = ((op)); if (((enum rtx_code) (_rtx)->code) != CONST_WIDE_INT) rtl_check_failed_flag ("CWI_GET_NUM_ELEM", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 1670, __FUNCTION__); _rtx; })->u2.num_elem) * HOST_BITS_PER_WIDE_INT64 > bitsize) |
1671 | return false; |
1672 | |
1673 | if (prec == bitsize) |
1674 | return true; |
1675 | else |
1676 | { |
1677 | /* Multiword partial int. */ |
1678 | HOST_WIDE_INTlong x |
1679 | = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1)((op)->u.hwiv.elem[((int)__extension__ ({ __typeof ((op)) const _rtx = ((op)); if (((enum rtx_code) (_rtx)->code) != CONST_WIDE_INT ) rtl_check_failed_flag ("CWI_GET_NUM_ELEM", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 1679, __FUNCTION__); _rtx; })->u2.num_elem) - 1]); |
1680 | return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT64 - 1)) == x); |
1681 | } |
1682 | } |
1683 | return true; |
1684 | } |
1685 | |
1686 | /* Return true if OP is an operand that is a constant integer or constant |
1687 | floating-point number of MODE. */ |
1688 | |
1689 | bool |
1690 | const_double_operand (rtx op, machine_mode mode) |
1691 | { |
1692 | return (GET_CODE (op)((enum rtx_code) (op)->code) == CONST_DOUBLE) |
1693 | && (GET_MODE (op)((machine_mode) (op)->mode) == mode || mode == VOIDmode((void) 0, E_VOIDmode)); |
1694 | } |
1695 | #else |
1696 | /* Return true if OP is an operand that is a constant integer or constant |
1697 | floating-point number of MODE. */ |
1698 | |
1699 | bool |
1700 | const_double_operand (rtx op, machine_mode mode) |
1701 | { |
1702 | /* Don't accept CONST_INT or anything similar |
1703 | if the caller wants something floating. */ |
1704 | if (GET_MODE (op)((machine_mode) (op)->mode) == VOIDmode((void) 0, E_VOIDmode) && mode != VOIDmode((void) 0, E_VOIDmode) |
1705 | && GET_MODE_CLASS (mode)((enum mode_class) mode_class[mode]) != MODE_INT |
1706 | && GET_MODE_CLASS (mode)((enum mode_class) mode_class[mode]) != MODE_PARTIAL_INT) |
1707 | return false; |
1708 | |
1709 | return ((CONST_DOUBLE_P (op)(((enum rtx_code) (op)->code) == CONST_DOUBLE) || CONST_INT_P (op)(((enum rtx_code) (op)->code) == CONST_INT)) |
1710 | && (mode == VOIDmode((void) 0, E_VOIDmode) || GET_MODE (op)((machine_mode) (op)->mode) == mode |
1711 | || GET_MODE (op)((machine_mode) (op)->mode) == VOIDmode((void) 0, E_VOIDmode))); |
1712 | } |
1713 | #endif |
1714 | /* Return true if OP is a general operand that is not an immediate |
1715 | operand of mode MODE. */ |
1716 | |
1717 | bool |
1718 | nonimmediate_operand (rtx op, machine_mode mode) |
1719 | { |
1720 | return (general_operand (op, mode) && ! CONSTANT_P (op)((rtx_class[(int) (((enum rtx_code) (op)->code))]) == RTX_CONST_OBJ )); |
1721 | } |
1722 | |
1723 | /* Return true if OP is a register reference or |
1724 | immediate value of mode MODE. */ |
1725 | |
1726 | bool |
1727 | nonmemory_operand (rtx op, machine_mode mode) |
1728 | { |
1729 | if (CONSTANT_P (op)((rtx_class[(int) (((enum rtx_code) (op)->code))]) == RTX_CONST_OBJ )) |
1730 | return immediate_operand (op, mode); |
1731 | return register_operand (op, mode); |
1732 | } |
1733 | |
1734 | /* Return true if OP is a valid operand that stands for pushing a |
1735 | value of mode MODE onto the stack. |
1736 | |
1737 | The main use of this function is as a predicate in match_operand |
1738 | expressions in the machine description. */ |
1739 | |
1740 | bool |
1741 | push_operand (rtx op, machine_mode mode) |
1742 | { |
1743 | if (!MEM_P (op)(((enum rtx_code) (op)->code) == MEM)) |
1744 | return false; |
1745 | |
1746 | if (mode != VOIDmode((void) 0, E_VOIDmode) && GET_MODE (op)((machine_mode) (op)->mode) != mode) |
1747 | return false; |
1748 | |
1749 | poly_int64 rounded_size = GET_MODE_SIZE (mode); |
1750 | |
1751 | #ifdef PUSH_ROUNDING |
1752 | rounded_size = PUSH_ROUNDING (MACRO_INT (rounded_size))ix86_push_rounding (((rounded_size).to_constant ())); |
1753 | #endif |
1754 | |
1755 | op = XEXP (op, 0)(((op)->u.fld[0]).rt_rtx); |
1756 | |
1757 | if (known_eq (rounded_size, GET_MODE_SIZE (mode))(!maybe_ne (rounded_size, GET_MODE_SIZE (mode)))) |
1758 | { |
1759 | if (GET_CODE (op)((enum rtx_code) (op)->code) != STACK_PUSH_CODEPRE_DEC) |
1760 | return false; |
1761 | } |
1762 | else |
1763 | { |
1764 | poly_int64 offset; |
1765 | if (GET_CODE (op)((enum rtx_code) (op)->code) != PRE_MODIFY |
1766 | || GET_CODE (XEXP (op, 1))((enum rtx_code) ((((op)->u.fld[1]).rt_rtx))->code) != PLUS |
1767 | || XEXP (XEXP (op, 1), 0)((((((op)->u.fld[1]).rt_rtx))->u.fld[0]).rt_rtx) != XEXP (op, 0)(((op)->u.fld[0]).rt_rtx) |
1768 | || !poly_int_rtx_p (XEXP (XEXP (op, 1), 1)((((((op)->u.fld[1]).rt_rtx))->u.fld[1]).rt_rtx), &offset) |
1769 | || (STACK_GROWS_DOWNWARD1 |
1770 | ? maybe_ne (offset, -rounded_size) |
1771 | : maybe_ne (offset, rounded_size))) |
1772 | return false; |
1773 | } |
1774 | |
1775 | return XEXP (op, 0)(((op)->u.fld[0]).rt_rtx) == stack_pointer_rtx((this_target_rtl->x_global_rtl)[GR_STACK_POINTER]); |
1776 | } |
1777 | |
1778 | /* Return true if OP is a valid operand that stands for popping a |
1779 | value of mode MODE off the stack. |
1780 | |
1781 | The main use of this function is as a predicate in match_operand |
1782 | expressions in the machine description. */ |
1783 | |
1784 | bool |
1785 | pop_operand (rtx op, machine_mode mode) |
1786 | { |
1787 | if (!MEM_P (op)(((enum rtx_code) (op)->code) == MEM)) |
1788 | return false; |
1789 | |
1790 | if (mode != VOIDmode((void) 0, E_VOIDmode) && GET_MODE (op)((machine_mode) (op)->mode) != mode) |
1791 | return false; |
1792 | |
1793 | op = XEXP (op, 0)(((op)->u.fld[0]).rt_rtx); |
1794 | |
1795 | if (GET_CODE (op)((enum rtx_code) (op)->code) != STACK_POP_CODEPOST_INC) |
1796 | return false; |
1797 | |
1798 | return XEXP (op, 0)(((op)->u.fld[0]).rt_rtx) == stack_pointer_rtx((this_target_rtl->x_global_rtl)[GR_STACK_POINTER]); |
1799 | } |
1800 | |
1801 | /* Return true if ADDR is a valid memory address |
1802 | for mode MODE in address space AS. */ |
1803 | |
1804 | bool |
1805 | memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED__attribute__ ((__unused__)), |
1806 | rtx addr, addr_space_t as) |
1807 | { |
1808 | #ifdef GO_IF_LEGITIMATE_ADDRESS |
1809 | gcc_assert (ADDR_SPACE_GENERIC_P (as))((void)(!(((as) == 0)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 1809, __FUNCTION__), 0 : 0)); |
1810 | GO_IF_LEGITIMATE_ADDRESS (mode, addr, win); |
1811 | return false; |
1812 | |
1813 | win: |
1814 | return true; |
1815 | #else |
1816 | return targetm.addr_space.legitimate_address_p (mode, addr, 0, as); |
1817 | #endif |
1818 | } |
1819 | |
1820 | /* Return true if OP is a valid memory reference with mode MODE, |
1821 | including a valid address. |
1822 | |
1823 | The main use of this function is as a predicate in match_operand |
1824 | expressions in the machine description. */ |
1825 | |
1826 | bool |
1827 | memory_operand (rtx op, machine_mode mode) |
1828 | { |
1829 | rtx inner; |
1830 | |
1831 | if (! reload_completed) |
1832 | /* Note that no SUBREG is a memory operand before end of reload pass, |
1833 | because (SUBREG (MEM...)) forces reloading into a register. */ |
1834 | return MEM_P (op)(((enum rtx_code) (op)->code) == MEM) && general_operand (op, mode); |
1835 | |
1836 | if (mode != VOIDmode((void) 0, E_VOIDmode) && GET_MODE (op)((machine_mode) (op)->mode) != mode) |
1837 | return false; |
1838 | |
1839 | inner = op; |
1840 | if (GET_CODE (inner)((enum rtx_code) (inner)->code) == SUBREG) |
1841 | inner = SUBREG_REG (inner)(((inner)->u.fld[0]).rt_rtx); |
1842 | |
1843 | return (MEM_P (inner)(((enum rtx_code) (inner)->code) == MEM) && general_operand (op, mode)); |
1844 | } |
1845 | |
1846 | /* Return true if OP is a valid indirect memory reference with mode MODE; |
1847 | that is, a memory reference whose address is a general_operand. */ |
1848 | |
1849 | bool |
1850 | indirect_operand (rtx op, machine_mode mode) |
1851 | { |
1852 | /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */ |
1853 | if (! reload_completed |
1854 | && GET_CODE (op)((enum rtx_code) (op)->code) == SUBREG && MEM_P (SUBREG_REG (op))(((enum rtx_code) ((((op)->u.fld[0]).rt_rtx))->code) == MEM)) |
1855 | { |
1856 | if (mode != VOIDmode((void) 0, E_VOIDmode) && GET_MODE (op)((machine_mode) (op)->mode) != mode) |
1857 | return false; |
1858 | |
1859 | /* The only way that we can have a general_operand as the resulting |
1860 | address is if OFFSET is zero and the address already is an operand |
1861 | or if the address is (plus Y (const_int -OFFSET)) and Y is an |
1862 | operand. */ |
1863 | poly_int64 offset; |
1864 | rtx addr = strip_offset (XEXP (SUBREG_REG (op), 0)((((((op)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx), &offset); |
1865 | return (known_eq (offset + SUBREG_BYTE (op), 0)(!maybe_ne (offset + (((op)->u.fld[1]).rt_subreg), 0)) |
1866 | && general_operand (addr, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))))); |
1867 | } |
1868 | |
1869 | return (MEM_P (op)(((enum rtx_code) (op)->code) == MEM) |
1870 | && memory_operand (op, mode) |
1871 | && general_operand (XEXP (op, 0)(((op)->u.fld[0]).rt_rtx), Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))))); |
1872 | } |
1873 | |
1874 | /* Return true if this is an ordered comparison operator (not including |
1875 | ORDERED and UNORDERED). */ |
1876 | |
1877 | bool |
1878 | ordered_comparison_operator (rtx op, machine_mode mode) |
1879 | { |
1880 | if (mode != VOIDmode((void) 0, E_VOIDmode) && GET_MODE (op)((machine_mode) (op)->mode) != mode) |
1881 | return false; |
1882 | switch (GET_CODE (op)((enum rtx_code) (op)->code)) |
1883 | { |
1884 | case EQ: |
1885 | case NE: |
1886 | case LT: |
1887 | case LTU: |
1888 | case LE: |
1889 | case LEU: |
1890 | case GT: |
1891 | case GTU: |
1892 | case GE: |
1893 | case GEU: |
1894 | return true; |
1895 | default: |
1896 | return false; |
1897 | } |
1898 | } |
1899 | |
1900 | /* Return true if this is a comparison operator. This allows the use of |
1901 | MATCH_OPERATOR to recognize all the branch insns. */ |
1902 | |
1903 | bool |
1904 | comparison_operator (rtx op, machine_mode mode) |
1905 | { |
1906 | return ((mode == VOIDmode((void) 0, E_VOIDmode) || GET_MODE (op)((machine_mode) (op)->mode) == mode) |
1907 | && COMPARISON_P (op)(((rtx_class[(int) (((enum rtx_code) (op)->code))]) & ( ~1)) == (RTX_COMPARE & (~1)))); |
1908 | } |
1909 | |
1910 | /* If BODY is an insn body that uses ASM_OPERANDS, return it. */ |
1911 | |
1912 | rtx |
1913 | extract_asm_operands (rtx body) |
1914 | { |
1915 | rtx tmp; |
1916 | switch (GET_CODE (body)((enum rtx_code) (body)->code)) |
1917 | { |
1918 | case ASM_OPERANDS: |
1919 | return body; |
1920 | |
1921 | case SET: |
1922 | /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */ |
1923 | tmp = SET_SRC (body)(((body)->u.fld[1]).rt_rtx); |
1924 | if (GET_CODE (tmp)((enum rtx_code) (tmp)->code) == ASM_OPERANDS) |
1925 | return tmp; |
1926 | break; |
1927 | |
1928 | case PARALLEL: |
1929 | tmp = XVECEXP (body, 0, 0)(((((body)->u.fld[0]).rt_rtvec))->elem[0]); |
1930 | if (GET_CODE (tmp)((enum rtx_code) (tmp)->code) == ASM_OPERANDS) |
1931 | return tmp; |
1932 | if (GET_CODE (tmp)((enum rtx_code) (tmp)->code) == SET) |
1933 | { |
1934 | tmp = SET_SRC (tmp)(((tmp)->u.fld[1]).rt_rtx); |
1935 | if (GET_CODE (tmp)((enum rtx_code) (tmp)->code) == ASM_OPERANDS) |
1936 | return tmp; |
1937 | } |
1938 | break; |
1939 | |
1940 | default: |
1941 | break; |
1942 | } |
1943 | return NULLnullptr; |
1944 | } |
1945 | |
1946 | /* If BODY is an insn body that uses ASM_OPERANDS, |
1947 | return the number of operands (both input and output) in the insn. |
1948 | If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL, |
1949 | return 0. |
1950 | Otherwise return -1. */ |
1951 | |
1952 | int |
1953 | asm_noperands (const_rtx body) |
1954 | { |
1955 | rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body)(const_cast<struct rtx_def *> (((body))))); |
1956 | int i, n_sets = 0; |
1957 | |
1958 | if (asm_op == NULLnullptr) |
1959 | { |
1960 | if (GET_CODE (body)((enum rtx_code) (body)->code) == PARALLEL && XVECLEN (body, 0)(((((body)->u.fld[0]).rt_rtvec))->num_elem) >= 2 |
1961 | && GET_CODE (XVECEXP (body, 0, 0))((enum rtx_code) ((((((body)->u.fld[0]).rt_rtvec))->elem [0]))->code) == ASM_INPUT) |
1962 | { |
1963 | /* body is [(asm_input ...) (clobber (reg ...))...]. */ |
1964 | for (i = XVECLEN (body, 0)(((((body)->u.fld[0]).rt_rtvec))->num_elem) - 1; i > 0; i--) |
1965 | if (GET_CODE (XVECEXP (body, 0, i))((enum rtx_code) ((((((body)->u.fld[0]).rt_rtvec))->elem [i]))->code) != CLOBBER) |
1966 | return -1; |
1967 | return 0; |
1968 | } |
1969 | return -1; |
1970 | } |
1971 | |
1972 | if (GET_CODE (body)((enum rtx_code) (body)->code) == SET) |
1973 | n_sets = 1; |
1974 | else if (GET_CODE (body)((enum rtx_code) (body)->code) == PARALLEL) |
1975 | { |
1976 | if (GET_CODE (XVECEXP (body, 0, 0))((enum rtx_code) ((((((body)->u.fld[0]).rt_rtvec))->elem [0]))->code) == SET) |
1977 | { |
1978 | /* Multiple output operands, or 1 output plus some clobbers: |
1979 | body is |
1980 | [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */ |
1981 | /* Count backwards through CLOBBERs to determine number of SETs. */ |
1982 | for (i = XVECLEN (body, 0)(((((body)->u.fld[0]).rt_rtvec))->num_elem); i > 0; i--) |
1983 | { |
1984 | if (GET_CODE (XVECEXP (body, 0, i - 1))((enum rtx_code) ((((((body)->u.fld[0]).rt_rtvec))->elem [i - 1]))->code) == SET) |
1985 | break; |
1986 | if (GET_CODE (XVECEXP (body, 0, i - 1))((enum rtx_code) ((((((body)->u.fld[0]).rt_rtvec))->elem [i - 1]))->code) != CLOBBER) |
1987 | return -1; |
1988 | } |
1989 | |
1990 | /* N_SETS is now number of output operands. */ |
1991 | n_sets = i; |
1992 | |
1993 | /* Verify that all the SETs we have |
1994 | came from a single original asm_operands insn |
1995 | (so that invalid combinations are blocked). */ |
1996 | for (i = 0; i < n_sets; i++) |
1997 | { |
1998 | rtx elt = XVECEXP (body, 0, i)(((((body)->u.fld[0]).rt_rtvec))->elem[i]); |
1999 | if (GET_CODE (elt)((enum rtx_code) (elt)->code) != SET) |
2000 | return -1; |
2001 | if (GET_CODE (SET_SRC (elt))((enum rtx_code) ((((elt)->u.fld[1]).rt_rtx))->code) != ASM_OPERANDS) |
2002 | return -1; |
2003 | /* If these ASM_OPERANDS rtx's came from different original insns |
2004 | then they aren't allowed together. */ |
2005 | if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))((((((elt)->u.fld[1]).rt_rtx))->u.fld[3]).rt_rtvec) |
2006 | != ASM_OPERANDS_INPUT_VEC (asm_op)(((asm_op)->u.fld[3]).rt_rtvec)) |
2007 | return -1; |
2008 | } |
2009 | } |
2010 | else |
2011 | { |
2012 | /* 0 outputs, but some clobbers: |
2013 | body is [(asm_operands ...) (clobber (reg ...))...]. */ |
2014 | /* Make sure all the other parallel things really are clobbers. */ |
2015 | for (i = XVECLEN (body, 0)(((((body)->u.fld[0]).rt_rtvec))->num_elem) - 1; i > 0; i--) |
2016 | if (GET_CODE (XVECEXP (body, 0, i))((enum rtx_code) ((((((body)->u.fld[0]).rt_rtvec))->elem [i]))->code) != CLOBBER) |
2017 | return -1; |
2018 | } |
2019 | } |
2020 | |
2021 | return (ASM_OPERANDS_INPUT_LENGTH (asm_op)(((((asm_op)->u.fld[3]).rt_rtvec))->num_elem) |
2022 | + ASM_OPERANDS_LABEL_LENGTH (asm_op)(((((asm_op)->u.fld[5]).rt_rtvec))->num_elem) + n_sets); |
2023 | } |
2024 | |
2025 | /* Assuming BODY is an insn body that uses ASM_OPERANDS, |
2026 | copy its operands (both input and output) into the vector OPERANDS, |
2027 | the locations of the operands within the insn into the vector OPERAND_LOCS, |
2028 | and the constraints for the operands into CONSTRAINTS. |
2029 | Write the modes of the operands into MODES. |
2030 | Write the location info into LOC. |
2031 | Return the assembler-template. |
2032 | If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL, |
2033 | return the basic assembly string. |
2034 | |
2035 | If LOC, MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0, |
2036 | we don't store that info. */ |
2037 | |
2038 | const char * |
2039 | decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs, |
2040 | const char **constraints, machine_mode *modes, |
2041 | location_t *loc) |
2042 | { |
2043 | int nbase = 0, n, i; |
2044 | rtx asmop; |
2045 | |
2046 | switch (GET_CODE (body)((enum rtx_code) (body)->code)) |
2047 | { |
2048 | case ASM_OPERANDS: |
2049 | /* Zero output asm: BODY is (asm_operands ...). */ |
2050 | asmop = body; |
2051 | break; |
2052 | |
2053 | case SET: |
2054 | /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */ |
2055 | asmop = SET_SRC (body)(((body)->u.fld[1]).rt_rtx); |
2056 | |
2057 | /* The output is in the SET. |
2058 | Its constraint is in the ASM_OPERANDS itself. */ |
2059 | if (operands) |
2060 | operands[0] = SET_DEST (body)(((body)->u.fld[0]).rt_rtx); |
2061 | if (operand_locs) |
2062 | operand_locs[0] = &SET_DEST (body)(((body)->u.fld[0]).rt_rtx); |
2063 | if (constraints) |
2064 | constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop)(((asmop)->u.fld[1]).rt_str); |
2065 | if (modes) |
2066 | modes[0] = GET_MODE (SET_DEST (body))((machine_mode) ((((body)->u.fld[0]).rt_rtx))->mode); |
2067 | nbase = 1; |
2068 | break; |
2069 | |
2070 | case PARALLEL: |
2071 | { |
2072 | int nparallel = XVECLEN (body, 0)(((((body)->u.fld[0]).rt_rtvec))->num_elem); /* Includes CLOBBERs. */ |
2073 | |
2074 | asmop = XVECEXP (body, 0, 0)(((((body)->u.fld[0]).rt_rtvec))->elem[0]); |
2075 | if (GET_CODE (asmop)((enum rtx_code) (asmop)->code) == SET) |
2076 | { |
2077 | asmop = SET_SRC (asmop)(((asmop)->u.fld[1]).rt_rtx); |
2078 | |
2079 | /* At least one output, plus some CLOBBERs. The outputs are in |
2080 | the SETs. Their constraints are in the ASM_OPERANDS itself. */ |
2081 | for (i = 0; i < nparallel; i++) |
2082 | { |
2083 | if (GET_CODE (XVECEXP (body, 0, i))((enum rtx_code) ((((((body)->u.fld[0]).rt_rtvec))->elem [i]))->code) == CLOBBER) |
2084 | break; /* Past last SET */ |
2085 | gcc_assert (GET_CODE (XVECEXP (body, 0, i)) == SET)((void)(!(((enum rtx_code) ((((((body)->u.fld[0]).rt_rtvec ))->elem[i]))->code) == SET) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 2085, __FUNCTION__), 0 : 0)); |
2086 | if (operands) |
2087 | operands[i] = SET_DEST (XVECEXP (body, 0, i))((((((((body)->u.fld[0]).rt_rtvec))->elem[i]))->u.fld [0]).rt_rtx); |
2088 | if (operand_locs) |
2089 | operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i))((((((((body)->u.fld[0]).rt_rtvec))->elem[i]))->u.fld [0]).rt_rtx); |
2090 | if (constraints) |
2091 | constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1)(((((((((((body)->u.fld[0]).rt_rtvec))->elem[i]))->u .fld[1]).rt_rtx))->u.fld[1]).rt_str); |
2092 | if (modes) |
2093 | modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)))((machine_mode) (((((((((body)->u.fld[0]).rt_rtvec))->elem [i]))->u.fld[0]).rt_rtx))->mode); |
2094 | } |
2095 | nbase = i; |
2096 | } |
2097 | else if (GET_CODE (asmop)((enum rtx_code) (asmop)->code) == ASM_INPUT) |
2098 | { |
2099 | if (loc) |
2100 | *loc = ASM_INPUT_SOURCE_LOCATION (asmop)(((asmop)->u.fld[1]).rt_uint); |
2101 | return XSTR (asmop, 0)(((asmop)->u.fld[0]).rt_str); |
2102 | } |
2103 | break; |
2104 | } |
2105 | |
2106 | default: |
2107 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 2107, __FUNCTION__)); |
2108 | } |
2109 | |
2110 | n = ASM_OPERANDS_INPUT_LENGTH (asmop)(((((asmop)->u.fld[3]).rt_rtvec))->num_elem); |
2111 | for (i = 0; i < n; i++) |
2112 | { |
2113 | if (operand_locs) |
2114 | operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i)(((((asmop)->u.fld[3]).rt_rtvec))->elem[i]); |
2115 | if (operands) |
2116 | operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i)(((((asmop)->u.fld[3]).rt_rtvec))->elem[i]); |
2117 | if (constraints) |
2118 | constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i)((((((((asmop)->u.fld[4]).rt_rtvec))->elem[i]))->u.fld [0]).rt_str); |
2119 | if (modes) |
2120 | modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i)((machine_mode) ((((((asmop)->u.fld[4]).rt_rtvec))->elem [i]))->mode); |
2121 | } |
2122 | nbase += n; |
2123 | |
2124 | n = ASM_OPERANDS_LABEL_LENGTH (asmop)(((((asmop)->u.fld[5]).rt_rtvec))->num_elem); |
2125 | for (i = 0; i < n; i++) |
2126 | { |
2127 | if (operand_locs) |
2128 | operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i)(((((asmop)->u.fld[5]).rt_rtvec))->elem[i]); |
2129 | if (operands) |
2130 | operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i)(((((asmop)->u.fld[5]).rt_rtvec))->elem[i]); |
2131 | if (constraints) |
2132 | constraints[nbase + i] = ""; |
2133 | if (modes) |
2134 | modes[nbase + i] = Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))); |
2135 | } |
2136 | |
2137 | if (loc) |
2138 | *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop)(((asmop)->u.fld[6]).rt_uint); |
2139 | |
2140 | return ASM_OPERANDS_TEMPLATE (asmop)(((asmop)->u.fld[0]).rt_str); |
2141 | } |
2142 | |
2143 | /* Parse inline assembly string STRING and determine which operands are |
2144 | referenced by % markers. For the first NOPERANDS operands, set USED[I] |
2145 | to true if operand I is referenced. |
2146 | |
2147 | This is intended to distinguish barrier-like asms such as: |
2148 | |
2149 | asm ("" : "=m" (...)); |
2150 | |
2151 | from real references such as: |
2152 | |
2153 | asm ("sw\t$0, %0" : "=m" (...)); */ |
2154 | |
2155 | void |
2156 | get_referenced_operands (const char *string, bool *used, |
2157 | unsigned int noperands) |
2158 | { |
2159 | memset (used, 0, sizeof (bool) * noperands); |
2160 | const char *p = string; |
2161 | while (*p) |
2162 | switch (*p) |
2163 | { |
2164 | case '%': |
2165 | p += 1; |
2166 | /* A letter followed by a digit indicates an operand number. */ |
2167 | if (ISALPHA (p[0])(_sch_istable[(p[0]) & 0xff] & (unsigned short)(_sch_isalpha )) && ISDIGIT (p[1])(_sch_istable[(p[1]) & 0xff] & (unsigned short)(_sch_isdigit ))) |
2168 | p += 1; |
2169 | if (ISDIGIT (*p)(_sch_istable[(*p) & 0xff] & (unsigned short)(_sch_isdigit ))) |
2170 | { |
2171 | char *endptr; |
2172 | unsigned long opnum = strtoul (p, &endptr, 10); |
2173 | if (endptr != p && opnum < noperands) |
2174 | used[opnum] = true; |
2175 | p = endptr; |
2176 | } |
2177 | else |
2178 | p += 1; |
2179 | break; |
2180 | |
2181 | default: |
2182 | p++; |
2183 | break; |
2184 | } |
2185 | } |
2186 | |
2187 | /* Check if an asm_operand matches its constraints. |
2188 | Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */ |
2189 | |
2190 | int |
2191 | asm_operand_ok (rtx op, const char *constraint, const char **constraints) |
2192 | { |
2193 | int result = 0; |
2194 | bool incdec_ok = false; |
2195 | |
2196 | /* Use constrain_operands after reload. */ |
2197 | gcc_assert (!reload_completed)((void)(!(!reload_completed) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 2197, __FUNCTION__), 0 : 0)); |
2198 | |
2199 | /* Empty constraint string is the same as "X,...,X", i.e. X for as |
2200 | many alternatives as required to match the other operands. */ |
2201 | if (*constraint == '\0') |
2202 | result = 1; |
2203 | |
2204 | while (*constraint) |
2205 | { |
2206 | enum constraint_num cn; |
2207 | char c = *constraint; |
2208 | int len; |
2209 | switch (c) |
2210 | { |
2211 | case ',': |
2212 | constraint++; |
2213 | continue; |
2214 | |
2215 | case '0': case '1': case '2': case '3': case '4': |
2216 | case '5': case '6': case '7': case '8': case '9': |
2217 | /* If caller provided constraints pointer, look up |
2218 | the matching constraint. Otherwise, our caller should have |
2219 | given us the proper matching constraint, but we can't |
2220 | actually fail the check if they didn't. Indicate that |
2221 | results are inconclusive. */ |
2222 | if (constraints) |
2223 | { |
2224 | char *end; |
2225 | unsigned long match; |
2226 | |
2227 | match = strtoul (constraint, &end, 10); |
2228 | if (!result) |
2229 | result = asm_operand_ok (op, constraints[match], NULLnullptr); |
2230 | constraint = (const char *) end; |
2231 | } |
2232 | else |
2233 | { |
2234 | do |
2235 | constraint++; |
2236 | while (ISDIGIT (*constraint)(_sch_istable[(*constraint) & 0xff] & (unsigned short )(_sch_isdigit))); |
2237 | if (! result) |
2238 | result = -1; |
2239 | } |
2240 | continue; |
2241 | |
2242 | /* The rest of the compiler assumes that reloading the address |
2243 | of a MEM into a register will make it fit an 'o' constraint. |
2244 | That is, if it sees a MEM operand for an 'o' constraint, |
2245 | it assumes that (mem (base-reg)) will fit. |
2246 | |
2247 | That assumption fails on targets that don't have offsettable |
2248 | addresses at all. We therefore need to treat 'o' asm |
2249 | constraints as a special case and only accept operands that |
2250 | are already offsettable, thus proving that at least one |
2251 | offsettable address exists. */ |
2252 | case 'o': /* offsettable */ |
2253 | if (offsettable_nonstrict_memref_p (op)) |
2254 | result = 1; |
2255 | break; |
2256 | |
2257 | case 'g': |
2258 | if (general_operand (op, VOIDmode((void) 0, E_VOIDmode))) |
2259 | result = 1; |
2260 | break; |
2261 | |
2262 | case '<': |
2263 | case '>': |
2264 | /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed |
2265 | to exist, excepting those that expand_call created. Further, |
2266 | on some machines which do not have generalized auto inc/dec, |
2267 | an inc/dec is not a memory_operand. |
2268 | |
2269 | Match any memory and hope things are resolved after reload. */ |
2270 | incdec_ok = true; |
Value stored to 'incdec_ok' is never read | |
2271 | /* FALLTHRU */ |
2272 | default: |
2273 | cn = lookup_constraint (constraint); |
2274 | rtx mem = NULLnullptr; |
2275 | switch (get_constraint_type (cn)) |
2276 | { |
2277 | case CT_REGISTER: |
2278 | if (!result |
2279 | && reg_class_for_constraint (cn) != NO_REGS |
2280 | && GET_MODE (op)((machine_mode) (op)->mode) != BLKmode((void) 0, E_BLKmode) |
2281 | && register_operand (op, VOIDmode((void) 0, E_VOIDmode))) |
2282 | result = 1; |
2283 | break; |
2284 | |
2285 | case CT_CONST_INT: |
2286 | if (!result |
2287 | && CONST_INT_P (op)(((enum rtx_code) (op)->code) == CONST_INT) |
2288 | && insn_const_int_ok_for_constraint (INTVAL (op)((op)->u.hwint[0]), cn)) |
2289 | result = 1; |
2290 | break; |
2291 | |
2292 | case CT_MEMORY: |
2293 | case CT_RELAXED_MEMORY: |
2294 | mem = op; |
2295 | /* Fall through. */ |
2296 | case CT_SPECIAL_MEMORY: |
2297 | /* Every memory operand can be reloaded to fit. */ |
2298 | if (!mem) |
2299 | mem = extract_mem_from_operand (op); |
2300 | result = result || memory_operand (mem, VOIDmode((void) 0, E_VOIDmode)); |
2301 | break; |
2302 | |
2303 | case CT_ADDRESS: |
2304 | /* Every address operand can be reloaded to fit. */ |
2305 | result = result || address_operand (op, VOIDmode((void) 0, E_VOIDmode)); |
2306 | break; |
2307 | |
2308 | case CT_FIXED_FORM: |
2309 | result = result || constraint_satisfied_p (op, cn); |
2310 | break; |
2311 | } |
2312 | break; |
2313 | } |
2314 | len = CONSTRAINT_LEN (c, constraint)insn_constraint_len (c,constraint); |
2315 | do |
2316 | constraint++; |
2317 | while (--len && *constraint && *constraint != ','); |
2318 | if (len) |
2319 | return 0; |
2320 | } |
2321 | |
2322 | /* For operands without < or > constraints reject side-effects. */ |
2323 | if (AUTO_INC_DEC0 && !incdec_ok && result && MEM_P (op)(((enum rtx_code) (op)->code) == MEM)) |
2324 | switch (GET_CODE (XEXP (op, 0))((enum rtx_code) ((((op)->u.fld[0]).rt_rtx))->code)) |
2325 | { |
2326 | case PRE_INC: |
2327 | case POST_INC: |
2328 | case PRE_DEC: |
2329 | case POST_DEC: |
2330 | case PRE_MODIFY: |
2331 | case POST_MODIFY: |
2332 | return 0; |
2333 | default: |
2334 | break; |
2335 | } |
2336 | |
2337 | return result; |
2338 | } |
2339 | |
2340 | /* Given an rtx *P, if it is a sum containing an integer constant term, |
2341 | return the location (type rtx *) of the pointer to that constant term. |
2342 | Otherwise, return a null pointer. */ |
2343 | |
2344 | rtx * |
2345 | find_constant_term_loc (rtx *p) |
2346 | { |
2347 | rtx *tem; |
2348 | enum rtx_code code = GET_CODE (*p)((enum rtx_code) (*p)->code); |
2349 | |
2350 | /* If *P IS such a constant term, P is its location. */ |
2351 | |
2352 | if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF |
2353 | || code == CONST) |
2354 | return p; |
2355 | |
2356 | /* Otherwise, if not a sum, it has no constant term. */ |
2357 | |
2358 | if (GET_CODE (*p)((enum rtx_code) (*p)->code) != PLUS) |
2359 | return 0; |
2360 | |
2361 | /* If one of the summands is constant, return its location. */ |
2362 | |
2363 | if (XEXP (*p, 0)(((*p)->u.fld[0]).rt_rtx) && CONSTANT_P (XEXP (*p, 0))((rtx_class[(int) (((enum rtx_code) ((((*p)->u.fld[0]).rt_rtx ))->code))]) == RTX_CONST_OBJ) |
2364 | && XEXP (*p, 1)(((*p)->u.fld[1]).rt_rtx) && CONSTANT_P (XEXP (*p, 1))((rtx_class[(int) (((enum rtx_code) ((((*p)->u.fld[1]).rt_rtx ))->code))]) == RTX_CONST_OBJ)) |
2365 | return p; |
2366 | |
2367 | /* Otherwise, check each summand for containing a constant term. */ |
2368 | |
2369 | if (XEXP (*p, 0)(((*p)->u.fld[0]).rt_rtx) != 0) |
2370 | { |
2371 | tem = find_constant_term_loc (&XEXP (*p, 0)(((*p)->u.fld[0]).rt_rtx)); |
2372 | if (tem != 0) |
2373 | return tem; |
2374 | } |
2375 | |
2376 | if (XEXP (*p, 1)(((*p)->u.fld[1]).rt_rtx) != 0) |
2377 | { |
2378 | tem = find_constant_term_loc (&XEXP (*p, 1)(((*p)->u.fld[1]).rt_rtx)); |
2379 | if (tem != 0) |
2380 | return tem; |
2381 | } |
2382 | |
2383 | return 0; |
2384 | } |
2385 | |
2386 | /* Return true if OP is a memory reference whose address contains |
2387 | no side effects and remains valid after the addition of a positive |
2388 | integer less than the size of the object being referenced. |
2389 | |
2390 | We assume that the original address is valid and do not check it. |
2391 | |
2392 | This uses strict_memory_address_p as a subroutine, so |
2393 | don't use it before reload. */ |
2394 | |
2395 | bool |
2396 | offsettable_memref_p (rtx op) |
2397 | { |
2398 | return ((MEM_P (op)(((enum rtx_code) (op)->code) == MEM)) |
2399 | && offsettable_address_addr_space_p (1, GET_MODE (op)((machine_mode) (op)->mode), XEXP (op, 0)(((op)->u.fld[0]).rt_rtx), |
2400 | MEM_ADDR_SPACE (op)(get_mem_attrs (op)->addrspace))); |
2401 | } |
2402 | |
2403 | /* Similar, but don't require a strictly valid mem ref: |
2404 | consider pseudo-regs valid as index or base regs. */ |
2405 | |
2406 | bool |
2407 | offsettable_nonstrict_memref_p (rtx op) |
2408 | { |
2409 | return ((MEM_P (op)(((enum rtx_code) (op)->code) == MEM)) |
2410 | && offsettable_address_addr_space_p (0, GET_MODE (op)((machine_mode) (op)->mode), XEXP (op, 0)(((op)->u.fld[0]).rt_rtx), |
2411 | MEM_ADDR_SPACE (op)(get_mem_attrs (op)->addrspace))); |
2412 | } |
2413 | |
2414 | /* Return true if Y is a memory address which contains no side effects |
2415 | and would remain valid for address space AS after the addition of |
2416 | a positive integer less than the size of that mode. |
2417 | |
2418 | We assume that the original address is valid and do not check it. |
2419 | We do check that it is valid for narrower modes. |
2420 | |
2421 | If STRICTP is nonzero, we require a strictly valid address, |
2422 | for the sake of use in reload.cc. */ |
2423 | |
2424 | bool |
2425 | offsettable_address_addr_space_p (int strictp, machine_mode mode, rtx y, |
2426 | addr_space_t as) |
2427 | { |
2428 | enum rtx_code ycode = GET_CODE (y)((enum rtx_code) (y)->code); |
2429 | rtx z; |
2430 | rtx y1 = y; |
2431 | rtx *y2; |
2432 | bool (*addressp) (machine_mode, rtx, addr_space_t) = |
2433 | (strictp ? strict_memory_address_addr_space_p |
2434 | : memory_address_addr_space_p); |
2435 | poly_int64 mode_sz = GET_MODE_SIZE (mode); |
2436 | |
2437 | if (CONSTANT_ADDRESS_P (y)constant_address_p (y)) |
2438 | return true; |
2439 | |
2440 | /* Adjusting an offsettable address involves changing to a narrower mode. |
2441 | Make sure that's OK. */ |
2442 | |
2443 | if (mode_dependent_address_p (y, as)) |
2444 | return false; |
2445 | |
2446 | machine_mode address_mode = GET_MODE (y)((machine_mode) (y)->mode); |
2447 | if (address_mode == VOIDmode((void) 0, E_VOIDmode)) |
2448 | address_mode = targetm.addr_space.address_mode (as); |
2449 | #ifdef POINTERS_EXTEND_UNSIGNED1 |
2450 | machine_mode pointer_mode = targetm.addr_space.pointer_mode (as); |
2451 | #endif |
2452 | |
2453 | /* ??? How much offset does an offsettable BLKmode reference need? |
2454 | Clearly that depends on the situation in which it's being used. |
2455 | However, the current situation in which we test 0xffffffff is |
2456 | less than ideal. Caveat user. */ |
2457 | if (known_eq (mode_sz, 0)(!maybe_ne (mode_sz, 0))) |
2458 | mode_sz = BIGGEST_ALIGNMENT(((global_options.x_target_flags & (1U << 12)) != 0 ) ? 32 : (((global_options.x_ix86_isa_flags & (1UL << 15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & ( 1UL << 8)) != 0) ? 256 : 128))) / BITS_PER_UNIT(8); |
2459 | |
2460 | /* If the expression contains a constant term, |
2461 | see if it remains valid when max possible offset is added. */ |
2462 | |
2463 | if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1))) |
2464 | { |
2465 | bool good; |
2466 | |
2467 | y1 = *y2; |
2468 | *y2 = plus_constant (address_mode, *y2, mode_sz - 1); |
2469 | /* Use QImode because an odd displacement may be automatically invalid |
2470 | for any wider mode. But it should be valid for a single byte. */ |
2471 | good = (*addressp) (QImode(scalar_int_mode ((scalar_int_mode::from_int) E_QImode)), y, as); |
2472 | |
2473 | /* In any case, restore old contents of memory. */ |
2474 | *y2 = y1; |
2475 | return good; |
2476 | } |
2477 | |
2478 | if (GET_RTX_CLASS (ycode)(rtx_class[(int) (ycode)]) == RTX_AUTOINC) |
2479 | return false; |
2480 | |
2481 | /* The offset added here is chosen as the maximum offset that |
2482 | any instruction could need to add when operating on something |
2483 | of the specified mode. We assume that if Y and Y+c are |
2484 | valid addresses then so is Y+d for all 0<d<c. adjust_address will |
2485 | go inside a LO_SUM here, so we do so as well. */ |
2486 | if (GET_CODE (y)((enum rtx_code) (y)->code) == LO_SUM |
2487 | && mode != BLKmode((void) 0, E_BLKmode) |
2488 | && known_le (mode_sz, GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)(!maybe_lt (get_mode_alignment (mode) / (8), mode_sz))) |
2489 | z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),gen_rtx_fmt_ee_stat ((LO_SUM), ((address_mode)), (((((y)-> u.fld[0]).rt_rtx))), ((plus_constant (address_mode, (((y)-> u.fld[1]).rt_rtx), mode_sz - 1))) ) |
2490 | plus_constant (address_mode, XEXP (y, 1),gen_rtx_fmt_ee_stat ((LO_SUM), ((address_mode)), (((((y)-> u.fld[0]).rt_rtx))), ((plus_constant (address_mode, (((y)-> u.fld[1]).rt_rtx), mode_sz - 1))) ) |
2491 | mode_sz - 1))gen_rtx_fmt_ee_stat ((LO_SUM), ((address_mode)), (((((y)-> u.fld[0]).rt_rtx))), ((plus_constant (address_mode, (((y)-> u.fld[1]).rt_rtx), mode_sz - 1))) ); |
2492 | #ifdef POINTERS_EXTEND_UNSIGNED1 |
2493 | /* Likewise for a ZERO_EXTEND from pointer_mode. */ |
2494 | else if (POINTERS_EXTEND_UNSIGNED1 > 0 |
2495 | && GET_CODE (y)((enum rtx_code) (y)->code) == ZERO_EXTEND |
2496 | && GET_MODE (XEXP (y, 0))((machine_mode) ((((y)->u.fld[0]).rt_rtx))->mode) == pointer_mode) |
2497 | z = gen_rtx_ZERO_EXTEND (address_mode,gen_rtx_fmt_e_stat ((ZERO_EXTEND), ((address_mode)), ((plus_constant (pointer_mode, (((y)->u.fld[0]).rt_rtx), mode_sz - 1))) ) |
2498 | plus_constant (pointer_mode, XEXP (y, 0),gen_rtx_fmt_e_stat ((ZERO_EXTEND), ((address_mode)), ((plus_constant (pointer_mode, (((y)->u.fld[0]).rt_rtx), mode_sz - 1))) ) |
2499 | mode_sz - 1))gen_rtx_fmt_e_stat ((ZERO_EXTEND), ((address_mode)), ((plus_constant (pointer_mode, (((y)->u.fld[0]).rt_rtx), mode_sz - 1))) ); |
2500 | #endif |
2501 | else |
2502 | z = plus_constant (address_mode, y, mode_sz - 1); |
2503 | |
2504 | /* Use QImode because an odd displacement may be automatically invalid |
2505 | for any wider mode. But it should be valid for a single byte. */ |
2506 | return (*addressp) (QImode(scalar_int_mode ((scalar_int_mode::from_int) E_QImode)), z, as); |
2507 | } |
2508 | |
2509 | /* Return true if ADDR is an address-expression whose effect depends |
2510 | on the mode of the memory reference it is used in. |
2511 | |
2512 | ADDRSPACE is the address space associated with the address. |
2513 | |
2514 | Autoincrement addressing is a typical example of mode-dependence |
2515 | because the amount of the increment depends on the mode. */ |
2516 | |
2517 | bool |
2518 | mode_dependent_address_p (rtx addr, addr_space_t addrspace) |
2519 | { |
2520 | /* Auto-increment addressing with anything other than post_modify |
2521 | or pre_modify always introduces a mode dependency. Catch such |
2522 | cases now instead of deferring to the target. */ |
2523 | if (GET_CODE (addr)((enum rtx_code) (addr)->code) == PRE_INC |
2524 | || GET_CODE (addr)((enum rtx_code) (addr)->code) == POST_INC |
2525 | || GET_CODE (addr)((enum rtx_code) (addr)->code) == PRE_DEC |
2526 | || GET_CODE (addr)((enum rtx_code) (addr)->code) == POST_DEC) |
2527 | return true; |
2528 | |
2529 | return targetm.mode_dependent_address_p (addr, addrspace); |
2530 | } |
2531 | |
2532 | /* Return true if boolean attribute ATTR is supported. */ |
2533 | |
2534 | static bool |
2535 | have_bool_attr (bool_attr attr) |
2536 | { |
2537 | switch (attr) |
2538 | { |
2539 | case BA_ENABLED: |
2540 | return HAVE_ATTR_enabled1; |
2541 | case BA_PREFERRED_FOR_SIZE: |
2542 | return HAVE_ATTR_enabled1 || HAVE_ATTR_preferred_for_size1; |
2543 | case BA_PREFERRED_FOR_SPEED: |
2544 | return HAVE_ATTR_enabled1 || HAVE_ATTR_preferred_for_speed1; |
2545 | } |
2546 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 2546, __FUNCTION__)); |
2547 | } |
2548 | |
2549 | /* Return the value of ATTR for instruction INSN. */ |
2550 | |
2551 | static bool |
2552 | get_bool_attr (rtx_insn *insn, bool_attr attr) |
2553 | { |
2554 | switch (attr) |
2555 | { |
2556 | case BA_ENABLED: |
2557 | return get_attr_enabled (insn); |
2558 | case BA_PREFERRED_FOR_SIZE: |
2559 | return get_attr_enabled (insn) && get_attr_preferred_for_size (insn); |
2560 | case BA_PREFERRED_FOR_SPEED: |
2561 | return get_attr_enabled (insn) && get_attr_preferred_for_speed (insn); |
2562 | } |
2563 | gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 2563, __FUNCTION__)); |
2564 | } |
2565 | |
2566 | /* Like get_bool_attr_mask, but don't use the cache. */ |
2567 | |
2568 | static alternative_mask |
2569 | get_bool_attr_mask_uncached (rtx_insn *insn, bool_attr attr) |
2570 | { |
2571 | /* Temporarily install enough information for get_attr_<foo> to assume |
2572 | that the insn operands are already cached. As above, the attribute |
2573 | mustn't depend on the values of operands, so we don't provide their |
2574 | real values here. */ |
2575 | rtx_insn *old_insn = recog_data.insn; |
2576 | int old_alternative = which_alternative; |
2577 | |
2578 | recog_data.insn = insn; |
2579 | alternative_mask mask = ALL_ALTERNATIVES((alternative_mask) -1); |
2580 | int n_alternatives = insn_data[INSN_CODE (insn)(((insn)->u.fld[5]).rt_int)].n_alternatives; |
2581 | for (int i = 0; i < n_alternatives; i++) |
2582 | { |
2583 | which_alternative = i; |
2584 | if (!get_bool_attr (insn, attr)) |
2585 | mask &= ~ALTERNATIVE_BIT (i)((alternative_mask) 1 << (i)); |
2586 | } |
2587 | |
2588 | recog_data.insn = old_insn; |
2589 | which_alternative = old_alternative; |
2590 | return mask; |
2591 | } |
2592 | |
2593 | /* Return the mask of operand alternatives that are allowed for INSN |
2594 | by boolean attribute ATTR. This mask depends only on INSN and on |
2595 | the current target; it does not depend on things like the values of |
2596 | operands. */ |
2597 | |
2598 | static alternative_mask |
2599 | get_bool_attr_mask (rtx_insn *insn, bool_attr attr) |
2600 | { |
2601 | /* Quick exit for asms and for targets that don't use these attributes. */ |
2602 | int code = INSN_CODE (insn)(((insn)->u.fld[5]).rt_int); |
2603 | if (code < 0 || !have_bool_attr (attr)) |
2604 | return ALL_ALTERNATIVES((alternative_mask) -1); |
2605 | |
2606 | /* Calling get_attr_<foo> can be expensive, so cache the mask |
2607 | for speed. */ |
2608 | if (!this_target_recog->x_bool_attr_masks[code][attr]) |
2609 | this_target_recog->x_bool_attr_masks[code][attr] |
2610 | = get_bool_attr_mask_uncached (insn, attr); |
2611 | return this_target_recog->x_bool_attr_masks[code][attr]; |
2612 | } |
2613 | |
2614 | /* Return the set of alternatives of INSN that are allowed by the current |
2615 | target. */ |
2616 | |
2617 | alternative_mask |
2618 | get_enabled_alternatives (rtx_insn *insn) |
2619 | { |
2620 | return get_bool_attr_mask (insn, BA_ENABLED); |
2621 | } |
2622 | |
2623 | /* Return the set of alternatives of INSN that are allowed by the current |
2624 | target and are preferred for the current size/speed optimization |
2625 | choice. */ |
2626 | |
2627 | alternative_mask |
2628 | get_preferred_alternatives (rtx_insn *insn) |
2629 | { |
2630 | if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn))) |
2631 | return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED); |
2632 | else |
2633 | return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE); |
2634 | } |
2635 | |
2636 | /* Return the set of alternatives of INSN that are allowed by the current |
2637 | target and are preferred for the size/speed optimization choice |
2638 | associated with BB. Passing a separate BB is useful if INSN has not |
2639 | been emitted yet or if we are considering moving it to a different |
2640 | block. */ |
2641 | |
2642 | alternative_mask |
2643 | get_preferred_alternatives (rtx_insn *insn, basic_block bb) |
2644 | { |
2645 | if (optimize_bb_for_speed_p (bb)) |
2646 | return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED); |
2647 | else |
2648 | return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE); |
2649 | } |
2650 | |
2651 | /* Assert that the cached boolean attributes for INSN are still accurate. |
2652 | The backend is required to define these attributes in a way that only |
2653 | depends on the current target (rather than operands, compiler phase, |
2654 | etc.). */ |
2655 | |
2656 | bool |
2657 | check_bool_attrs (rtx_insn *insn) |
2658 | { |
2659 | int code = INSN_CODE (insn)(((insn)->u.fld[5]).rt_int); |
2660 | if (code >= 0) |
2661 | for (int i = 0; i <= BA_LAST; ++i) |
2662 | { |
2663 | enum bool_attr attr = (enum bool_attr) i; |
2664 | if (this_target_recog->x_bool_attr_masks[code][attr]) |
2665 | gcc_assert (this_target_recog->x_bool_attr_masks[code][attr]((void)(!(this_target_recog->x_bool_attr_masks[code][attr] == get_bool_attr_mask_uncached (insn, attr)) ? fancy_abort ( "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 2666, __FUNCTION__), 0 : 0)) |
2666 | == get_bool_attr_mask_uncached (insn, attr))((void)(!(this_target_recog->x_bool_attr_masks[code][attr] == get_bool_attr_mask_uncached (insn, attr)) ? fancy_abort ( "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 2666, __FUNCTION__), 0 : 0)); |
2667 | } |
2668 | return true; |
2669 | } |
2670 | |
2671 | /* Like extract_insn, but save insn extracted and don't extract again, when |
2672 | called again for the same insn expecting that recog_data still contain the |
2673 | valid information. This is used primary by gen_attr infrastructure that |
2674 | often does extract insn again and again. */ |
2675 | void |
2676 | extract_insn_cached (rtx_insn *insn) |
2677 | { |
2678 | if (recog_data.insn == insn && INSN_CODE (insn)(((insn)->u.fld[5]).rt_int) >= 0) |
2679 | return; |
2680 | extract_insn (insn); |
2681 | recog_data.insn = insn; |
2682 | } |
2683 | |
2684 | /* Do uncached extract_insn, constrain_operands and complain about failures. |
2685 | This should be used when extracting a pre-existing constrained instruction |
2686 | if the caller wants to know which alternative was chosen. */ |
2687 | void |
2688 | extract_constrain_insn (rtx_insn *insn) |
2689 | { |
2690 | extract_insn (insn); |
2691 | if (!constrain_operands (reload_completed, get_enabled_alternatives (insn))) |
2692 | fatal_insn_not_found (insn)_fatal_insn_not_found (insn, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 2692, __FUNCTION__); |
2693 | } |
2694 | |
2695 | /* Do cached extract_insn, constrain_operands and complain about failures. |
2696 | Used by insn_attrtab. */ |
2697 | void |
2698 | extract_constrain_insn_cached (rtx_insn *insn) |
2699 | { |
2700 | extract_insn_cached (insn); |
2701 | if (which_alternative == -1 |
2702 | && !constrain_operands (reload_completed, |
2703 | get_enabled_alternatives (insn))) |
2704 | fatal_insn_not_found (insn)_fatal_insn_not_found (insn, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 2704, __FUNCTION__); |
2705 | } |
2706 | |
2707 | /* Do cached constrain_operands on INSN and complain about failures. */ |
2708 | int |
2709 | constrain_operands_cached (rtx_insn *insn, int strict) |
2710 | { |
2711 | if (which_alternative == -1) |
2712 | return constrain_operands (strict, get_enabled_alternatives (insn)); |
2713 | else |
2714 | return 1; |
2715 | } |
2716 | |
2717 | /* Analyze INSN and fill in recog_data. */ |
2718 | |
2719 | void |
2720 | extract_insn (rtx_insn *insn) |
2721 | { |
2722 | int i; |
2723 | int icode; |
2724 | int noperands; |
2725 | rtx body = PATTERN (insn); |
2726 | |
2727 | recog_data.n_operands = 0; |
2728 | recog_data.n_alternatives = 0; |
2729 | recog_data.n_dups = 0; |
2730 | recog_data.is_asm = false; |
2731 | |
2732 | switch (GET_CODE (body)((enum rtx_code) (body)->code)) |
2733 | { |
2734 | case USE: |
2735 | case CLOBBER: |
2736 | case ASM_INPUT: |
2737 | case ADDR_VEC: |
2738 | case ADDR_DIFF_VEC: |
2739 | case VAR_LOCATION: |
2740 | case DEBUG_MARKER: |
2741 | return; |
2742 | |
2743 | case SET: |
2744 | if (GET_CODE (SET_SRC (body))((enum rtx_code) ((((body)->u.fld[1]).rt_rtx))->code) == ASM_OPERANDS) |
2745 | goto asm_insn; |
2746 | else |
2747 | goto normal_insn; |
2748 | case PARALLEL: |
2749 | if ((GET_CODE (XVECEXP (body, 0, 0))((enum rtx_code) ((((((body)->u.fld[0]).rt_rtvec))->elem [0]))->code) == SET |
2750 | && GET_CODE (SET_SRC (XVECEXP (body, 0, 0)))((enum rtx_code) (((((((((body)->u.fld[0]).rt_rtvec))-> elem[0]))->u.fld[1]).rt_rtx))->code) == ASM_OPERANDS) |
2751 | || GET_CODE (XVECEXP (body, 0, 0))((enum rtx_code) ((((((body)->u.fld[0]).rt_rtvec))->elem [0]))->code) == ASM_OPERANDS |
2752 | || GET_CODE (XVECEXP (body, 0, 0))((enum rtx_code) ((((((body)->u.fld[0]).rt_rtvec))->elem [0]))->code) == ASM_INPUT) |
2753 | goto asm_insn; |
2754 | else |
2755 | goto normal_insn; |
2756 | case ASM_OPERANDS: |
2757 | asm_insn: |
2758 | recog_data.n_operands = noperands = asm_noperands (body); |
2759 | if (noperands >= 0) |
2760 | { |
2761 | /* This insn is an `asm' with operands. */ |
2762 | |
2763 | /* expand_asm_operands makes sure there aren't too many operands. */ |
2764 | gcc_assert (noperands <= MAX_RECOG_OPERANDS)((void)(!(noperands <= 30) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 2764, __FUNCTION__), 0 : 0)); |
2765 | |
2766 | /* Now get the operand values and constraints out of the insn. */ |
2767 | decode_asm_operands (body, recog_data.operand, |
2768 | recog_data.operand_loc, |
2769 | recog_data.constraints, |
2770 | recog_data.operand_mode, NULLnullptr); |
2771 | memset (recog_data.is_operator, 0, sizeof recog_data.is_operator); |
2772 | if (noperands > 0) |
2773 | { |
2774 | const char *p = recog_data.constraints[0]; |
2775 | recog_data.n_alternatives = 1; |
2776 | while (*p) |
2777 | recog_data.n_alternatives += (*p++ == ','); |
2778 | } |
2779 | recog_data.is_asm = true; |
2780 | break; |
2781 | } |
2782 | fatal_insn_not_found (insn)_fatal_insn_not_found (insn, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 2782, __FUNCTION__); |
2783 | |
2784 | default: |
2785 | normal_insn: |
2786 | /* Ordinary insn: recognize it, get the operands via insn_extract |
2787 | and get the constraints. */ |
2788 | |
2789 | icode = recog_memoized (insn); |
2790 | if (icode < 0) |
2791 | fatal_insn_not_found (insn)_fatal_insn_not_found (insn, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 2791, __FUNCTION__); |
2792 | |
2793 | recog_data.n_operands = noperands = insn_data[icode].n_operands; |
2794 | recog_data.n_alternatives = insn_data[icode].n_alternatives; |
2795 | recog_data.n_dups = insn_data[icode].n_dups; |
2796 | |
2797 | insn_extract (insn); |
2798 | |
2799 | for (i = 0; i < noperands; i++) |
2800 | { |
2801 | recog_data.constraints[i] = insn_data[icode].operand[i].constraint; |
2802 | recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator; |
2803 | recog_data.operand_mode[i] = insn_data[icode].operand[i].mode; |
2804 | /* VOIDmode match_operands gets mode from their real operand. */ |
2805 | if (recog_data.operand_mode[i] == VOIDmode((void) 0, E_VOIDmode)) |
2806 | recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i])((machine_mode) (recog_data.operand[i])->mode); |
2807 | } |
2808 | } |
2809 | for (i = 0; i < noperands; i++) |
2810 | recog_data.operand_type[i] |
2811 | = (recog_data.constraints[i][0] == '=' ? OP_OUT |
2812 | : recog_data.constraints[i][0] == '+' ? OP_INOUT |
2813 | : OP_IN); |
2814 | |
2815 | gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES)((void)(!(recog_data.n_alternatives <= 35) ? fancy_abort ( "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 2815, __FUNCTION__), 0 : 0)); |
2816 | |
2817 | recog_data.insn = NULLnullptr; |
2818 | which_alternative = -1; |
2819 | } |
2820 | |
2821 | /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS |
2822 | operands, N_ALTERNATIVES alternatives and constraint strings |
2823 | CONSTRAINTS. OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries |
2824 | and CONSTRAINTS has N_OPERANDS entries. OPLOC should be passed in |
2825 | if the insn is an asm statement and preprocessing should take the |
2826 | asm operands into account, e.g. to determine whether they could be |
2827 | addresses in constraints that require addresses; it should then |
2828 | point to an array of pointers to each operand. */ |
2829 | |
2830 | void |
2831 | preprocess_constraints (int n_operands, int n_alternatives, |
2832 | const char **constraints, |
2833 | operand_alternative *op_alt_base, |
2834 | rtx **oploc) |
2835 | { |
2836 | for (int i = 0; i < n_operands; i++) |
2837 | { |
2838 | int j; |
2839 | struct operand_alternative *op_alt; |
2840 | const char *p = constraints[i]; |
2841 | |
2842 | op_alt = op_alt_base; |
2843 | |
2844 | for (j = 0; j < n_alternatives; j++, op_alt += n_operands) |
2845 | { |
2846 | op_alt[i].cl = NO_REGS; |
2847 | op_alt[i].constraint = p; |
2848 | op_alt[i].matches = -1; |
2849 | op_alt[i].matched = -1; |
2850 | |
2851 | if (*p == '\0' || *p == ',') |
2852 | { |
2853 | op_alt[i].anything_ok = 1; |
2854 | continue; |
2855 | } |
2856 | |
2857 | for (;;) |
2858 | { |
2859 | char c = *p; |
2860 | if (c == '#') |
2861 | do |
2862 | c = *++p; |
2863 | while (c != ',' && c != '\0'); |
2864 | if (c == ',' || c == '\0') |
2865 | { |
2866 | p++; |
2867 | break; |
2868 | } |
2869 | |
2870 | switch (c) |
2871 | { |
2872 | case '?': |
2873 | op_alt[i].reject += 6; |
2874 | break; |
2875 | case '!': |
2876 | op_alt[i].reject += 600; |
2877 | break; |
2878 | case '&': |
2879 | op_alt[i].earlyclobber = 1; |
2880 | break; |
2881 | |
2882 | case '0': case '1': case '2': case '3': case '4': |
2883 | case '5': case '6': case '7': case '8': case '9': |
2884 | { |
2885 | char *end; |
2886 | op_alt[i].matches = strtoul (p, &end, 10); |
2887 | op_alt[op_alt[i].matches].matched = i; |
2888 | p = end; |
2889 | } |
2890 | continue; |
2891 | |
2892 | case 'X': |
2893 | op_alt[i].anything_ok = 1; |
2894 | break; |
2895 | |
2896 | case 'g': |
2897 | op_alt[i].cl = |
2898 | reg_class_subunion(this_target_hard_regs->x_reg_class_subunion)[(int) op_alt[i].cl][(int) GENERAL_REGS]; |
2899 | break; |
2900 | |
2901 | default: |
2902 | enum constraint_num cn = lookup_constraint (p); |
2903 | enum reg_class cl; |
2904 | switch (get_constraint_type (cn)) |
2905 | { |
2906 | case CT_REGISTER: |
2907 | cl = reg_class_for_constraint (cn); |
2908 | if (cl != NO_REGS) |
2909 | op_alt[i].cl = reg_class_subunion(this_target_hard_regs->x_reg_class_subunion)[op_alt[i].cl][cl]; |
2910 | break; |
2911 | |
2912 | case CT_CONST_INT: |
2913 | break; |
2914 | |
2915 | case CT_MEMORY: |
2916 | case CT_SPECIAL_MEMORY: |
2917 | case CT_RELAXED_MEMORY: |
2918 | op_alt[i].memory_ok = 1; |
2919 | break; |
2920 | |
2921 | case CT_ADDRESS: |
2922 | if (oploc && !address_operand (*oploc[i], VOIDmode((void) 0, E_VOIDmode))) |
2923 | break; |
2924 | |
2925 | op_alt[i].is_address = 1; |
2926 | op_alt[i].cl |
2927 | = (reg_class_subunion(this_target_hard_regs->x_reg_class_subunion) |
2928 | [(int) op_alt[i].cl] |
2929 | [(int) base_reg_class (VOIDmode((void) 0, E_VOIDmode), ADDR_SPACE_GENERIC0, |
2930 | ADDRESS, SCRATCH)]); |
2931 | break; |
2932 | |
2933 | case CT_FIXED_FORM: |
2934 | break; |
2935 | } |
2936 | break; |
2937 | } |
2938 | p += CONSTRAINT_LEN (c, p)insn_constraint_len (c,p); |
2939 | } |
2940 | } |
2941 | } |
2942 | } |
2943 | |
2944 | /* Return an array of operand_alternative instructions for |
2945 | instruction ICODE. */ |
2946 | |
2947 | const operand_alternative * |
2948 | preprocess_insn_constraints (unsigned int icode) |
2949 | { |
2950 | gcc_checking_assert (IN_RANGE (icode, 0, NUM_INSN_CODES - 1))((void)(!(((unsigned long) (icode) - (unsigned long) (0) <= (unsigned long) (NUM_INSN_CODES - 1) - (unsigned long) (0))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 2950, __FUNCTION__), 0 : 0)); |
2951 | if (this_target_recog->x_op_alt[icode]) |
2952 | return this_target_recog->x_op_alt[icode]; |
2953 | |
2954 | int n_operands = insn_data[icode].n_operands; |
2955 | if (n_operands == 0) |
2956 | return 0; |
2957 | /* Always provide at least one alternative so that which_op_alt () |
2958 | works correctly. If the instruction has 0 alternatives (i.e. all |
2959 | constraint strings are empty) then each operand in this alternative |
2960 | will have anything_ok set. */ |
2961 | int n_alternatives = MAX (insn_data[icode].n_alternatives, 1)((insn_data[icode].n_alternatives) > (1) ? (insn_data[icode ].n_alternatives) : (1)); |
2962 | int n_entries = n_operands * n_alternatives; |
2963 | |
2964 | operand_alternative *op_alt = XCNEWVEC (operand_alternative, n_entries)((operand_alternative *) xcalloc ((n_entries), sizeof (operand_alternative ))); |
2965 | const char **constraints = XALLOCAVEC (const char *, n_operands)((const char * *) __builtin_alloca(sizeof (const char *) * (n_operands ))); |
2966 | |
2967 | for (int i = 0; i < n_operands; ++i) |
2968 | constraints[i] = insn_data[icode].operand[i].constraint; |
2969 | preprocess_constraints (n_operands, n_alternatives, constraints, op_alt, |
2970 | NULLnullptr); |
2971 | |
2972 | this_target_recog->x_op_alt[icode] = op_alt; |
2973 | return op_alt; |
2974 | } |
2975 | |
2976 | /* After calling extract_insn, you can use this function to extract some |
2977 | information from the constraint strings into a more usable form. |
2978 | The collected data is stored in recog_op_alt. */ |
2979 | |
2980 | void |
2981 | preprocess_constraints (rtx_insn *insn) |
2982 | { |
2983 | int icode = INSN_CODE (insn)(((insn)->u.fld[5]).rt_int); |
2984 | if (icode >= 0) |
2985 | recog_op_alt = preprocess_insn_constraints (icode); |
2986 | else |
2987 | { |
2988 | int n_operands = recog_data.n_operands; |
2989 | int n_alternatives = recog_data.n_alternatives; |
2990 | int n_entries = n_operands * n_alternatives; |
2991 | memset (asm_op_alt, 0, n_entries * sizeof (operand_alternative)); |
2992 | preprocess_constraints (n_operands, n_alternatives, |
2993 | recog_data.constraints, asm_op_alt, |
2994 | NULLnullptr); |
2995 | recog_op_alt = asm_op_alt; |
2996 | } |
2997 | } |
2998 | |
2999 | /* Check the operands of an insn against the insn's operand constraints |
3000 | and return 1 if they match any of the alternatives in ALTERNATIVES. |
3001 | |
3002 | The information about the insn's operands, constraints, operand modes |
3003 | etc. is obtained from the global variables set up by extract_insn. |
3004 | |
3005 | WHICH_ALTERNATIVE is set to a number which indicates which |
3006 | alternative of constraints was matched: 0 for the first alternative, |
3007 | 1 for the next, etc. |
3008 | |
3009 | In addition, when two operands are required to match |
3010 | and it happens that the output operand is (reg) while the |
3011 | input operand is --(reg) or ++(reg) (a pre-inc or pre-dec), |
3012 | make the output operand look like the input. |
3013 | This is because the output operand is the one the template will print. |
3014 | |
3015 | This is used in final, just before printing the assembler code and by |
3016 | the routines that determine an insn's attribute. |
3017 | |
3018 | If STRICT is a positive nonzero value, it means that we have been |
3019 | called after reload has been completed. In that case, we must |
3020 | do all checks strictly. If it is zero, it means that we have been called |
3021 | before reload has completed. In that case, we first try to see if we can |
3022 | find an alternative that matches strictly. If not, we try again, this |
3023 | time assuming that reload will fix up the insn. This provides a "best |
3024 | guess" for the alternative and is used to compute attributes of insns prior |
3025 | to reload. A negative value of STRICT is used for this internal call. */ |
3026 | |
3027 | struct funny_match |
3028 | { |
3029 | int this_op, other; |
3030 | }; |
3031 | |
3032 | int |
3033 | constrain_operands (int strict, alternative_mask alternatives) |
3034 | { |
3035 | const char *constraints[MAX_RECOG_OPERANDS30]; |
3036 | int matching_operands[MAX_RECOG_OPERANDS30]; |
3037 | int earlyclobber[MAX_RECOG_OPERANDS30]; |
3038 | int c; |
3039 | |
3040 | struct funny_match funny_match[MAX_RECOG_OPERANDS30]; |
3041 | int funny_match_index; |
3042 | |
3043 | which_alternative = 0; |
3044 | if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0) |
3045 | return 1; |
3046 | |
3047 | for (c = 0; c < recog_data.n_operands; c++) |
3048 | constraints[c] = recog_data.constraints[c]; |
3049 | |
3050 | do |
3051 | { |
3052 | int seen_earlyclobber_at = -1; |
3053 | int opno; |
3054 | int lose = 0; |
3055 | funny_match_index = 0; |
3056 | |
3057 | if (!TEST_BIT (alternatives, which_alternative)(((alternatives) >> (which_alternative)) & 1)) |
3058 | { |
3059 | int i; |
3060 | |
3061 | for (i = 0; i < recog_data.n_operands; i++) |
3062 | constraints[i] = skip_alternative (constraints[i]); |
3063 | |
3064 | which_alternative++; |
3065 | continue; |
3066 | } |
3067 | |
3068 | for (opno = 0; opno < recog_data.n_operands; opno++) |
3069 | matching_operands[opno] = -1; |
3070 | |
3071 | for (opno = 0; opno < recog_data.n_operands; opno++) |
3072 | { |
3073 | rtx op = recog_data.operand[opno]; |
3074 | machine_mode mode = GET_MODE (op)((machine_mode) (op)->mode); |
3075 | const char *p = constraints[opno]; |
3076 | int offset = 0; |
3077 | int win = 0; |
3078 | int val; |
3079 | int len; |
3080 | |
3081 | earlyclobber[opno] = 0; |
3082 | |
3083 | /* A unary operator may be accepted by the predicate, but it |
3084 | is irrelevant for matching constraints. */ |
3085 | /* For special_memory_operand, there could be a memory operand inside, |
3086 | and it would cause a mismatch for constraint_satisfied_p. */ |
3087 | if (UNARY_P (op)((rtx_class[(int) (((enum rtx_code) (op)->code))]) == RTX_UNARY ) && op == extract_mem_from_operand (op)) |
3088 | op = XEXP (op, 0)(((op)->u.fld[0]).rt_rtx); |
3089 | |
3090 | if (GET_CODE (op)((enum rtx_code) (op)->code) == SUBREG) |
3091 | { |
3092 | if (REG_P (SUBREG_REG (op))(((enum rtx_code) ((((op)->u.fld[0]).rt_rtx))->code) == REG) |
3093 | && REGNO (SUBREG_REG (op))(rhs_regno((((op)->u.fld[0]).rt_rtx))) < FIRST_PSEUDO_REGISTER76) |
3094 | offset = subreg_regno_offset (REGNO (SUBREG_REG (op))(rhs_regno((((op)->u.fld[0]).rt_rtx))), |
3095 | GET_MODE (SUBREG_REG (op))((machine_mode) ((((op)->u.fld[0]).rt_rtx))->mode), |
3096 | SUBREG_BYTE (op)(((op)->u.fld[1]).rt_subreg), |
3097 | GET_MODE (op)((machine_mode) (op)->mode)); |
3098 | op = SUBREG_REG (op)(((op)->u.fld[0]).rt_rtx); |
3099 | } |
3100 | |
3101 | /* An empty constraint or empty alternative |
3102 | allows anything which matched the pattern. */ |
3103 | if (*p == 0 || *p == ',') |
3104 | win = 1; |
3105 | |
3106 | do |
3107 | switch (c = *p, len = CONSTRAINT_LEN (c, p)insn_constraint_len (c,p), c) |
3108 | { |
3109 | case '\0': |
3110 | len = 0; |
3111 | break; |
3112 | case ',': |
3113 | c = '\0'; |
3114 | break; |
3115 | |
3116 | case '#': |
3117 | /* Ignore rest of this alternative as far as |
3118 | constraint checking is concerned. */ |
3119 | do |
3120 | p++; |
3121 | while (*p && *p != ','); |
3122 | len = 0; |
3123 | break; |
3124 | |
3125 | case '&': |
3126 | earlyclobber[opno] = 1; |
3127 | if (seen_earlyclobber_at < 0) |
3128 | seen_earlyclobber_at = opno; |
3129 | break; |
3130 | |
3131 | case '0': case '1': case '2': case '3': case '4': |
3132 | case '5': case '6': case '7': case '8': case '9': |
3133 | { |
3134 | /* This operand must be the same as a previous one. |
3135 | This kind of constraint is used for instructions such |
3136 | as add when they take only two operands. |
3137 | |
3138 | Note that the lower-numbered operand is passed first. |
3139 | |
3140 | If we are not testing strictly, assume that this |
3141 | constraint will be satisfied. */ |
3142 | |
3143 | char *end; |
3144 | int match; |
3145 | |
3146 | match = strtoul (p, &end, 10); |
3147 | p = end; |
3148 | |
3149 | if (strict < 0) |
3150 | val = 1; |
3151 | else |
3152 | { |
3153 | rtx op1 = recog_data.operand[match]; |
3154 | rtx op2 = recog_data.operand[opno]; |
3155 | |
3156 | /* A unary operator may be accepted by the predicate, |
3157 | but it is irrelevant for matching constraints. */ |
3158 | if (UNARY_P (op1)((rtx_class[(int) (((enum rtx_code) (op1)->code))]) == RTX_UNARY )) |
3159 | op1 = XEXP (op1, 0)(((op1)->u.fld[0]).rt_rtx); |
3160 | if (UNARY_P (op2)((rtx_class[(int) (((enum rtx_code) (op2)->code))]) == RTX_UNARY )) |
3161 | op2 = XEXP (op2, 0)(((op2)->u.fld[0]).rt_rtx); |
3162 | |
3163 | val = operands_match_p (op1, op2); |
3164 | } |
3165 | |
3166 | matching_operands[opno] = match; |
3167 | matching_operands[match] = opno; |
3168 | |
3169 | if (val != 0) |
3170 | win = 1; |
3171 | |
3172 | /* If output is *x and input is *--x, arrange later |
3173 | to change the output to *--x as well, since the |
3174 | output op is the one that will be printed. */ |
3175 | if (val == 2 && strict > 0) |
3176 | { |
3177 | funny_match[funny_match_index].this_op = opno; |
3178 | funny_match[funny_match_index++].other = match; |
3179 | } |
3180 | } |
3181 | len = 0; |
3182 | break; |
3183 | |
3184 | case 'p': |
3185 | /* p is used for address_operands. When we are called by |
3186 | gen_reload, no one will have checked that the address is |
3187 | strictly valid, i.e., that all pseudos requiring hard regs |
3188 | have gotten them. We also want to make sure we have a |
3189 | valid mode. */ |
3190 | if ((GET_MODE (op)((machine_mode) (op)->mode) == VOIDmode((void) 0, E_VOIDmode) |
3191 | || SCALAR_INT_MODE_P (GET_MODE (op))(((enum mode_class) mode_class[((machine_mode) (op)->mode) ]) == MODE_INT || ((enum mode_class) mode_class[((machine_mode ) (op)->mode)]) == MODE_PARTIAL_INT)) |
3192 | && (strict <= 0 |
3193 | || (strict_memory_address_pstrict_memory_address_addr_space_p ((recog_data.operand_mode[ opno]), (op), 0) |
3194 | (recog_data.operand_mode[opno], op)strict_memory_address_addr_space_p ((recog_data.operand_mode[ opno]), (op), 0)))) |
3195 | win = 1; |
3196 | break; |
3197 | |
3198 | /* No need to check general_operand again; |
3199 | it was done in insn-recog.cc. Well, except that reload |
3200 | doesn't check the validity of its replacements, but |
3201 | that should only matter when there's a bug. */ |
3202 | case 'g': |
3203 | /* Anything goes unless it is a REG and really has a hard reg |
3204 | but the hard reg is not in the class GENERAL_REGS. */ |
3205 | if (REG_P (op)(((enum rtx_code) (op)->code) == REG)) |
3206 | { |
3207 | if (strict < 0 |
3208 | || GENERAL_REGS == ALL_REGS |
3209 | || (reload_in_progress |
3210 | && REGNO (op)(rhs_regno(op)) >= FIRST_PSEUDO_REGISTER76) |
3211 | || reg_fits_class_p (op, GENERAL_REGS, offset, mode)) |
3212 | win = 1; |
3213 | } |
3214 | else if (strict < 0 || general_operand (op, mode)) |
3215 | win = 1; |
3216 | break; |
3217 | |
3218 | default: |
3219 | { |
3220 | enum constraint_num cn = lookup_constraint (p); |
3221 | enum reg_class cl = reg_class_for_constraint (cn); |
3222 | if (cl != NO_REGS) |
3223 | { |
3224 | if (strict < 0 |
3225 | || (strict == 0 |
3226 | && REG_P (op)(((enum rtx_code) (op)->code) == REG) |
3227 | && REGNO (op)(rhs_regno(op)) >= FIRST_PSEUDO_REGISTER76) |
3228 | || (strict == 0 && GET_CODE (op)((enum rtx_code) (op)->code) == SCRATCH) |
3229 | || (REG_P (op)(((enum rtx_code) (op)->code) == REG) |
3230 | && reg_fits_class_p (op, cl, offset, mode))) |
3231 | win = 1; |
3232 | } |
3233 | |
3234 | else if (constraint_satisfied_p (op, cn)) |
3235 | win = 1; |
3236 | |
3237 | else if (insn_extra_memory_constraint (cn) |
3238 | /* Every memory operand can be reloaded to fit. */ |
3239 | && ((strict < 0 && MEM_P (op)(((enum rtx_code) (op)->code) == MEM)) |
3240 | /* Before reload, accept what reload can turn |
3241 | into a mem. */ |
3242 | || (strict < 0 && CONSTANT_P (op)((rtx_class[(int) (((enum rtx_code) (op)->code))]) == RTX_CONST_OBJ )) |
3243 | /* Before reload, accept a pseudo or hard register, |
3244 | since LRA can turn it into a mem. */ |
3245 | || (strict < 0 && targetm.lra_p () && REG_P (op)(((enum rtx_code) (op)->code) == REG)) |
3246 | /* During reload, accept a pseudo */ |
3247 | || (reload_in_progress && REG_P (op)(((enum rtx_code) (op)->code) == REG) |
3248 | && REGNO (op)(rhs_regno(op)) >= FIRST_PSEUDO_REGISTER76))) |
3249 | win = 1; |
3250 | else if (insn_extra_address_constraint (cn) |
3251 | /* Every address operand can be reloaded to fit. */ |
3252 | && strict < 0) |
3253 | win = 1; |
3254 | /* Cater to architectures like IA-64 that define extra memory |
3255 | constraints without using define_memory_constraint. */ |
3256 | else if (reload_in_progress |
3257 | && REG_P (op)(((enum rtx_code) (op)->code) == REG) |
3258 | && REGNO (op)(rhs_regno(op)) >= FIRST_PSEUDO_REGISTER76 |
3259 | && reg_renumber[REGNO (op)(rhs_regno(op))] < 0 |
3260 | && reg_equiv_mem (REGNO (op))(*reg_equivs)[((rhs_regno(op)))].mem != 0 |
3261 | && constraint_satisfied_p |
3262 | (reg_equiv_mem (REGNO (op))(*reg_equivs)[((rhs_regno(op)))].mem, cn)) |
3263 | win = 1; |
3264 | break; |
3265 | } |
3266 | } |
3267 | while (p += len, c); |
3268 | |
3269 | constraints[opno] = p; |
3270 | /* If this operand did not win somehow, |
3271 | this alternative loses. */ |
3272 | if (! win) |
3273 | lose = 1; |
3274 | } |
3275 | /* This alternative won; the operands are ok. |
3276 | Change whichever operands this alternative says to change. */ |
3277 | if (! lose) |
3278 | { |
3279 | int opno, eopno; |
3280 | |
3281 | /* See if any earlyclobber operand conflicts with some other |
3282 | operand. */ |
3283 | |
3284 | if (strict > 0 && seen_earlyclobber_at >= 0) |
3285 | for (eopno = seen_earlyclobber_at; |
3286 | eopno < recog_data.n_operands; |
3287 | eopno++) |
3288 | /* Ignore earlyclobber operands now in memory, |
3289 | because we would often report failure when we have |
3290 | two memory operands, one of which was formerly a REG. */ |
3291 | if (earlyclobber[eopno] |
3292 | && REG_P (recog_data.operand[eopno])(((enum rtx_code) (recog_data.operand[eopno])->code) == REG )) |
3293 | for (opno = 0; opno < recog_data.n_operands; opno++) |
3294 | if ((MEM_P (recog_data.operand[opno])(((enum rtx_code) (recog_data.operand[opno])->code) == MEM ) |
3295 | || recog_data.operand_type[opno] != OP_OUT) |
3296 | && opno != eopno |
3297 | /* Ignore things like match_operator operands. */ |
3298 | && *recog_data.constraints[opno] != 0 |
3299 | && ! (matching_operands[opno] == eopno |
3300 | && operands_match_p (recog_data.operand[opno], |
3301 | recog_data.operand[eopno])) |
3302 | && ! safe_from_earlyclobber (recog_data.operand[opno], |
3303 | recog_data.operand[eopno])) |
3304 | lose = 1; |
3305 | |
3306 | if (! lose) |
3307 | { |
3308 | while (--funny_match_index >= 0) |
3309 | { |
3310 | recog_data.operand[funny_match[funny_match_index].other] |
3311 | = recog_data.operand[funny_match[funny_match_index].this_op]; |
3312 | } |
3313 | |
3314 | /* For operands without < or > constraints reject side-effects. */ |
3315 | if (AUTO_INC_DEC0 && recog_data.is_asm) |
3316 | { |
3317 | for (opno = 0; opno < recog_data.n_operands; opno++) |
3318 | if (MEM_P (recog_data.operand[opno])(((enum rtx_code) (recog_data.operand[opno])->code) == MEM )) |
3319 | switch (GET_CODE (XEXP (recog_data.operand[opno], 0))((enum rtx_code) ((((recog_data.operand[opno])->u.fld[0]). rt_rtx))->code)) |
3320 | { |
3321 | case PRE_INC: |
3322 | case POST_INC: |
3323 | case PRE_DEC: |
3324 | case POST_DEC: |
3325 | case PRE_MODIFY: |
3326 | case POST_MODIFY: |
3327 | if (strchr (recog_data.constraints[opno], '<') == NULLnullptr |
3328 | && strchr (recog_data.constraints[opno], '>') |
3329 | == NULLnullptr) |
3330 | return 0; |
3331 | break; |
3332 | default: |
3333 | break; |
3334 | } |
3335 | } |
3336 | |
3337 | return 1; |
3338 | } |
3339 | } |
3340 | |
3341 | which_alternative++; |
3342 | } |
3343 | while (which_alternative < recog_data.n_alternatives); |
3344 | |
3345 | which_alternative = -1; |
3346 | /* If we are about to reject this, but we are not to test strictly, |
3347 | try a very loose test. Only return failure if it fails also. */ |
3348 | if (strict == 0) |
3349 | return constrain_operands (-1, alternatives); |
3350 | else |
3351 | return 0; |
3352 | } |
3353 | |
3354 | /* Return true iff OPERAND (assumed to be a REG rtx) |
3355 | is a hard reg in class CLASS when its regno is offset by OFFSET |
3356 | and changed to mode MODE. |
3357 | If REG occupies multiple hard regs, all of them must be in CLASS. */ |
3358 | |
3359 | bool |
3360 | reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset, |
3361 | machine_mode mode) |
3362 | { |
3363 | unsigned int regno = REGNO (operand)(rhs_regno(operand)); |
3364 | |
3365 | if (cl == NO_REGS) |
3366 | return false; |
3367 | |
3368 | /* Regno must not be a pseudo register. Offset may be negative. */ |
3369 | return (HARD_REGISTER_NUM_P (regno)((regno) < 76) |
3370 | && HARD_REGISTER_NUM_P (regno + offset)((regno + offset) < 76) |
3371 | && in_hard_reg_set_p (reg_class_contents(this_target_hard_regs->x_reg_class_contents)[(int) cl], mode, |
3372 | regno + offset)); |
3373 | } |
3374 | |
3375 | /* Split single instruction. Helper function for split_all_insns and |
3376 | split_all_insns_noflow. Return last insn in the sequence if successful, |
3377 | or NULL if unsuccessful. */ |
3378 | |
3379 | static rtx_insn * |
3380 | split_insn (rtx_insn *insn) |
3381 | { |
3382 | /* Split insns here to get max fine-grain parallelism. */ |
3383 | rtx_insn *first = PREV_INSN (insn); |
3384 | rtx_insn *last = try_split (PATTERN (insn), insn, 1); |
3385 | rtx insn_set, last_set, note; |
3386 | |
3387 | if (last == insn) |
3388 | return NULLnullptr; |
3389 | |
3390 | /* If the original instruction was a single set that was known to be |
3391 | equivalent to a constant, see if we can say the same about the last |
3392 | instruction in the split sequence. The two instructions must set |
3393 | the same destination. */ |
3394 | insn_set = single_set (insn); |
3395 | if (insn_set) |
3396 | { |
3397 | last_set = single_set (last); |
3398 | if (last_set && rtx_equal_p (SET_DEST (last_set)(((last_set)->u.fld[0]).rt_rtx), SET_DEST (insn_set)(((insn_set)->u.fld[0]).rt_rtx))) |
3399 | { |
3400 | note = find_reg_equal_equiv_note (insn); |
3401 | if (note && CONSTANT_P (XEXP (note, 0))((rtx_class[(int) (((enum rtx_code) ((((note)->u.fld[0]).rt_rtx ))->code))]) == RTX_CONST_OBJ)) |
3402 | set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0)(((note)->u.fld[0]).rt_rtx)); |
3403 | else if (CONSTANT_P (SET_SRC (insn_set))((rtx_class[(int) (((enum rtx_code) ((((insn_set)->u.fld[1 ]).rt_rtx))->code))]) == RTX_CONST_OBJ)) |
3404 | set_unique_reg_note (last, REG_EQUAL, |
3405 | copy_rtx (SET_SRC (insn_set)(((insn_set)->u.fld[1]).rt_rtx))); |
3406 | } |
3407 | } |
3408 | |
3409 | /* try_split returns the NOTE that INSN became. */ |
3410 | SET_INSN_DELETED (insn)set_insn_deleted (insn);; |
3411 | |
3412 | /* ??? Coddle to md files that generate subregs in post-reload |
3413 | splitters instead of computing the proper hard register. */ |
3414 | if (reload_completed && first != last) |
3415 | { |
3416 | first = NEXT_INSN (first); |
3417 | for (;;) |
3418 | { |
3419 | if (INSN_P (first)(((((enum rtx_code) (first)->code) == INSN) || (((enum rtx_code ) (first)->code) == JUMP_INSN) || (((enum rtx_code) (first )->code) == CALL_INSN)) || (((enum rtx_code) (first)->code ) == DEBUG_INSN))) |
3420 | cleanup_subreg_operands (first); |
3421 | if (first == last) |
3422 | break; |
3423 | first = NEXT_INSN (first); |
3424 | } |
3425 | } |
3426 | |
3427 | return last; |
3428 | } |
3429 | |
3430 | /* Split all insns in the function. If UPD_LIFE, update life info after. */ |
3431 | |
3432 | void |
3433 | split_all_insns (void) |
3434 | { |
3435 | bool changed; |
3436 | bool need_cfg_cleanup = false; |
3437 | basic_block bb; |
3438 | |
3439 | auto_sbitmap blocks (last_basic_block_for_fn (cfun)(((cfun + 0))->cfg->x_last_basic_block)); |
3440 | bitmap_clear (blocks); |
3441 | changed = false; |
3442 | |
3443 | FOR_EACH_BB_REVERSE_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_exit_block_ptr->prev_bb ; bb != ((cfun + 0))->cfg->x_entry_block_ptr; bb = bb-> prev_bb) |
3444 | { |
3445 | rtx_insn *insn, *next; |
3446 | bool finish = false; |
3447 | |
3448 | rtl_profile_for_bb (bb); |
3449 | for (insn = BB_HEAD (bb)(bb)->il.x.head_; !finish ; insn = next) |
3450 | { |
3451 | /* Can't use `next_real_insn' because that might go across |
3452 | CODE_LABELS and short-out basic blocks. */ |
3453 | next = NEXT_INSN (insn); |
3454 | finish = (insn == BB_END (bb)(bb)->il.x.rtl->end_); |
3455 | |
3456 | /* If INSN has a REG_EH_REGION note and we split INSN, the |
3457 | resulting split may not have/need REG_EH_REGION notes. |
3458 | |
3459 | If that happens and INSN was the last reference to the |
3460 | given EH region, then the EH region will become unreachable. |
3461 | We cannot leave the unreachable blocks in the CFG as that |
3462 | will trigger a checking failure. |
3463 | |
3464 | So track if INSN has a REG_EH_REGION note. If so and we |
3465 | split INSN, then trigger a CFG cleanup. */ |
3466 | rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX(rtx) 0); |
3467 | if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) == DEBUG_INSN))) |
3468 | { |
3469 | rtx set = single_set (insn); |
3470 | |
3471 | /* Don't split no-op move insns. These should silently |
3472 | disappear later in final. Splitting such insns would |
3473 | break the code that handles LIBCALL blocks. */ |
3474 | if (set && set_noop_p (set)) |
3475 | { |
3476 | /* Nops get in the way while scheduling, so delete them |
3477 | now if register allocation has already been done. It |
3478 | is too risky to try to do this before register |
3479 | allocation, and there are unlikely to be very many |
3480 | nops then anyways. */ |
3481 | if (reload_completed) |
3482 | delete_insn_and_edges (insn); |
3483 | if (note) |
3484 | need_cfg_cleanup = true; |
3485 | } |
3486 | else |
3487 | { |
3488 | if (split_insn (insn)) |
3489 | { |
3490 | bitmap_set_bit (blocks, bb->index); |
3491 | changed = true; |
3492 | if (note) |
3493 | need_cfg_cleanup = true; |
3494 | } |
3495 | } |
3496 | } |
3497 | } |
3498 | } |
3499 | |
3500 | default_rtl_profile (); |
3501 | if (changed) |
3502 | { |
3503 | find_many_sub_basic_blocks (blocks); |
3504 | |
3505 | /* Splitting could drop an REG_EH_REGION if it potentially |
3506 | trapped in its original form, but does not in its split |
3507 | form. Consider a FLOAT_TRUNCATE which splits into a memory |
3508 | store/load pair and -fnon-call-exceptions. */ |
3509 | if (need_cfg_cleanup) |
3510 | cleanup_cfg (0); |
3511 | } |
3512 | |
3513 | checking_verify_flow_info (); |
3514 | } |
3515 | |
3516 | /* Same as split_all_insns, but do not expect CFG to be available. |
3517 | Used by machine dependent reorg passes. */ |
3518 | |
3519 | unsigned int |
3520 | split_all_insns_noflow (void) |
3521 | { |
3522 | rtx_insn *next, *insn; |
3523 | |
3524 | for (insn = get_insns (); insn; insn = next) |
3525 | { |
3526 | next = NEXT_INSN (insn); |
3527 | if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) == DEBUG_INSN))) |
3528 | { |
3529 | /* Don't split no-op move insns. These should silently |
3530 | disappear later in final. Splitting such insns would |
3531 | break the code that handles LIBCALL blocks. */ |
3532 | rtx set = single_set (insn); |
3533 | if (set && set_noop_p (set)) |
3534 | { |
3535 | /* Nops get in the way while scheduling, so delete them |
3536 | now if register allocation has already been done. It |
3537 | is too risky to try to do this before register |
3538 | allocation, and there are unlikely to be very many |
3539 | nops then anyways. |
3540 | |
3541 | ??? Should we use delete_insn when the CFG isn't valid? */ |
3542 | if (reload_completed) |
3543 | delete_insn_and_edges (insn); |
3544 | } |
3545 | else |
3546 | split_insn (insn); |
3547 | } |
3548 | } |
3549 | return 0; |
3550 | } |
3551 | |
3552 | struct peep2_insn_data |
3553 | { |
3554 | rtx_insn *insn; |
3555 | regset live_before; |
3556 | }; |
3557 | |
3558 | static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP25 + 1]; |
3559 | static int peep2_current; |
3560 | |
3561 | static bool peep2_do_rebuild_jump_labels; |
3562 | static bool peep2_do_cleanup_cfg; |
3563 | |
3564 | /* The number of instructions available to match a peep2. */ |
3565 | int peep2_current_count; |
3566 | |
3567 | /* A marker indicating the last insn of the block. The live_before regset |
3568 | for this element is correct, indicating DF_LIVE_OUT for the block. */ |
3569 | #define PEEP2_EOBinvalid_insn_rtx invalid_insn_rtx |
3570 | |
3571 | /* Wrap N to fit into the peep2_insn_data buffer. */ |
3572 | |
3573 | static int |
3574 | peep2_buf_position (int n) |
3575 | { |
3576 | if (n >= MAX_INSNS_PER_PEEP25 + 1) |
3577 | n -= MAX_INSNS_PER_PEEP25 + 1; |
3578 | return n; |
3579 | } |
3580 | |
3581 | /* Return the Nth non-note insn after `current', or return NULL_RTX if it |
3582 | does not exist. Used by the recognizer to find the next insn to match |
3583 | in a multi-insn pattern. */ |
3584 | |
3585 | rtx_insn * |
3586 | peep2_next_insn (int n) |
3587 | { |
3588 | gcc_assert (n <= peep2_current_count)((void)(!(n <= peep2_current_count) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 3588, __FUNCTION__), 0 : 0)); |
3589 | |
3590 | n = peep2_buf_position (peep2_current + n); |
3591 | |
3592 | return peep2_insn_data[n].insn; |
3593 | } |
3594 | |
3595 | /* Return true if REGNO is dead before the Nth non-note insn |
3596 | after `current'. */ |
3597 | |
3598 | int |
3599 | peep2_regno_dead_p (int ofs, int regno) |
3600 | { |
3601 | gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1)((void)(!(ofs < 5 + 1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 3601, __FUNCTION__), 0 : 0)); |
3602 | |
3603 | ofs = peep2_buf_position (peep2_current + ofs); |
3604 | |
3605 | gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX)((void)(!(peep2_insn_data[ofs].insn != (rtx) 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 3605, __FUNCTION__), 0 : 0)); |
3606 | |
3607 | return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno)bitmap_bit_p (peep2_insn_data[ofs].live_before, regno); |
3608 | } |
3609 | |
3610 | /* Similarly for a REG. */ |
3611 | |
3612 | int |
3613 | peep2_reg_dead_p (int ofs, rtx reg) |
3614 | { |
3615 | gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1)((void)(!(ofs < 5 + 1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 3615, __FUNCTION__), 0 : 0)); |
3616 | |
3617 | ofs = peep2_buf_position (peep2_current + ofs); |
3618 | |
3619 | gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX)((void)(!(peep2_insn_data[ofs].insn != (rtx) 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 3619, __FUNCTION__), 0 : 0)); |
3620 | |
3621 | unsigned int end_regno = END_REGNO (reg); |
3622 | for (unsigned int regno = REGNO (reg)(rhs_regno(reg)); regno < end_regno; ++regno) |
3623 | if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno)bitmap_bit_p (peep2_insn_data[ofs].live_before, regno)) |
3624 | return 0; |
3625 | return 1; |
3626 | } |
3627 | |
3628 | /* Regno offset to be used in the register search. */ |
3629 | static int search_ofs; |
3630 | |
3631 | /* Try to find a hard register of mode MODE, matching the register class in |
3632 | CLASS_STR, which is available at the beginning of insn CURRENT_INSN and |
3633 | remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX, |
3634 | in which case the only condition is that the register must be available |
3635 | before CURRENT_INSN. |
3636 | Registers that already have bits set in REG_SET will not be considered. |
3637 | |
3638 | If an appropriate register is available, it will be returned and the |
3639 | corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is |
3640 | returned. */ |
3641 | |
3642 | rtx |
3643 | peep2_find_free_register (int from, int to, const char *class_str, |
3644 | machine_mode mode, HARD_REG_SET *reg_set) |
3645 | { |
3646 | enum reg_class cl; |
3647 | HARD_REG_SET live; |
3648 | df_ref def; |
3649 | int i; |
3650 | |
3651 | gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1)((void)(!(from < 5 + 1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 3651, __FUNCTION__), 0 : 0)); |
3652 | gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1)((void)(!(to < 5 + 1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 3652, __FUNCTION__), 0 : 0)); |
3653 | |
3654 | from = peep2_buf_position (peep2_current + from); |
3655 | to = peep2_buf_position (peep2_current + to); |
3656 | |
3657 | gcc_assert (peep2_insn_data[from].insn != NULL_RTX)((void)(!(peep2_insn_data[from].insn != (rtx) 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 3657, __FUNCTION__), 0 : 0)); |
3658 | REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before)do { CLEAR_HARD_REG_SET (live); reg_set_to_hard_reg_set (& live, peep2_insn_data[from].live_before); } while (0); |
3659 | |
3660 | while (from != to) |
3661 | { |
3662 | gcc_assert (peep2_insn_data[from].insn != NULL_RTX)((void)(!(peep2_insn_data[from].insn != (rtx) 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 3662, __FUNCTION__), 0 : 0)); |
3663 | |
3664 | /* Don't use registers set or clobbered by the insn. */ |
3665 | FOR_EACH_INSN_DEF (def, peep2_insn_data[from].insn)for (def = (((df->insns[(INSN_UID (peep2_insn_data[from].insn ))]))->defs); def; def = ((def)->base.next_loc)) |
3666 | SET_HARD_REG_BIT (live, DF_REF_REGNO (def)((def)->base.regno)); |
3667 | |
3668 | from = peep2_buf_position (from + 1); |
3669 | } |
3670 | |
3671 | cl = reg_class_for_constraint (lookup_constraint (class_str)); |
3672 | |
3673 | for (i = 0; i < FIRST_PSEUDO_REGISTER76; i++) |
3674 | { |
3675 | int raw_regno, regno, success, j; |
3676 | |
3677 | /* Distribute the free registers as much as possible. */ |
3678 | raw_regno = search_ofs + i; |
3679 | if (raw_regno >= FIRST_PSEUDO_REGISTER76) |
3680 | raw_regno -= FIRST_PSEUDO_REGISTER76; |
3681 | #ifdef REG_ALLOC_ORDER{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17 , 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48 , 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75 } |
3682 | regno = reg_alloc_order(this_target_hard_regs->x_reg_alloc_order)[raw_regno]; |
3683 | #else |
3684 | regno = raw_regno; |
3685 | #endif |
3686 | |
3687 | /* Can it support the mode we need? */ |
3688 | if (!targetm.hard_regno_mode_ok (regno, mode)) |
3689 | continue; |
3690 | |
3691 | success = 1; |
3692 | for (j = 0; success && j < hard_regno_nregs (regno, mode); j++) |
3693 | { |
3694 | /* Don't allocate fixed registers. */ |
3695 | if (fixed_regs(this_target_hard_regs->x_fixed_regs)[regno + j]) |
3696 | { |
3697 | success = 0; |
3698 | break; |
3699 | } |
3700 | /* Don't allocate global registers. */ |
3701 | if (global_regs[regno + j]) |
3702 | { |
3703 | success = 0; |
3704 | break; |
3705 | } |
3706 | /* Make sure the register is of the right class. */ |
3707 | if (! TEST_HARD_REG_BIT (reg_class_contents(this_target_hard_regs->x_reg_class_contents)[cl], regno + j)) |
3708 | { |
3709 | success = 0; |
3710 | break; |
3711 | } |
3712 | /* And that we don't create an extra save/restore. */ |
3713 | if (! crtl(&x_rtl)->abi->clobbers_full_reg_p (regno + j) |
3714 | && ! df_regs_ever_live_p (regno + j)) |
3715 | { |
3716 | success = 0; |
3717 | break; |
3718 | } |
3719 | |
3720 | if (! targetm.hard_regno_scratch_ok (regno + j)) |
3721 | { |
3722 | success = 0; |
3723 | break; |
3724 | } |
3725 | |
3726 | /* And we don't clobber traceback for noreturn functions. */ |
3727 | if ((regno + j == FRAME_POINTER_REGNUM19 |
3728 | || regno + j == HARD_FRAME_POINTER_REGNUM6) |
3729 | && (! reload_completed || frame_pointer_needed((&x_rtl)->frame_pointer_needed))) |
3730 | { |
3731 | success = 0; |
3732 | break; |
3733 | } |
3734 | |
3735 | if (TEST_HARD_REG_BIT (*reg_set, regno + j) |
3736 | || TEST_HARD_REG_BIT (live, regno + j)) |
3737 | { |
3738 | success = 0; |
3739 | break; |
3740 | } |
3741 | } |
3742 | |
3743 | if (success) |
3744 | { |
3745 | add_to_hard_reg_set (reg_set, mode, regno); |
3746 | |
3747 | /* Start the next search with the next register. */ |
3748 | if (++raw_regno >= FIRST_PSEUDO_REGISTER76) |
3749 | raw_regno = 0; |
3750 | search_ofs = raw_regno; |
3751 | |
3752 | return gen_rtx_REG (mode, regno); |
3753 | } |
3754 | } |
3755 | |
3756 | search_ofs = 0; |
3757 | return NULL_RTX(rtx) 0; |
3758 | } |
3759 | |
3760 | /* Forget all currently tracked instructions, only remember current |
3761 | LIVE regset. */ |
3762 | |
3763 | static void |
3764 | peep2_reinit_state (regset live) |
3765 | { |
3766 | int i; |
3767 | |
3768 | /* Indicate that all slots except the last holds invalid data. */ |
3769 | for (i = 0; i < MAX_INSNS_PER_PEEP25; ++i) |
3770 | peep2_insn_data[i].insn = NULLnullptr; |
3771 | peep2_current_count = 0; |
3772 | |
3773 | /* Indicate that the last slot contains live_after data. */ |
3774 | peep2_insn_data[MAX_INSNS_PER_PEEP25].insn = PEEP2_EOBinvalid_insn_rtx; |
3775 | peep2_current = MAX_INSNS_PER_PEEP25; |
3776 | |
3777 | COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live)bitmap_copy (peep2_insn_data[5].live_before, live); |
3778 | } |
3779 | |
3780 | /* Copies frame related info of an insn (OLD_INSN) to the single |
3781 | insn (NEW_INSN) that was obtained by splitting OLD_INSN. */ |
3782 | |
3783 | void |
3784 | copy_frame_info_to_split_insn (rtx_insn *old_insn, rtx_insn *new_insn) |
3785 | { |
3786 | bool any_note = false; |
3787 | rtx note; |
3788 | |
3789 | if (!RTX_FRAME_RELATED_P (old_insn)(__extension__ ({ __typeof ((old_insn)) const _rtx = ((old_insn )); if (((enum rtx_code) (_rtx)->code) != DEBUG_INSN && ((enum rtx_code) (_rtx)->code) != INSN && ((enum rtx_code ) (_rtx)->code) != CALL_INSN && ((enum rtx_code) ( _rtx)->code) != JUMP_INSN && ((enum rtx_code) (_rtx )->code) != BARRIER && ((enum rtx_code) (_rtx)-> code) != SET) rtl_check_failed_flag ("RTX_FRAME_RELATED_P",_rtx , "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 3789, __FUNCTION__); _rtx; })->frame_related)) |
3790 | return; |
3791 | |
3792 | RTX_FRAME_RELATED_P (new_insn)(__extension__ ({ __typeof ((new_insn)) const _rtx = ((new_insn )); if (((enum rtx_code) (_rtx)->code) != DEBUG_INSN && ((enum rtx_code) (_rtx)->code) != INSN && ((enum rtx_code ) (_rtx)->code) != CALL_INSN && ((enum rtx_code) ( _rtx)->code) != JUMP_INSN && ((enum rtx_code) (_rtx )->code) != BARRIER && ((enum rtx_code) (_rtx)-> code) != SET) rtl_check_failed_flag ("RTX_FRAME_RELATED_P",_rtx , "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 3792, __FUNCTION__); _rtx; })->frame_related) = 1; |
3793 | |
3794 | /* Allow the backend to fill in a note during the split. */ |
3795 | for (note = REG_NOTES (new_insn)(((new_insn)->u.fld[6]).rt_rtx); note ; note = XEXP (note, 1)(((note)->u.fld[1]).rt_rtx)) |
3796 | switch (REG_NOTE_KIND (note)((enum reg_note) ((machine_mode) (note)->mode))) |
3797 | { |
3798 | case REG_FRAME_RELATED_EXPR: |
3799 | case REG_CFA_DEF_CFA: |
3800 | case REG_CFA_ADJUST_CFA: |
3801 | case REG_CFA_OFFSET: |
3802 | case REG_CFA_REGISTER: |
3803 | case REG_CFA_EXPRESSION: |
3804 | case REG_CFA_RESTORE: |
3805 | case REG_CFA_SET_VDRAP: |
3806 | any_note = true; |
3807 | break; |
3808 | default: |
3809 | break; |
3810 | } |
3811 | |
3812 | /* If the backend didn't supply a note, copy one over. */ |
3813 | if (!any_note) |
3814 | for (note = REG_NOTES (old_insn)(((old_insn)->u.fld[6]).rt_rtx); note ; note = XEXP (note, 1)(((note)->u.fld[1]).rt_rtx)) |
3815 | switch (REG_NOTE_KIND (note)((enum reg_note) ((machine_mode) (note)->mode))) |
3816 | { |
3817 | case REG_FRAME_RELATED_EXPR: |
3818 | case REG_CFA_DEF_CFA: |
3819 | case REG_CFA_ADJUST_CFA: |
3820 | case REG_CFA_OFFSET: |
3821 | case REG_CFA_REGISTER: |
3822 | case REG_CFA_EXPRESSION: |
3823 | case REG_CFA_RESTORE: |
3824 | case REG_CFA_SET_VDRAP: |
3825 | add_reg_note (new_insn, REG_NOTE_KIND (note)((enum reg_note) ((machine_mode) (note)->mode)), XEXP (note, 0)(((note)->u.fld[0]).rt_rtx)); |
3826 | any_note = true; |
3827 | break; |
3828 | default: |
3829 | break; |
3830 | } |
3831 | |
3832 | /* If there still isn't a note, make sure the unwind info sees the |
3833 | same expression as before the split. */ |
3834 | if (!any_note) |
3835 | { |
3836 | rtx old_set, new_set; |
3837 | |
3838 | /* The old insn had better have been simple, or annotated. */ |
3839 | old_set = single_set (old_insn); |
3840 | gcc_assert (old_set != NULL)((void)(!(old_set != nullptr) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 3840, __FUNCTION__), 0 : 0)); |
3841 | |
3842 | new_set = single_set (new_insn); |
3843 | if (!new_set || !rtx_equal_p (new_set, old_set)) |
3844 | add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set); |
3845 | } |
3846 | |
3847 | /* Copy prologue/epilogue status. This is required in order to keep |
3848 | proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */ |
3849 | maybe_copy_prologue_epilogue_insn (old_insn, new_insn); |
3850 | } |
3851 | |
3852 | /* While scanning basic block BB, we found a match of length MATCH_LEN, |
3853 | starting at INSN. Perform the replacement, removing the old insns and |
3854 | replacing them with ATTEMPT. Returns the last insn emitted, or NULL |
3855 | if the replacement is rejected. */ |
3856 | |
3857 | static rtx_insn * |
3858 | peep2_attempt (basic_block bb, rtx_insn *insn, int match_len, rtx_insn *attempt) |
3859 | { |
3860 | int i; |
3861 | rtx_insn *last, *before_try, *x; |
3862 | rtx eh_note, as_note; |
3863 | rtx_insn *old_insn; |
3864 | rtx_insn *new_insn; |
3865 | bool was_call = false; |
3866 | |
3867 | /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to |
3868 | match more than one insn, or to be split into more than one insn. */ |
3869 | old_insn = peep2_insn_data[peep2_current].insn; |
3870 | if (RTX_FRAME_RELATED_P (old_insn)(__extension__ ({ __typeof ((old_insn)) const _rtx = ((old_insn )); if (((enum rtx_code) (_rtx)->code) != DEBUG_INSN && ((enum rtx_code) (_rtx)->code) != INSN && ((enum rtx_code ) (_rtx)->code) != CALL_INSN && ((enum rtx_code) ( _rtx)->code) != JUMP_INSN && ((enum rtx_code) (_rtx )->code) != BARRIER && ((enum rtx_code) (_rtx)-> code) != SET) rtl_check_failed_flag ("RTX_FRAME_RELATED_P",_rtx , "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 3870, __FUNCTION__); _rtx; })->frame_related)) |
3871 | { |
3872 | if (match_len != 0) |
3873 | return NULLnullptr; |
3874 | |
3875 | /* Look for one "active" insn. I.e. ignore any "clobber" insns that |
3876 | may be in the stream for the purpose of register allocation. */ |
3877 | if (active_insn_p (attempt)) |
3878 | new_insn = attempt; |
3879 | else |
3880 | new_insn = next_active_insn (attempt); |
3881 | if (next_active_insn (new_insn)) |
3882 | return NULLnullptr; |
3883 | |
3884 | /* We have a 1-1 replacement. Copy over any frame-related info. */ |
3885 | copy_frame_info_to_split_insn (old_insn, new_insn); |
3886 | } |
3887 | |
3888 | /* If we are splitting a CALL_INSN, look for the CALL_INSN |
3889 | in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other |
3890 | cfg-related call notes. */ |
3891 | for (i = 0; i <= match_len; ++i) |
3892 | { |
3893 | int j; |
3894 | rtx note; |
3895 | |
3896 | j = peep2_buf_position (peep2_current + i); |
3897 | old_insn = peep2_insn_data[j].insn; |
3898 | if (!CALL_P (old_insn)(((enum rtx_code) (old_insn)->code) == CALL_INSN)) |
3899 | continue; |
3900 | was_call = true; |
3901 | |
3902 | new_insn = attempt; |
3903 | while (new_insn != NULL_RTX(rtx) 0) |
3904 | { |
3905 | if (CALL_P (new_insn)(((enum rtx_code) (new_insn)->code) == CALL_INSN)) |
3906 | break; |
3907 | new_insn = NEXT_INSN (new_insn); |
3908 | } |
3909 | |
3910 | gcc_assert (new_insn != NULL_RTX)((void)(!(new_insn != (rtx) 0) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 3910, __FUNCTION__), 0 : 0)); |
3911 | |
3912 | CALL_INSN_FUNCTION_USAGE (new_insn)(((new_insn)->u.fld[7]).rt_rtx) |
3913 | = CALL_INSN_FUNCTION_USAGE (old_insn)(((old_insn)->u.fld[7]).rt_rtx); |
3914 | SIBLING_CALL_P (new_insn)(__extension__ ({ __typeof ((new_insn)) const _rtx = ((new_insn )); if (((enum rtx_code) (_rtx)->code) != CALL_INSN) rtl_check_failed_flag ("SIBLING_CALL_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 3914, __FUNCTION__); _rtx; })->jump) = SIBLING_CALL_P (old_insn)(__extension__ ({ __typeof ((old_insn)) const _rtx = ((old_insn )); if (((enum rtx_code) (_rtx)->code) != CALL_INSN) rtl_check_failed_flag ("SIBLING_CALL_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 3914, __FUNCTION__); _rtx; })->jump); |
3915 | |
3916 | for (note = REG_NOTES (old_insn)(((old_insn)->u.fld[6]).rt_rtx); |
3917 | note; |
3918 | note = XEXP (note, 1)(((note)->u.fld[1]).rt_rtx)) |
3919 | switch (REG_NOTE_KIND (note)((enum reg_note) ((machine_mode) (note)->mode))) |
3920 | { |
3921 | case REG_NORETURN: |
3922 | case REG_SETJMP: |
3923 | case REG_TM: |
3924 | case REG_CALL_NOCF_CHECK: |
3925 | add_reg_note (new_insn, REG_NOTE_KIND (note)((enum reg_note) ((machine_mode) (note)->mode)), |
3926 | XEXP (note, 0)(((note)->u.fld[0]).rt_rtx)); |
3927 | break; |
3928 | default: |
3929 | /* Discard all other reg notes. */ |
3930 | break; |
3931 | } |
3932 | |
3933 | /* Croak if there is another call in the sequence. */ |
3934 | while (++i <= match_len) |
3935 | { |
3936 | j = peep2_buf_position (peep2_current + i); |
3937 | old_insn = peep2_insn_data[j].insn; |
3938 | gcc_assert (!CALL_P (old_insn))((void)(!(!(((enum rtx_code) (old_insn)->code) == CALL_INSN )) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 3938, __FUNCTION__), 0 : 0)); |
3939 | } |
3940 | break; |
3941 | } |
3942 | |
3943 | /* If we matched any instruction that had a REG_ARGS_SIZE, then |
3944 | move those notes over to the new sequence. */ |
3945 | as_note = NULLnullptr; |
3946 | for (i = match_len; i >= 0; --i) |
3947 | { |
3948 | int j = peep2_buf_position (peep2_current + i); |
3949 | old_insn = peep2_insn_data[j].insn; |
3950 | |
3951 | as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULLnullptr); |
3952 | if (as_note) |
3953 | break; |
3954 | } |
3955 | |
3956 | i = peep2_buf_position (peep2_current + match_len); |
3957 | eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX(rtx) 0); |
3958 | |
3959 | /* Replace the old sequence with the new. */ |
3960 | rtx_insn *peepinsn = peep2_insn_data[i].insn; |
3961 | last = emit_insn_after_setloc (attempt, |
3962 | peep2_insn_data[i].insn, |
3963 | INSN_LOCATION (peepinsn)); |
3964 | if (JUMP_P (peepinsn)(((enum rtx_code) (peepinsn)->code) == JUMP_INSN) && JUMP_P (last)(((enum rtx_code) (last)->code) == JUMP_INSN)) |
3965 | CROSSING_JUMP_P (last)(__extension__ ({ __typeof ((last)) const _rtx = ((last)); if (((enum rtx_code) (_rtx)->code) != JUMP_INSN) rtl_check_failed_flag ("CROSSING_JUMP_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 3965, __FUNCTION__); _rtx; })->jump) = CROSSING_JUMP_P (peepinsn)(__extension__ ({ __typeof ((peepinsn)) const _rtx = ((peepinsn )); if (((enum rtx_code) (_rtx)->code) != JUMP_INSN) rtl_check_failed_flag ("CROSSING_JUMP_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 3965, __FUNCTION__); _rtx; })->jump); |
3966 | before_try = PREV_INSN (insn); |
3967 | delete_insn_chain (insn, peep2_insn_data[i].insn, false); |
3968 | |
3969 | /* Re-insert the EH_REGION notes. */ |
3970 | if (eh_note || (was_call && nonlocal_goto_handler_labels((&x_rtl)->x_nonlocal_goto_handler_labels))) |
3971 | { |
3972 | edge eh_edge; |
3973 | edge_iterator ei; |
3974 | |
3975 | FOR_EACH_EDGE (eh_edge, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei) , &(eh_edge)); ei_next (&(ei))) |
3976 | if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL)) |
3977 | break; |
3978 | |
3979 | if (eh_note) |
3980 | copy_reg_eh_region_note_backward (eh_note, last, before_try); |
3981 | |
3982 | if (eh_edge) |
3983 | for (x = last; x != before_try; x = PREV_INSN (x)) |
3984 | if (x != BB_END (bb)(bb)->il.x.rtl->end_ |
3985 | && (can_throw_internal (x) |
3986 | || can_nonlocal_goto (x))) |
3987 | { |
3988 | edge nfte, nehe; |
3989 | int flags; |
3990 | |
3991 | nfte = split_block (bb, x); |
3992 | flags = (eh_edge->flags |
3993 | & (EDGE_EH | EDGE_ABNORMAL)); |
3994 | if (CALL_P (x)(((enum rtx_code) (x)->code) == CALL_INSN)) |
3995 | flags |= EDGE_ABNORMAL_CALL; |
3996 | nehe = make_edge (nfte->src, eh_edge->dest, |
3997 | flags); |
3998 | |
3999 | nehe->probability = eh_edge->probability; |
4000 | nfte->probability = nehe->probability.invert (); |
4001 | |
4002 | peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest); |
4003 | bb = nfte->src; |
4004 | eh_edge = nehe; |
4005 | } |
4006 | |
4007 | /* Converting possibly trapping insn to non-trapping is |
4008 | possible. Zap dummy outgoing edges. */ |
4009 | peep2_do_cleanup_cfg |= purge_dead_edges (bb); |
4010 | } |
4011 | |
4012 | /* Re-insert the ARGS_SIZE notes. */ |
4013 | if (as_note) |
4014 | fixup_args_size_notes (before_try, last, get_args_size (as_note)); |
4015 | |
4016 | /* Scan the new insns for embedded side effects and add appropriate |
4017 | REG_INC notes. */ |
4018 | if (AUTO_INC_DEC0) |
4019 | for (x = last; x != before_try; x = PREV_INSN (x)) |
4020 | if (NONDEBUG_INSN_P (x)((((enum rtx_code) (x)->code) == INSN) || (((enum rtx_code ) (x)->code) == JUMP_INSN) || (((enum rtx_code) (x)->code ) == CALL_INSN))) |
4021 | add_auto_inc_notes (x, PATTERN (x)); |
4022 | |
4023 | /* If we generated a jump instruction, it won't have |
4024 | JUMP_LABEL set. Recompute after we're done. */ |
4025 | for (x = last; x != before_try; x = PREV_INSN (x)) |
4026 | if (JUMP_P (x)(((enum rtx_code) (x)->code) == JUMP_INSN)) |
4027 | { |
4028 | peep2_do_rebuild_jump_labels = true; |
4029 | break; |
4030 | } |
4031 | |
4032 | return last; |
4033 | } |
4034 | |
4035 | /* After performing a replacement in basic block BB, fix up the life |
4036 | information in our buffer. LAST is the last of the insns that we |
4037 | emitted as a replacement. PREV is the insn before the start of |
4038 | the replacement. MATCH_LEN is the number of instructions that were |
4039 | matched, and which now need to be replaced in the buffer. */ |
4040 | |
4041 | static void |
4042 | peep2_update_life (basic_block bb, int match_len, rtx_insn *last, |
4043 | rtx_insn *prev) |
4044 | { |
4045 | int i = peep2_buf_position (peep2_current + match_len + 1); |
4046 | rtx_insn *x; |
4047 | regset_head live; |
4048 | |
4049 | INIT_REG_SET (&live)bitmap_initialize (&live, ®_obstack); |
4050 | COPY_REG_SET (&live, peep2_insn_data[i].live_before)bitmap_copy (&live, peep2_insn_data[i].live_before); |
4051 | |
4052 | gcc_assert (peep2_current_count >= match_len + 1)((void)(!(peep2_current_count >= match_len + 1) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 4052, __FUNCTION__), 0 : 0)); |
4053 | peep2_current_count -= match_len + 1; |
4054 | |
4055 | x = last; |
4056 | do |
4057 | { |
4058 | if (INSN_P (x)(((((enum rtx_code) (x)->code) == INSN) || (((enum rtx_code ) (x)->code) == JUMP_INSN) || (((enum rtx_code) (x)->code ) == CALL_INSN)) || (((enum rtx_code) (x)->code) == DEBUG_INSN ))) |
4059 | { |
4060 | df_insn_rescan (x); |
4061 | if (peep2_current_count < MAX_INSNS_PER_PEEP25) |
4062 | { |
4063 | peep2_current_count++; |
4064 | if (--i < 0) |
4065 | i = MAX_INSNS_PER_PEEP25; |
4066 | peep2_insn_data[i].insn = x; |
4067 | df_simulate_one_insn_backwards (bb, x, &live); |
4068 | COPY_REG_SET (peep2_insn_data[i].live_before, &live)bitmap_copy (peep2_insn_data[i].live_before, &live); |
4069 | } |
4070 | } |
4071 | x = PREV_INSN (x); |
4072 | } |
4073 | while (x != prev); |
4074 | CLEAR_REG_SET (&live)bitmap_clear (&live); |
4075 | |
4076 | peep2_current = i; |
4077 | } |
4078 | |
4079 | /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible. |
4080 | Return true if we added it, false otherwise. The caller will try to match |
4081 | peepholes against the buffer if we return false; otherwise it will try to |
4082 | add more instructions to the buffer. */ |
4083 | |
4084 | static bool |
4085 | peep2_fill_buffer (basic_block bb, rtx_insn *insn, regset live) |
4086 | { |
4087 | int pos; |
4088 | |
4089 | /* Once we have filled the maximum number of insns the buffer can hold, |
4090 | allow the caller to match the insns against peepholes. We wait until |
4091 | the buffer is full in case the target has similar peepholes of different |
4092 | length; we always want to match the longest if possible. */ |
4093 | if (peep2_current_count == MAX_INSNS_PER_PEEP25) |
4094 | return false; |
4095 | |
4096 | /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with |
4097 | any other pattern, lest it change the semantics of the frame info. */ |
4098 | if (RTX_FRAME_RELATED_P (insn)(__extension__ ({ __typeof ((insn)) const _rtx = ((insn)); if (((enum rtx_code) (_rtx)->code) != DEBUG_INSN && ( (enum rtx_code) (_rtx)->code) != INSN && ((enum rtx_code ) (_rtx)->code) != CALL_INSN && ((enum rtx_code) ( _rtx)->code) != JUMP_INSN && ((enum rtx_code) (_rtx )->code) != BARRIER && ((enum rtx_code) (_rtx)-> code) != SET) rtl_check_failed_flag ("RTX_FRAME_RELATED_P",_rtx , "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 4098, __FUNCTION__); _rtx; })->frame_related)) |
4099 | { |
4100 | /* Let the buffer drain first. */ |
4101 | if (peep2_current_count > 0) |
4102 | return false; |
4103 | /* Now the insn will be the only thing in the buffer. */ |
4104 | } |
4105 | |
4106 | pos = peep2_buf_position (peep2_current + peep2_current_count); |
4107 | peep2_insn_data[pos].insn = insn; |
4108 | COPY_REG_SET (peep2_insn_data[pos].live_before, live)bitmap_copy (peep2_insn_data[pos].live_before, live); |
4109 | peep2_current_count++; |
4110 | |
4111 | df_simulate_one_insn_forwards (bb, insn, live); |
4112 | return true; |
4113 | } |
4114 | |
4115 | /* Perform the peephole2 optimization pass. */ |
4116 | |
4117 | static void |
4118 | peephole2_optimize (void) |
4119 | { |
4120 | rtx_insn *insn; |
4121 | bitmap live; |
4122 | int i; |
4123 | basic_block bb; |
4124 | |
4125 | peep2_do_cleanup_cfg = false; |
4126 | peep2_do_rebuild_jump_labels = false; |
4127 | |
4128 | df_set_flags (DF_LR_RUN_DCE); |
4129 | df_note_add_problem (); |
4130 | df_analyze (); |
4131 | |
4132 | /* Initialize the regsets we're going to use. */ |
4133 | for (i = 0; i < MAX_INSNS_PER_PEEP25 + 1; ++i) |
4134 | peep2_insn_data[i].live_before = BITMAP_ALLOCbitmap_alloc (®_obstack); |
4135 | search_ofs = 0; |
4136 | live = BITMAP_ALLOCbitmap_alloc (®_obstack); |
4137 | |
4138 | FOR_EACH_BB_REVERSE_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_exit_block_ptr->prev_bb ; bb != ((cfun + 0))->cfg->x_entry_block_ptr; bb = bb-> prev_bb) |
4139 | { |
4140 | bool past_end = false; |
4141 | int pos; |
4142 | |
4143 | rtl_profile_for_bb (bb); |
4144 | |
4145 | /* Start up propagation. */ |
4146 | bitmap_copy (live, DF_LR_IN (bb)(&(df_lr_get_bb_info ((bb)->index))->in)); |
4147 | df_simulate_initialize_forwards (bb, live); |
4148 | peep2_reinit_state (live); |
4149 | |
4150 | insn = BB_HEAD (bb)(bb)->il.x.head_; |
4151 | for (;;) |
4152 | { |
4153 | rtx_insn *attempt, *head; |
4154 | int match_len; |
4155 | |
4156 | if (!past_end && !NONDEBUG_INSN_P (insn)((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code ) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)-> code) == CALL_INSN))) |
4157 | { |
4158 | next_insn: |
4159 | insn = NEXT_INSN (insn); |
4160 | if (insn == NEXT_INSN (BB_END (bb)(bb)->il.x.rtl->end_)) |
4161 | past_end = true; |
4162 | continue; |
4163 | } |
4164 | if (!past_end && peep2_fill_buffer (bb, insn, live)) |
4165 | goto next_insn; |
4166 | |
4167 | /* If we did not fill an empty buffer, it signals the end of the |
4168 | block. */ |
4169 | if (peep2_current_count == 0) |
4170 | break; |
4171 | |
4172 | /* The buffer filled to the current maximum, so try to match. */ |
4173 | |
4174 | pos = peep2_buf_position (peep2_current + peep2_current_count); |
4175 | peep2_insn_data[pos].insn = PEEP2_EOBinvalid_insn_rtx; |
4176 | COPY_REG_SET (peep2_insn_data[pos].live_before, live)bitmap_copy (peep2_insn_data[pos].live_before, live); |
4177 | |
4178 | /* Match the peephole. */ |
4179 | head = peep2_insn_data[peep2_current].insn; |
4180 | attempt = peephole2_insns (PATTERN (head), head, &match_len); |
4181 | if (attempt != NULLnullptr) |
4182 | { |
4183 | rtx_insn *last = peep2_attempt (bb, head, match_len, attempt); |
4184 | if (last) |
4185 | { |
4186 | peep2_update_life (bb, match_len, last, PREV_INSN (attempt)); |
4187 | continue; |
4188 | } |
4189 | } |
4190 | |
4191 | /* No match: advance the buffer by one insn. */ |
4192 | peep2_current = peep2_buf_position (peep2_current + 1); |
4193 | peep2_current_count--; |
4194 | } |
4195 | } |
4196 | |
4197 | default_rtl_profile (); |
4198 | for (i = 0; i < MAX_INSNS_PER_PEEP25 + 1; ++i) |
4199 | BITMAP_FREE (peep2_insn_data[i].live_before)((void) (bitmap_obstack_free ((bitmap) peep2_insn_data[i].live_before ), (peep2_insn_data[i].live_before) = (bitmap) nullptr)); |
4200 | BITMAP_FREE (live)((void) (bitmap_obstack_free ((bitmap) live), (live) = (bitmap ) nullptr)); |
4201 | if (peep2_do_rebuild_jump_labels) |
4202 | rebuild_jump_labels (get_insns ()); |
4203 | if (peep2_do_cleanup_cfg) |
4204 | cleanup_cfg (CLEANUP_CFG_CHANGED64); |
4205 | } |
4206 | |
4207 | /* Common predicates for use with define_bypass. */ |
4208 | |
4209 | /* Helper function for store_data_bypass_p, handle just a single SET |
4210 | IN_SET. */ |
4211 | |
4212 | static bool |
4213 | store_data_bypass_p_1 (rtx_insn *out_insn, rtx in_set) |
4214 | { |
4215 | if (!MEM_P (SET_DEST (in_set))(((enum rtx_code) ((((in_set)->u.fld[0]).rt_rtx))->code ) == MEM)) |
4216 | return false; |
4217 | |
4218 | rtx out_set = single_set (out_insn); |
4219 | if (out_set) |
4220 | return !reg_mentioned_p (SET_DEST (out_set)(((out_set)->u.fld[0]).rt_rtx), SET_DEST (in_set)(((in_set)->u.fld[0]).rt_rtx)); |
4221 | |
4222 | rtx out_pat = PATTERN (out_insn); |
4223 | if (GET_CODE (out_pat)((enum rtx_code) (out_pat)->code) != PARALLEL) |
4224 | return false; |
4225 | |
4226 | for (int i = 0; i < XVECLEN (out_pat, 0)(((((out_pat)->u.fld[0]).rt_rtvec))->num_elem); i++) |
4227 | { |
4228 | rtx out_exp = XVECEXP (out_pat, 0, i)(((((out_pat)->u.fld[0]).rt_rtvec))->elem[i]); |
4229 | |
4230 | if (GET_CODE (out_exp)((enum rtx_code) (out_exp)->code) == CLOBBER || GET_CODE (out_exp)((enum rtx_code) (out_exp)->code) == USE) |
4231 | continue; |
4232 | |
4233 | gcc_assert (GET_CODE (out_exp) == SET)((void)(!(((enum rtx_code) (out_exp)->code) == SET) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 4233, __FUNCTION__), 0 : 0)); |
4234 | |
4235 | if (reg_mentioned_p (SET_DEST (out_exp)(((out_exp)->u.fld[0]).rt_rtx), SET_DEST (in_set)(((in_set)->u.fld[0]).rt_rtx))) |
4236 | return false; |
4237 | } |
4238 | |
4239 | return true; |
4240 | } |
4241 | |
4242 | /* True if the dependency between OUT_INSN and IN_INSN is on the store |
4243 | data not the address operand(s) of the store. IN_INSN and OUT_INSN |
4244 | must be either a single_set or a PARALLEL with SETs inside. */ |
4245 | |
4246 | int |
4247 | store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn) |
4248 | { |
4249 | rtx in_set = single_set (in_insn); |
4250 | if (in_set) |
4251 | return store_data_bypass_p_1 (out_insn, in_set); |
4252 | |
4253 | rtx in_pat = PATTERN (in_insn); |
4254 | if (GET_CODE (in_pat)((enum rtx_code) (in_pat)->code) != PARALLEL) |
4255 | return false; |
4256 | |
4257 | for (int i = 0; i < XVECLEN (in_pat, 0)(((((in_pat)->u.fld[0]).rt_rtvec))->num_elem); i++) |
4258 | { |
4259 | rtx in_exp = XVECEXP (in_pat, 0, i)(((((in_pat)->u.fld[0]).rt_rtvec))->elem[i]); |
4260 | |
4261 | if (GET_CODE (in_exp)((enum rtx_code) (in_exp)->code) == CLOBBER || GET_CODE (in_exp)((enum rtx_code) (in_exp)->code) == USE) |
4262 | continue; |
4263 | |
4264 | gcc_assert (GET_CODE (in_exp) == SET)((void)(!(((enum rtx_code) (in_exp)->code) == SET) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 4264, __FUNCTION__), 0 : 0)); |
4265 | |
4266 | if (!store_data_bypass_p_1 (out_insn, in_exp)) |
4267 | return false; |
4268 | } |
4269 | |
4270 | return true; |
4271 | } |
4272 | |
4273 | /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE |
4274 | condition, and not the THEN or ELSE branch. OUT_INSN may be either a single |
4275 | or multiple set; IN_INSN should be single_set for truth, but for convenience |
4276 | of insn categorization may be any JUMP or CALL insn. */ |
4277 | |
4278 | int |
4279 | if_test_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn) |
4280 | { |
4281 | rtx out_set, in_set; |
4282 | |
4283 | in_set = single_set (in_insn); |
4284 | if (! in_set) |
4285 | { |
4286 | gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn))((void)(!((((enum rtx_code) (in_insn)->code) == JUMP_INSN) || (((enum rtx_code) (in_insn)->code) == CALL_INSN)) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 4286, __FUNCTION__), 0 : 0)); |
4287 | return false; |
4288 | } |
4289 | |
4290 | if (GET_CODE (SET_SRC (in_set))((enum rtx_code) ((((in_set)->u.fld[1]).rt_rtx))->code) != IF_THEN_ELSE) |
4291 | return false; |
4292 | in_set = SET_SRC (in_set)(((in_set)->u.fld[1]).rt_rtx); |
4293 | |
4294 | out_set = single_set (out_insn); |
4295 | if (out_set) |
4296 | { |
4297 | if (reg_mentioned_p (SET_DEST (out_set)(((out_set)->u.fld[0]).rt_rtx), XEXP (in_set, 1)(((in_set)->u.fld[1]).rt_rtx)) |
4298 | || reg_mentioned_p (SET_DEST (out_set)(((out_set)->u.fld[0]).rt_rtx), XEXP (in_set, 2)(((in_set)->u.fld[2]).rt_rtx))) |
4299 | return false; |
4300 | } |
4301 | else |
4302 | { |
4303 | rtx out_pat; |
4304 | int i; |
4305 | |
4306 | out_pat = PATTERN (out_insn); |
4307 | gcc_assert (GET_CODE (out_pat) == PARALLEL)((void)(!(((enum rtx_code) (out_pat)->code) == PARALLEL) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 4307, __FUNCTION__), 0 : 0)); |
4308 | |
4309 | for (i = 0; i < XVECLEN (out_pat, 0)(((((out_pat)->u.fld[0]).rt_rtvec))->num_elem); i++) |
4310 | { |
4311 | rtx exp = XVECEXP (out_pat, 0, i)(((((out_pat)->u.fld[0]).rt_rtvec))->elem[i]); |
4312 | |
4313 | if (GET_CODE (exp)((enum rtx_code) (exp)->code) == CLOBBER) |
4314 | continue; |
4315 | |
4316 | gcc_assert (GET_CODE (exp) == SET)((void)(!(((enum rtx_code) (exp)->code) == SET) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.cc" , 4316, __FUNCTION__), 0 : 0)); |
4317 | |
4318 | if (reg_mentioned_p (SET_DEST (out_set)(((out_set)->u.fld[0]).rt_rtx), XEXP (in_set, 1)(((in_set)->u.fld[1]).rt_rtx)) |
4319 | || reg_mentioned_p (SET_DEST (out_set)(((out_set)->u.fld[0]).rt_rtx), XEXP (in_set, 2)(((in_set)->u.fld[2]).rt_rtx))) |
4320 | return false; |
4321 | } |
4322 | } |
4323 | |
4324 | return true; |
4325 | } |
4326 | |
4327 | static unsigned int |
4328 | rest_of_handle_peephole2 (void) |
4329 | { |
4330 | if (HAVE_peephole21) |
4331 | peephole2_optimize (); |
4332 | |
4333 | return 0; |
4334 | } |
4335 | |
4336 | namespace { |
4337 | |
4338 | const pass_data pass_data_peephole2 = |
4339 | { |
4340 | RTL_PASS, /* type */ |
4341 | "peephole2", /* name */ |
4342 | OPTGROUP_NONE, /* optinfo_flags */ |
4343 | TV_PEEPHOLE2, /* tv_id */ |
4344 | 0, /* properties_required */ |
4345 | 0, /* properties_provided */ |
4346 | 0, /* properties_destroyed */ |
4347 | 0, /* todo_flags_start */ |
4348 | TODO_df_finish(1 << 17), /* todo_flags_finish */ |
4349 | }; |
4350 | |
4351 | class pass_peephole2 : public rtl_opt_pass |
4352 | { |
4353 | public: |
4354 | pass_peephole2 (gcc::context *ctxt) |
4355 | : rtl_opt_pass (pass_data_peephole2, ctxt) |
4356 | {} |
4357 | |
4358 | /* opt_pass methods: */ |
4359 | /* The epiphany backend creates a second instance of this pass, so we need |
4360 | a clone method. */ |
4361 | opt_pass * clone () final override { return new pass_peephole2 (m_ctxt); } |
4362 | bool gate (function *) final override |
4363 | { |
4364 | return (optimizeglobal_options.x_optimize > 0 && flag_peephole2global_options.x_flag_peephole2); |
4365 | } |
4366 | unsigned int execute (function *) final override |
4367 | { |
4368 | return rest_of_handle_peephole2 (); |
4369 | } |
4370 | |
4371 | }; // class pass_peephole2 |
4372 | |
4373 | } // anon namespace |
4374 | |
4375 | rtl_opt_pass * |
4376 | make_pass_peephole2 (gcc::context *ctxt) |
4377 | { |
4378 | return new pass_peephole2 (ctxt); |
4379 | } |
4380 | |
4381 | namespace { |
4382 | |
4383 | const pass_data pass_data_split_all_insns = |
4384 | { |
4385 | RTL_PASS, /* type */ |
4386 | "split1", /* name */ |
4387 | OPTGROUP_NONE, /* optinfo_flags */ |
4388 | TV_NONE, /* tv_id */ |
4389 | 0, /* properties_required */ |
4390 | PROP_rtl_split_insns(1 << 17), /* properties_provided */ |
4391 | 0, /* properties_destroyed */ |
4392 | 0, /* todo_flags_start */ |
4393 | 0, /* todo_flags_finish */ |
4394 | }; |
4395 | |
4396 | class pass_split_all_insns : public rtl_opt_pass |
4397 | { |
4398 | public: |
4399 | pass_split_all_insns (gcc::context *ctxt) |
4400 | : rtl_opt_pass (pass_data_split_all_insns, ctxt) |
4401 | {} |
4402 | |
4403 | /* opt_pass methods: */ |
4404 | /* The epiphany backend creates a second instance of this pass, so |
4405 | we need a clone method. */ |
4406 | opt_pass * clone () final override |
4407 | { |
4408 | return new pass_split_all_insns (m_ctxt); |
4409 | } |
4410 | unsigned int execute (function *) final override |
4411 | { |
4412 | split_all_insns (); |
4413 | return 0; |
4414 | } |
4415 | |
4416 | }; // class pass_split_all_insns |
4417 | |
4418 | } // anon namespace |
4419 | |
4420 | rtl_opt_pass * |
4421 | make_pass_split_all_insns (gcc::context *ctxt) |
4422 | { |
4423 | return new pass_split_all_insns (ctxt); |
4424 | } |
4425 | |
4426 | namespace { |
4427 | |
4428 | const pass_data pass_data_split_after_reload = |
4429 | { |
4430 | RTL_PASS, /* type */ |
4431 | "split2", /* name */ |
4432 | OPTGROUP_NONE, /* optinfo_flags */ |
4433 | TV_NONE, /* tv_id */ |
4434 | 0, /* properties_required */ |
4435 | 0, /* properties_provided */ |
4436 | 0, /* properties_destroyed */ |
4437 | 0, /* todo_flags_start */ |
4438 | 0, /* todo_flags_finish */ |
4439 | }; |
4440 | |
4441 | class pass_split_after_reload : public rtl_opt_pass |
4442 | { |
4443 | public: |
4444 | pass_split_after_reload (gcc::context *ctxt) |
4445 | : rtl_opt_pass (pass_data_split_after_reload, ctxt) |
4446 | {} |
4447 | |
4448 | /* opt_pass methods: */ |
4449 | bool gate (function *) final override |
4450 | { |
4451 | /* If optimizing, then go ahead and split insns now. */ |
4452 | return optimizeglobal_options.x_optimize > 0; |
4453 | } |
4454 | |
4455 | unsigned int execute (function *) final override |
4456 | { |
4457 | split_all_insns (); |
4458 | return 0; |
4459 | } |
4460 | |
4461 | }; // class pass_split_after_reload |
4462 | |
4463 | } // anon namespace |
4464 | |
4465 | rtl_opt_pass * |
4466 | make_pass_split_after_reload (gcc::context *ctxt) |
4467 | { |
4468 | return new pass_split_after_reload (ctxt); |
4469 | } |
4470 | |
4471 | static bool |
4472 | enable_split_before_sched2 (void) |
4473 | { |
4474 | #ifdef INSN_SCHEDULING |
4475 | return optimizeglobal_options.x_optimize > 0 && flag_schedule_insns_after_reloadglobal_options.x_flag_schedule_insns_after_reload; |
4476 | #else |
4477 | return false; |
4478 | #endif |
4479 | } |
4480 | |
4481 | namespace { |
4482 | |
4483 | const pass_data pass_data_split_before_sched2 = |
4484 | { |
4485 | RTL_PASS, /* type */ |
4486 | "split3", /* name */ |
4487 | OPTGROUP_NONE, /* optinfo_flags */ |
4488 | TV_NONE, /* tv_id */ |
4489 | 0, /* properties_required */ |
4490 | 0, /* properties_provided */ |
4491 | 0, /* properties_destroyed */ |
4492 | 0, /* todo_flags_start */ |
4493 | 0, /* todo_flags_finish */ |
4494 | }; |
4495 | |
4496 | class pass_split_before_sched2 : public rtl_opt_pass |
4497 | { |
4498 | public: |
4499 | pass_split_before_sched2 (gcc::context *ctxt) |
4500 | : rtl_opt_pass (pass_data_split_before_sched2, ctxt) |
4501 | {} |
4502 | |
4503 | /* opt_pass methods: */ |
4504 | bool gate (function *) final override |
4505 | { |
4506 | return enable_split_before_sched2 (); |
4507 | } |
4508 | |
4509 | unsigned int execute (function *) final override |
4510 | { |
4511 | split_all_insns (); |
4512 | return 0; |
4513 | } |
4514 | |
4515 | }; // class pass_split_before_sched2 |
4516 | |
4517 | } // anon namespace |
4518 | |
4519 | rtl_opt_pass * |
4520 | make_pass_split_before_sched2 (gcc::context *ctxt) |
4521 | { |
4522 | return new pass_split_before_sched2 (ctxt); |
4523 | } |
4524 | |
4525 | namespace { |
4526 | |
4527 | const pass_data pass_data_split_before_regstack = |
4528 | { |
4529 | RTL_PASS, /* type */ |
4530 | "split4", /* name */ |
4531 | OPTGROUP_NONE, /* optinfo_flags */ |
4532 | TV_NONE, /* tv_id */ |
4533 | 0, /* properties_required */ |
4534 | 0, /* properties_provided */ |
4535 | 0, /* properties_destroyed */ |
4536 | 0, /* todo_flags_start */ |
4537 | 0, /* todo_flags_finish */ |
4538 | }; |
4539 | |
4540 | class pass_split_before_regstack : public rtl_opt_pass |
4541 | { |
4542 | public: |
4543 | pass_split_before_regstack (gcc::context *ctxt) |
4544 | : rtl_opt_pass (pass_data_split_before_regstack, ctxt) |
4545 | {} |
4546 | |
4547 | /* opt_pass methods: */ |
4548 | bool gate (function *) final override; |
4549 | unsigned int execute (function *) final override |
4550 | { |
4551 | split_all_insns (); |
4552 | return 0; |
4553 | } |
4554 | |
4555 | }; // class pass_split_before_regstack |
4556 | |
4557 | bool |
4558 | pass_split_before_regstack::gate (function *) |
4559 | { |
4560 | #if HAVE_ATTR_length1 && defined (STACK_REGS) |
4561 | /* If flow2 creates new instructions which need splitting |
4562 | and scheduling after reload is not done, they might not be |
4563 | split until final which doesn't allow splitting |
4564 | if HAVE_ATTR_length. Selective scheduling can result in |
4565 | further instructions that need splitting. */ |
4566 | #ifdef INSN_SCHEDULING |
4567 | return !enable_split_before_sched2 () || flag_selective_scheduling2global_options.x_flag_selective_scheduling2; |
4568 | #else |
4569 | return !enable_split_before_sched2 (); |
4570 | #endif |
4571 | #else |
4572 | return false; |
4573 | #endif |
4574 | } |
4575 | |
4576 | } // anon namespace |
4577 | |
4578 | rtl_opt_pass * |
4579 | make_pass_split_before_regstack (gcc::context *ctxt) |
4580 | { |
4581 | return new pass_split_before_regstack (ctxt); |
4582 | } |
4583 | |
4584 | namespace { |
4585 | |
4586 | const pass_data pass_data_split_for_shorten_branches = |
4587 | { |
4588 | RTL_PASS, /* type */ |
4589 | "split5", /* name */ |
4590 | OPTGROUP_NONE, /* optinfo_flags */ |
4591 | TV_NONE, /* tv_id */ |
4592 | 0, /* properties_required */ |
4593 | 0, /* properties_provided */ |
4594 | 0, /* properties_destroyed */ |
4595 | 0, /* todo_flags_start */ |
4596 | 0, /* todo_flags_finish */ |
4597 | }; |
4598 | |
4599 | class pass_split_for_shorten_branches : public rtl_opt_pass |
4600 | { |
4601 | public: |
4602 | pass_split_for_shorten_branches (gcc::context *ctxt) |
4603 | : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt) |
4604 | {} |
4605 | |
4606 | /* opt_pass methods: */ |
4607 | bool gate (function *) final override |
4608 | { |
4609 | /* The placement of the splitting that we do for shorten_branches |
4610 | depends on whether regstack is used by the target or not. */ |
4611 | #if HAVE_ATTR_length1 && !defined (STACK_REGS) |
4612 | return true; |
4613 | #else |
4614 | return false; |
4615 | #endif |
4616 | } |
4617 | |
4618 | unsigned int execute (function *) final override |
4619 | { |
4620 | return split_all_insns_noflow (); |
4621 | } |
4622 | |
4623 | }; // class pass_split_for_shorten_branches |
4624 | |
4625 | } // anon namespace |
4626 | |
4627 | rtl_opt_pass * |
4628 | make_pass_split_for_shorten_branches (gcc::context *ctxt) |
4629 | { |
4630 | return new pass_split_for_shorten_branches (ctxt); |
4631 | } |
4632 | |
4633 | /* (Re)initialize the target information after a change in target. */ |
4634 | |
4635 | void |
4636 | recog_init () |
4637 | { |
4638 | /* The information is zero-initialized, so we don't need to do anything |
4639 | first time round. */ |
4640 | if (!this_target_recog->x_initialized) |
4641 | { |
4642 | this_target_recog->x_initialized = true; |
4643 | return; |
4644 | } |
4645 | memset (this_target_recog->x_bool_attr_masks, 0, |
4646 | sizeof (this_target_recog->x_bool_attr_masks)); |
4647 | for (unsigned int i = 0; i < NUM_INSN_CODES; ++i) |
4648 | if (this_target_recog->x_op_alt[i]) |
4649 | { |
4650 | free (this_target_recog->x_op_alt[i]); |
4651 | this_target_recog->x_op_alt[i] = 0; |
4652 | } |
4653 | } |