Bug Summary

File:build/gcc/rtl.h
Warning:line 1509, column 3
Returning null reference

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-suse-linux -analyze -disable-free -clear-ast-before-backend -disable-llvm-verifier -discard-value-names -main-file-name combine.cc -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -ffp-contract=on -fno-rounding-math -mconstructor-aliases -funwind-tables=2 -target-cpu x86-64 -tune-cpu generic -debugger-tuning=gdb -fcoverage-compilation-dir=/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -resource-dir /usr/lib64/clang/15.0.7 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../include/c++/13/backward -internal-isystem /usr/lib64/clang/15.0.7/include -internal-isystem /usr/local/include -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/13/../../../../x86_64-suse-linux/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir=/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -D__GCC_HAVE_DWARF2_CFI_ASM=1 -o /buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2023-03-27-141847-20772-1/report-PNXTni.plist -x c++ /buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc

/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc

1/* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20/* This module is essentially the "combiner" phase of the U. of Arizona
21 Portable Optimizer, but redone to work on our list-structured
22 representation for RTL instead of their string representation.
23
24 The LOG_LINKS of each insn identify the most recent assignment
25 to each REG used in the insn. It is a list of previous insns,
26 each of which contains a SET for a REG that is used in this insn
27 and not used or set in between. LOG_LINKs never cross basic blocks.
28 They were set up by the preceding pass (lifetime analysis).
29
30 We try to combine each pair of insns joined by a logical link.
31 We also try to combine triplets of insns A, B and C when C has
32 a link back to B and B has a link back to A. Likewise for a
33 small number of quadruplets of insns A, B, C and D for which
34 there's high likelihood of success.
35
36 We check (with modified_between_p) to avoid combining in such a way
37 as to move a computation to a place where its value would be different.
38
39 Combination is done by mathematically substituting the previous
40 insn(s) values for the regs they set into the expressions in
41 the later insns that refer to these regs. If the result is a valid insn
42 for our target machine, according to the machine description,
43 we install it, delete the earlier insns, and update the data flow
44 information (LOG_LINKS and REG_NOTES) for what we did.
45
46 There are a few exceptions where the dataflow information isn't
47 completely updated (however this is only a local issue since it is
48 regenerated before the next pass that uses it):
49
50 - reg_live_length is not updated
51 - reg_n_refs is not adjusted in the rare case when a register is
52 no longer required in a computation
53 - there are extremely rare cases (see distribute_notes) when a
54 REG_DEAD note is lost
55 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
56 removed because there is no way to know which register it was
57 linking
58
59 To simplify substitution, we combine only when the earlier insn(s)
60 consist of only a single assignment. To simplify updating afterward,
61 we never combine when a subroutine call appears in the middle. */
62
63#include "config.h"
64#include "system.h"
65#include "coretypes.h"
66#include "backend.h"
67#include "target.h"
68#include "rtl.h"
69#include "tree.h"
70#include "cfghooks.h"
71#include "predict.h"
72#include "df.h"
73#include "memmodel.h"
74#include "tm_p.h"
75#include "optabs.h"
76#include "regs.h"
77#include "emit-rtl.h"
78#include "recog.h"
79#include "cgraph.h"
80#include "stor-layout.h"
81#include "cfgrtl.h"
82#include "cfgcleanup.h"
83/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
84#include "explow.h"
85#include "insn-attr.h"
86#include "rtlhooks-def.h"
87#include "expr.h"
88#include "tree-pass.h"
89#include "valtrack.h"
90#include "rtl-iter.h"
91#include "print-rtl.h"
92#include "function-abi.h"
93#include "rtlanal.h"
94
95/* Number of attempts to combine instructions in this function. */
96
97static int combine_attempts;
98
99/* Number of attempts that got as far as substitution in this function. */
100
101static int combine_merges;
102
103/* Number of instructions combined with added SETs in this function. */
104
105static int combine_extras;
106
107/* Number of instructions combined in this function. */
108
109static int combine_successes;
110
111/* Totals over entire compilation. */
112
113static int total_attempts, total_merges, total_extras, total_successes;
114
115/* combine_instructions may try to replace the right hand side of the
116 second instruction with the value of an associated REG_EQUAL note
117 before throwing it at try_combine. That is problematic when there
118 is a REG_DEAD note for a register used in the old right hand side
119 and can cause distribute_notes to do wrong things. This is the
120 second instruction if it has been so modified, null otherwise. */
121
122static rtx_insn *i2mod;
123
124/* When I2MOD is nonnull, this is a copy of the old right hand side. */
125
126static rtx i2mod_old_rhs;
127
128/* When I2MOD is nonnull, this is a copy of the new right hand side. */
129
130static rtx i2mod_new_rhs;
131
132struct reg_stat_type {
133 /* Record last point of death of (hard or pseudo) register n. */
134 rtx_insn *last_death;
135
136 /* Record last point of modification of (hard or pseudo) register n. */
137 rtx_insn *last_set;
138
139 /* The next group of fields allows the recording of the last value assigned
140 to (hard or pseudo) register n. We use this information to see if an
141 operation being processed is redundant given a prior operation performed
142 on the register. For example, an `and' with a constant is redundant if
143 all the zero bits are already known to be turned off.
144
145 We use an approach similar to that used by cse, but change it in the
146 following ways:
147
148 (1) We do not want to reinitialize at each label.
149 (2) It is useful, but not critical, to know the actual value assigned
150 to a register. Often just its form is helpful.
151
152 Therefore, we maintain the following fields:
153
154 last_set_value the last value assigned
155 last_set_label records the value of label_tick when the
156 register was assigned
157 last_set_table_tick records the value of label_tick when a
158 value using the register is assigned
159 last_set_invalid set to nonzero when it is not valid
160 to use the value of this register in some
161 register's value
162
163 To understand the usage of these tables, it is important to understand
164 the distinction between the value in last_set_value being valid and
165 the register being validly contained in some other expression in the
166 table.
167
168 (The next two parameters are out of date).
169
170 reg_stat[i].last_set_value is valid if it is nonzero, and either
171 reg_n_sets[i] is 1 or reg_stat[i].last_set_label == label_tick.
172
173 Register I may validly appear in any expression returned for the value
174 of another register if reg_n_sets[i] is 1. It may also appear in the
175 value for register J if reg_stat[j].last_set_invalid is zero, or
176 reg_stat[i].last_set_label < reg_stat[j].last_set_label.
177
178 If an expression is found in the table containing a register which may
179 not validly appear in an expression, the register is replaced by
180 something that won't match, (clobber (const_int 0)). */
181
182 /* Record last value assigned to (hard or pseudo) register n. */
183
184 rtx last_set_value;
185
186 /* Record the value of label_tick when an expression involving register n
187 is placed in last_set_value. */
188
189 int last_set_table_tick;
190
191 /* Record the value of label_tick when the value for register n is placed in
192 last_set_value. */
193
194 int last_set_label;
195
196 /* These fields are maintained in parallel with last_set_value and are
197 used to store the mode in which the register was last set, the bits
198 that were known to be zero when it was last set, and the number of
199 sign bits copies it was known to have when it was last set. */
200
201 unsigned HOST_WIDE_INTlong last_set_nonzero_bits;
202 char last_set_sign_bit_copies;
203 ENUM_BITFIELD(machine_mode)enum machine_mode last_set_mode : 8;
204
205 /* Set nonzero if references to register n in expressions should not be
206 used. last_set_invalid is set nonzero when this register is being
207 assigned to and last_set_table_tick == label_tick. */
208
209 char last_set_invalid;
210
211 /* Some registers that are set more than once and used in more than one
212 basic block are nevertheless always set in similar ways. For example,
213 a QImode register may be loaded from memory in two places on a machine
214 where byte loads zero extend.
215
216 We record in the following fields if a register has some leading bits
217 that are always equal to the sign bit, and what we know about the
218 nonzero bits of a register, specifically which bits are known to be
219 zero.
220
221 If an entry is zero, it means that we don't know anything special. */
222
223 unsigned char sign_bit_copies;
224
225 unsigned HOST_WIDE_INTlong nonzero_bits;
226
227 /* Record the value of the label_tick when the last truncation
228 happened. The field truncated_to_mode is only valid if
229 truncation_label == label_tick. */
230
231 int truncation_label;
232
233 /* Record the last truncation seen for this register. If truncation
234 is not a nop to this mode we might be able to save an explicit
235 truncation if we know that value already contains a truncated
236 value. */
237
238 ENUM_BITFIELD(machine_mode)enum machine_mode truncated_to_mode : 8;
239};
240
241
242static vec<reg_stat_type> reg_stat;
243
244/* One plus the highest pseudo for which we track REG_N_SETS.
245 regstat_init_n_sets_and_refs allocates the array for REG_N_SETS just once,
246 but during combine_split_insns new pseudos can be created. As we don't have
247 updated DF information in that case, it is hard to initialize the array
248 after growing. The combiner only cares about REG_N_SETS (regno) == 1,
249 so instead of growing the arrays, just assume all newly created pseudos
250 during combine might be set multiple times. */
251
252static unsigned int reg_n_sets_max;
253
254/* Record the luid of the last insn that invalidated memory
255 (anything that writes memory, and subroutine calls, but not pushes). */
256
257static int mem_last_set;
258
259/* Record the luid of the last CALL_INSN
260 so we can tell whether a potential combination crosses any calls. */
261
262static int last_call_luid;
263
264/* When `subst' is called, this is the insn that is being modified
265 (by combining in a previous insn). The PATTERN of this insn
266 is still the old pattern partially modified and it should not be
267 looked at, but this may be used to examine the successors of the insn
268 to judge whether a simplification is valid. */
269
270static rtx_insn *subst_insn;
271
272/* This is the lowest LUID that `subst' is currently dealing with.
273 get_last_value will not return a value if the register was set at or
274 after this LUID. If not for this mechanism, we could get confused if
275 I2 or I1 in try_combine were an insn that used the old value of a register
276 to obtain a new value. In that case, we might erroneously get the
277 new value of the register when we wanted the old one. */
278
279static int subst_low_luid;
280
281/* This contains any hard registers that are used in newpat; reg_dead_at_p
282 must consider all these registers to be always live. */
283
284static HARD_REG_SET newpat_used_regs;
285
286/* This is an insn to which a LOG_LINKS entry has been added. If this
287 insn is the earlier than I2 or I3, combine should rescan starting at
288 that location. */
289
290static rtx_insn *added_links_insn;
291
292/* And similarly, for notes. */
293
294static rtx_insn *added_notes_insn;
295
296/* Basic block in which we are performing combines. */
297static basic_block this_basic_block;
298static bool optimize_this_for_speed_p;
299
300
301/* Length of the currently allocated uid_insn_cost array. */
302
303static int max_uid_known;
304
305/* The following array records the insn_cost for every insn
306 in the instruction stream. */
307
308static int *uid_insn_cost;
309
310/* The following array records the LOG_LINKS for every insn in the
311 instruction stream as struct insn_link pointers. */
312
313struct insn_link {
314 rtx_insn *insn;
315 unsigned int regno;
316 struct insn_link *next;
317};
318
319static struct insn_link **uid_log_links;
320
321static inline int
322insn_uid_check (const_rtx insn)
323{
324 int uid = INSN_UID (insn);
325 gcc_checking_assert (uid <= max_uid_known)((void)(!(uid <= max_uid_known) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 325, __FUNCTION__), 0 : 0))
;
326 return uid;
327}
328
329#define INSN_COST(INSN)(uid_insn_cost[insn_uid_check (INSN)]) (uid_insn_cost[insn_uid_check (INSN)])
330#define LOG_LINKS(INSN)(uid_log_links[insn_uid_check (INSN)]) (uid_log_links[insn_uid_check (INSN)])
331
332#define FOR_EACH_LOG_LINK(L, INSN)for ((L) = (uid_log_links[insn_uid_check (INSN)]); (L); (L) =
(L)->next)
\
333 for ((L) = LOG_LINKS (INSN)(uid_log_links[insn_uid_check (INSN)]); (L); (L) = (L)->next)
334
335/* Links for LOG_LINKS are allocated from this obstack. */
336
337static struct obstack insn_link_obstack;
338
339/* Allocate a link. */
340
341static inline struct insn_link *
342alloc_insn_link (rtx_insn *insn, unsigned int regno, struct insn_link *next)
343{
344 struct insn_link *l
345 = (struct insn_link *) obstack_alloc (&insn_link_obstack,__extension__ ({ struct obstack *__h = (&insn_link_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (struct insn_link))); if (__extension__ ({ struct
obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit -
__o1->next_free); }) < __len) _obstack_newchunk (__o, __len
); ((void) ((__o)->next_free += (__len))); }); __extension__
({ struct obstack *__o1 = (__h); void *__value = (void *) __o1
->object_base; if (__o1->next_free == __value) __o1->
maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t
) < sizeof (void *) ? ((__o1->object_base) + (((__o1->
next_free) - (__o1->object_base) + (__o1->alignment_mask
)) & ~(__o1->alignment_mask))) : (char *) (((ptrdiff_t
) (__o1->next_free) + (__o1->alignment_mask)) & ~(__o1
->alignment_mask))); if ((size_t) (__o1->next_free - (char
*) __o1->chunk) > (size_t) (__o1->chunk_limit - (char
*) __o1->chunk)) __o1->next_free = __o1->chunk_limit
; __o1->object_base = __o1->next_free; __value; }); })
346 sizeof (struct insn_link))__extension__ ({ struct obstack *__h = (&insn_link_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (struct insn_link))); if (__extension__ ({ struct
obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit -
__o1->next_free); }) < __len) _obstack_newchunk (__o, __len
); ((void) ((__o)->next_free += (__len))); }); __extension__
({ struct obstack *__o1 = (__h); void *__value = (void *) __o1
->object_base; if (__o1->next_free == __value) __o1->
maybe_empty_object = 1; __o1->next_free = (sizeof (ptrdiff_t
) < sizeof (void *) ? ((__o1->object_base) + (((__o1->
next_free) - (__o1->object_base) + (__o1->alignment_mask
)) & ~(__o1->alignment_mask))) : (char *) (((ptrdiff_t
) (__o1->next_free) + (__o1->alignment_mask)) & ~(__o1
->alignment_mask))); if ((size_t) (__o1->next_free - (char
*) __o1->chunk) > (size_t) (__o1->chunk_limit - (char
*) __o1->chunk)) __o1->next_free = __o1->chunk_limit
; __o1->object_base = __o1->next_free; __value; }); })
;
347 l->insn = insn;
348 l->regno = regno;
349 l->next = next;
350 return l;
351}
352
353/* Incremented for each basic block. */
354
355static int label_tick;
356
357/* Reset to label_tick for each extended basic block in scanning order. */
358
359static int label_tick_ebb_start;
360
361/* Mode used to compute significance in reg_stat[].nonzero_bits. It is the
362 largest integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
363
364static scalar_int_mode nonzero_bits_mode;
365
366/* Nonzero when reg_stat[].nonzero_bits and reg_stat[].sign_bit_copies can
367 be safely used. It is zero while computing them and after combine has
368 completed. This former test prevents propagating values based on
369 previously set values, which can be incorrect if a variable is modified
370 in a loop. */
371
372static int nonzero_sign_valid;
373
374
375/* Record one modification to rtl structure
376 to be undone by storing old_contents into *where. */
377
378enum undo_kind { UNDO_RTX, UNDO_INT, UNDO_MODE, UNDO_LINKS };
379
380struct undo
381{
382 struct undo *next;
383 enum undo_kind kind;
384 union { rtx r; int i; machine_mode m; struct insn_link *l; } old_contents;
385 union { rtx *r; int *i; int regno; struct insn_link **l; } where;
386};
387
388/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
389 num_undo says how many are currently recorded.
390
391 other_insn is nonzero if we have modified some other insn in the process
392 of working on subst_insn. It must be verified too. */
393
394struct undobuf
395{
396 struct undo *undos;
397 struct undo *frees;
398 rtx_insn *other_insn;
399};
400
401static struct undobuf undobuf;
402
403/* Number of times the pseudo being substituted for
404 was found and replaced. */
405
406static int n_occurrences;
407
408static rtx reg_nonzero_bits_for_combine (const_rtx, scalar_int_mode,
409 scalar_int_mode,
410 unsigned HOST_WIDE_INTlong *);
411static rtx reg_num_sign_bit_copies_for_combine (const_rtx, scalar_int_mode,
412 scalar_int_mode,
413 unsigned int *);
414static void do_SUBST (rtx *, rtx);
415static void do_SUBST_INT (int *, int);
416static void init_reg_last (void);
417static void setup_incoming_promotions (rtx_insn *);
418static void set_nonzero_bits_and_sign_copies (rtx, const_rtx, void *);
419static int cant_combine_insn_p (rtx_insn *);
420static int can_combine_p (rtx_insn *, rtx_insn *, rtx_insn *, rtx_insn *,
421 rtx_insn *, rtx_insn *, rtx *, rtx *);
422static int combinable_i3pat (rtx_insn *, rtx *, rtx, rtx, rtx, int, int, rtx *);
423static int contains_muldiv (rtx);
424static rtx_insn *try_combine (rtx_insn *, rtx_insn *, rtx_insn *, rtx_insn *,
425 int *, rtx_insn *);
426static void undo_all (void);
427static void undo_commit (void);
428static rtx *find_split_point (rtx *, rtx_insn *, bool);
429static rtx subst (rtx, rtx, rtx, int, int, int);
430static rtx combine_simplify_rtx (rtx, machine_mode, int, int);
431static rtx simplify_if_then_else (rtx);
432static rtx simplify_set (rtx);
433static rtx simplify_logical (rtx);
434static rtx expand_compound_operation (rtx);
435static const_rtx expand_field_assignment (const_rtx);
436static rtx make_extraction (machine_mode, rtx, HOST_WIDE_INTlong,
437 rtx, unsigned HOST_WIDE_INTlong, int, int, int);
438static int get_pos_from_mask (unsigned HOST_WIDE_INTlong,
439 unsigned HOST_WIDE_INTlong *);
440static rtx canon_reg_for_combine (rtx, rtx);
441static rtx force_int_to_mode (rtx, scalar_int_mode, scalar_int_mode,
442 scalar_int_mode, unsigned HOST_WIDE_INTlong, int);
443static rtx force_to_mode (rtx, machine_mode,
444 unsigned HOST_WIDE_INTlong, int);
445static rtx if_then_else_cond (rtx, rtx *, rtx *);
446static rtx known_cond (rtx, enum rtx_code, rtx, rtx);
447static int rtx_equal_for_field_assignment_p (rtx, rtx, bool = false);
448static rtx make_field_assignment (rtx);
449static rtx apply_distributive_law (rtx);
450static rtx distribute_and_simplify_rtx (rtx, int);
451static rtx simplify_and_const_int_1 (scalar_int_mode, rtx,
452 unsigned HOST_WIDE_INTlong);
453static rtx simplify_and_const_int (rtx, scalar_int_mode, rtx,
454 unsigned HOST_WIDE_INTlong);
455static int merge_outer_ops (enum rtx_code *, HOST_WIDE_INTlong *, enum rtx_code,
456 HOST_WIDE_INTlong, machine_mode, int *);
457static rtx simplify_shift_const_1 (enum rtx_code, machine_mode, rtx, int);
458static rtx simplify_shift_const (rtx, enum rtx_code, machine_mode, rtx,
459 int);
460static int recog_for_combine (rtx *, rtx_insn *, rtx *);
461static rtx gen_lowpart_for_combine (machine_mode, rtx);
462static enum rtx_code simplify_compare_const (enum rtx_code, machine_mode,
463 rtx, rtx *);
464static enum rtx_code simplify_comparison (enum rtx_code, rtx *, rtx *);
465static void update_table_tick (rtx);
466static void record_value_for_reg (rtx, rtx_insn *, rtx);
467static void check_promoted_subreg (rtx_insn *, rtx);
468static void record_dead_and_set_regs_1 (rtx, const_rtx, void *);
469static void record_dead_and_set_regs (rtx_insn *);
470static int get_last_value_validate (rtx *, rtx_insn *, int, int);
471static rtx get_last_value (const_rtx);
472static void reg_dead_at_p_1 (rtx, const_rtx, void *);
473static int reg_dead_at_p (rtx, rtx_insn *);
474static void move_deaths (rtx, rtx, int, rtx_insn *, rtx *);
475static int reg_bitfield_target_p (rtx, rtx);
476static void distribute_notes (rtx, rtx_insn *, rtx_insn *, rtx_insn *, rtx, rtx, rtx);
477static void distribute_links (struct insn_link *);
478static void mark_used_regs_combine (rtx);
479static void record_promoted_value (rtx_insn *, rtx);
480static bool unmentioned_reg_p (rtx, rtx);
481static void record_truncated_values (rtx *, void *);
482static bool reg_truncated_to_mode (machine_mode, const_rtx);
483static rtx gen_lowpart_or_truncate (machine_mode, rtx);
484
485
486/* It is not safe to use ordinary gen_lowpart in combine.
487 See comments in gen_lowpart_for_combine. */
488#undef RTL_HOOKS_GEN_LOWPARTgen_lowpart_for_combine
489#define RTL_HOOKS_GEN_LOWPARTgen_lowpart_for_combine gen_lowpart_for_combine
490
491/* Our implementation of gen_lowpart never emits a new pseudo. */
492#undef RTL_HOOKS_GEN_LOWPART_NO_EMITgen_lowpart_for_combine
493#define RTL_HOOKS_GEN_LOWPART_NO_EMITgen_lowpart_for_combine gen_lowpart_for_combine
494
495#undef RTL_HOOKS_REG_NONZERO_REG_BITSreg_nonzero_bits_for_combine
496#define RTL_HOOKS_REG_NONZERO_REG_BITSreg_nonzero_bits_for_combine reg_nonzero_bits_for_combine
497
498#undef RTL_HOOKS_REG_NUM_SIGN_BIT_COPIESreg_num_sign_bit_copies_for_combine
499#define RTL_HOOKS_REG_NUM_SIGN_BIT_COPIESreg_num_sign_bit_copies_for_combine reg_num_sign_bit_copies_for_combine
500
501#undef RTL_HOOKS_REG_TRUNCATED_TO_MODEreg_truncated_to_mode
502#define RTL_HOOKS_REG_TRUNCATED_TO_MODEreg_truncated_to_mode reg_truncated_to_mode
503
504static const struct rtl_hooks combine_rtl_hooks = RTL_HOOKS_INITIALIZER{ gen_lowpart_for_combine, gen_lowpart_for_combine, reg_nonzero_bits_for_combine
, reg_num_sign_bit_copies_for_combine, reg_truncated_to_mode }
;
505
506
507/* Convenience wrapper for the canonicalize_comparison target hook.
508 Target hooks cannot use enum rtx_code. */
509static inline void
510target_canonicalize_comparison (enum rtx_code *code, rtx *op0, rtx *op1,
511 bool op0_preserve_value)
512{
513 int code_int = (int)*code;
514 targetm.canonicalize_comparison (&code_int, op0, op1, op0_preserve_value);
515 *code = (enum rtx_code)code_int;
516}
517
518/* Try to split PATTERN found in INSN. This returns NULL_RTX if
519 PATTERN cannot be split. Otherwise, it returns an insn sequence.
520 This is a wrapper around split_insns which ensures that the
521 reg_stat vector is made larger if the splitter creates a new
522 register. */
523
524static rtx_insn *
525combine_split_insns (rtx pattern, rtx_insn *insn)
526{
527 rtx_insn *ret;
528 unsigned int nregs;
529
530 ret = split_insns (pattern, insn);
531 nregs = max_reg_num ();
532 if (nregs > reg_stat.length ())
533 reg_stat.safe_grow_cleared (nregs, true);
534 return ret;
535}
536
537/* This is used by find_single_use to locate an rtx in LOC that
538 contains exactly one use of DEST, which is typically a REG.
539 It returns a pointer to the innermost rtx expression
540 containing DEST. Appearances of DEST that are being used to
541 totally replace it are not counted. */
542
543static rtx *
544find_single_use_1 (rtx dest, rtx *loc)
545{
546 rtx x = *loc;
547 enum rtx_code code = GET_CODE (x)((enum rtx_code) (x)->code);
548 rtx *result = NULLnullptr;
549 rtx *this_result;
550 int i;
551 const char *fmt;
552
553 switch (code)
554 {
555 case CONST:
556 case LABEL_REF:
557 case SYMBOL_REF:
558 CASE_CONST_ANYcase CONST_INT: case CONST_WIDE_INT: case CONST_POLY_INT: case
CONST_DOUBLE: case CONST_FIXED: case CONST_VECTOR
:
559 case CLOBBER:
560 return 0;
561
562 case SET:
563 /* If the destination is anything other than PC, a REG or a SUBREG
564 of a REG that occupies all of the REG, the insn uses DEST if
565 it is mentioned in the destination or the source. Otherwise, we
566 need just check the source. */
567 if (GET_CODE (SET_DEST (x))((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) != PC
568 && !REG_P (SET_DEST (x))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == REG
)
569 && ! (GET_CODE (SET_DEST (x))((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == SUBREG
570 && REG_P (SUBREG_REG (SET_DEST (x)))(((enum rtx_code) (((((((x)->u.fld[0]).rt_rtx))->u.fld[
0]).rt_rtx))->code) == REG)
571 && !read_modify_subreg_p (SET_DEST (x)(((x)->u.fld[0]).rt_rtx))))
572 break;
573
574 return find_single_use_1 (dest, &SET_SRC (x)(((x)->u.fld[1]).rt_rtx));
575
576 case MEM:
577 case SUBREG:
578 return find_single_use_1 (dest, &XEXP (x, 0)(((x)->u.fld[0]).rt_rtx));
579
580 default:
581 break;
582 }
583
584 /* If it wasn't one of the common cases above, check each expression and
585 vector of this code. Look for a unique usage of DEST. */
586
587 fmt = GET_RTX_FORMAT (code)(rtx_format[(int) (code)]);
588 for (i = GET_RTX_LENGTH (code)(rtx_length[(int) (code)]) - 1; i >= 0; i--)
589 {
590 if (fmt[i] == 'e')
591 {
592 if (dest == XEXP (x, i)(((x)->u.fld[i]).rt_rtx)
593 || (REG_P (dest)(((enum rtx_code) (dest)->code) == REG) && REG_P (XEXP (x, i))(((enum rtx_code) ((((x)->u.fld[i]).rt_rtx))->code) == REG
)
594 && REGNO (dest)(rhs_regno(dest)) == REGNO (XEXP (x, i))(rhs_regno((((x)->u.fld[i]).rt_rtx)))))
595 this_result = loc;
596 else
597 this_result = find_single_use_1 (dest, &XEXP (x, i)(((x)->u.fld[i]).rt_rtx));
598
599 if (result == NULLnullptr)
600 result = this_result;
601 else if (this_result)
602 /* Duplicate usage. */
603 return NULLnullptr;
604 }
605 else if (fmt[i] == 'E')
606 {
607 int j;
608
609 for (j = XVECLEN (x, i)(((((x)->u.fld[i]).rt_rtvec))->num_elem) - 1; j >= 0; j--)
610 {
611 if (XVECEXP (x, i, j)(((((x)->u.fld[i]).rt_rtvec))->elem[j]) == dest
612 || (REG_P (dest)(((enum rtx_code) (dest)->code) == REG)
613 && REG_P (XVECEXP (x, i, j))(((enum rtx_code) ((((((x)->u.fld[i]).rt_rtvec))->elem[
j]))->code) == REG)
614 && REGNO (XVECEXP (x, i, j))(rhs_regno((((((x)->u.fld[i]).rt_rtvec))->elem[j]))) == REGNO (dest)(rhs_regno(dest))))
615 this_result = loc;
616 else
617 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j)(((((x)->u.fld[i]).rt_rtvec))->elem[j]));
618
619 if (result == NULLnullptr)
620 result = this_result;
621 else if (this_result)
622 return NULLnullptr;
623 }
624 }
625 }
626
627 return result;
628}
629
630
631/* See if DEST, produced in INSN, is used only a single time in the
632 sequel. If so, return a pointer to the innermost rtx expression in which
633 it is used.
634
635 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
636
637 Otherwise, we find the single use by finding an insn that has a
638 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
639 only referenced once in that insn, we know that it must be the first
640 and last insn referencing DEST. */
641
642static rtx *
643find_single_use (rtx dest, rtx_insn *insn, rtx_insn **ploc)
644{
645 basic_block bb;
646 rtx_insn *next;
647 rtx *result;
648 struct insn_link *link;
649
650 if (!REG_P (dest)(((enum rtx_code) (dest)->code) == REG))
651 return 0;
652
653 bb = BLOCK_FOR_INSN (insn);
654 for (next = NEXT_INSN (insn);
655 next && BLOCK_FOR_INSN (next) == bb;
656 next = NEXT_INSN (next))
657 if (NONDEBUG_INSN_P (next)((((enum rtx_code) (next)->code) == INSN) || (((enum rtx_code
) (next)->code) == JUMP_INSN) || (((enum rtx_code) (next)->
code) == CALL_INSN))
&& dead_or_set_p (next, dest))
658 {
659 FOR_EACH_LOG_LINK (link, next)for ((link) = (uid_log_links[insn_uid_check (next)]); (link);
(link) = (link)->next)
660 if (link->insn == insn && link->regno == REGNO (dest)(rhs_regno(dest)))
661 break;
662
663 if (link)
664 {
665 result = find_single_use_1 (dest, &PATTERN (next));
666 if (ploc)
667 *ploc = next;
668 return result;
669 }
670 }
671
672 return 0;
673}
674
675/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
676 insn. The substitution can be undone by undo_all. If INTO is already
677 set to NEWVAL, do not record this change. Because computing NEWVAL might
678 also call SUBST, we have to compute it before we put anything into
679 the undo table. */
680
681static void
682do_SUBST (rtx *into, rtx newval)
683{
684 struct undo *buf;
685 rtx oldval = *into;
686
687 if (oldval == newval)
688 return;
689
690 /* We'd like to catch as many invalid transformations here as
691 possible. Unfortunately, there are way too many mode changes
692 that are perfectly valid, so we'd waste too much effort for
693 little gain doing the checks here. Focus on catching invalid
694 transformations involving integer constants. */
695 if (GET_MODE_CLASS (GET_MODE (oldval))((enum mode_class) mode_class[((machine_mode) (oldval)->mode
)])
== MODE_INT
696 && CONST_INT_P (newval)(((enum rtx_code) (newval)->code) == CONST_INT))
697 {
698 /* Sanity check that we're replacing oldval with a CONST_INT
699 that is a valid sign-extension for the original mode. */
700 gcc_assert (INTVAL (newval)((void)(!(((newval)->u.hwint[0]) == trunc_int_for_mode (((
newval)->u.hwint[0]), ((machine_mode) (oldval)->mode)))
? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 701, __FUNCTION__), 0 : 0))
701 == trunc_int_for_mode (INTVAL (newval), GET_MODE (oldval)))((void)(!(((newval)->u.hwint[0]) == trunc_int_for_mode (((
newval)->u.hwint[0]), ((machine_mode) (oldval)->mode)))
? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 701, __FUNCTION__), 0 : 0))
;
702
703 /* Replacing the operand of a SUBREG or a ZERO_EXTEND with a
704 CONST_INT is not valid, because after the replacement, the
705 original mode would be gone. Unfortunately, we can't tell
706 when do_SUBST is called to replace the operand thereof, so we
707 perform this test on oldval instead, checking whether an
708 invalid replacement took place before we got here. */
709 gcc_assert (!(GET_CODE (oldval) == SUBREG((void)(!(!(((enum rtx_code) (oldval)->code) == SUBREG &&
(((enum rtx_code) ((((oldval)->u.fld[0]).rt_rtx))->code
) == CONST_INT))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 710, __FUNCTION__), 0 : 0))
710 && CONST_INT_P (SUBREG_REG (oldval))))((void)(!(!(((enum rtx_code) (oldval)->code) == SUBREG &&
(((enum rtx_code) ((((oldval)->u.fld[0]).rt_rtx))->code
) == CONST_INT))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 710, __FUNCTION__), 0 : 0))
;
711 gcc_assert (!(GET_CODE (oldval) == ZERO_EXTEND((void)(!(!(((enum rtx_code) (oldval)->code) == ZERO_EXTEND
&& (((enum rtx_code) ((((oldval)->u.fld[0]).rt_rtx
))->code) == CONST_INT))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 712, __FUNCTION__), 0 : 0))
712 && CONST_INT_P (XEXP (oldval, 0))))((void)(!(!(((enum rtx_code) (oldval)->code) == ZERO_EXTEND
&& (((enum rtx_code) ((((oldval)->u.fld[0]).rt_rtx
))->code) == CONST_INT))) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 712, __FUNCTION__), 0 : 0))
;
713 }
714
715 if (undobuf.frees)
716 buf = undobuf.frees, undobuf.frees = buf->next;
717 else
718 buf = XNEW (struct undo)((struct undo *) xmalloc (sizeof (struct undo)));
719
720 buf->kind = UNDO_RTX;
721 buf->where.r = into;
722 buf->old_contents.r = oldval;
723 *into = newval;
724
725 buf->next = undobuf.undos, undobuf.undos = buf;
726}
727
728#define SUBST(INTO, NEWVAL)do_SUBST (&(INTO), (NEWVAL)) do_SUBST (&(INTO), (NEWVAL))
729
730/* Similar to SUBST, but NEWVAL is an int expression. Note that substitution
731 for the value of a HOST_WIDE_INT value (including CONST_INT) is
732 not safe. */
733
734static void
735do_SUBST_INT (int *into, int newval)
736{
737 struct undo *buf;
738 int oldval = *into;
739
740 if (oldval == newval)
741 return;
742
743 if (undobuf.frees)
744 buf = undobuf.frees, undobuf.frees = buf->next;
745 else
746 buf = XNEW (struct undo)((struct undo *) xmalloc (sizeof (struct undo)));
747
748 buf->kind = UNDO_INT;
749 buf->where.i = into;
750 buf->old_contents.i = oldval;
751 *into = newval;
752
753 buf->next = undobuf.undos, undobuf.undos = buf;
754}
755
756#define SUBST_INT(INTO, NEWVAL)do_SUBST_INT (&(INTO), (NEWVAL)) do_SUBST_INT (&(INTO), (NEWVAL))
757
758/* Similar to SUBST, but just substitute the mode. This is used when
759 changing the mode of a pseudo-register, so that any other
760 references to the entry in the regno_reg_rtx array will change as
761 well. */
762
763static void
764subst_mode (int regno, machine_mode newval)
765{
766 struct undo *buf;
767 rtx reg = regno_reg_rtx[regno];
768 machine_mode oldval = GET_MODE (reg)((machine_mode) (reg)->mode);
769
770 if (oldval == newval)
771 return;
772
773 if (undobuf.frees)
774 buf = undobuf.frees, undobuf.frees = buf->next;
775 else
776 buf = XNEW (struct undo)((struct undo *) xmalloc (sizeof (struct undo)));
777
778 buf->kind = UNDO_MODE;
779 buf->where.regno = regno;
780 buf->old_contents.m = oldval;
781 adjust_reg_mode (reg, newval);
782
783 buf->next = undobuf.undos, undobuf.undos = buf;
784}
785
786/* Similar to SUBST, but NEWVAL is a LOG_LINKS expression. */
787
788static void
789do_SUBST_LINK (struct insn_link **into, struct insn_link *newval)
790{
791 struct undo *buf;
792 struct insn_link * oldval = *into;
793
794 if (oldval == newval)
795 return;
796
797 if (undobuf.frees)
798 buf = undobuf.frees, undobuf.frees = buf->next;
799 else
800 buf = XNEW (struct undo)((struct undo *) xmalloc (sizeof (struct undo)));
801
802 buf->kind = UNDO_LINKS;
803 buf->where.l = into;
804 buf->old_contents.l = oldval;
805 *into = newval;
806
807 buf->next = undobuf.undos, undobuf.undos = buf;
808}
809
810#define SUBST_LINK(oldval, newval)do_SUBST_LINK (&oldval, newval) do_SUBST_LINK (&oldval, newval)
811
812/* Subroutine of try_combine. Determine whether the replacement patterns
813 NEWPAT, NEWI2PAT and NEWOTHERPAT are cheaper according to insn_cost
814 than the original sequence I0, I1, I2, I3 and undobuf.other_insn. Note
815 that I0, I1 and/or NEWI2PAT may be NULL_RTX. Similarly, NEWOTHERPAT and
816 undobuf.other_insn may also both be NULL_RTX. Return false if the cost
817 of all the instructions can be estimated and the replacements are more
818 expensive than the original sequence. */
819
820static bool
821combine_validate_cost (rtx_insn *i0, rtx_insn *i1, rtx_insn *i2, rtx_insn *i3,
822 rtx newpat, rtx newi2pat, rtx newotherpat)
823{
824 int i0_cost, i1_cost, i2_cost, i3_cost;
825 int new_i2_cost, new_i3_cost;
826 int old_cost, new_cost;
827
828 /* Lookup the original insn_costs. */
829 i2_cost = INSN_COST (i2)(uid_insn_cost[insn_uid_check (i2)]);
830 i3_cost = INSN_COST (i3)(uid_insn_cost[insn_uid_check (i3)]);
831
832 if (i1)
833 {
834 i1_cost = INSN_COST (i1)(uid_insn_cost[insn_uid_check (i1)]);
835 if (i0)
836 {
837 i0_cost = INSN_COST (i0)(uid_insn_cost[insn_uid_check (i0)]);
838 old_cost = (i0_cost > 0 && i1_cost > 0 && i2_cost > 0 && i3_cost > 0
839 ? i0_cost + i1_cost + i2_cost + i3_cost : 0);
840 }
841 else
842 {
843 old_cost = (i1_cost > 0 && i2_cost > 0 && i3_cost > 0
844 ? i1_cost + i2_cost + i3_cost : 0);
845 i0_cost = 0;
846 }
847 }
848 else
849 {
850 old_cost = (i2_cost > 0 && i3_cost > 0) ? i2_cost + i3_cost : 0;
851 i1_cost = i0_cost = 0;
852 }
853
854 /* If we have split a PARALLEL I2 to I1,I2, we have counted its cost twice;
855 correct that. */
856 if (old_cost && i1 && INSN_UID (i1) == INSN_UID (i2))
857 old_cost -= i1_cost;
858
859
860 /* Calculate the replacement insn_costs. */
861 rtx tmp = PATTERN (i3);
862 PATTERN (i3) = newpat;
863 int tmpi = INSN_CODE (i3)(((i3)->u.fld[5]).rt_int);
864 INSN_CODE (i3)(((i3)->u.fld[5]).rt_int) = -1;
865 new_i3_cost = insn_cost (i3, optimize_this_for_speed_p);
866 PATTERN (i3) = tmp;
867 INSN_CODE (i3)(((i3)->u.fld[5]).rt_int) = tmpi;
868 if (newi2pat)
869 {
870 tmp = PATTERN (i2);
871 PATTERN (i2) = newi2pat;
872 tmpi = INSN_CODE (i2)(((i2)->u.fld[5]).rt_int);
873 INSN_CODE (i2)(((i2)->u.fld[5]).rt_int) = -1;
874 new_i2_cost = insn_cost (i2, optimize_this_for_speed_p);
875 PATTERN (i2) = tmp;
876 INSN_CODE (i2)(((i2)->u.fld[5]).rt_int) = tmpi;
877 new_cost = (new_i2_cost > 0 && new_i3_cost > 0)
878 ? new_i2_cost + new_i3_cost : 0;
879 }
880 else
881 {
882 new_cost = new_i3_cost;
883 new_i2_cost = 0;
884 }
885
886 if (undobuf.other_insn)
887 {
888 int old_other_cost, new_other_cost;
889
890 old_other_cost = INSN_COST (undobuf.other_insn)(uid_insn_cost[insn_uid_check (undobuf.other_insn)]);
891 tmp = PATTERN (undobuf.other_insn);
892 PATTERN (undobuf.other_insn) = newotherpat;
893 tmpi = INSN_CODE (undobuf.other_insn)(((undobuf.other_insn)->u.fld[5]).rt_int);
894 INSN_CODE (undobuf.other_insn)(((undobuf.other_insn)->u.fld[5]).rt_int) = -1;
895 new_other_cost = insn_cost (undobuf.other_insn,
896 optimize_this_for_speed_p);
897 PATTERN (undobuf.other_insn) = tmp;
898 INSN_CODE (undobuf.other_insn)(((undobuf.other_insn)->u.fld[5]).rt_int) = tmpi;
899 if (old_other_cost > 0 && new_other_cost > 0)
900 {
901 old_cost += old_other_cost;
902 new_cost += new_other_cost;
903 }
904 else
905 old_cost = 0;
906 }
907
908 /* Disallow this combination if both new_cost and old_cost are greater than
909 zero, and new_cost is greater than old cost. */
910 int reject = old_cost > 0 && new_cost > old_cost;
911
912 if (dump_file)
913 {
914 fprintf (dump_file, "%s combination of insns ",
915 reject ? "rejecting" : "allowing");
916 if (i0)
917 fprintf (dump_file, "%d, ", INSN_UID (i0));
918 if (i1 && INSN_UID (i1) != INSN_UID (i2))
919 fprintf (dump_file, "%d, ", INSN_UID (i1));
920 fprintf (dump_file, "%d and %d\n", INSN_UID (i2), INSN_UID (i3));
921
922 fprintf (dump_file, "original costs ");
923 if (i0)
924 fprintf (dump_file, "%d + ", i0_cost);
925 if (i1 && INSN_UID (i1) != INSN_UID (i2))
926 fprintf (dump_file, "%d + ", i1_cost);
927 fprintf (dump_file, "%d + %d = %d\n", i2_cost, i3_cost, old_cost);
928
929 if (newi2pat)
930 fprintf (dump_file, "replacement costs %d + %d = %d\n",
931 new_i2_cost, new_i3_cost, new_cost);
932 else
933 fprintf (dump_file, "replacement cost %d\n", new_cost);
934 }
935
936 if (reject)
937 return false;
938
939 /* Update the uid_insn_cost array with the replacement costs. */
940 INSN_COST (i2)(uid_insn_cost[insn_uid_check (i2)]) = new_i2_cost;
941 INSN_COST (i3)(uid_insn_cost[insn_uid_check (i3)]) = new_i3_cost;
942 if (i1)
943 {
944 INSN_COST (i1)(uid_insn_cost[insn_uid_check (i1)]) = 0;
945 if (i0)
946 INSN_COST (i0)(uid_insn_cost[insn_uid_check (i0)]) = 0;
947 }
948
949 return true;
950}
951
952
953/* Delete any insns that copy a register to itself.
954 Return true if the CFG was changed. */
955
956static bool
957delete_noop_moves (void)
958{
959 rtx_insn *insn, *next;
960 basic_block bb;
961
962 bool edges_deleted = false;
963
964 FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb
; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb->
next_bb)
965 {
966 for (insn = BB_HEAD (bb)(bb)->il.x.head_; insn != NEXT_INSN (BB_END (bb)(bb)->il.x.rtl->end_); insn = next)
967 {
968 next = NEXT_INSN (insn);
969 if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code
) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)->
code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) ==
DEBUG_INSN))
&& noop_move_p (insn))
970 {
971 if (dump_file)
972 fprintf (dump_file, "deleting noop move %d\n", INSN_UID (insn));
973
974 edges_deleted |= delete_insn_and_edges (insn);
975 }
976 }
977 }
978
979 return edges_deleted;
980}
981
982
983/* Return false if we do not want to (or cannot) combine DEF. */
984static bool
985can_combine_def_p (df_ref def)
986{
987 /* Do not consider if it is pre/post modification in MEM. */
988 if (DF_REF_FLAGS (def)((def)->base.flags) & DF_REF_PRE_POST_MODIFY)
989 return false;
990
991 unsigned int regno = DF_REF_REGNO (def)((def)->base.regno);
992
993 /* Do not combine frame pointer adjustments. */
994 if ((regno == FRAME_POINTER_REGNUM19
995 && (!reload_completed || frame_pointer_needed((&x_rtl)->frame_pointer_needed)))
996 || (!HARD_FRAME_POINTER_IS_FRAME_POINTER(6 == 19)
997 && regno == HARD_FRAME_POINTER_REGNUM6
998 && (!reload_completed || frame_pointer_needed((&x_rtl)->frame_pointer_needed)))
999 || (FRAME_POINTER_REGNUM19 != ARG_POINTER_REGNUM16
1000 && regno == ARG_POINTER_REGNUM16 && fixed_regs(this_target_hard_regs->x_fixed_regs)[regno]))
1001 return false;
1002
1003 return true;
1004}
1005
1006/* Return false if we do not want to (or cannot) combine USE. */
1007static bool
1008can_combine_use_p (df_ref use)
1009{
1010 /* Do not consider the usage of the stack pointer by function call. */
1011 if (DF_REF_FLAGS (use)((use)->base.flags) & DF_REF_CALL_STACK_USAGE)
1012 return false;
1013
1014 return true;
1015}
1016
1017/* Fill in log links field for all insns. */
1018
1019static void
1020create_log_links (void)
1021{
1022 basic_block bb;
1023 rtx_insn **next_use;
1024 rtx_insn *insn;
1025 df_ref def, use;
1026
1027 next_use = XCNEWVEC (rtx_insn *, max_reg_num ())((rtx_insn * *) xcalloc ((max_reg_num ()), sizeof (rtx_insn *
)))
;
1028
1029 /* Pass through each block from the end, recording the uses of each
1030 register and establishing log links when def is encountered.
1031 Note that we do not clear next_use array in order to save time,
1032 so we have to test whether the use is in the same basic block as def.
1033
1034 There are a few cases below when we do not consider the definition or
1035 usage -- these are taken from original flow.c did. Don't ask me why it is
1036 done this way; I don't know and if it works, I don't want to know. */
1037
1038 FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb
; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb->
next_bb)
1039 {
1040 FOR_BB_INSNS_REVERSE (bb, insn)for ((insn) = (bb)->il.x.rtl->end_; (insn) && (
insn) != PREV_INSN ((bb)->il.x.head_); (insn) = PREV_INSN (
insn))
1041 {
1042 if (!NONDEBUG_INSN_P (insn)((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code
) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)->
code) == CALL_INSN))
)
1043 continue;
1044
1045 /* Log links are created only once. */
1046 gcc_assert (!LOG_LINKS (insn))((void)(!(!(uid_log_links[insn_uid_check (insn)])) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 1046, __FUNCTION__), 0 : 0))
;
1047
1048 FOR_EACH_INSN_DEF (def, insn)for (def = (((df->insns[(INSN_UID (insn))]))->defs); def
; def = ((def)->base.next_loc))
1049 {
1050 unsigned int regno = DF_REF_REGNO (def)((def)->base.regno);
1051 rtx_insn *use_insn;
1052
1053 if (!next_use[regno])
1054 continue;
1055
1056 if (!can_combine_def_p (def))
1057 continue;
1058
1059 use_insn = next_use[regno];
1060 next_use[regno] = NULLnullptr;
1061
1062 if (BLOCK_FOR_INSN (use_insn) != bb)
1063 continue;
1064
1065 /* flow.c claimed:
1066
1067 We don't build a LOG_LINK for hard registers contained
1068 in ASM_OPERANDs. If these registers get replaced,
1069 we might wind up changing the semantics of the insn,
1070 even if reload can make what appear to be valid
1071 assignments later. */
1072 if (regno < FIRST_PSEUDO_REGISTER76
1073 && asm_noperands (PATTERN (use_insn)) >= 0)
1074 continue;
1075
1076 /* Don't add duplicate links between instructions. */
1077 struct insn_link *links;
1078 FOR_EACH_LOG_LINK (links, use_insn)for ((links) = (uid_log_links[insn_uid_check (use_insn)]); (links
); (links) = (links)->next)
1079 if (insn == links->insn && regno == links->regno)
1080 break;
1081
1082 if (!links)
1083 LOG_LINKS (use_insn)(uid_log_links[insn_uid_check (use_insn)])
1084 = alloc_insn_link (insn, regno, LOG_LINKS (use_insn)(uid_log_links[insn_uid_check (use_insn)]));
1085 }
1086
1087 FOR_EACH_INSN_USE (use, insn)for (use = (((df->insns[(INSN_UID (insn))]))->uses); use
; use = ((use)->base.next_loc))
1088 if (can_combine_use_p (use))
1089 next_use[DF_REF_REGNO (use)((use)->base.regno)] = insn;
1090 }
1091 }
1092
1093 free (next_use);
1094}
1095
1096/* Walk the LOG_LINKS of insn B to see if we find a reference to A. Return
1097 true if we found a LOG_LINK that proves that A feeds B. This only works
1098 if there are no instructions between A and B which could have a link
1099 depending on A, since in that case we would not record a link for B. */
1100
1101static bool
1102insn_a_feeds_b (rtx_insn *a, rtx_insn *b)
1103{
1104 struct insn_link *links;
1105 FOR_EACH_LOG_LINK (links, b)for ((links) = (uid_log_links[insn_uid_check (b)]); (links); (
links) = (links)->next)
1106 if (links->insn == a)
1107 return true;
1108 return false;
1109}
1110
1111/* Main entry point for combiner. F is the first insn of the function.
1112 NREGS is the first unused pseudo-reg number.
1113
1114 Return nonzero if the CFG was changed (e.g. if the combiner has
1115 turned an indirect jump instruction into a direct jump). */
1116static int
1117combine_instructions (rtx_insn *f, unsigned int nregs)
1118{
1119 rtx_insn *insn, *next;
1120 struct insn_link *links, *nextlinks;
1121 rtx_insn *first;
1122 basic_block last_bb;
1123
1124 int new_direct_jump_p = 0;
1125
1126 for (first = f; first && !NONDEBUG_INSN_P (first)((((enum rtx_code) (first)->code) == INSN) || (((enum rtx_code
) (first)->code) == JUMP_INSN) || (((enum rtx_code) (first
)->code) == CALL_INSN))
; )
1127 first = NEXT_INSN (first);
1128 if (!first)
1129 return 0;
1130
1131 combine_attempts = 0;
1132 combine_merges = 0;
1133 combine_extras = 0;
1134 combine_successes = 0;
1135
1136 rtl_hooks = combine_rtl_hooks;
1137
1138 reg_stat.safe_grow_cleared (nregs, true);
1139
1140 init_recog_no_volatile ();
1141
1142 /* Allocate array for insn info. */
1143 max_uid_known = get_max_uid ();
1144 uid_log_links = XCNEWVEC (struct insn_link *, max_uid_known + 1)((struct insn_link * *) xcalloc ((max_uid_known + 1), sizeof (
struct insn_link *)))
;
1145 uid_insn_cost = XCNEWVEC (int, max_uid_known + 1)((int *) xcalloc ((max_uid_known + 1), sizeof (int)));
1146 gcc_obstack_init (&insn_link_obstack)_obstack_begin (((&insn_link_obstack)), (memory_block_pool
::block_size), (0), (mempool_obstack_chunk_alloc), (mempool_obstack_chunk_free
))
;
1147
1148 nonzero_bits_mode = int_mode_for_size (HOST_BITS_PER_WIDE_INT64, 0).require ();
1149
1150 /* Don't use reg_stat[].nonzero_bits when computing it. This can cause
1151 problems when, for example, we have j <<= 1 in a loop. */
1152
1153 nonzero_sign_valid = 0;
1154 label_tick = label_tick_ebb_start = 1;
1155
1156 /* Scan all SETs and see if we can deduce anything about what
1157 bits are known to be zero for some registers and how many copies
1158 of the sign bit are known to exist for those registers.
1159
1160 Also set any known values so that we can use it while searching
1161 for what bits are known to be set. */
1162
1163 setup_incoming_promotions (first);
1164 /* Allow the entry block and the first block to fall into the same EBB.
1165 Conceptually the incoming promotions are assigned to the entry block. */
1166 last_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr);
1167
1168 create_log_links ();
1169 FOR_EACH_BB_FN (this_basic_block, cfun)for (this_basic_block = ((cfun + 0))->cfg->x_entry_block_ptr
->next_bb; this_basic_block != ((cfun + 0))->cfg->x_exit_block_ptr
; this_basic_block = this_basic_block->next_bb)
1170 {
1171 optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block);
1172 last_call_luid = 0;
1173 mem_last_set = -1;
1174
1175 label_tick++;
1176 if (!single_pred_p (this_basic_block)
1177 || single_pred (this_basic_block) != last_bb)
1178 label_tick_ebb_start = label_tick;
1179 last_bb = this_basic_block;
1180
1181 FOR_BB_INSNS (this_basic_block, insn)for ((insn) = (this_basic_block)->il.x.head_; (insn) &&
(insn) != NEXT_INSN ((this_basic_block)->il.x.rtl->end_
); (insn) = NEXT_INSN (insn))
1182 if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code
) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)->
code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) ==
DEBUG_INSN))
&& BLOCK_FOR_INSN (insn))
1183 {
1184 rtx links;
1185
1186 subst_low_luid = DF_INSN_LUID (insn)((((df->insns[(INSN_UID (insn))]))->luid));
1187 subst_insn = insn;
1188
1189 note_stores (insn, set_nonzero_bits_and_sign_copies, insn);
1190 record_dead_and_set_regs (insn);
1191
1192 if (AUTO_INC_DEC0)
1193 for (links = REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx); links; links = XEXP (links, 1)(((links)->u.fld[1]).rt_rtx))
1194 if (REG_NOTE_KIND (links)((enum reg_note) ((machine_mode) (links)->mode)) == REG_INC)
1195 set_nonzero_bits_and_sign_copies (XEXP (links, 0)(((links)->u.fld[0]).rt_rtx), NULL_RTX(rtx) 0,
1196 insn);
1197
1198 /* Record the current insn_cost of this instruction. */
1199 INSN_COST (insn)(uid_insn_cost[insn_uid_check (insn)]) = insn_cost (insn, optimize_this_for_speed_p);
1200 if (dump_file)
1201 {
1202 fprintf (dump_file, "insn_cost %d for ", INSN_COST (insn)(uid_insn_cost[insn_uid_check (insn)]));
1203 dump_insn_slim (dump_file, insn);
1204 }
1205 }
1206 }
1207
1208 nonzero_sign_valid = 1;
1209
1210 /* Now scan all the insns in forward order. */
1211 label_tick = label_tick_ebb_start = 1;
1212 init_reg_last ();
1213 setup_incoming_promotions (first);
1214 last_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr);
1215 int max_combine = param_max_combine_insnsglobal_options.x_param_max_combine_insns;
1216
1217 FOR_EACH_BB_FN (this_basic_block, cfun)for (this_basic_block = ((cfun + 0))->cfg->x_entry_block_ptr
->next_bb; this_basic_block != ((cfun + 0))->cfg->x_exit_block_ptr
; this_basic_block = this_basic_block->next_bb)
1218 {
1219 rtx_insn *last_combined_insn = NULLnullptr;
1220
1221 /* Ignore instruction combination in basic blocks that are going to
1222 be removed as unreachable anyway. See PR82386. */
1223 if (EDGE_COUNT (this_basic_block->preds)vec_safe_length (this_basic_block->preds) == 0)
1224 continue;
1225
1226 optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block);
1227 last_call_luid = 0;
1228 mem_last_set = -1;
1229
1230 label_tick++;
1231 if (!single_pred_p (this_basic_block)
1232 || single_pred (this_basic_block) != last_bb)
1233 label_tick_ebb_start = label_tick;
1234 last_bb = this_basic_block;
1235
1236 rtl_profile_for_bb (this_basic_block);
1237 for (insn = BB_HEAD (this_basic_block)(this_basic_block)->il.x.head_;
1238 insn != NEXT_INSN (BB_END (this_basic_block)(this_basic_block)->il.x.rtl->end_);
1239 insn = next ? next : NEXT_INSN (insn))
1240 {
1241 next = 0;
1242 if (!NONDEBUG_INSN_P (insn)((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code
) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)->
code) == CALL_INSN))
)
1243 continue;
1244
1245 while (last_combined_insn
1246 && (!NONDEBUG_INSN_P (last_combined_insn)((((enum rtx_code) (last_combined_insn)->code) == INSN) ||
(((enum rtx_code) (last_combined_insn)->code) == JUMP_INSN
) || (((enum rtx_code) (last_combined_insn)->code) == CALL_INSN
))
1247 || last_combined_insn->deleted ()))
1248 last_combined_insn = PREV_INSN (last_combined_insn);
1249 if (last_combined_insn == NULL_RTX(rtx) 0
1250 || BLOCK_FOR_INSN (last_combined_insn) != this_basic_block
1251 || DF_INSN_LUID (last_combined_insn)((((df->insns[(INSN_UID (last_combined_insn))]))->luid)
)
<= DF_INSN_LUID (insn)((((df->insns[(INSN_UID (insn))]))->luid)))
1252 last_combined_insn = insn;
1253
1254 /* See if we know about function return values before this
1255 insn based upon SUBREG flags. */
1256 check_promoted_subreg (insn, PATTERN (insn));
1257
1258 /* See if we can find hardregs and subreg of pseudos in
1259 narrower modes. This could help turning TRUNCATEs
1260 into SUBREGs. */
1261 note_uses (&PATTERN (insn), record_truncated_values, NULLnullptr);
1262
1263 /* Try this insn with each insn it links back to. */
1264
1265 FOR_EACH_LOG_LINK (links, insn)for ((links) = (uid_log_links[insn_uid_check (insn)]); (links
); (links) = (links)->next)
1266 if ((next = try_combine (insn, links->insn, NULLnullptr,
1267 NULLnullptr, &new_direct_jump_p,
1268 last_combined_insn)) != 0)
1269 {
1270 statistics_counter_event (cfun(cfun + 0), "two-insn combine", 1);
1271 goto retry;
1272 }
1273
1274 /* Try each sequence of three linked insns ending with this one. */
1275
1276 if (max_combine >= 3)
1277 FOR_EACH_LOG_LINK (links, insn)for ((links) = (uid_log_links[insn_uid_check (insn)]); (links
); (links) = (links)->next)
1278 {
1279 rtx_insn *link = links->insn;
1280
1281 /* If the linked insn has been replaced by a note, then there
1282 is no point in pursuing this chain any further. */
1283 if (NOTE_P (link)(((enum rtx_code) (link)->code) == NOTE))
1284 continue;
1285
1286 FOR_EACH_LOG_LINK (nextlinks, link)for ((nextlinks) = (uid_log_links[insn_uid_check (link)]); (nextlinks
); (nextlinks) = (nextlinks)->next)
1287 if ((next = try_combine (insn, link, nextlinks->insn,
1288 NULLnullptr, &new_direct_jump_p,
1289 last_combined_insn)) != 0)
1290 {
1291 statistics_counter_event (cfun(cfun + 0), "three-insn combine", 1);
1292 goto retry;
1293 }
1294 }
1295
1296 /* Try combining an insn with two different insns whose results it
1297 uses. */
1298 if (max_combine >= 3)
1299 FOR_EACH_LOG_LINK (links, insn)for ((links) = (uid_log_links[insn_uid_check (insn)]); (links
); (links) = (links)->next)
1300 for (nextlinks = links->next; nextlinks;
1301 nextlinks = nextlinks->next)
1302 if ((next = try_combine (insn, links->insn,
1303 nextlinks->insn, NULLnullptr,
1304 &new_direct_jump_p,
1305 last_combined_insn)) != 0)
1306
1307 {
1308 statistics_counter_event (cfun(cfun + 0), "three-insn combine", 1);
1309 goto retry;
1310 }
1311
1312 /* Try four-instruction combinations. */
1313 if (max_combine >= 4)
1314 FOR_EACH_LOG_LINK (links, insn)for ((links) = (uid_log_links[insn_uid_check (insn)]); (links
); (links) = (links)->next)
1315 {
1316 struct insn_link *next1;
1317 rtx_insn *link = links->insn;
1318
1319 /* If the linked insn has been replaced by a note, then there
1320 is no point in pursuing this chain any further. */
1321 if (NOTE_P (link)(((enum rtx_code) (link)->code) == NOTE))
1322 continue;
1323
1324 FOR_EACH_LOG_LINK (next1, link)for ((next1) = (uid_log_links[insn_uid_check (link)]); (next1
); (next1) = (next1)->next)
1325 {
1326 rtx_insn *link1 = next1->insn;
1327 if (NOTE_P (link1)(((enum rtx_code) (link1)->code) == NOTE))
1328 continue;
1329 /* I0 -> I1 -> I2 -> I3. */
1330 FOR_EACH_LOG_LINK (nextlinks, link1)for ((nextlinks) = (uid_log_links[insn_uid_check (link1)]); (
nextlinks); (nextlinks) = (nextlinks)->next)
1331 if ((next = try_combine (insn, link, link1,
1332 nextlinks->insn,
1333 &new_direct_jump_p,
1334 last_combined_insn)) != 0)
1335 {
1336 statistics_counter_event (cfun(cfun + 0), "four-insn combine", 1);
1337 goto retry;
1338 }
1339 /* I0, I1 -> I2, I2 -> I3. */
1340 for (nextlinks = next1->next; nextlinks;
1341 nextlinks = nextlinks->next)
1342 if ((next = try_combine (insn, link, link1,
1343 nextlinks->insn,
1344 &new_direct_jump_p,
1345 last_combined_insn)) != 0)
1346 {
1347 statistics_counter_event (cfun(cfun + 0), "four-insn combine", 1);
1348 goto retry;
1349 }
1350 }
1351
1352 for (next1 = links->next; next1; next1 = next1->next)
1353 {
1354 rtx_insn *link1 = next1->insn;
1355 if (NOTE_P (link1)(((enum rtx_code) (link1)->code) == NOTE))
1356 continue;
1357 /* I0 -> I2; I1, I2 -> I3. */
1358 FOR_EACH_LOG_LINK (nextlinks, link)for ((nextlinks) = (uid_log_links[insn_uid_check (link)]); (nextlinks
); (nextlinks) = (nextlinks)->next)
1359 if ((next = try_combine (insn, link, link1,
1360 nextlinks->insn,
1361 &new_direct_jump_p,
1362 last_combined_insn)) != 0)
1363 {
1364 statistics_counter_event (cfun(cfun + 0), "four-insn combine", 1);
1365 goto retry;
1366 }
1367 /* I0 -> I1; I1, I2 -> I3. */
1368 FOR_EACH_LOG_LINK (nextlinks, link1)for ((nextlinks) = (uid_log_links[insn_uid_check (link1)]); (
nextlinks); (nextlinks) = (nextlinks)->next)
1369 if ((next = try_combine (insn, link, link1,
1370 nextlinks->insn,
1371 &new_direct_jump_p,
1372 last_combined_insn)) != 0)
1373 {
1374 statistics_counter_event (cfun(cfun + 0), "four-insn combine", 1);
1375 goto retry;
1376 }
1377 }
1378 }
1379
1380 /* Try this insn with each REG_EQUAL note it links back to. */
1381 FOR_EACH_LOG_LINK (links, insn)for ((links) = (uid_log_links[insn_uid_check (insn)]); (links
); (links) = (links)->next)
1382 {
1383 rtx set, note;
1384 rtx_insn *temp = links->insn;
1385 if ((set = single_set (temp)) != 0
1386 && (note = find_reg_equal_equiv_note (temp)) != 0
1387 && (note = XEXP (note, 0)(((note)->u.fld[0]).rt_rtx), GET_CODE (note)((enum rtx_code) (note)->code)) != EXPR_LIST
1388 && ! side_effects_p (SET_SRC (set)(((set)->u.fld[1]).rt_rtx))
1389 /* Avoid using a register that may already been marked
1390 dead by an earlier instruction. */
1391 && ! unmentioned_reg_p (note, SET_SRC (set)(((set)->u.fld[1]).rt_rtx))
1392 && (GET_MODE (note)((machine_mode) (note)->mode) == VOIDmode((void) 0, E_VOIDmode)
1393 ? SCALAR_INT_MODE_P (GET_MODE (SET_DEST (set)))(((enum mode_class) mode_class[((machine_mode) ((((set)->u
.fld[0]).rt_rtx))->mode)]) == MODE_INT || ((enum mode_class
) mode_class[((machine_mode) ((((set)->u.fld[0]).rt_rtx))->
mode)]) == MODE_PARTIAL_INT)
1394 : (GET_MODE (SET_DEST (set))((machine_mode) ((((set)->u.fld[0]).rt_rtx))->mode) == GET_MODE (note)((machine_mode) (note)->mode)
1395 && (GET_CODE (SET_DEST (set))((enum rtx_code) ((((set)->u.fld[0]).rt_rtx))->code) != ZERO_EXTRACT
1396 || (GET_MODE (XEXP (SET_DEST (set), 0))((machine_mode) (((((((set)->u.fld[0]).rt_rtx))->u.fld[
0]).rt_rtx))->mode)
1397 == GET_MODE (note)((machine_mode) (note)->mode))))))
1398 {
1399 /* Temporarily replace the set's source with the
1400 contents of the REG_EQUAL note. The insn will
1401 be deleted or recognized by try_combine. */
1402 rtx orig_src = SET_SRC (set)(((set)->u.fld[1]).rt_rtx);
1403 rtx orig_dest = SET_DEST (set)(((set)->u.fld[0]).rt_rtx);
1404 if (GET_CODE (SET_DEST (set))((enum rtx_code) ((((set)->u.fld[0]).rt_rtx))->code) == ZERO_EXTRACT)
1405 SET_DEST (set)(((set)->u.fld[0]).rt_rtx) = XEXP (SET_DEST (set), 0)((((((set)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx);
1406 SET_SRC (set)(((set)->u.fld[1]).rt_rtx) = note;
1407 i2mod = temp;
1408 i2mod_old_rhs = copy_rtx (orig_src);
1409 i2mod_new_rhs = copy_rtx (note);
1410 next = try_combine (insn, i2mod, NULLnullptr, NULLnullptr,
1411 &new_direct_jump_p,
1412 last_combined_insn);
1413 i2mod = NULLnullptr;
1414 if (next)
1415 {
1416 statistics_counter_event (cfun(cfun + 0), "insn-with-note combine", 1);
1417 goto retry;
1418 }
1419 SET_SRC (set)(((set)->u.fld[1]).rt_rtx) = orig_src;
1420 SET_DEST (set)(((set)->u.fld[0]).rt_rtx) = orig_dest;
1421 }
1422 }
1423
1424 if (!NOTE_P (insn)(((enum rtx_code) (insn)->code) == NOTE))
1425 record_dead_and_set_regs (insn);
1426
1427retry:
1428 ;
1429 }
1430 }
1431
1432 default_rtl_profile ();
1433 clear_bb_flags ();
1434 new_direct_jump_p |= purge_all_dead_edges ();
1435 new_direct_jump_p |= delete_noop_moves ();
1436
1437 /* Clean up. */
1438 obstack_free (&insn_link_obstack, NULL)__extension__ ({ struct obstack *__o = (&insn_link_obstack
); void *__obj = (void *) (nullptr); if (__obj > (void *) __o
->chunk && __obj < (void *) __o->chunk_limit
) __o->next_free = __o->object_base = (char *) __obj; else
_obstack_free (__o, __obj); })
;
1439 free (uid_log_links);
1440 free (uid_insn_cost);
1441 reg_stat.release ();
1442
1443 {
1444 struct undo *undo, *next;
1445 for (undo = undobuf.frees; undo; undo = next)
1446 {
1447 next = undo->next;
1448 free (undo);
1449 }
1450 undobuf.frees = 0;
1451 }
1452
1453 total_attempts += combine_attempts;
1454 total_merges += combine_merges;
1455 total_extras += combine_extras;
1456 total_successes += combine_successes;
1457
1458 nonzero_sign_valid = 0;
1459 rtl_hooks = general_rtl_hooks;
1460
1461 /* Make recognizer allow volatile MEMs again. */
1462 init_recog ();
1463
1464 return new_direct_jump_p;
1465}
1466
1467/* Wipe the last_xxx fields of reg_stat in preparation for another pass. */
1468
1469static void
1470init_reg_last (void)
1471{
1472 unsigned int i;
1473 reg_stat_type *p;
1474
1475 FOR_EACH_VEC_ELT (reg_stat, i, p)for (i = 0; (reg_stat).iterate ((i), &(p)); ++(i))
1476 memset (p, 0, offsetof (reg_stat_type, sign_bit_copies)__builtin_offsetof(reg_stat_type, sign_bit_copies));
1477}
1478
1479/* Set up any promoted values for incoming argument registers. */
1480
1481static void
1482setup_incoming_promotions (rtx_insn *first)
1483{
1484 tree arg;
1485 bool strictly_local = false;
1486
1487 for (arg = DECL_ARGUMENTS (current_function_decl)((tree_check ((current_function_decl), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 1487, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
; arg;
1488 arg = DECL_CHAIN (arg)(((contains_struct_check (((contains_struct_check ((arg), (TS_DECL_MINIMAL
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 1488, __FUNCTION__))), (TS_COMMON), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 1488, __FUNCTION__))->common.chain))
)
1489 {
1490 rtx x, reg = DECL_INCOMING_RTL (arg)((tree_check ((arg), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 1490, __FUNCTION__, (PARM_DECL)))->parm_decl.incoming_rtl
)
;
1491 int uns1, uns3;
1492 machine_mode mode1, mode2, mode3, mode4;
1493
1494 /* Only continue if the incoming argument is in a register. */
1495 if (!REG_P (reg)(((enum rtx_code) (reg)->code) == REG))
1496 continue;
1497
1498 /* Determine, if possible, whether all call sites of the current
1499 function lie within the current compilation unit. (This does
1500 take into account the exporting of a function via taking its
1501 address, and so forth.) */
1502 strictly_local
1503 = cgraph_node::local_info_node (current_function_decl)->local;
1504
1505 /* The mode and signedness of the argument before any promotions happen
1506 (equal to the mode of the pseudo holding it at that stage). */
1507 mode1 = TYPE_MODE (TREE_TYPE (arg))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 1507, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 1507, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 1507, __FUNCTION__))->typed.type)) : (((contains_struct_check
((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 1507, __FUNCTION__))->typed.type))->type_common.mode)
;
1508 uns1 = TYPE_UNSIGNED (TREE_TYPE (arg))((tree_class_check ((((contains_struct_check ((arg), (TS_TYPED
), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 1508, __FUNCTION__))->typed.type)), (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 1508, __FUNCTION__))->base.u.bits.unsigned_flag)
;
1509
1510 /* The mode and signedness of the argument after any source language and
1511 TARGET_PROMOTE_PROTOTYPES-driven promotions. */
1512 mode2 = TYPE_MODE (DECL_ARG_TYPE (arg))((((enum tree_code) ((tree_class_check ((((tree_check ((arg),
"/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 1512, __FUNCTION__, (PARM_DECL)))->decl_common.initial))
, (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 1512, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((tree_check ((arg), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 1512, __FUNCTION__, (PARM_DECL)))->decl_common.initial))
: (((tree_check ((arg), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 1512, __FUNCTION__, (PARM_DECL)))->decl_common.initial))
->type_common.mode)
;
1513 uns3 = TYPE_UNSIGNED (DECL_ARG_TYPE (arg))((tree_class_check ((((tree_check ((arg), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 1513, __FUNCTION__, (PARM_DECL)))->decl_common.initial))
, (tcc_type), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 1513, __FUNCTION__))->base.u.bits.unsigned_flag)
;
1514
1515 /* The mode and signedness of the argument as it is actually passed,
1516 see assign_parm_setup_reg in function.cc. */
1517 mode3 = promote_function_mode (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 1517, __FUNCTION__))->typed.type)
, mode1, &uns3,
1518 TREE_TYPE (cfun->decl)((contains_struct_check (((cfun + 0)->decl), (TS_TYPED), "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 1518, __FUNCTION__))->typed.type)
, 0);
1519
1520 /* The mode of the register in which the argument is being passed. */
1521 mode4 = GET_MODE (reg)((machine_mode) (reg)->mode);
1522
1523 /* Eliminate sign extensions in the callee when:
1524 (a) A mode promotion has occurred; */
1525 if (mode1 == mode3)
1526 continue;
1527 /* (b) The mode of the register is the same as the mode of
1528 the argument as it is passed; */
1529 if (mode3 != mode4)
1530 continue;
1531 /* (c) There's no language level extension; */
1532 if (mode1 == mode2)
1533 ;
1534 /* (c.1) All callers are from the current compilation unit. If that's
1535 the case we don't have to rely on an ABI, we only have to know
1536 what we're generating right now, and we know that we will do the
1537 mode1 to mode2 promotion with the given sign. */
1538 else if (!strictly_local)
1539 continue;
1540 /* (c.2) The combination of the two promotions is useful. This is
1541 true when the signs match, or if the first promotion is unsigned.
1542 In the later case, (sign_extend (zero_extend x)) is the same as
1543 (zero_extend (zero_extend x)), so make sure to force UNS3 true. */
1544 else if (uns1)
1545 uns3 = true;
1546 else if (uns3)
1547 continue;
1548
1549 /* Record that the value was promoted from mode1 to mode3,
1550 so that any sign extension at the head of the current
1551 function may be eliminated. */
1552 x = gen_rtx_CLOBBER (mode1, const0_rtx)gen_rtx_fmt_e_stat ((CLOBBER), ((mode1)), (((const_int_rtx[64
]))) )
;
1553 x = gen_rtx_fmt_e ((uns3 ? ZERO_EXTEND : SIGN_EXTEND), mode3, x)gen_rtx_fmt_e_stat (((uns3 ? ZERO_EXTEND : SIGN_EXTEND)), (mode3
), (x) )
;
1554 record_value_for_reg (reg, first, x);
1555 }
1556}
1557
1558/* If MODE has a precision lower than PREC and SRC is a non-negative constant
1559 that would appear negative in MODE, sign-extend SRC for use in nonzero_bits
1560 because some machines (maybe most) will actually do the sign-extension and
1561 this is the conservative approach.
1562
1563 ??? For 2.5, try to tighten up the MD files in this regard instead of this
1564 kludge. */
1565
1566static rtx
1567sign_extend_short_imm (rtx src, machine_mode mode, unsigned int prec)
1568{
1569 scalar_int_mode int_mode;
1570 if (CONST_INT_P (src)(((enum rtx_code) (src)->code) == CONST_INT)
1571 && is_a <scalar_int_mode> (mode, &int_mode)
1572 && GET_MODE_PRECISION (int_mode) < prec
1573 && INTVAL (src)((src)->u.hwint[0]) > 0
1574 && val_signbit_known_set_p (int_mode, INTVAL (src)((src)->u.hwint[0])))
1575 src = GEN_INT (INTVAL (src) | ~GET_MODE_MASK (int_mode))gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (((src)->u.hwint
[0]) | ~mode_mask_array[int_mode]))
;
1576
1577 return src;
1578}
1579
1580/* Update RSP for pseudo-register X from INSN's REG_EQUAL note (if one exists)
1581 and SET. */
1582
1583static void
1584update_rsp_from_reg_equal (reg_stat_type *rsp, rtx_insn *insn, const_rtx set,
1585 rtx x)
1586{
1587 rtx reg_equal_note = insn ? find_reg_equal_equiv_note (insn) : NULL_RTX(rtx) 0;
1588 unsigned HOST_WIDE_INTlong bits = 0;
1589 rtx reg_equal = NULLnullptr, src = SET_SRC (set)(((set)->u.fld[1]).rt_rtx);
1590 unsigned int num = 0;
1591
1592 if (reg_equal_note)
1593 reg_equal = XEXP (reg_equal_note, 0)(((reg_equal_note)->u.fld[0]).rt_rtx);
1594
1595 if (SHORT_IMMEDIATES_SIGN_EXTEND0)
1596 {
1597 src = sign_extend_short_imm (src, GET_MODE (x)((machine_mode) (x)->mode), BITS_PER_WORD((8) * (((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4))
);
1598 if (reg_equal)
1599 reg_equal = sign_extend_short_imm (reg_equal, GET_MODE (x)((machine_mode) (x)->mode), BITS_PER_WORD((8) * (((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4))
);
1600 }
1601
1602 /* Don't call nonzero_bits if it cannot change anything. */
1603 if (rsp->nonzero_bits != HOST_WIDE_INT_M1U-1UL)
1604 {
1605 machine_mode mode = GET_MODE (x)((machine_mode) (x)->mode);
1606 if (GET_MODE_CLASS (mode)((enum mode_class) mode_class[mode]) == MODE_INT
1607 && HWI_COMPUTABLE_MODE_P (mode))
1608 mode = nonzero_bits_mode;
1609 bits = nonzero_bits (src, mode);
1610 if (reg_equal && bits)
1611 bits &= nonzero_bits (reg_equal, mode);
1612 rsp->nonzero_bits |= bits;
1613 }
1614
1615 /* Don't call num_sign_bit_copies if it cannot change anything. */
1616 if (rsp->sign_bit_copies != 1)
1617 {
1618 num = num_sign_bit_copies (SET_SRC (set)(((set)->u.fld[1]).rt_rtx), GET_MODE (x)((machine_mode) (x)->mode));
1619 if (reg_equal && maybe_ne (num, GET_MODE_PRECISION (GET_MODE (x)((machine_mode) (x)->mode))))
1620 {
1621 unsigned int numeq = num_sign_bit_copies (reg_equal, GET_MODE (x)((machine_mode) (x)->mode));
1622 if (num == 0 || numeq > num)
1623 num = numeq;
1624 }
1625 if (rsp->sign_bit_copies == 0 || num < rsp->sign_bit_copies)
1626 rsp->sign_bit_copies = num;
1627 }
1628}
1629
1630/* Called via note_stores. If X is a pseudo that is narrower than
1631 HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
1632
1633 If we are setting only a portion of X and we can't figure out what
1634 portion, assume all bits will be used since we don't know what will
1635 be happening.
1636
1637 Similarly, set how many bits of X are known to be copies of the sign bit
1638 at all locations in the function. This is the smallest number implied
1639 by any set of X. */
1640
1641static void
1642set_nonzero_bits_and_sign_copies (rtx x, const_rtx set, void *data)
1643{
1644 rtx_insn *insn = (rtx_insn *) data;
1645 scalar_int_mode mode;
1646
1647 if (REG_P (x)(((enum rtx_code) (x)->code) == REG)
1648 && REGNO (x)(rhs_regno(x)) >= FIRST_PSEUDO_REGISTER76
1649 /* If this register is undefined at the start of the file, we can't
1650 say what its contents were. */
1651 && ! REGNO_REG_SET_Pbitmap_bit_p ((&(df_lr_get_bb_info (((((cfun + 0))->cfg
->x_entry_block_ptr)->next_bb)->index))->in), (rhs_regno
(x)))
1652 (DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb), REGNO (x))bitmap_bit_p ((&(df_lr_get_bb_info (((((cfun + 0))->cfg
->x_entry_block_ptr)->next_bb)->index))->in), (rhs_regno
(x)))
1653 && is_a <scalar_int_mode> (GET_MODE (x)((machine_mode) (x)->mode), &mode)
1654 && HWI_COMPUTABLE_MODE_P (mode))
1655 {
1656 reg_stat_type *rsp = &reg_stat[REGNO (x)(rhs_regno(x))];
1657
1658 if (set == 0 || GET_CODE (set)((enum rtx_code) (set)->code) == CLOBBER)
1659 {
1660 rsp->nonzero_bits = GET_MODE_MASK (mode)mode_mask_array[mode];
1661 rsp->sign_bit_copies = 1;
1662 return;
1663 }
1664
1665 /* If this register is being initialized using itself, and the
1666 register is uninitialized in this basic block, and there are
1667 no LOG_LINKS which set the register, then part of the
1668 register is uninitialized. In that case we can't assume
1669 anything about the number of nonzero bits.
1670
1671 ??? We could do better if we checked this in
1672 reg_{nonzero_bits,num_sign_bit_copies}_for_combine. Then we
1673 could avoid making assumptions about the insn which initially
1674 sets the register, while still using the information in other
1675 insns. We would have to be careful to check every insn
1676 involved in the combination. */
1677
1678 if (insn
1679 && reg_referenced_p (x, PATTERN (insn))
1680 && !REGNO_REG_SET_P (DF_LR_IN (BLOCK_FOR_INSN (insn)),bitmap_bit_p ((&(df_lr_get_bb_info ((BLOCK_FOR_INSN (insn
))->index))->in), (rhs_regno(x)))
1681 REGNO (x))bitmap_bit_p ((&(df_lr_get_bb_info ((BLOCK_FOR_INSN (insn
))->index))->in), (rhs_regno(x)))
)
1682 {
1683 struct insn_link *link;
1684
1685 FOR_EACH_LOG_LINK (link, insn)for ((link) = (uid_log_links[insn_uid_check (insn)]); (link);
(link) = (link)->next)
1686 if (dead_or_set_p (link->insn, x))
1687 break;
1688 if (!link)
1689 {
1690 rsp->nonzero_bits = GET_MODE_MASK (mode)mode_mask_array[mode];
1691 rsp->sign_bit_copies = 1;
1692 return;
1693 }
1694 }
1695
1696 /* If this is a complex assignment, see if we can convert it into a
1697 simple assignment. */
1698 set = expand_field_assignment (set);
1699
1700 /* If this is a simple assignment, or we have a paradoxical SUBREG,
1701 set what we know about X. */
1702
1703 if (SET_DEST (set)(((set)->u.fld[0]).rt_rtx) == x
1704 || (paradoxical_subreg_p (SET_DEST (set)(((set)->u.fld[0]).rt_rtx))
1705 && SUBREG_REG (SET_DEST (set))((((((set)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx) == x))
1706 update_rsp_from_reg_equal (rsp, insn, set, x);
1707 else
1708 {
1709 rsp->nonzero_bits = GET_MODE_MASK (mode)mode_mask_array[mode];
1710 rsp->sign_bit_copies = 1;
1711 }
1712 }
1713}
1714
1715/* See if INSN can be combined into I3. PRED, PRED2, SUCC and SUCC2 are
1716 optionally insns that were previously combined into I3 or that will be
1717 combined into the merger of INSN and I3. The order is PRED, PRED2,
1718 INSN, SUCC, SUCC2, I3.
1719
1720 Return 0 if the combination is not allowed for any reason.
1721
1722 If the combination is allowed, *PDEST will be set to the single
1723 destination of INSN and *PSRC to the single source, and this function
1724 will return 1. */
1725
1726static int
1727can_combine_p (rtx_insn *insn, rtx_insn *i3, rtx_insn *pred ATTRIBUTE_UNUSED__attribute__ ((__unused__)),
1728 rtx_insn *pred2 ATTRIBUTE_UNUSED__attribute__ ((__unused__)), rtx_insn *succ, rtx_insn *succ2,
1729 rtx *pdest, rtx *psrc)
1730{
1731 int i;
1732 const_rtx set = 0;
1733 rtx src, dest;
1734 rtx_insn *p;
1735 rtx link;
1736 bool all_adjacent = true;
1737 int (*is_volatile_p) (const_rtx);
1738
1739 if (succ)
1740 {
1741 if (succ2)
1742 {
1743 if (next_active_insn (succ2) != i3)
1744 all_adjacent = false;
1745 if (next_active_insn (succ) != succ2)
1746 all_adjacent = false;
1747 }
1748 else if (next_active_insn (succ) != i3)
1749 all_adjacent = false;
1750 if (next_active_insn (insn) != succ)
1751 all_adjacent = false;
1752 }
1753 else if (next_active_insn (insn) != i3)
1754 all_adjacent = false;
1755
1756 /* Can combine only if previous insn is a SET of a REG or a SUBREG,
1757 or a PARALLEL consisting of such a SET and CLOBBERs.
1758
1759 If INSN has CLOBBER parallel parts, ignore them for our processing.
1760 By definition, these happen during the execution of the insn. When it
1761 is merged with another insn, all bets are off. If they are, in fact,
1762 needed and aren't also supplied in I3, they may be added by
1763 recog_for_combine. Otherwise, it won't match.
1764
1765 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
1766 note.
1767
1768 Get the source and destination of INSN. If more than one, can't
1769 combine. */
1770
1771 if (GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == SET)
1772 set = PATTERN (insn);
1773 else if (GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == PARALLEL
1774 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0))((enum rtx_code) ((((((PATTERN (insn))->u.fld[0]).rt_rtvec
))->elem[0]))->code)
== SET)
1775 {
1776 for (i = 0; i < XVECLEN (PATTERN (insn), 0)(((((PATTERN (insn))->u.fld[0]).rt_rtvec))->num_elem); i++)
1777 {
1778 rtx elt = XVECEXP (PATTERN (insn), 0, i)(((((PATTERN (insn))->u.fld[0]).rt_rtvec))->elem[i]);
1779
1780 switch (GET_CODE (elt)((enum rtx_code) (elt)->code))
1781 {
1782 /* This is important to combine floating point insns
1783 for the SH4 port. */
1784 case USE:
1785 /* Combining an isolated USE doesn't make sense.
1786 We depend here on combinable_i3pat to reject them. */
1787 /* The code below this loop only verifies that the inputs of
1788 the SET in INSN do not change. We call reg_set_between_p
1789 to verify that the REG in the USE does not change between
1790 I3 and INSN.
1791 If the USE in INSN was for a pseudo register, the matching
1792 insn pattern will likely match any register; combining this
1793 with any other USE would only be safe if we knew that the
1794 used registers have identical values, or if there was
1795 something to tell them apart, e.g. different modes. For
1796 now, we forgo such complicated tests and simply disallow
1797 combining of USES of pseudo registers with any other USE. */
1798 if (REG_P (XEXP (elt, 0))(((enum rtx_code) ((((elt)->u.fld[0]).rt_rtx))->code) ==
REG)
1799 && GET_CODE (PATTERN (i3))((enum rtx_code) (PATTERN (i3))->code) == PARALLEL)
1800 {
1801 rtx i3pat = PATTERN (i3);
1802 int i = XVECLEN (i3pat, 0)(((((i3pat)->u.fld[0]).rt_rtvec))->num_elem) - 1;
1803 unsigned int regno = REGNO (XEXP (elt, 0))(rhs_regno((((elt)->u.fld[0]).rt_rtx)));
1804
1805 do
1806 {
1807 rtx i3elt = XVECEXP (i3pat, 0, i)(((((i3pat)->u.fld[0]).rt_rtvec))->elem[i]);
1808
1809 if (GET_CODE (i3elt)((enum rtx_code) (i3elt)->code) == USE
1810 && REG_P (XEXP (i3elt, 0))(((enum rtx_code) ((((i3elt)->u.fld[0]).rt_rtx))->code)
== REG)
1811 && (REGNO (XEXP (i3elt, 0))(rhs_regno((((i3elt)->u.fld[0]).rt_rtx))) == regno
1812 ? reg_set_between_p (XEXP (elt, 0)(((elt)->u.fld[0]).rt_rtx),
1813 PREV_INSN (insn), i3)
1814 : regno >= FIRST_PSEUDO_REGISTER76))
1815 return 0;
1816 }
1817 while (--i >= 0);
1818 }
1819 break;
1820
1821 /* We can ignore CLOBBERs. */
1822 case CLOBBER:
1823 break;
1824
1825 case SET:
1826 /* Ignore SETs whose result isn't used but not those that
1827 have side-effects. */
1828 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt)(((elt)->u.fld[0]).rt_rtx))
1829 && insn_nothrow_p (insn)
1830 && !side_effects_p (elt))
1831 break;
1832
1833 /* If we have already found a SET, this is a second one and
1834 so we cannot combine with this insn. */
1835 if (set)
1836 return 0;
1837
1838 set = elt;
1839 break;
1840
1841 default:
1842 /* Anything else means we can't combine. */
1843 return 0;
1844 }
1845 }
1846
1847 if (set == 0
1848 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1849 so don't do anything with it. */
1850 || GET_CODE (SET_SRC (set))((enum rtx_code) ((((set)->u.fld[1]).rt_rtx))->code) == ASM_OPERANDS)
1851 return 0;
1852 }
1853 else
1854 return 0;
1855
1856 if (set == 0)
1857 return 0;
1858
1859 /* The simplification in expand_field_assignment may call back to
1860 get_last_value, so set safe guard here. */
1861 subst_low_luid = DF_INSN_LUID (insn)((((df->insns[(INSN_UID (insn))]))->luid));
1862
1863 set = expand_field_assignment (set);
1864 src = SET_SRC (set)(((set)->u.fld[1]).rt_rtx), dest = SET_DEST (set)(((set)->u.fld[0]).rt_rtx);
1865
1866 /* Do not eliminate user-specified register if it is in an
1867 asm input because we may break the register asm usage defined
1868 in GCC manual if allow to do so.
1869 Be aware that this may cover more cases than we expect but this
1870 should be harmless. */
1871 if (REG_P (dest)(((enum rtx_code) (dest)->code) == REG) && REG_USERVAR_P (dest)(__extension__ ({ __typeof ((dest)) const _rtx = ((dest)); if
(((enum rtx_code) (_rtx)->code) != REG) rtl_check_failed_flag
("REG_USERVAR_P", _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 1871, __FUNCTION__); _rtx; })->volatil)
&& HARD_REGISTER_P (dest)((((rhs_regno(dest))) < 76))
1872 && extract_asm_operands (PATTERN (i3)))
1873 return 0;
1874
1875 /* Don't eliminate a store in the stack pointer. */
1876 if (dest == stack_pointer_rtx((this_target_rtl->x_global_rtl)[GR_STACK_POINTER])
1877 /* Don't combine with an insn that sets a register to itself if it has
1878 a REG_EQUAL note. This may be part of a LIBCALL sequence. */
1879 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX(rtx) 0))
1880 /* Can't merge an ASM_OPERANDS. */
1881 || GET_CODE (src)((enum rtx_code) (src)->code) == ASM_OPERANDS
1882 /* Can't merge a function call. */
1883 || GET_CODE (src)((enum rtx_code) (src)->code) == CALL
1884 /* Don't eliminate a function call argument. */
1885 || (CALL_P (i3)(((enum rtx_code) (i3)->code) == CALL_INSN)
1886 && (find_reg_fusage (i3, USE, dest)
1887 || (REG_P (dest)(((enum rtx_code) (dest)->code) == REG)
1888 && REGNO (dest)(rhs_regno(dest)) < FIRST_PSEUDO_REGISTER76
1889 && global_regs[REGNO (dest)(rhs_regno(dest))])))
1890 /* Don't substitute into an incremented register. */
1891 || FIND_REG_INC_NOTE (i3, dest)0
1892 || (succ && FIND_REG_INC_NOTE (succ, dest)0)
1893 || (succ2 && FIND_REG_INC_NOTE (succ2, dest)0)
1894 /* Don't substitute into a non-local goto, this confuses CFG. */
1895 || (JUMP_P (i3)(((enum rtx_code) (i3)->code) == JUMP_INSN) && find_reg_note (i3, REG_NON_LOCAL_GOTO, NULL_RTX(rtx) 0))
1896 /* Make sure that DEST is not used after INSN but before SUCC, or
1897 after SUCC and before SUCC2, or after SUCC2 but before I3. */
1898 || (!all_adjacent
1899 && ((succ2
1900 && (reg_used_between_p (dest, succ2, i3)
1901 || reg_used_between_p (dest, succ, succ2)))
1902 || (!succ2 && succ && reg_used_between_p (dest, succ, i3))
1903 || (!succ2 && !succ && reg_used_between_p (dest, insn, i3))
1904 || (succ
1905 /* SUCC and SUCC2 can be split halves from a PARALLEL; in
1906 that case SUCC is not in the insn stream, so use SUCC2
1907 instead for this test. */
1908 && reg_used_between_p (dest, insn,
1909 succ2
1910 && INSN_UID (succ) == INSN_UID (succ2)
1911 ? succ2 : succ))))
1912 /* Make sure that the value that is to be substituted for the register
1913 does not use any registers whose values alter in between. However,
1914 If the insns are adjacent, a use can't cross a set even though we
1915 think it might (this can happen for a sequence of insns each setting
1916 the same destination; last_set of that register might point to
1917 a NOTE). If INSN has a REG_EQUIV note, the register is always
1918 equivalent to the memory so the substitution is valid even if there
1919 are intervening stores. Also, don't move a volatile asm or
1920 UNSPEC_VOLATILE across any other insns. */
1921 || (! all_adjacent
1922 && (((!MEM_P (src)(((enum rtx_code) (src)->code) == MEM)
1923 || ! find_reg_note (insn, REG_EQUIV, src))
1924 && modified_between_p (src, insn, i3))
1925 || (GET_CODE (src)((enum rtx_code) (src)->code) == ASM_OPERANDS && MEM_VOLATILE_P (src)(__extension__ ({ __typeof ((src)) const _rtx = ((src)); if (
((enum rtx_code) (_rtx)->code) != MEM && ((enum rtx_code
) (_rtx)->code) != ASM_OPERANDS && ((enum rtx_code
) (_rtx)->code) != ASM_INPUT) rtl_check_failed_flag ("MEM_VOLATILE_P"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 1925, __FUNCTION__); _rtx; })->volatil)
)
1926 || GET_CODE (src)((enum rtx_code) (src)->code) == UNSPEC_VOLATILE))
1927 /* Don't combine across a CALL_INSN, because that would possibly
1928 change whether the life span of some REGs crosses calls or not,
1929 and it is a pain to update that information.
1930 Exception: if source is a constant, moving it later can't hurt.
1931 Accept that as a special case. */
1932 || (DF_INSN_LUID (insn)((((df->insns[(INSN_UID (insn))]))->luid)) < last_call_luid && ! CONSTANT_P (src)((rtx_class[(int) (((enum rtx_code) (src)->code))]) == RTX_CONST_OBJ
)
))
1933 return 0;
1934
1935 /* DEST must be a REG. */
1936 if (REG_P (dest)(((enum rtx_code) (dest)->code) == REG))
1937 {
1938 /* If register alignment is being enforced for multi-word items in all
1939 cases except for parameters, it is possible to have a register copy
1940 insn referencing a hard register that is not allowed to contain the
1941 mode being copied and which would not be valid as an operand of most
1942 insns. Eliminate this problem by not combining with such an insn.
1943
1944 Also, on some machines we don't want to extend the life of a hard
1945 register. */
1946
1947 if (REG_P (src)(((enum rtx_code) (src)->code) == REG)
1948 && ((REGNO (dest)(rhs_regno(dest)) < FIRST_PSEUDO_REGISTER76
1949 && !targetm.hard_regno_mode_ok (REGNO (dest)(rhs_regno(dest)), GET_MODE (dest)((machine_mode) (dest)->mode)))
1950 /* Don't extend the life of a hard register unless it is
1951 user variable (if we have few registers) or it can't
1952 fit into the desired register (meaning something special
1953 is going on).
1954 Also avoid substituting a return register into I3, because
1955 reload can't handle a conflict with constraints of other
1956 inputs. */
1957 || (REGNO (src)(rhs_regno(src)) < FIRST_PSEUDO_REGISTER76
1958 && !targetm.hard_regno_mode_ok (REGNO (src)(rhs_regno(src)),
1959 GET_MODE (src)((machine_mode) (src)->mode)))))
1960 return 0;
1961 }
1962 else
1963 return 0;
1964
1965
1966 if (GET_CODE (PATTERN (i3))((enum rtx_code) (PATTERN (i3))->code) == PARALLEL)
1967 for (i = XVECLEN (PATTERN (i3), 0)(((((PATTERN (i3))->u.fld[0]).rt_rtvec))->num_elem) - 1; i >= 0; i--)
1968 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i))((enum rtx_code) ((((((PATTERN (i3))->u.fld[0]).rt_rtvec))
->elem[i]))->code)
== CLOBBER)
1969 {
1970 rtx reg = XEXP (XVECEXP (PATTERN (i3), 0, i), 0)((((((((PATTERN (i3))->u.fld[0]).rt_rtvec))->elem[i]))->
u.fld[0]).rt_rtx)
;
1971
1972 /* If the clobber represents an earlyclobber operand, we must not
1973 substitute an expression containing the clobbered register.
1974 As we do not analyze the constraint strings here, we have to
1975 make the conservative assumption. However, if the register is
1976 a fixed hard reg, the clobber cannot represent any operand;
1977 we leave it up to the machine description to either accept or
1978 reject use-and-clobber patterns. */
1979 if (!REG_P (reg)(((enum rtx_code) (reg)->code) == REG)
1980 || REGNO (reg)(rhs_regno(reg)) >= FIRST_PSEUDO_REGISTER76
1981 || !fixed_regs(this_target_hard_regs->x_fixed_regs)[REGNO (reg)(rhs_regno(reg))])
1982 if (reg_overlap_mentioned_p (reg, src))
1983 return 0;
1984 }
1985
1986 /* If INSN contains anything volatile, or is an `asm' (whether volatile
1987 or not), reject, unless nothing volatile comes between it and I3 */
1988
1989 if (GET_CODE (src)((enum rtx_code) (src)->code) == ASM_OPERANDS || volatile_refs_p (src))
1990 {
1991 /* Make sure neither succ nor succ2 contains a volatile reference. */
1992 if (succ2 != 0 && volatile_refs_p (PATTERN (succ2)))
1993 return 0;
1994 if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1995 return 0;
1996 /* We'll check insns between INSN and I3 below. */
1997 }
1998
1999 /* If INSN is an asm, and DEST is a hard register, reject, since it has
2000 to be an explicit register variable, and was chosen for a reason. */
2001
2002 if (GET_CODE (src)((enum rtx_code) (src)->code) == ASM_OPERANDS
2003 && REG_P (dest)(((enum rtx_code) (dest)->code) == REG) && REGNO (dest)(rhs_regno(dest)) < FIRST_PSEUDO_REGISTER76)
2004 return 0;
2005
2006 /* If INSN contains volatile references (specifically volatile MEMs),
2007 we cannot combine across any other volatile references.
2008 Even if INSN doesn't contain volatile references, any intervening
2009 volatile insn might affect machine state. */
2010
2011 is_volatile_p = volatile_refs_p (PATTERN (insn))
2012 ? volatile_refs_p
2013 : volatile_insn_p;
2014
2015 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
2016 if (INSN_P (p)(((((enum rtx_code) (p)->code) == INSN) || (((enum rtx_code
) (p)->code) == JUMP_INSN) || (((enum rtx_code) (p)->code
) == CALL_INSN)) || (((enum rtx_code) (p)->code) == DEBUG_INSN
))
&& p != succ && p != succ2 && is_volatile_p (PATTERN (p)))
2017 return 0;
2018
2019 /* If INSN contains an autoincrement or autodecrement, make sure that
2020 register is not used between there and I3, and not already used in
2021 I3 either. Neither must it be used in PRED or SUCC, if they exist.
2022 Also insist that I3 not be a jump if using LRA; if it were one
2023 and the incremented register were spilled, we would lose.
2024 Reload handles this correctly. */
2025
2026 if (AUTO_INC_DEC0)
2027 for (link = REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx); link; link = XEXP (link, 1)(((link)->u.fld[1]).rt_rtx))
2028 if (REG_NOTE_KIND (link)((enum reg_note) ((machine_mode) (link)->mode)) == REG_INC
2029 && ((JUMP_P (i3)(((enum rtx_code) (i3)->code) == JUMP_INSN) && targetm.lra_p ())
2030 || reg_used_between_p (XEXP (link, 0)(((link)->u.fld[0]).rt_rtx), insn, i3)
2031 || (pred != NULL_RTX(rtx) 0
2032 && reg_overlap_mentioned_p (XEXP (link, 0)(((link)->u.fld[0]).rt_rtx), PATTERN (pred)))
2033 || (pred2 != NULL_RTX(rtx) 0
2034 && reg_overlap_mentioned_p (XEXP (link, 0)(((link)->u.fld[0]).rt_rtx), PATTERN (pred2)))
2035 || (succ != NULL_RTX(rtx) 0
2036 && reg_overlap_mentioned_p (XEXP (link, 0)(((link)->u.fld[0]).rt_rtx), PATTERN (succ)))
2037 || (succ2 != NULL_RTX(rtx) 0
2038 && reg_overlap_mentioned_p (XEXP (link, 0)(((link)->u.fld[0]).rt_rtx), PATTERN (succ2)))
2039 || reg_overlap_mentioned_p (XEXP (link, 0)(((link)->u.fld[0]).rt_rtx), PATTERN (i3))))
2040 return 0;
2041
2042 /* If we get here, we have passed all the tests and the combination is
2043 to be allowed. */
2044
2045 *pdest = dest;
2046 *psrc = src;
2047
2048 return 1;
2049}
2050
2051/* LOC is the location within I3 that contains its pattern or the component
2052 of a PARALLEL of the pattern. We validate that it is valid for combining.
2053
2054 One problem is if I3 modifies its output, as opposed to replacing it
2055 entirely, we can't allow the output to contain I2DEST, I1DEST or I0DEST as
2056 doing so would produce an insn that is not equivalent to the original insns.
2057
2058 Consider:
2059
2060 (set (reg:DI 101) (reg:DI 100))
2061 (set (subreg:SI (reg:DI 101) 0) <foo>)
2062
2063 This is NOT equivalent to:
2064
2065 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
2066 (set (reg:DI 101) (reg:DI 100))])
2067
2068 Not only does this modify 100 (in which case it might still be valid
2069 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
2070
2071 We can also run into a problem if I2 sets a register that I1
2072 uses and I1 gets directly substituted into I3 (not via I2). In that
2073 case, we would be getting the wrong value of I2DEST into I3, so we
2074 must reject the combination. This case occurs when I2 and I1 both
2075 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
2076 If I1_NOT_IN_SRC is nonzero, it means that finding I1 in the source
2077 of a SET must prevent combination from occurring. The same situation
2078 can occur for I0, in which case I0_NOT_IN_SRC is set.
2079
2080 Before doing the above check, we first try to expand a field assignment
2081 into a set of logical operations.
2082
2083 If PI3_DEST_KILLED is nonzero, it is a pointer to a location in which
2084 we place a register that is both set and used within I3. If more than one
2085 such register is detected, we fail.
2086
2087 Return 1 if the combination is valid, zero otherwise. */
2088
2089static int
2090combinable_i3pat (rtx_insn *i3, rtx *loc, rtx i2dest, rtx i1dest, rtx i0dest,
2091 int i1_not_in_src, int i0_not_in_src, rtx *pi3dest_killed)
2092{
2093 rtx x = *loc;
2094
2095 if (GET_CODE (x)((enum rtx_code) (x)->code) == SET)
2096 {
2097 rtx set = x ;
2098 rtx dest = SET_DEST (set)(((set)->u.fld[0]).rt_rtx);
2099 rtx src = SET_SRC (set)(((set)->u.fld[1]).rt_rtx);
2100 rtx inner_dest = dest;
2101 rtx subdest;
2102
2103 while (GET_CODE (inner_dest)((enum rtx_code) (inner_dest)->code) == STRICT_LOW_PART
2104 || GET_CODE (inner_dest)((enum rtx_code) (inner_dest)->code) == SUBREG
2105 || GET_CODE (inner_dest)((enum rtx_code) (inner_dest)->code) == ZERO_EXTRACT)
2106 inner_dest = XEXP (inner_dest, 0)(((inner_dest)->u.fld[0]).rt_rtx);
2107
2108 /* Check for the case where I3 modifies its output, as discussed
2109 above. We don't want to prevent pseudos from being combined
2110 into the address of a MEM, so only prevent the combination if
2111 i1 or i2 set the same MEM. */
2112 if ((inner_dest != dest &&
2113 (!MEM_P (inner_dest)(((enum rtx_code) (inner_dest)->code) == MEM)
2114 || rtx_equal_p (i2dest, inner_dest)
2115 || (i1dest && rtx_equal_p (i1dest, inner_dest))
2116 || (i0dest && rtx_equal_p (i0dest, inner_dest)))
2117 && (reg_overlap_mentioned_p (i2dest, inner_dest)
2118 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))
2119 || (i0dest && reg_overlap_mentioned_p (i0dest, inner_dest))))
2120
2121 /* This is the same test done in can_combine_p except we can't test
2122 all_adjacent; we don't have to, since this instruction will stay
2123 in place, thus we are not considering increasing the lifetime of
2124 INNER_DEST.
2125
2126 Also, if this insn sets a function argument, combining it with
2127 something that might need a spill could clobber a previous
2128 function argument; the all_adjacent test in can_combine_p also
2129 checks this; here, we do a more specific test for this case. */
2130
2131 || (REG_P (inner_dest)(((enum rtx_code) (inner_dest)->code) == REG)
2132 && REGNO (inner_dest)(rhs_regno(inner_dest)) < FIRST_PSEUDO_REGISTER76
2133 && !targetm.hard_regno_mode_ok (REGNO (inner_dest)(rhs_regno(inner_dest)),
2134 GET_MODE (inner_dest)((machine_mode) (inner_dest)->mode)))
2135 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src))
2136 || (i0_not_in_src && reg_overlap_mentioned_p (i0dest, src)))
2137 return 0;
2138
2139 /* If DEST is used in I3, it is being killed in this insn, so
2140 record that for later. We have to consider paradoxical
2141 subregs here, since they kill the whole register, but we
2142 ignore partial subregs, STRICT_LOW_PART, etc.
2143 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
2144 STACK_POINTER_REGNUM, since these are always considered to be
2145 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
2146 subdest = dest;
2147 if (GET_CODE (subdest)((enum rtx_code) (subdest)->code) == SUBREG && !partial_subreg_p (subdest))
2148 subdest = SUBREG_REG (subdest)(((subdest)->u.fld[0]).rt_rtx);
2149 if (pi3dest_killed
2150 && REG_P (subdest)(((enum rtx_code) (subdest)->code) == REG)
2151 && reg_referenced_p (subdest, PATTERN (i3))
2152 && REGNO (subdest)(rhs_regno(subdest)) != FRAME_POINTER_REGNUM19
2153 && (HARD_FRAME_POINTER_IS_FRAME_POINTER(6 == 19)
2154 || REGNO (subdest)(rhs_regno(subdest)) != HARD_FRAME_POINTER_REGNUM6)
2155 && (FRAME_POINTER_REGNUM19 == ARG_POINTER_REGNUM16
2156 || (REGNO (subdest)(rhs_regno(subdest)) != ARG_POINTER_REGNUM16
2157 || ! fixed_regs(this_target_hard_regs->x_fixed_regs) [REGNO (subdest)(rhs_regno(subdest))]))
2158 && REGNO (subdest)(rhs_regno(subdest)) != STACK_POINTER_REGNUM7)
2159 {
2160 if (*pi3dest_killed)
2161 return 0;
2162
2163 *pi3dest_killed = subdest;
2164 }
2165 }
2166
2167 else if (GET_CODE (x)((enum rtx_code) (x)->code) == PARALLEL)
2168 {
2169 int i;
2170
2171 for (i = 0; i < XVECLEN (x, 0)(((((x)->u.fld[0]).rt_rtvec))->num_elem); i++)
2172 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i)(((((x)->u.fld[0]).rt_rtvec))->elem[i]), i2dest, i1dest, i0dest,
2173 i1_not_in_src, i0_not_in_src, pi3dest_killed))
2174 return 0;
2175 }
2176
2177 return 1;
2178}
2179
2180/* Return 1 if X is an arithmetic expression that contains a multiplication
2181 and division. We don't count multiplications by powers of two here. */
2182
2183static int
2184contains_muldiv (rtx x)
2185{
2186 switch (GET_CODE (x)((enum rtx_code) (x)->code))
2187 {
2188 case MOD: case DIV: case UMOD: case UDIV:
2189 return 1;
2190
2191 case MULT:
2192 return ! (CONST_INT_P (XEXP (x, 1))(((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) == CONST_INT
)
2193 && pow2p_hwi (UINTVAL (XEXP (x, 1))((unsigned long) (((((x)->u.fld[1]).rt_rtx))->u.hwint[0
]))
));
2194 default:
2195 if (BINARY_P (x)(((rtx_class[(int) (((enum rtx_code) (x)->code))]) & (
~3)) == (RTX_COMPARE & (~3)))
)
2196 return contains_muldiv (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx))
2197 || contains_muldiv (XEXP (x, 1)(((x)->u.fld[1]).rt_rtx));
2198
2199 if (UNARY_P (x)((rtx_class[(int) (((enum rtx_code) (x)->code))]) == RTX_UNARY
)
)
2200 return contains_muldiv (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx));
2201
2202 return 0;
2203 }
2204}
2205
2206/* Determine whether INSN can be used in a combination. Return nonzero if
2207 not. This is used in try_combine to detect early some cases where we
2208 can't perform combinations. */
2209
2210static int
2211cant_combine_insn_p (rtx_insn *insn)
2212{
2213 rtx set;
2214 rtx src, dest;
2215
2216 /* If this isn't really an insn, we can't do anything.
2217 This can occur when flow deletes an insn that it has merged into an
2218 auto-increment address. */
2219 if (!NONDEBUG_INSN_P (insn)((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code
) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)->
code) == CALL_INSN))
)
2220 return 1;
2221
2222 /* Never combine loads and stores involving hard regs that are likely
2223 to be spilled. The register allocator can usually handle such
2224 reg-reg moves by tying. If we allow the combiner to make
2225 substitutions of likely-spilled regs, reload might die.
2226 As an exception, we allow combinations involving fixed regs; these are
2227 not available to the register allocator so there's no risk involved. */
2228
2229 set = single_set (insn);
2230 if (! set)
2231 return 0;
2232 src = SET_SRC (set)(((set)->u.fld[1]).rt_rtx);
2233 dest = SET_DEST (set)(((set)->u.fld[0]).rt_rtx);
2234 if (GET_CODE (src)((enum rtx_code) (src)->code) == SUBREG)
2235 src = SUBREG_REG (src)(((src)->u.fld[0]).rt_rtx);
2236 if (GET_CODE (dest)((enum rtx_code) (dest)->code) == SUBREG)
2237 dest = SUBREG_REG (dest)(((dest)->u.fld[0]).rt_rtx);
2238 if (REG_P (src)(((enum rtx_code) (src)->code) == REG) && REG_P (dest)(((enum rtx_code) (dest)->code) == REG)
2239 && ((HARD_REGISTER_P (src)((((rhs_regno(src))) < 76))
2240 && ! TEST_HARD_REG_BIT (fixed_reg_set(this_target_hard_regs->x_fixed_reg_set), REGNO (src)(rhs_regno(src)))
2241#ifdef LEAF_REGISTERS
2242 && ! LEAF_REGISTERS [REGNO (src)(rhs_regno(src))])
2243#else
2244 )
2245#endif
2246 || (HARD_REGISTER_P (dest)((((rhs_regno(dest))) < 76))
2247 && ! TEST_HARD_REG_BIT (fixed_reg_set(this_target_hard_regs->x_fixed_reg_set), REGNO (dest)(rhs_regno(dest)))
2248 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (dest))(regclass_map[((rhs_regno(dest)))])))))
2249 return 1;
2250
2251 return 0;
2252}
2253
2254struct likely_spilled_retval_info
2255{
2256 unsigned regno, nregs;
2257 unsigned mask;
2258};
2259
2260/* Called via note_stores by likely_spilled_retval_p. Remove from info->mask
2261 hard registers that are known to be written to / clobbered in full. */
2262static void
2263likely_spilled_retval_1 (rtx x, const_rtx set, void *data)
2264{
2265 struct likely_spilled_retval_info *const info =
2266 (struct likely_spilled_retval_info *) data;
2267 unsigned regno, nregs;
2268 unsigned new_mask;
2269
2270 if (!REG_P (XEXP (set, 0))(((enum rtx_code) ((((set)->u.fld[0]).rt_rtx))->code) ==
REG)
)
2271 return;
2272 regno = REGNO (x)(rhs_regno(x));
2273 if (regno >= info->regno + info->nregs)
2274 return;
2275 nregs = REG_NREGS (x)((&(x)->u.reg)->nregs);
2276 if (regno + nregs <= info->regno)
2277 return;
2278 new_mask = (2U << (nregs - 1)) - 1;
2279 if (regno < info->regno)
2280 new_mask >>= info->regno - regno;
2281 else
2282 new_mask <<= regno - info->regno;
2283 info->mask &= ~new_mask;
2284}
2285
2286/* Return nonzero iff part of the return value is live during INSN, and
2287 it is likely spilled. This can happen when more than one insn is needed
2288 to copy the return value, e.g. when we consider to combine into the
2289 second copy insn for a complex value. */
2290
2291static int
2292likely_spilled_retval_p (rtx_insn *insn)
2293{
2294 rtx_insn *use = BB_END (this_basic_block)(this_basic_block)->il.x.rtl->end_;
2295 rtx reg;
2296 rtx_insn *p;
2297 unsigned regno, nregs;
2298 /* We assume here that no machine mode needs more than
2299 32 hard registers when the value overlaps with a register
2300 for which TARGET_FUNCTION_VALUE_REGNO_P is true. */
2301 unsigned mask;
2302 struct likely_spilled_retval_info info;
2303
2304 if (!NONJUMP_INSN_P (use)(((enum rtx_code) (use)->code) == INSN) || GET_CODE (PATTERN (use))((enum rtx_code) (PATTERN (use))->code) != USE || insn == use)
2305 return 0;
2306 reg = XEXP (PATTERN (use), 0)(((PATTERN (use))->u.fld[0]).rt_rtx);
2307 if (!REG_P (reg)(((enum rtx_code) (reg)->code) == REG) || !targetm.calls.function_value_regno_p (REGNO (reg)(rhs_regno(reg))))
2308 return 0;
2309 regno = REGNO (reg)(rhs_regno(reg));
2310 nregs = REG_NREGS (reg)((&(reg)->u.reg)->nregs);
2311 if (nregs == 1)
2312 return 0;
2313 mask = (2U << (nregs - 1)) - 1;
2314
2315 /* Disregard parts of the return value that are set later. */
2316 info.regno = regno;
2317 info.nregs = nregs;
2318 info.mask = mask;
2319 for (p = PREV_INSN (use); info.mask && p != insn; p = PREV_INSN (p))
2320 if (INSN_P (p)(((((enum rtx_code) (p)->code) == INSN) || (((enum rtx_code
) (p)->code) == JUMP_INSN) || (((enum rtx_code) (p)->code
) == CALL_INSN)) || (((enum rtx_code) (p)->code) == DEBUG_INSN
))
)
2321 note_stores (p, likely_spilled_retval_1, &info);
2322 mask = info.mask;
2323
2324 /* Check if any of the (probably) live return value registers is
2325 likely spilled. */
2326 nregs --;
2327 do
2328 {
2329 if ((mask & 1 << nregs)
2330 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (regno + nregs)(regclass_map[(regno + nregs)])))
2331 return 1;
2332 } while (nregs--);
2333 return 0;
2334}
2335
2336/* Adjust INSN after we made a change to its destination.
2337
2338 Changing the destination can invalidate notes that say something about
2339 the results of the insn and a LOG_LINK pointing to the insn. */
2340
2341static void
2342adjust_for_new_dest (rtx_insn *insn)
2343{
2344 /* For notes, be conservative and simply remove them. */
2345 remove_reg_equal_equiv_notes (insn, true);
2346
2347 /* The new insn will have a destination that was previously the destination
2348 of an insn just above it. Call distribute_links to make a LOG_LINK from
2349 the next use of that destination. */
2350
2351 rtx set = single_set (insn);
2352 gcc_assert (set)((void)(!(set) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 2352, __FUNCTION__), 0 : 0))
;
2353
2354 rtx reg = SET_DEST (set)(((set)->u.fld[0]).rt_rtx);
2355
2356 while (GET_CODE (reg)((enum rtx_code) (reg)->code) == ZERO_EXTRACT
2357 || GET_CODE (reg)((enum rtx_code) (reg)->code) == STRICT_LOW_PART
2358 || GET_CODE (reg)((enum rtx_code) (reg)->code) == SUBREG)
2359 reg = XEXP (reg, 0)(((reg)->u.fld[0]).rt_rtx);
2360 gcc_assert (REG_P (reg))((void)(!((((enum rtx_code) (reg)->code) == REG)) ? fancy_abort
("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 2360, __FUNCTION__), 0 : 0))
;
2361
2362 distribute_links (alloc_insn_link (insn, REGNO (reg)(rhs_regno(reg)), NULLnullptr));
2363
2364 df_insn_rescan (insn);
2365}
2366
2367/* Return TRUE if combine can reuse reg X in mode MODE.
2368 ADDED_SETS is nonzero if the original set is still required. */
2369static bool
2370can_change_dest_mode (rtx x, int added_sets, machine_mode mode)
2371{
2372 unsigned int regno;
2373
2374 if (!REG_P (x)(((enum rtx_code) (x)->code) == REG))
2375 return false;
2376
2377 /* Don't change between modes with different underlying register sizes,
2378 since this could lead to invalid subregs. */
2379 if (maybe_ne (REGMODE_NATURAL_SIZE (mode)ix86_regmode_natural_size (mode),
2380 REGMODE_NATURAL_SIZE (GET_MODE (x))ix86_regmode_natural_size (((machine_mode) (x)->mode))))
2381 return false;
2382
2383 regno = REGNO (x)(rhs_regno(x));
2384 /* Allow hard registers if the new mode is legal, and occupies no more
2385 registers than the old mode. */
2386 if (regno < FIRST_PSEUDO_REGISTER76)
2387 return (targetm.hard_regno_mode_ok (regno, mode)
2388 && REG_NREGS (x)((&(x)->u.reg)->nregs) >= hard_regno_nregs (regno, mode));
2389
2390 /* Or a pseudo that is only used once. */
2391 return (regno < reg_n_sets_max
2392 && REG_N_SETS (regno) == 1
2393 && !added_sets
2394 && !REG_USERVAR_P (x)(__extension__ ({ __typeof ((x)) const _rtx = ((x)); if (((enum
rtx_code) (_rtx)->code) != REG) rtl_check_failed_flag ("REG_USERVAR_P"
, _rtx, "/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 2394, __FUNCTION__); _rtx; })->volatil)
);
2395}
2396
2397
2398/* Check whether X, the destination of a set, refers to part of
2399 the register specified by REG. */
2400
2401static bool
2402reg_subword_p (rtx x, rtx reg)
2403{
2404 /* Check that reg is an integer mode register. */
2405 if (!REG_P (reg)(((enum rtx_code) (reg)->code) == REG) || GET_MODE_CLASS (GET_MODE (reg))((enum mode_class) mode_class[((machine_mode) (reg)->mode)
])
!= MODE_INT)
2406 return false;
2407
2408 if (GET_CODE (x)((enum rtx_code) (x)->code) == STRICT_LOW_PART
2409 || GET_CODE (x)((enum rtx_code) (x)->code) == ZERO_EXTRACT)
2410 x = XEXP (x, 0)(((x)->u.fld[0]).rt_rtx);
2411
2412 return GET_CODE (x)((enum rtx_code) (x)->code) == SUBREG
2413 && !paradoxical_subreg_p (x)
2414 && SUBREG_REG (x)(((x)->u.fld[0]).rt_rtx) == reg
2415 && GET_MODE_CLASS (GET_MODE (x))((enum mode_class) mode_class[((machine_mode) (x)->mode)]) == MODE_INT;
2416}
2417
2418/* Return whether PAT is a PARALLEL of exactly N register SETs followed
2419 by an arbitrary number of CLOBBERs. */
2420static bool
2421is_parallel_of_n_reg_sets (rtx pat, int n)
2422{
2423 if (GET_CODE (pat)((enum rtx_code) (pat)->code) != PARALLEL)
2424 return false;
2425
2426 int len = XVECLEN (pat, 0)(((((pat)->u.fld[0]).rt_rtvec))->num_elem);
2427 if (len < n)
2428 return false;
2429
2430 int i;
2431 for (i = 0; i < n; i++)
2432 if (GET_CODE (XVECEXP (pat, 0, i))((enum rtx_code) ((((((pat)->u.fld[0]).rt_rtvec))->elem
[i]))->code)
!= SET
2433 || !REG_P (SET_DEST (XVECEXP (pat, 0, i)))(((enum rtx_code) (((((((((pat)->u.fld[0]).rt_rtvec))->
elem[i]))->u.fld[0]).rt_rtx))->code) == REG)
)
2434 return false;
2435 for ( ; i < len; i++)
2436 switch (GET_CODE (XVECEXP (pat, 0, i))((enum rtx_code) ((((((pat)->u.fld[0]).rt_rtvec))->elem
[i]))->code)
)
2437 {
2438 case CLOBBER:
2439 if (XEXP (XVECEXP (pat, 0, i), 0)((((((((pat)->u.fld[0]).rt_rtvec))->elem[i]))->u.fld
[0]).rt_rtx)
== const0_rtx(const_int_rtx[64]))
2440 return false;
2441 break;
2442 default:
2443 return false;
2444 }
2445 return true;
2446}
2447
2448/* Return whether INSN, a PARALLEL of N register SETs (and maybe some
2449 CLOBBERs), can be split into individual SETs in that order, without
2450 changing semantics. */
2451static bool
2452can_split_parallel_of_n_reg_sets (rtx_insn *insn, int n)
2453{
2454 if (!insn_nothrow_p (insn))
2455 return false;
2456
2457 rtx pat = PATTERN (insn);
2458
2459 int i, j;
2460 for (i = 0; i < n; i++)
2461 {
2462 if (side_effects_p (SET_SRC (XVECEXP (pat, 0, i))((((((((pat)->u.fld[0]).rt_rtvec))->elem[i]))->u.fld
[1]).rt_rtx)
))
2463 return false;
2464
2465 rtx reg = SET_DEST (XVECEXP (pat, 0, i))((((((((pat)->u.fld[0]).rt_rtvec))->elem[i]))->u.fld
[0]).rt_rtx)
;
2466
2467 for (j = i + 1; j < n; j++)
2468 if (reg_referenced_p (reg, XVECEXP (pat, 0, j)(((((pat)->u.fld[0]).rt_rtvec))->elem[j])))
2469 return false;
2470 }
2471
2472 return true;
2473}
2474
2475/* Return whether X is just a single_set, with the source
2476 a general_operand. */
2477static bool
2478is_just_move (rtx_insn *x)
2479{
2480 rtx set = single_set (x);
2481 if (!set)
2482 return false;
2483
2484 return general_operand (SET_SRC (set)(((set)->u.fld[1]).rt_rtx), VOIDmode((void) 0, E_VOIDmode));
2485}
2486
2487/* Callback function to count autoincs. */
2488
2489static int
2490count_auto_inc (rtx, rtx, rtx, rtx, rtx, void *arg)
2491{
2492 (*((int *) arg))++;
2493
2494 return 0;
2495}
2496
2497/* Try to combine the insns I0, I1 and I2 into I3.
2498 Here I0, I1 and I2 appear earlier than I3.
2499 I0 and I1 can be zero; then we combine just I2 into I3, or I1 and I2 into
2500 I3.
2501
2502 If we are combining more than two insns and the resulting insn is not
2503 recognized, try splitting it into two insns. If that happens, I2 and I3
2504 are retained and I1/I0 are pseudo-deleted by turning them into a NOTE.
2505 Otherwise, I0, I1 and I2 are pseudo-deleted.
2506
2507 Return 0 if the combination does not work. Then nothing is changed.
2508 If we did the combination, return the insn at which combine should
2509 resume scanning.
2510
2511 Set NEW_DIRECT_JUMP_P to a nonzero value if try_combine creates a
2512 new direct jump instruction.
2513
2514 LAST_COMBINED_INSN is either I3, or some insn after I3 that has
2515 been I3 passed to an earlier try_combine within the same basic
2516 block. */
2517
2518static rtx_insn *
2519try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
2520 int *new_direct_jump_p, rtx_insn *last_combined_insn)
2521{
2522 /* New patterns for I3 and I2, respectively. */
2523 rtx newpat, newi2pat = 0;
2524 rtvec newpat_vec_with_clobbers = 0;
2525 int substed_i2 = 0, substed_i1 = 0, substed_i0 = 0;
2526 /* Indicates need to preserve SET in I0, I1 or I2 in I3 if it is not
2527 dead. */
2528 int added_sets_0, added_sets_1, added_sets_2;
2529 /* Total number of SETs to put into I3. */
2530 int total_sets;
2531 /* Nonzero if I2's or I1's body now appears in I3. */
2532 int i2_is_used = 0, i1_is_used = 0;
2533 /* INSN_CODEs for new I3, new I2, and user of condition code. */
2534 int insn_code_number, i2_code_number = 0, other_code_number = 0;
2535 /* Contains I3 if the destination of I3 is used in its source, which means
2536 that the old life of I3 is being killed. If that usage is placed into
2537 I2 and not in I3, a REG_DEAD note must be made. */
2538 rtx i3dest_killed = 0;
2539 /* SET_DEST and SET_SRC of I2, I1 and I0. */
2540 rtx i2dest = 0, i2src = 0, i1dest = 0, i1src = 0, i0dest = 0, i0src = 0;
2541 /* Copy of SET_SRC of I1 and I0, if needed. */
2542 rtx i1src_copy = 0, i0src_copy = 0, i0src_copy2 = 0;
2543 /* Set if I2DEST was reused as a scratch register. */
2544 bool i2scratch = false;
2545 /* The PATTERNs of I0, I1, and I2, or a copy of them in certain cases. */
2546 rtx i0pat = 0, i1pat = 0, i2pat = 0;
2547 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
2548 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
2549 int i0dest_in_i0src = 0, i1dest_in_i0src = 0, i2dest_in_i0src = 0;
2550 int i2dest_killed = 0, i1dest_killed = 0, i0dest_killed = 0;
2551 int i1_feeds_i2_n = 0, i0_feeds_i2_n = 0, i0_feeds_i1_n = 0;
2552 /* Notes that must be added to REG_NOTES in I3 and I2. */
2553 rtx new_i3_notes, new_i2_notes;
2554 /* Notes that we substituted I3 into I2 instead of the normal case. */
2555 int i3_subst_into_i2 = 0;
2556 /* Notes that I1, I2 or I3 is a MULT operation. */
2557 int have_mult = 0;
2558 int swap_i2i3 = 0;
2559 int split_i2i3 = 0;
2560 int changed_i3_dest = 0;
2561 bool i2_was_move = false, i3_was_move = false;
2562 int n_auto_inc = 0;
2563
2564 int maxreg;
2565 rtx_insn *temp_insn;
2566 rtx temp_expr;
2567 struct insn_link *link;
2568 rtx other_pat = 0;
2569 rtx new_other_notes;
2570 int i;
2571 scalar_int_mode dest_mode, temp_mode;
2572 bool has_non_call_exception = false;
2573
2574 /* Immediately return if any of I0,I1,I2 are the same insn (I3 can
2575 never be). */
2576 if (i1 == i2 || i0 == i2 || (i0 && i0 == i1))
2577 return 0;
2578
2579 /* Only try four-insn combinations when there's high likelihood of
2580 success. Look for simple insns, such as loads of constants or
2581 binary operations involving a constant. */
2582 if (i0)
2583 {
2584 int i;
2585 int ngood = 0;
2586 int nshift = 0;
2587 rtx set0, set3;
2588
2589 if (!flag_expensive_optimizationsglobal_options.x_flag_expensive_optimizations)
2590 return 0;
2591
2592 for (i = 0; i < 4; i++)
2593 {
2594 rtx_insn *insn = i == 0 ? i0 : i == 1 ? i1 : i == 2 ? i2 : i3;
2595 rtx set = single_set (insn);
2596 rtx src;
2597 if (!set)
2598 continue;
2599 src = SET_SRC (set)(((set)->u.fld[1]).rt_rtx);
2600 if (CONSTANT_P (src)((rtx_class[(int) (((enum rtx_code) (src)->code))]) == RTX_CONST_OBJ
)
)
2601 {
2602 ngood += 2;
2603 break;
2604 }
2605 else if (BINARY_P (src)(((rtx_class[(int) (((enum rtx_code) (src)->code))]) &
(~3)) == (RTX_COMPARE & (~3)))
&& CONSTANT_P (XEXP (src, 1))((rtx_class[(int) (((enum rtx_code) ((((src)->u.fld[1]).rt_rtx
))->code))]) == RTX_CONST_OBJ)
)
2606 ngood++;
2607 else if (GET_CODE (src)((enum rtx_code) (src)->code) == ASHIFT || GET_CODE (src)((enum rtx_code) (src)->code) == ASHIFTRT
2608 || GET_CODE (src)((enum rtx_code) (src)->code) == LSHIFTRT)
2609 nshift++;
2610 }
2611
2612 /* If I0 loads a memory and I3 sets the same memory, then I1 and I2
2613 are likely manipulating its value. Ideally we'll be able to combine
2614 all four insns into a bitfield insertion of some kind.
2615
2616 Note the source in I0 might be inside a sign/zero extension and the
2617 memory modes in I0 and I3 might be different. So extract the address
2618 from the destination of I3 and search for it in the source of I0.
2619
2620 In the event that there's a match but the source/dest do not actually
2621 refer to the same memory, the worst that happens is we try some
2622 combinations that we wouldn't have otherwise. */
2623 if ((set0 = single_set (i0))
2624 /* Ensure the source of SET0 is a MEM, possibly buried inside
2625 an extension. */
2626 && (GET_CODE (SET_SRC (set0))((enum rtx_code) ((((set0)->u.fld[1]).rt_rtx))->code) == MEM
2627 || ((GET_CODE (SET_SRC (set0))((enum rtx_code) ((((set0)->u.fld[1]).rt_rtx))->code) == ZERO_EXTEND
2628 || GET_CODE (SET_SRC (set0))((enum rtx_code) ((((set0)->u.fld[1]).rt_rtx))->code) == SIGN_EXTEND)
2629 && GET_CODE (XEXP (SET_SRC (set0), 0))((enum rtx_code) (((((((set0)->u.fld[1]).rt_rtx))->u.fld
[0]).rt_rtx))->code)
== MEM))
2630 && (set3 = single_set (i3))
2631 /* Ensure the destination of SET3 is a MEM. */
2632 && GET_CODE (SET_DEST (set3))((enum rtx_code) ((((set3)->u.fld[0]).rt_rtx))->code) == MEM
2633 /* Would it be better to extract the base address for the MEM
2634 in SET3 and look for that? I don't have cases where it matters
2635 but I could envision such cases. */
2636 && rtx_referenced_p (XEXP (SET_DEST (set3), 0)((((((set3)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx), SET_SRC (set0)(((set0)->u.fld[1]).rt_rtx)))
2637 ngood += 2;
2638
2639 if (ngood < 2 && nshift < 2)
2640 return 0;
2641 }
2642
2643 /* Exit early if one of the insns involved can't be used for
2644 combinations. */
2645 if (CALL_P (i2)(((enum rtx_code) (i2)->code) == CALL_INSN)
2646 || (i1 && CALL_P (i1)(((enum rtx_code) (i1)->code) == CALL_INSN))
2647 || (i0 && CALL_P (i0)(((enum rtx_code) (i0)->code) == CALL_INSN))
2648 || cant_combine_insn_p (i3)
2649 || cant_combine_insn_p (i2)
2650 || (i1 && cant_combine_insn_p (i1))
2651 || (i0 && cant_combine_insn_p (i0))
2652 || likely_spilled_retval_p (i3))
2653 return 0;
2654
2655 combine_attempts++;
2656 undobuf.other_insn = 0;
2657
2658 /* Reset the hard register usage information. */
2659 CLEAR_HARD_REG_SET (newpat_used_regs);
2660
2661 if (dump_file && (dump_flags & TDF_DETAILS))
2662 {
2663 if (i0)
2664 fprintf (dump_file, "\nTrying %d, %d, %d -> %d:\n",
2665 INSN_UID (i0), INSN_UID (i1), INSN_UID (i2), INSN_UID (i3));
2666 else if (i1)
2667 fprintf (dump_file, "\nTrying %d, %d -> %d:\n",
2668 INSN_UID (i1), INSN_UID (i2), INSN_UID (i3));
2669 else
2670 fprintf (dump_file, "\nTrying %d -> %d:\n",
2671 INSN_UID (i2), INSN_UID (i3));
2672
2673 if (i0)
2674 dump_insn_slim (dump_file, i0);
2675 if (i1)
2676 dump_insn_slim (dump_file, i1);
2677 dump_insn_slim (dump_file, i2);
2678 dump_insn_slim (dump_file, i3);
2679 }
2680
2681 /* If multiple insns feed into one of I2 or I3, they can be in any
2682 order. To simplify the code below, reorder them in sequence. */
2683 if (i0 && DF_INSN_LUID (i0)((((df->insns[(INSN_UID (i0))]))->luid)) > DF_INSN_LUID (i2)((((df->insns[(INSN_UID (i2))]))->luid)))
2684 std::swap (i0, i2);
2685 if (i0 && DF_INSN_LUID (i0)((((df->insns[(INSN_UID (i0))]))->luid)) > DF_INSN_LUID (i1)((((df->insns[(INSN_UID (i1))]))->luid)))
2686 std::swap (i0, i1);
2687 if (i1 && DF_INSN_LUID (i1)((((df->insns[(INSN_UID (i1))]))->luid)) > DF_INSN_LUID (i2)((((df->insns[(INSN_UID (i2))]))->luid)))
2688 std::swap (i1, i2);
2689
2690 added_links_insn = 0;
2691 added_notes_insn = 0;
2692
2693 /* First check for one important special case that the code below will
2694 not handle. Namely, the case where I1 is zero, I2 is a PARALLEL
2695 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
2696 we may be able to replace that destination with the destination of I3.
2697 This occurs in the common code where we compute both a quotient and
2698 remainder into a structure, in which case we want to do the computation
2699 directly into the structure to avoid register-register copies.
2700
2701 Note that this case handles both multiple sets in I2 and also cases
2702 where I2 has a number of CLOBBERs inside the PARALLEL.
2703
2704 We make very conservative checks below and only try to handle the
2705 most common cases of this. For example, we only handle the case
2706 where I2 and I3 are adjacent to avoid making difficult register
2707 usage tests. */
2708
2709 if (i1 == 0 && NONJUMP_INSN_P (i3)(((enum rtx_code) (i3)->code) == INSN) && GET_CODE (PATTERN (i3))((enum rtx_code) (PATTERN (i3))->code) == SET
2710 && REG_P (SET_SRC (PATTERN (i3)))(((enum rtx_code) ((((PATTERN (i3))->u.fld[1]).rt_rtx))->
code) == REG)
2711 && REGNO (SET_SRC (PATTERN (i3)))(rhs_regno((((PATTERN (i3))->u.fld[1]).rt_rtx))) >= FIRST_PSEUDO_REGISTER76
2712 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3))(((PATTERN (i3))->u.fld[1]).rt_rtx))
2713 && GET_CODE (PATTERN (i2))((enum rtx_code) (PATTERN (i2))->code) == PARALLEL
2714 && ! side_effects_p (SET_DEST (PATTERN (i3))(((PATTERN (i3))->u.fld[0]).rt_rtx))
2715 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
2716 below would need to check what is inside (and reg_overlap_mentioned_p
2717 doesn't support those codes anyway). Don't allow those destinations;
2718 the resulting insn isn't likely to be recognized anyway. */
2719 && GET_CODE (SET_DEST (PATTERN (i3)))((enum rtx_code) ((((PATTERN (i3))->u.fld[0]).rt_rtx))->
code)
!= ZERO_EXTRACT
2720 && GET_CODE (SET_DEST (PATTERN (i3)))((enum rtx_code) ((((PATTERN (i3))->u.fld[0]).rt_rtx))->
code)
!= STRICT_LOW_PART
2721 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3))(((PATTERN (i3))->u.fld[1]).rt_rtx),
2722 SET_DEST (PATTERN (i3))(((PATTERN (i3))->u.fld[0]).rt_rtx))
2723 && next_active_insn (i2) == i3)
2724 {
2725 rtx p2 = PATTERN (i2);
2726
2727 /* Make sure that the destination of I3,
2728 which we are going to substitute into one output of I2,
2729 is not used within another output of I2. We must avoid making this:
2730 (parallel [(set (mem (reg 69)) ...)
2731 (set (reg 69) ...)])
2732 which is not well-defined as to order of actions.
2733 (Besides, reload can't handle output reloads for this.)
2734
2735 The problem can also happen if the dest of I3 is a memory ref,
2736 if another dest in I2 is an indirect memory ref.
2737
2738 Neither can this PARALLEL be an asm. We do not allow combining
2739 that usually (see can_combine_p), so do not here either. */
2740 bool ok = true;
2741 for (i = 0; ok && i < XVECLEN (p2, 0)(((((p2)->u.fld[0]).rt_rtvec))->num_elem); i++)
2742 {
2743 if ((GET_CODE (XVECEXP (p2, 0, i))((enum rtx_code) ((((((p2)->u.fld[0]).rt_rtvec))->elem[
i]))->code)
== SET
2744 || GET_CODE (XVECEXP (p2, 0, i))((enum rtx_code) ((((((p2)->u.fld[0]).rt_rtvec))->elem[
i]))->code)
== CLOBBER)
2745 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3))(((PATTERN (i3))->u.fld[0]).rt_rtx),
2746 SET_DEST (XVECEXP (p2, 0, i))((((((((p2)->u.fld[0]).rt_rtvec))->elem[i]))->u.fld[
0]).rt_rtx)
))
2747 ok = false;
2748 else if (GET_CODE (XVECEXP (p2, 0, i))((enum rtx_code) ((((((p2)->u.fld[0]).rt_rtvec))->elem[
i]))->code)
== SET
2749 && GET_CODE (SET_SRC (XVECEXP (p2, 0, i)))((enum rtx_code) (((((((((p2)->u.fld[0]).rt_rtvec))->elem
[i]))->u.fld[1]).rt_rtx))->code)
== ASM_OPERANDS)
2750 ok = false;
2751 }
2752
2753 if (ok)
2754 for (i = 0; i < XVECLEN (p2, 0)(((((p2)->u.fld[0]).rt_rtvec))->num_elem); i++)
2755 if (GET_CODE (XVECEXP (p2, 0, i))((enum rtx_code) ((((((p2)->u.fld[0]).rt_rtvec))->elem[
i]))->code)
== SET
2756 && SET_DEST (XVECEXP (p2, 0, i))((((((((p2)->u.fld[0]).rt_rtvec))->elem[i]))->u.fld[
0]).rt_rtx)
== SET_SRC (PATTERN (i3))(((PATTERN (i3))->u.fld[1]).rt_rtx))
2757 {
2758 combine_merges++;
2759
2760 subst_insn = i3;
2761 subst_low_luid = DF_INSN_LUID (i2)((((df->insns[(INSN_UID (i2))]))->luid));
2762
2763 added_sets_2 = added_sets_1 = added_sets_0 = 0;
2764 i2src = SET_SRC (XVECEXP (p2, 0, i))((((((((p2)->u.fld[0]).rt_rtvec))->elem[i]))->u.fld[
1]).rt_rtx)
;
2765 i2dest = SET_DEST (XVECEXP (p2, 0, i))((((((((p2)->u.fld[0]).rt_rtvec))->elem[i]))->u.fld[
0]).rt_rtx)
;
2766 i2dest_killed = dead_or_set_p (i2, i2dest);
2767
2768 /* Replace the dest in I2 with our dest and make the resulting
2769 insn the new pattern for I3. Then skip to where we validate
2770 the pattern. Everything was set up above. */
2771 SUBST (SET_DEST (XVECEXP (p2, 0, i)), SET_DEST (PATTERN (i3)))do_SUBST (&(((((((((p2)->u.fld[0]).rt_rtvec))->elem
[i]))->u.fld[0]).rt_rtx)), ((((PATTERN (i3))->u.fld[0])
.rt_rtx)))
;
2772 newpat = p2;
2773 i3_subst_into_i2 = 1;
2774 goto validate_replacement;
2775 }
2776 }
2777
2778 /* If I2 is setting a pseudo to a constant and I3 is setting some
2779 sub-part of it to another constant, merge them by making a new
2780 constant. */
2781 if (i1 == 0
2782 && (temp_expr = single_set (i2)) != 0
2783 && is_a <scalar_int_mode> (GET_MODE (SET_DEST (temp_expr))((machine_mode) ((((temp_expr)->u.fld[0]).rt_rtx))->mode
)
, &temp_mode)
2784 && CONST_SCALAR_INT_P (SET_SRC (temp_expr))((((enum rtx_code) ((((temp_expr)->u.fld[1]).rt_rtx))->
code) == CONST_INT) || (((enum rtx_code) ((((temp_expr)->u
.fld[1]).rt_rtx))->code) == CONST_WIDE_INT))
2785 && GET_CODE (PATTERN (i3))((enum rtx_code) (PATTERN (i3))->code) == SET
2786 && CONST_SCALAR_INT_P (SET_SRC (PATTERN (i3)))((((enum rtx_code) ((((PATTERN (i3))->u.fld[1]).rt_rtx))->
code) == CONST_INT) || (((enum rtx_code) ((((PATTERN (i3))->
u.fld[1]).rt_rtx))->code) == CONST_WIDE_INT))
2787 && reg_subword_p (SET_DEST (PATTERN (i3))(((PATTERN (i3))->u.fld[0]).rt_rtx), SET_DEST (temp_expr)(((temp_expr)->u.fld[0]).rt_rtx)))
2788 {
2789 rtx dest = SET_DEST (PATTERN (i3))(((PATTERN (i3))->u.fld[0]).rt_rtx);
2790 rtx temp_dest = SET_DEST (temp_expr)(((temp_expr)->u.fld[0]).rt_rtx);
2791 int offset = -1;
2792 int width = 0;
2793
2794 if (GET_CODE (dest)((enum rtx_code) (dest)->code) == ZERO_EXTRACT)
2795 {
2796 if (CONST_INT_P (XEXP (dest, 1))(((enum rtx_code) ((((dest)->u.fld[1]).rt_rtx))->code) ==
CONST_INT)
2797 && CONST_INT_P (XEXP (dest, 2))(((enum rtx_code) ((((dest)->u.fld[2]).rt_rtx))->code) ==
CONST_INT)
2798 && is_a <scalar_int_mode> (GET_MODE (XEXP (dest, 0))((machine_mode) ((((dest)->u.fld[0]).rt_rtx))->mode),
2799 &dest_mode))
2800 {
2801 width = INTVAL (XEXP (dest, 1))(((((dest)->u.fld[1]).rt_rtx))->u.hwint[0]);
2802 offset = INTVAL (XEXP (dest, 2))(((((dest)->u.fld[2]).rt_rtx))->u.hwint[0]);
2803 dest = XEXP (dest, 0)(((dest)->u.fld[0]).rt_rtx);
2804 if (BITS_BIG_ENDIAN0)
2805 offset = GET_MODE_PRECISION (dest_mode) - width - offset;
2806 }
2807 }
2808 else
2809 {
2810 if (GET_CODE (dest)((enum rtx_code) (dest)->code) == STRICT_LOW_PART)
2811 dest = XEXP (dest, 0)(((dest)->u.fld[0]).rt_rtx);
2812 if (is_a <scalar_int_mode> (GET_MODE (dest)((machine_mode) (dest)->mode), &dest_mode))
2813 {
2814 width = GET_MODE_PRECISION (dest_mode);
2815 offset = 0;
2816 }
2817 }
2818
2819 if (offset >= 0)
2820 {
2821 /* If this is the low part, we're done. */
2822 if (subreg_lowpart_p (dest))
2823 ;
2824 /* Handle the case where inner is twice the size of outer. */
2825 else if (GET_MODE_PRECISION (temp_mode)
2826 == 2 * GET_MODE_PRECISION (dest_mode))
2827 offset += GET_MODE_PRECISION (dest_mode);
2828 /* Otherwise give up for now. */
2829 else
2830 offset = -1;
2831 }
2832
2833 if (offset >= 0)
2834 {
2835 rtx inner = SET_SRC (PATTERN (i3))(((PATTERN (i3))->u.fld[1]).rt_rtx);
2836 rtx outer = SET_SRC (temp_expr)(((temp_expr)->u.fld[1]).rt_rtx);
2837
2838 wide_int o = wi::insert (rtx_mode_t (outer, temp_mode),
2839 rtx_mode_t (inner, dest_mode),
2840 offset, width);
2841
2842 combine_merges++;
2843 subst_insn = i3;
2844 subst_low_luid = DF_INSN_LUID (i2)((((df->insns[(INSN_UID (i2))]))->luid));
2845 added_sets_2 = added_sets_1 = added_sets_0 = 0;
2846 i2dest = temp_dest;
2847 i2dest_killed = dead_or_set_p (i2, i2dest);
2848
2849 /* Replace the source in I2 with the new constant and make the
2850 resulting insn the new pattern for I3. Then skip to where we
2851 validate the pattern. Everything was set up above. */
2852 SUBST (SET_SRC (temp_expr),do_SUBST (&((((temp_expr)->u.fld[1]).rt_rtx)), (immed_wide_int_const
(o, temp_mode)))
2853 immed_wide_int_const (o, temp_mode))do_SUBST (&((((temp_expr)->u.fld[1]).rt_rtx)), (immed_wide_int_const
(o, temp_mode)))
;
2854
2855 newpat = PATTERN (i2);
2856
2857 /* The dest of I3 has been replaced with the dest of I2. */
2858 changed_i3_dest = 1;
2859 goto validate_replacement;
2860 }
2861 }
2862
2863 /* If we have no I1 and I2 looks like:
2864 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
2865 (set Y OP)])
2866 make up a dummy I1 that is
2867 (set Y OP)
2868 and change I2 to be
2869 (set (reg:CC X) (compare:CC Y (const_int 0)))
2870
2871 (We can ignore any trailing CLOBBERs.)
2872
2873 This undoes a previous combination and allows us to match a branch-and-
2874 decrement insn. */
2875
2876 if (i1 == 0
2877 && is_parallel_of_n_reg_sets (PATTERN (i2), 2)
2878 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))((enum mode_class) mode_class[((machine_mode) (((((((((PATTERN
(i2))->u.fld[0]).rt_rtvec))->elem[0]))->u.fld[0]).rt_rtx
))->mode)])
2879 == MODE_CC)
2880 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)))((enum rtx_code) (((((((((PATTERN (i2))->u.fld[0]).rt_rtvec
))->elem[0]))->u.fld[1]).rt_rtx))->code)
== COMPARE
2881 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1)(((((((((((PATTERN (i2))->u.fld[0]).rt_rtvec))->elem[0]
))->u.fld[1]).rt_rtx))->u.fld[1]).rt_rtx)
== const0_rtx(const_int_rtx[64])
2882 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0)(((((((((((PATTERN (i2))->u.fld[0]).rt_rtvec))->elem[0]
))->u.fld[1]).rt_rtx))->u.fld[0]).rt_rtx)
,
2883 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))((((((((PATTERN (i2))->u.fld[0]).rt_rtvec))->elem[1]))->
u.fld[1]).rt_rtx)
)
2884 && !reg_used_between_p (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))((((((((PATTERN (i2))->u.fld[0]).rt_rtvec))->elem[0]))->
u.fld[0]).rt_rtx)
, i2, i3)
2885 && !reg_used_between_p (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))((((((((PATTERN (i2))->u.fld[0]).rt_rtvec))->elem[1]))->
u.fld[0]).rt_rtx)
, i2, i3))
2886 {
2887 /* We make I1 with the same INSN_UID as I2. This gives it
2888 the same DF_INSN_LUID for value tracking. Our fake I1 will
2889 never appear in the insn stream so giving it the same INSN_UID
2890 as I2 will not cause a problem. */
2891
2892 i1 = gen_rtx_INSN (VOIDmode((void) 0, E_VOIDmode), NULLnullptr, i2, BLOCK_FOR_INSN (i2),
2893 XVECEXP (PATTERN (i2), 0, 1)(((((PATTERN (i2))->u.fld[0]).rt_rtvec))->elem[1]), INSN_LOCATION (i2),
2894 -1, NULL_RTX(rtx) 0);
2895 INSN_UID (i1) = INSN_UID (i2);
2896
2897 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0))do_SUBST (&(PATTERN (i2)), ((((((PATTERN (i2))->u.fld[
0]).rt_rtvec))->elem[0])))
;
2898 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),do_SUBST (&(((((((PATTERN (i2))->u.fld[1]).rt_rtx))->
u.fld[0]).rt_rtx)), ((((PATTERN (i1))->u.fld[0]).rt_rtx)))
2899 SET_DEST (PATTERN (i1)))do_SUBST (&(((((((PATTERN (i2))->u.fld[1]).rt_rtx))->
u.fld[0]).rt_rtx)), ((((PATTERN (i1))->u.fld[0]).rt_rtx)))
;
2900 unsigned int regno = REGNO (SET_DEST (PATTERN (i1)))(rhs_regno((((PATTERN (i1))->u.fld[0]).rt_rtx)));
2901 SUBST_LINK (LOG_LINKS (i2),do_SUBST_LINK (&(uid_log_links[insn_uid_check (i2)]), alloc_insn_link
(i1, regno, (uid_log_links[insn_uid_check (i2)])))
2902 alloc_insn_link (i1, regno, LOG_LINKS (i2)))do_SUBST_LINK (&(uid_log_links[insn_uid_check (i2)]), alloc_insn_link
(i1, regno, (uid_log_links[insn_uid_check (i2)])))
;
2903 }
2904
2905 /* If I2 is a PARALLEL of two SETs of REGs (and perhaps some CLOBBERs),
2906 make those two SETs separate I1 and I2 insns, and make an I0 that is
2907 the original I1. */
2908 if (i0 == 0
2909 && is_parallel_of_n_reg_sets (PATTERN (i2), 2)
2910 && can_split_parallel_of_n_reg_sets (i2, 2)
2911 && !reg_used_between_p (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))((((((((PATTERN (i2))->u.fld[0]).rt_rtvec))->elem[0]))->
u.fld[0]).rt_rtx)
, i2, i3)
2912 && !reg_used_between_p (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))((((((((PATTERN (i2))->u.fld[0]).rt_rtvec))->elem[1]))->
u.fld[0]).rt_rtx)
, i2, i3)
2913 && !reg_set_between_p (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))((((((((PATTERN (i2))->u.fld[0]).rt_rtvec))->elem[0]))->
u.fld[0]).rt_rtx)
, i2, i3)
2914 && !reg_set_between_p (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))((((((((PATTERN (i2))->u.fld[0]).rt_rtvec))->elem[1]))->
u.fld[0]).rt_rtx)
, i2, i3))
2915 {
2916 /* If there is no I1, there is no I0 either. */
2917 i0 = i1;
2918
2919 /* We make I1 with the same INSN_UID as I2. This gives it
2920 the same DF_INSN_LUID for value tracking. Our fake I1 will
2921 never appear in the insn stream so giving it the same INSN_UID
2922 as I2 will not cause a problem. */
2923
2924 i1 = gen_rtx_INSN (VOIDmode((void) 0, E_VOIDmode), NULLnullptr, i2, BLOCK_FOR_INSN (i2),
2925 XVECEXP (PATTERN (i2), 0, 0)(((((PATTERN (i2))->u.fld[0]).rt_rtvec))->elem[0]), INSN_LOCATION (i2),
2926 -1, NULL_RTX(rtx) 0);
2927 INSN_UID (i1) = INSN_UID (i2);
2928
2929 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 1))do_SUBST (&(PATTERN (i2)), ((((((PATTERN (i2))->u.fld[
0]).rt_rtvec))->elem[1])))
;
2930 }
2931
2932 /* Verify that I2 and maybe I1 and I0 can be combined into I3. */
2933 if (!can_combine_p (i2, i3, i0, i1, NULLnullptr, NULLnullptr, &i2dest, &i2src))
2934 {
2935 if (dump_file && (dump_flags & TDF_DETAILS))
2936 fprintf (dump_file, "Can't combine i2 into i3\n");
2937 undo_all ();
2938 return 0;
2939 }
2940 if (i1 && !can_combine_p (i1, i3, i0, NULLnullptr, i2, NULLnullptr, &i1dest, &i1src))
2941 {
2942 if (dump_file && (dump_flags & TDF_DETAILS))
2943 fprintf (dump_file, "Can't combine i1 into i3\n");
2944 undo_all ();
2945 return 0;
2946 }
2947 if (i0 && !can_combine_p (i0, i3, NULLnullptr, NULLnullptr, i1, i2, &i0dest, &i0src))
2948 {
2949 if (dump_file && (dump_flags & TDF_DETAILS))
2950 fprintf (dump_file, "Can't combine i0 into i3\n");
2951 undo_all ();
2952 return 0;
2953 }
2954
2955 /* With non-call exceptions we can end up trying to combine multiple
2956 insns with possible EH side effects. Make sure we can combine
2957 that to a single insn which means there must be at most one insn
2958 in the combination with an EH side effect. */
2959 if (cfun(cfun + 0)->can_throw_non_call_exceptions)
2960 {
2961 if (find_reg_note (i3, REG_EH_REGION, NULL_RTX(rtx) 0)
2962 || find_reg_note (i2, REG_EH_REGION, NULL_RTX(rtx) 0)
2963 || (i1 && find_reg_note (i1, REG_EH_REGION, NULL_RTX(rtx) 0))
2964 || (i0 && find_reg_note (i0, REG_EH_REGION, NULL_RTX(rtx) 0)))
2965 {
2966 has_non_call_exception = true;
2967 if (insn_could_throw_p (i3)
2968 + insn_could_throw_p (i2)
2969 + (i1 ? insn_could_throw_p (i1) : 0)
2970 + (i0 ? insn_could_throw_p (i0) : 0) > 1)
2971 {
2972 if (dump_file && (dump_flags & TDF_DETAILS))
2973 fprintf (dump_file, "Can't combine multiple insns with EH "
2974 "side-effects\n");
2975 undo_all ();
2976 return 0;
2977 }
2978 }
2979 }
2980
2981 /* Record whether i2 and i3 are trivial moves. */
2982 i2_was_move = is_just_move (i2);
2983 i3_was_move = is_just_move (i3);
2984
2985 /* Record whether I2DEST is used in I2SRC and similarly for the other
2986 cases. Knowing this will help in register status updating below. */
2987 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
2988 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
2989 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
2990 i0dest_in_i0src = i0 && reg_overlap_mentioned_p (i0dest, i0src);
2991 i1dest_in_i0src = i0 && reg_overlap_mentioned_p (i1dest, i0src);
2992 i2dest_in_i0src = i0 && reg_overlap_mentioned_p (i2dest, i0src);
2993 i2dest_killed = dead_or_set_p (i2, i2dest);
2994 i1dest_killed = i1 && dead_or_set_p (i1, i1dest);
2995 i0dest_killed = i0 && dead_or_set_p (i0, i0dest);
2996
2997 /* For the earlier insns, determine which of the subsequent ones they
2998 feed. */
2999 i1_feeds_i2_n = i1 && insn_a_feeds_b (i1, i2);
3000 i0_feeds_i1_n = i0 && insn_a_feeds_b (i0, i1);
3001 i0_feeds_i2_n = (i0 && (!i0_feeds_i1_n ? insn_a_feeds_b (i0, i2)
3002 : (!reg_overlap_mentioned_p (i1dest, i0dest)
3003 && reg_overlap_mentioned_p (i0dest, i2src))));
3004
3005 /* Ensure that I3's pattern can be the destination of combines. */
3006 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest, i0dest,
3007 i1 && i2dest_in_i1src && !i1_feeds_i2_n,
3008 i0 && ((i2dest_in_i0src && !i0_feeds_i2_n)
3009 || (i1dest_in_i0src && !i0_feeds_i1_n)),
3010 &i3dest_killed))
3011 {
3012 undo_all ();
3013 return 0;
3014 }
3015
3016 /* See if any of the insns is a MULT operation. Unless one is, we will
3017 reject a combination that is, since it must be slower. Be conservative
3018 here. */
3019 if (GET_CODE (i2src)((enum rtx_code) (i2src)->code) == MULT
3020 || (i1 != 0 && GET_CODE (i1src)((enum rtx_code) (i1src)->code) == MULT)
3021 || (i0 != 0 && GET_CODE (i0src)((enum rtx_code) (i0src)->code) == MULT)
3022 || (GET_CODE (PATTERN (i3))((enum rtx_code) (PATTERN (i3))->code) == SET
3023 && GET_CODE (SET_SRC (PATTERN (i3)))((enum rtx_code) ((((PATTERN (i3))->u.fld[1]).rt_rtx))->
code)
== MULT))
3024 have_mult = 1;
3025
3026 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
3027 We used to do this EXCEPT in one case: I3 has a post-inc in an
3028 output operand. However, that exception can give rise to insns like
3029 mov r3,(r3)+
3030 which is a famous insn on the PDP-11 where the value of r3 used as the
3031 source was model-dependent. Avoid this sort of thing. */
3032
3033#if 0
3034 if (!(GET_CODE (PATTERN (i3))((enum rtx_code) (PATTERN (i3))->code) == SET
3035 && REG_P (SET_SRC (PATTERN (i3)))(((enum rtx_code) ((((PATTERN (i3))->u.fld[1]).rt_rtx))->
code) == REG)
3036 && MEM_P (SET_DEST (PATTERN (i3)))(((enum rtx_code) ((((PATTERN (i3))->u.fld[0]).rt_rtx))->
code) == MEM)
3037 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0))((enum rtx_code) (((((((PATTERN (i3))->u.fld[0]).rt_rtx))->
u.fld[0]).rt_rtx))->code)
== POST_INC
3038 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0))((enum rtx_code) (((((((PATTERN (i3))->u.fld[0]).rt_rtx))->
u.fld[0]).rt_rtx))->code)
== POST_DEC)))
3039 /* It's not the exception. */
3040#endif
3041 if (AUTO_INC_DEC0)
3042 {
3043 rtx link;
3044 for (link = REG_NOTES (i3)(((i3)->u.fld[6]).rt_rtx); link; link = XEXP (link, 1)(((link)->u.fld[1]).rt_rtx))
3045 if (REG_NOTE_KIND (link)((enum reg_note) ((machine_mode) (link)->mode)) == REG_INC
3046 && (reg_overlap_mentioned_p (XEXP (link, 0)(((link)->u.fld[0]).rt_rtx), PATTERN (i2))
3047 || (i1 != 0
3048 && reg_overlap_mentioned_p (XEXP (link, 0)(((link)->u.fld[0]).rt_rtx), PATTERN (i1)))))
3049 {
3050 undo_all ();
3051 return 0;
3052 }
3053 }
3054
3055 /* See if the SETs in I1 or I2 need to be kept around in the merged
3056 instruction: whenever the value set there is still needed past I3.
3057 For the SET in I2, this is easy: we see if I2DEST dies or is set in I3.
3058
3059 For the SET in I1, we have two cases: if I1 and I2 independently feed
3060 into I3, the set in I1 needs to be kept around unless I1DEST dies
3061 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
3062 in I1 needs to be kept around unless I1DEST dies or is set in either
3063 I2 or I3. The same considerations apply to I0. */
3064
3065 added_sets_2 = !dead_or_set_p (i3, i2dest);
3066
3067 if (i1)
3068 added_sets_1 = !(dead_or_set_p (i3, i1dest)
3069 || (i1_feeds_i2_n && dead_or_set_p (i2, i1dest)));
3070 else
3071 added_sets_1 = 0;
3072
3073 if (i0)
3074 added_sets_0 = !(dead_or_set_p (i3, i0dest)
3075 || (i0_feeds_i1_n && dead_or_set_p (i1, i0dest))
3076 || ((i0_feeds_i2_n || (i0_feeds_i1_n && i1_feeds_i2_n))
3077 && dead_or_set_p (i2, i0dest)));
3078 else
3079 added_sets_0 = 0;
3080
3081 /* We are about to copy insns for the case where they need to be kept
3082 around. Check that they can be copied in the merged instruction. */
3083
3084 if (targetm.cannot_copy_insn_p
3085 && ((added_sets_2 && targetm.cannot_copy_insn_p (i2))
3086 || (i1 && added_sets_1 && targetm.cannot_copy_insn_p (i1))
3087 || (i0 && added_sets_0 && targetm.cannot_copy_insn_p (i0))))
3088 {
3089 undo_all ();
3090 return 0;
3091 }
3092
3093 /* We cannot safely duplicate volatile references in any case. */
3094
3095 if ((added_sets_2 && volatile_refs_p (PATTERN (i2)))
3096 || (added_sets_1 && volatile_refs_p (PATTERN (i1)))
3097 || (added_sets_0 && volatile_refs_p (PATTERN (i0))))
3098 {
3099 undo_all ();
3100 return 0;
3101 }
3102
3103 /* Count how many auto_inc expressions there were in the original insns;
3104 we need to have the same number in the resulting patterns. */
3105
3106 if (i0)
3107 for_each_inc_dec (PATTERN (i0), count_auto_inc, &n_auto_inc);
3108 if (i1)
3109 for_each_inc_dec (PATTERN (i1), count_auto_inc, &n_auto_inc);
3110 for_each_inc_dec (PATTERN (i2), count_auto_inc, &n_auto_inc);
3111 for_each_inc_dec (PATTERN (i3), count_auto_inc, &n_auto_inc);
3112
3113 /* If the set in I2 needs to be kept around, we must make a copy of
3114 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
3115 PATTERN (I2), we are only substituting for the original I1DEST, not into
3116 an already-substituted copy. This also prevents making self-referential
3117 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
3118 I2DEST. */
3119
3120 if (added_sets_2)
3121 {
3122 if (GET_CODE (PATTERN (i2))((enum rtx_code) (PATTERN (i2))->code) == PARALLEL)
3123 i2pat = gen_rtx_SET (i2dest, copy_rtx (i2src))gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((i2dest
)), ((copy_rtx (i2src))) )
;
3124 else
3125 i2pat = copy_rtx (PATTERN (i2));
3126 }
3127
3128 if (added_sets_1)
3129 {
3130 if (GET_CODE (PATTERN (i1))((enum rtx_code) (PATTERN (i1))->code) == PARALLEL)
3131 i1pat = gen_rtx_SET (i1dest, copy_rtx (i1src))gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((i1dest
)), ((copy_rtx (i1src))) )
;
3132 else
3133 i1pat = copy_rtx (PATTERN (i1));
3134 }
3135
3136 if (added_sets_0)
3137 {
3138 if (GET_CODE (PATTERN (i0))((enum rtx_code) (PATTERN (i0))->code) == PARALLEL)
3139 i0pat = gen_rtx_SET (i0dest, copy_rtx (i0src))gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((i0dest
)), ((copy_rtx (i0src))) )
;
3140 else
3141 i0pat = copy_rtx (PATTERN (i0));
3142 }
3143
3144 combine_merges++;
3145
3146 /* Substitute in the latest insn for the regs set by the earlier ones. */
3147
3148 maxreg = max_reg_num ();
3149
3150 subst_insn = i3;
3151
3152 /* Many machines have insns that can both perform an
3153 arithmetic operation and set the condition code. These operations will
3154 be represented as a PARALLEL with the first element of the vector
3155 being a COMPARE of an arithmetic operation with the constant zero.
3156 The second element of the vector will set some pseudo to the result
3157 of the same arithmetic operation. If we simplify the COMPARE, we won't
3158 match such a pattern and so will generate an extra insn. Here we test
3159 for this case, where both the comparison and the operation result are
3160 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
3161 I2SRC. Later we will make the PARALLEL that contains I2. */
3162
3163 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3))((enum rtx_code) (PATTERN (i3))->code) == SET
3164 && GET_CODE (SET_SRC (PATTERN (i3)))((enum rtx_code) ((((PATTERN (i3))->u.fld[1]).rt_rtx))->
code)
== COMPARE
3165 && CONST_INT_P (XEXP (SET_SRC (PATTERN (i3)), 1))(((enum rtx_code) (((((((PATTERN (i3))->u.fld[1]).rt_rtx))
->u.fld[1]).rt_rtx))->code) == CONST_INT)
3166 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0)((((((PATTERN (i3))->u.fld[1]).rt_rtx))->u.fld[0]).rt_rtx
)
, i2dest))
3167 {
3168 rtx newpat_dest;
3169 rtx *cc_use_loc = NULLnullptr;
3170 rtx_insn *cc_use_insn = NULLnullptr;
3171 rtx op0 = i2src, op1 = XEXP (SET_SRC (PATTERN (i3)), 1)((((((PATTERN (i3))->u.fld[1]).rt_rtx))->u.fld[1]).rt_rtx
)
;
3172 machine_mode compare_mode, orig_compare_mode;
3173 enum rtx_code compare_code = UNKNOWN, orig_compare_code = UNKNOWN;
3174 scalar_int_mode mode;
3175
3176 newpat = PATTERN (i3);
3177 newpat_dest = SET_DEST (newpat)(((newpat)->u.fld[0]).rt_rtx);
3178 compare_mode = orig_compare_mode = GET_MODE (newpat_dest)((machine_mode) (newpat_dest)->mode);
3179
3180 if (undobuf.other_insn == 0
3181 && (cc_use_loc = find_single_use (SET_DEST (newpat)(((newpat)->u.fld[0]).rt_rtx), i3,
3182 &cc_use_insn)))
3183 {
3184 compare_code = orig_compare_code = GET_CODE (*cc_use_loc)((enum rtx_code) (*cc_use_loc)->code);
3185 if (is_a <scalar_int_mode> (GET_MODE (i2dest)((machine_mode) (i2dest)->mode), &mode))
3186 compare_code = simplify_compare_const (compare_code, mode,
3187 op0, &op1);
3188 target_canonicalize_comparison (&compare_code, &op0, &op1, 1);
3189 }
3190
3191 /* Do the rest only if op1 is const0_rtx, which may be the
3192 result of simplification. */
3193 if (op1 == const0_rtx(const_int_rtx[64]))
3194 {
3195 /* If a single use of the CC is found, prepare to modify it
3196 when SELECT_CC_MODE returns a new CC-class mode, or when
3197 the above simplify_compare_const() returned a new comparison
3198 operator. undobuf.other_insn is assigned the CC use insn
3199 when modifying it. */
3200 if (cc_use_loc)
3201 {
3202#ifdef SELECT_CC_MODE
3203 machine_mode new_mode
3204 = SELECT_CC_MODE (compare_code, op0, op1)ix86_cc_mode ((compare_code), (op0), (op1));
3205 if (new_mode != orig_compare_mode
3206 && can_change_dest_mode (SET_DEST (newpat)(((newpat)->u.fld[0]).rt_rtx),
3207 added_sets_2, new_mode))
3208 {
3209 unsigned int regno = REGNO (newpat_dest)(rhs_regno(newpat_dest));
3210 compare_mode = new_mode;
3211 if (regno < FIRST_PSEUDO_REGISTER76)
3212 newpat_dest = gen_rtx_REG (compare_mode, regno);
3213 else
3214 {
3215 subst_mode (regno, compare_mode);
3216 newpat_dest = regno_reg_rtx[regno];
3217 }
3218 }
3219#endif
3220 /* Cases for modifying the CC-using comparison. */
3221 if (compare_code != orig_compare_code
3222 /* ??? Do we need to verify the zero rtx? */
3223 && XEXP (*cc_use_loc, 1)(((*cc_use_loc)->u.fld[1]).rt_rtx) == const0_rtx(const_int_rtx[64]))
3224 {
3225 /* Replace cc_use_loc with entire new RTX. */
3226 SUBST (*cc_use_loc,do_SUBST (&(*cc_use_loc), (gen_rtx_fmt_ee_stat ((compare_code
), (((machine_mode) (*cc_use_loc)->mode)), (newpat_dest), (
(const_int_rtx[64])) )))
3227 gen_rtx_fmt_ee (compare_code, GET_MODE (*cc_use_loc),do_SUBST (&(*cc_use_loc), (gen_rtx_fmt_ee_stat ((compare_code
), (((machine_mode) (*cc_use_loc)->mode)), (newpat_dest), (
(const_int_rtx[64])) )))
3228 newpat_dest, const0_rtx))do_SUBST (&(*cc_use_loc), (gen_rtx_fmt_ee_stat ((compare_code
), (((machine_mode) (*cc_use_loc)->mode)), (newpat_dest), (
(const_int_rtx[64])) )))
;
3229 undobuf.other_insn = cc_use_insn;
3230 }
3231 else if (compare_mode != orig_compare_mode)
3232 {
3233 /* Just replace the CC reg with a new mode. */
3234 SUBST (XEXP (*cc_use_loc, 0), newpat_dest)do_SUBST (&((((*cc_use_loc)->u.fld[0]).rt_rtx)), (newpat_dest
))
;
3235 undobuf.other_insn = cc_use_insn;
3236 }
3237 }
3238
3239 /* Now we modify the current newpat:
3240 First, SET_DEST(newpat) is updated if the CC mode has been
3241 altered. For targets without SELECT_CC_MODE, this should be
3242 optimized away. */
3243 if (compare_mode != orig_compare_mode)
3244 SUBST (SET_DEST (newpat), newpat_dest)do_SUBST (&((((newpat)->u.fld[0]).rt_rtx)), (newpat_dest
))
;
3245 /* This is always done to propagate i2src into newpat. */
3246 SUBST (SET_SRC (newpat),do_SUBST (&((((newpat)->u.fld[1]).rt_rtx)), (gen_rtx_fmt_ee_stat
((COMPARE), ((compare_mode)), ((op0)), ((op1)) )))
3247 gen_rtx_COMPARE (compare_mode, op0, op1))do_SUBST (&((((newpat)->u.fld[1]).rt_rtx)), (gen_rtx_fmt_ee_stat
((COMPARE), ((compare_mode)), ((op0)), ((op1)) )))
;
3248 /* Create new version of i2pat if needed; the below PARALLEL
3249 creation needs this to work correctly. */
3250 if (! rtx_equal_p (i2src, op0))
3251 i2pat = gen_rtx_SET (i2dest, op0)gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((i2dest
)), ((op0)) )
;
3252 i2_is_used = 1;
3253 }
3254 }
3255
3256 if (i2_is_used == 0)
3257 {
3258 /* It is possible that the source of I2 or I1 may be performing
3259 an unneeded operation, such as a ZERO_EXTEND of something
3260 that is known to have the high part zero. Handle that case
3261 by letting subst look at the inner insns.
3262
3263 Another way to do this would be to have a function that tries
3264 to simplify a single insn instead of merging two or more
3265 insns. We don't do this because of the potential of infinite
3266 loops and because of the potential extra memory required.
3267 However, doing it the way we are is a bit of a kludge and
3268 doesn't catch all cases.
3269
3270 But only do this if -fexpensive-optimizations since it slows
3271 things down and doesn't usually win.
3272
3273 This is not done in the COMPARE case above because the
3274 unmodified I2PAT is used in the PARALLEL and so a pattern
3275 with a modified I2SRC would not match. */
3276
3277 if (flag_expensive_optimizationsglobal_options.x_flag_expensive_optimizations)
3278 {
3279 /* Pass pc_rtx so no substitutions are done, just
3280 simplifications. */
3281 if (i1)
3282 {
3283 subst_low_luid = DF_INSN_LUID (i1)((((df->insns[(INSN_UID (i1))]))->luid));
3284 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0, 0);
3285 }
3286
3287 subst_low_luid = DF_INSN_LUID (i2)((((df->insns[(INSN_UID (i2))]))->luid));
3288 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0, 0);
3289 }
3290
3291 n_occurrences = 0; /* `subst' counts here */
3292 subst_low_luid = DF_INSN_LUID (i2)((((df->insns[(INSN_UID (i2))]))->luid));
3293
3294 /* If I1 feeds into I2 and I1DEST is in I1SRC, we need to make a unique
3295 copy of I2SRC each time we substitute it, in order to avoid creating
3296 self-referential RTL when we will be substituting I1SRC for I1DEST
3297 later. Likewise if I0 feeds into I2, either directly or indirectly
3298 through I1, and I0DEST is in I0SRC. */
3299 newpat = subst (PATTERN (i3), i2dest, i2src, 0, 0,
3300 (i1_feeds_i2_n && i1dest_in_i1src)
3301 || ((i0_feeds_i2_n || (i0_feeds_i1_n && i1_feeds_i2_n))
3302 && i0dest_in_i0src));
3303 substed_i2 = 1;
3304
3305 /* Record whether I2's body now appears within I3's body. */
3306 i2_is_used = n_occurrences;
3307 }
3308
3309 /* If we already got a failure, don't try to do more. Otherwise, try to
3310 substitute I1 if we have it. */
3311
3312 if (i1 && GET_CODE (newpat)((enum rtx_code) (newpat)->code) != CLOBBER)
3313 {
3314 /* Before we can do this substitution, we must redo the test done
3315 above (see detailed comments there) that ensures I1DEST isn't
3316 mentioned in any SETs in NEWPAT that are field assignments. */
3317 if (!combinable_i3pat (NULLnullptr, &newpat, i1dest, NULL_RTX(rtx) 0, NULL_RTX(rtx) 0,
3318 0, 0, 0))
3319 {
3320 undo_all ();
3321 return 0;
3322 }
3323
3324 n_occurrences = 0;
3325 subst_low_luid = DF_INSN_LUID (i1)((((df->insns[(INSN_UID (i1))]))->luid));
3326
3327 /* If the following substitution will modify I1SRC, make a copy of it
3328 for the case where it is substituted for I1DEST in I2PAT later. */
3329 if (added_sets_2 && i1_feeds_i2_n)
3330 i1src_copy = copy_rtx (i1src);
3331
3332 /* If I0 feeds into I1 and I0DEST is in I0SRC, we need to make a unique
3333 copy of I1SRC each time we substitute it, in order to avoid creating
3334 self-referential RTL when we will be substituting I0SRC for I0DEST
3335 later. */
3336 newpat = subst (newpat, i1dest, i1src, 0, 0,
3337 i0_feeds_i1_n && i0dest_in_i0src);
3338 substed_i1 = 1;
3339
3340 /* Record whether I1's body now appears within I3's body. */
3341 i1_is_used = n_occurrences;
3342 }
3343
3344 /* Likewise for I0 if we have it. */
3345
3346 if (i0 && GET_CODE (newpat)((enum rtx_code) (newpat)->code) != CLOBBER)
3347 {
3348 if (!combinable_i3pat (NULLnullptr, &newpat, i0dest, NULL_RTX(rtx) 0, NULL_RTX(rtx) 0,
3349 0, 0, 0))
3350 {
3351 undo_all ();
3352 return 0;
3353 }
3354
3355 /* If the following substitution will modify I0SRC, make a copy of it
3356 for the case where it is substituted for I0DEST in I1PAT later. */
3357 if (added_sets_1 && i0_feeds_i1_n)
3358 i0src_copy = copy_rtx (i0src);
3359 /* And a copy for I0DEST in I2PAT substitution. */
3360 if (added_sets_2 && ((i0_feeds_i1_n && i1_feeds_i2_n)
3361 || (i0_feeds_i2_n)))
3362 i0src_copy2 = copy_rtx (i0src);
3363
3364 n_occurrences = 0;
3365 subst_low_luid = DF_INSN_LUID (i0)((((df->insns[(INSN_UID (i0))]))->luid));
3366 newpat = subst (newpat, i0dest, i0src, 0, 0, 0);
3367 substed_i0 = 1;
3368 }
3369
3370 if (n_auto_inc)
3371 {
3372 int new_n_auto_inc = 0;
3373 for_each_inc_dec (newpat, count_auto_inc, &new_n_auto_inc);
3374
3375 if (n_auto_inc != new_n_auto_inc)
3376 {
3377 if (dump_file && (dump_flags & TDF_DETAILS))
3378 fprintf (dump_file, "Number of auto_inc expressions changed\n");
3379 undo_all ();
3380 return 0;
3381 }
3382 }
3383
3384 /* Fail if an autoincrement side-effect has been duplicated. Be careful
3385 to count all the ways that I2SRC and I1SRC can be used. */
3386 if ((FIND_REG_INC_NOTE (i2, NULL_RTX)0 != 0
3387 && i2_is_used + added_sets_2 > 1)
3388 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX)0 != 0
3389 && (i1_is_used + added_sets_1 + (added_sets_2 && i1_feeds_i2_n)
3390 > 1))
3391 || (i0 != 0 && FIND_REG_INC_NOTE (i0, NULL_RTX)0 != 0
3392 && (n_occurrences + added_sets_0
3393 + (added_sets_1 && i0_feeds_i1_n)
3394 + (added_sets_2 && i0_feeds_i2_n)
3395 > 1))
3396 /* Fail if we tried to make a new register. */
3397 || max_reg_num () != maxreg
3398 /* Fail if we couldn't do something and have a CLOBBER. */
3399 || GET_CODE (newpat)((enum rtx_code) (newpat)->code) == CLOBBER
3400 /* Fail if this new pattern is a MULT and we didn't have one before
3401 at the outer level. */
3402 || (GET_CODE (newpat)((enum rtx_code) (newpat)->code) == SET && GET_CODE (SET_SRC (newpat))((enum rtx_code) ((((newpat)->u.fld[1]).rt_rtx))->code) == MULT
3403 && ! have_mult))
3404 {
3405 undo_all ();
3406 return 0;
3407 }
3408
3409 /* If the actions of the earlier insns must be kept
3410 in addition to substituting them into the latest one,
3411 we must make a new PARALLEL for the latest insn
3412 to hold additional the SETs. */
3413
3414 if (added_sets_0 || added_sets_1 || added_sets_2)
3415 {
3416 int extra_sets = added_sets_0 + added_sets_1 + added_sets_2;
3417 combine_extras++;
3418
3419 if (GET_CODE (newpat)((enum rtx_code) (newpat)->code) == PARALLEL)
3420 {
3421 rtvec old = XVEC (newpat, 0)(((newpat)->u.fld[0]).rt_rtvec);
3422 total_sets = XVECLEN (newpat, 0)(((((newpat)->u.fld[0]).rt_rtvec))->num_elem) + extra_sets;
3423 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets))gen_rtx_fmt_E_stat ((PARALLEL), ((((void) 0, E_VOIDmode))), (
(rtvec_alloc (total_sets))) )
;
3424 memcpy (XVEC (newpat, 0)(((newpat)->u.fld[0]).rt_rtvec)->elem, &old->elem[0],
3425 sizeof (old->elem[0]) * old->num_elem);
3426 }
3427 else
3428 {
3429 rtx old = newpat;
3430 total_sets = 1 + extra_sets;
3431 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets))gen_rtx_fmt_E_stat ((PARALLEL), ((((void) 0, E_VOIDmode))), (
(rtvec_alloc (total_sets))) )
;
3432 XVECEXP (newpat, 0, 0)(((((newpat)->u.fld[0]).rt_rtvec))->elem[0]) = old;
3433 }
3434
3435 if (added_sets_0)
3436 XVECEXP (newpat, 0, --total_sets)(((((newpat)->u.fld[0]).rt_rtvec))->elem[--total_sets]) = i0pat;
3437
3438 if (added_sets_1)
3439 {
3440 rtx t = i1pat;
3441 if (i0_feeds_i1_n)
3442 t = subst (t, i0dest, i0src_copy ? i0src_copy : i0src, 0, 0, 0);
3443
3444 XVECEXP (newpat, 0, --total_sets)(((((newpat)->u.fld[0]).rt_rtvec))->elem[--total_sets]) = t;
3445 }
3446 if (added_sets_2)
3447 {
3448 rtx t = i2pat;
3449 if (i1_feeds_i2_n)
3450 t = subst (t, i1dest, i1src_copy ? i1src_copy : i1src, 0, 0,
3451 i0_feeds_i1_n && i0dest_in_i0src);
3452 if ((i0_feeds_i1_n && i1_feeds_i2_n) || i0_feeds_i2_n)
3453 t = subst (t, i0dest, i0src_copy2 ? i0src_copy2 : i0src, 0, 0, 0);
3454
3455 XVECEXP (newpat, 0, --total_sets)(((((newpat)->u.fld[0]).rt_rtvec))->elem[--total_sets]) = t;
3456 }
3457 }
3458
3459 validate_replacement:
3460
3461 /* Note which hard regs this insn has as inputs. */
3462 mark_used_regs_combine (newpat);
3463
3464 /* If recog_for_combine fails, it strips existing clobbers. If we'll
3465 consider splitting this pattern, we might need these clobbers. */
3466 if (i1 && GET_CODE (newpat)((enum rtx_code) (newpat)->code) == PARALLEL
3467 && GET_CODE (XVECEXP (newpat, 0, XVECLEN (newpat, 0) - 1))((enum rtx_code) ((((((newpat)->u.fld[0]).rt_rtvec))->elem
[(((((newpat)->u.fld[0]).rt_rtvec))->num_elem) - 1]))->
code)
== CLOBBER)
3468 {
3469 int len = XVECLEN (newpat, 0)(((((newpat)->u.fld[0]).rt_rtvec))->num_elem);
3470
3471 newpat_vec_with_clobbers = rtvec_alloc (len);
3472 for (i = 0; i < len; i++)
3473 RTVEC_ELT (newpat_vec_with_clobbers, i)((newpat_vec_with_clobbers)->elem[i]) = XVECEXP (newpat, 0, i)(((((newpat)->u.fld[0]).rt_rtvec))->elem[i]);
3474 }
3475
3476 /* We have recognized nothing yet. */
3477 insn_code_number = -1;
3478
3479 /* See if this is a PARALLEL of two SETs where one SET's destination is
3480 a register that is unused and this isn't marked as an instruction that
3481 might trap in an EH region. In that case, we just need the other SET.
3482 We prefer this over the PARALLEL.
3483
3484 This can occur when simplifying a divmod insn. We *must* test for this
3485 case here because the code below that splits two independent SETs doesn't
3486 handle this case correctly when it updates the register status.
3487
3488 It's pointless doing this if we originally had two sets, one from
3489 i3, and one from i2. Combining then splitting the parallel results
3490 in the original i2 again plus an invalid insn (which we delete).
3491 The net effect is only to move instructions around, which makes
3492 debug info less accurate.
3493
3494 If the remaining SET came from I2 its destination should not be used
3495 between I2 and I3. See PR82024. */
3496
3497 if (!(added_sets_2 && i1 == 0)
3498 && is_parallel_of_n_reg_sets (newpat, 2)
3499 && asm_noperands (newpat) < 0)
3500 {
3501 rtx set0 = XVECEXP (newpat, 0, 0)(((((newpat)->u.fld[0]).rt_rtvec))->elem[0]);
3502 rtx set1 = XVECEXP (newpat, 0, 1)(((((newpat)->u.fld[0]).rt_rtvec))->elem[1]);
3503 rtx oldpat = newpat;
3504
3505 if (((REG_P (SET_DEST (set1))(((enum rtx_code) ((((set1)->u.fld[0]).rt_rtx))->code) ==
REG)
3506 && find_reg_note (i3, REG_UNUSED, SET_DEST (set1)(((set1)->u.fld[0]).rt_rtx)))
3507 || (GET_CODE (SET_DEST (set1))((enum rtx_code) ((((set1)->u.fld[0]).rt_rtx))->code) == SUBREG
3508 && find_reg_note (i3, REG_UNUSED, SUBREG_REG (SET_DEST (set1))((((((set1)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx))))
3509 && insn_nothrow_p (i3)
3510 && !side_effects_p (SET_SRC (set1)(((set1)->u.fld[1]).rt_rtx)))
3511 {
3512 newpat = set0;
3513 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3514 }
3515
3516 else if (((REG_P (SET_DEST (set0))(((enum rtx_code) ((((set0)->u.fld[0]).rt_rtx))->code) ==
REG)
3517 && find_reg_note (i3, REG_UNUSED, SET_DEST (set0)(((set0)->u.fld[0]).rt_rtx)))
3518 || (GET_CODE (SET_DEST (set0))((enum rtx_code) ((((set0)->u.fld[0]).rt_rtx))->code) == SUBREG
3519 && find_reg_note (i3, REG_UNUSED,
3520 SUBREG_REG (SET_DEST (set0))((((((set0)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx))))
3521 && insn_nothrow_p (i3)
3522 && !side_effects_p (SET_SRC (set0)(((set0)->u.fld[1]).rt_rtx)))
3523 {
3524 rtx dest = SET_DEST (set1)(((set1)->u.fld[0]).rt_rtx);
3525 if (GET_CODE (dest)((enum rtx_code) (dest)->code) == SUBREG)
3526 dest = SUBREG_REG (dest)(((dest)->u.fld[0]).rt_rtx);
3527 if (!reg_used_between_p (dest, i2, i3))
3528 {
3529 newpat = set1;
3530 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3531
3532 if (insn_code_number >= 0)
3533 changed_i3_dest = 1;
3534 }
3535 }
3536
3537 if (insn_code_number < 0)
3538 newpat = oldpat;
3539 }
3540
3541 /* Is the result of combination a valid instruction? */
3542 if (insn_code_number < 0)
3543 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3544
3545 /* If we were combining three insns and the result is a simple SET
3546 with no ASM_OPERANDS that wasn't recognized, try to split it into two
3547 insns. There are two ways to do this. It can be split using a
3548 machine-specific method (like when you have an addition of a large
3549 constant) or by combine in the function find_split_point. */
3550
3551 if (i1 && insn_code_number < 0 && GET_CODE (newpat)((enum rtx_code) (newpat)->code) == SET
3552 && asm_noperands (newpat) < 0)
3553 {
3554 rtx parallel, *split;
3555 rtx_insn *m_split_insn;
3556
3557 /* See if the MD file can split NEWPAT. If it can't, see if letting it
3558 use I2DEST as a scratch register will help. In the latter case,
3559 convert I2DEST to the mode of the source of NEWPAT if we can. */
3560
3561 m_split_insn = combine_split_insns (newpat, i3);
3562
3563 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
3564 inputs of NEWPAT. */
3565
3566 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
3567 possible to try that as a scratch reg. This would require adding
3568 more code to make it work though. */
3569
3570 if (m_split_insn == 0 && ! reg_overlap_mentioned_p (i2dest, newpat))
3571 {
3572 machine_mode new_mode = GET_MODE (SET_DEST (newpat))((machine_mode) ((((newpat)->u.fld[0]).rt_rtx))->mode);
3573
3574 /* ??? Reusing i2dest without resetting the reg_stat entry for it
3575 (temporarily, until we are committed to this instruction
3576 combination) does not work: for example, any call to nonzero_bits
3577 on the register (from a splitter in the MD file, for example)
3578 will get the old information, which is invalid.
3579
3580 Since nowadays we can create registers during combine just fine,
3581 we should just create a new one here, not reuse i2dest. */
3582
3583 /* First try to split using the original register as a
3584 scratch register. */
3585 parallel = gen_rtx_PARALLEL (VOIDmode,gen_rtx_fmt_E_stat ((PARALLEL), ((((void) 0, E_VOIDmode))), (
(gen_rtvec (2, newpat, gen_rtx_fmt_e_stat ((CLOBBER), ((((void
) 0, E_VOIDmode))), ((i2dest)) )))) )
3586 gen_rtvec (2, newpat,gen_rtx_fmt_E_stat ((PARALLEL), ((((void) 0, E_VOIDmode))), (
(gen_rtvec (2, newpat, gen_rtx_fmt_e_stat ((CLOBBER), ((((void
) 0, E_VOIDmode))), ((i2dest)) )))) )
3587 gen_rtx_CLOBBER (VOIDmode,gen_rtx_fmt_E_stat ((PARALLEL), ((((void) 0, E_VOIDmode))), (
(gen_rtvec (2, newpat, gen_rtx_fmt_e_stat ((CLOBBER), ((((void
) 0, E_VOIDmode))), ((i2dest)) )))) )
3588 i2dest)))gen_rtx_fmt_E_stat ((PARALLEL), ((((void) 0, E_VOIDmode))), (
(gen_rtvec (2, newpat, gen_rtx_fmt_e_stat ((CLOBBER), ((((void
) 0, E_VOIDmode))), ((i2dest)) )))) )
;
3589 m_split_insn = combine_split_insns (parallel, i3);
3590
3591 /* If that didn't work, try changing the mode of I2DEST if
3592 we can. */
3593 if (m_split_insn == 0
3594 && new_mode != GET_MODE (i2dest)((machine_mode) (i2dest)->mode)
3595 && new_mode != VOIDmode((void) 0, E_VOIDmode)
3596 && can_change_dest_mode (i2dest, added_sets_2, new_mode))
3597 {
3598 machine_mode old_mode = GET_MODE (i2dest)((machine_mode) (i2dest)->mode);
3599 rtx ni2dest;
3600
3601 if (REGNO (i2dest)(rhs_regno(i2dest)) < FIRST_PSEUDO_REGISTER76)
3602 ni2dest = gen_rtx_REG (new_mode, REGNO (i2dest)(rhs_regno(i2dest)));
3603 else
3604 {
3605 subst_mode (REGNO (i2dest)(rhs_regno(i2dest)), new_mode);
3606 ni2dest = regno_reg_rtx[REGNO (i2dest)(rhs_regno(i2dest))];
3607 }
3608
3609 parallel = (gen_rtx_PARALLELgen_rtx_fmt_E_stat ((PARALLEL), ((((void) 0, E_VOIDmode))), (
(gen_rtvec (2, newpat, gen_rtx_fmt_e_stat ((CLOBBER), ((((void
) 0, E_VOIDmode))), ((ni2dest)) )))) )
3610 (VOIDmode,gen_rtx_fmt_E_stat ((PARALLEL), ((((void) 0, E_VOIDmode))), (
(gen_rtvec (2, newpat, gen_rtx_fmt_e_stat ((CLOBBER), ((((void
) 0, E_VOIDmode))), ((ni2dest)) )))) )
3611 gen_rtvec (2, newpat,gen_rtx_fmt_E_stat ((PARALLEL), ((((void) 0, E_VOIDmode))), (
(gen_rtvec (2, newpat, gen_rtx_fmt_e_stat ((CLOBBER), ((((void
) 0, E_VOIDmode))), ((ni2dest)) )))) )
3612 gen_rtx_CLOBBER (VOIDmode,gen_rtx_fmt_E_stat ((PARALLEL), ((((void) 0, E_VOIDmode))), (
(gen_rtvec (2, newpat, gen_rtx_fmt_e_stat ((CLOBBER), ((((void
) 0, E_VOIDmode))), ((ni2dest)) )))) )
3613 ni2dest)))gen_rtx_fmt_E_stat ((PARALLEL), ((((void) 0, E_VOIDmode))), (
(gen_rtvec (2, newpat, gen_rtx_fmt_e_stat ((CLOBBER), ((((void
) 0, E_VOIDmode))), ((ni2dest)) )))) )
);
3614 m_split_insn = combine_split_insns (parallel, i3);
3615
3616 if (m_split_insn == 0
3617 && REGNO (i2dest)(rhs_regno(i2dest)) >= FIRST_PSEUDO_REGISTER76)
3618 {
3619 struct undo *buf;
3620
3621 adjust_reg_mode (regno_reg_rtx[REGNO (i2dest)(rhs_regno(i2dest))], old_mode);
3622 buf = undobuf.undos;
3623 undobuf.undos = buf->next;
3624 buf->next = undobuf.frees;
3625 undobuf.frees = buf;
3626 }
3627 }
3628
3629 i2scratch = m_split_insn != 0;
3630 }
3631
3632 /* If recog_for_combine has discarded clobbers, try to use them
3633 again for the split. */
3634 if (m_split_insn == 0 && newpat_vec_with_clobbers)
3635 {
3636 parallel = gen_rtx_PARALLEL (VOIDmode, newpat_vec_with_clobbers)gen_rtx_fmt_E_stat ((PARALLEL), ((((void) 0, E_VOIDmode))), (
(newpat_vec_with_clobbers)) )
;
3637 m_split_insn = combine_split_insns (parallel, i3);
3638 }
3639
3640 if (m_split_insn && NEXT_INSN (m_split_insn) == NULL_RTX(rtx) 0)
3641 {
3642 rtx m_split_pat = PATTERN (m_split_insn);
3643 insn_code_number = recog_for_combine (&m_split_pat, i3, &new_i3_notes);
3644 if (insn_code_number >= 0)
3645 newpat = m_split_pat;
3646 }
3647 else if (m_split_insn && NEXT_INSN (NEXT_INSN (m_split_insn)) == NULL_RTX(rtx) 0
3648 && (next_nonnote_nondebug_insn (i2) == i3
3649 || !modified_between_p (PATTERN (m_split_insn), i2, i3)))
3650 {
3651 rtx i2set, i3set;
3652 rtx newi3pat = PATTERN (NEXT_INSN (m_split_insn));
3653 newi2pat = PATTERN (m_split_insn);
3654
3655 i3set = single_set (NEXT_INSN (m_split_insn));
3656 i2set = single_set (m_split_insn);
3657
3658 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3659
3660 /* If I2 or I3 has multiple SETs, we won't know how to track
3661 register status, so don't use these insns. If I2's destination
3662 is used between I2 and I3, we also can't use these insns. */
3663
3664 if (i2_code_number >= 0 && i2set && i3set
3665 && (next_nonnote_nondebug_insn (i2) == i3
3666 || ! reg_used_between_p (SET_DEST (i2set)(((i2set)->u.fld[0]).rt_rtx), i2, i3)))
3667 insn_code_number = recog_for_combine (&newi3pat, i3,
3668 &new_i3_notes);
3669 if (insn_code_number >= 0)
3670 newpat = newi3pat;
3671
3672 /* It is possible that both insns now set the destination of I3.
3673 If so, we must show an extra use of it. */
3674
3675 if (insn_code_number >= 0)
3676 {
3677 rtx new_i3_dest = SET_DEST (i3set)(((i3set)->u.fld[0]).rt_rtx);
3678 rtx new_i2_dest = SET_DEST (i2set)(((i2set)->u.fld[0]).rt_rtx);
3679
3680 while (GET_CODE (new_i3_dest)((enum rtx_code) (new_i3_dest)->code) == ZERO_EXTRACT
3681 || GET_CODE (new_i3_dest)((enum rtx_code) (new_i3_dest)->code) == STRICT_LOW_PART
3682 || GET_CODE (new_i3_dest)((enum rtx_code) (new_i3_dest)->code) == SUBREG)
3683 new_i3_dest = XEXP (new_i3_dest, 0)(((new_i3_dest)->u.fld[0]).rt_rtx);
3684
3685 while (GET_CODE (new_i2_dest)((enum rtx_code) (new_i2_dest)->code) == ZERO_EXTRACT
3686 || GET_CODE (new_i2_dest)((enum rtx_code) (new_i2_dest)->code) == STRICT_LOW_PART
3687 || GET_CODE (new_i2_dest)((enum rtx_code) (new_i2_dest)->code) == SUBREG)
3688 new_i2_dest = XEXP (new_i2_dest, 0)(((new_i2_dest)->u.fld[0]).rt_rtx);
3689
3690 if (REG_P (new_i3_dest)(((enum rtx_code) (new_i3_dest)->code) == REG)
3691 && REG_P (new_i2_dest)(((enum rtx_code) (new_i2_dest)->code) == REG)
3692 && REGNO (new_i3_dest)(rhs_regno(new_i3_dest)) == REGNO (new_i2_dest)(rhs_regno(new_i2_dest))
3693 && REGNO (new_i2_dest)(rhs_regno(new_i2_dest)) < reg_n_sets_max)
3694 INC_REG_N_SETS (REGNO (new_i2_dest), 1)(regstat_n_sets_and_refs[(rhs_regno(new_i2_dest))].sets += 1);
3695 }
3696 }
3697
3698 /* If we can split it and use I2DEST, go ahead and see if that
3699 helps things be recognized. Verify that none of the registers
3700 are set between I2 and I3. */
3701 if (insn_code_number < 0
3702 && (split = find_split_point (&newpat, i3, false)) != 0
3703 /* We need I2DEST in the proper mode. If it is a hard register
3704 or the only use of a pseudo, we can change its mode.
3705 Make sure we don't change a hard register to have a mode that
3706 isn't valid for it, or change the number of registers. */
3707 && (GET_MODE (*split)((machine_mode) (*split)->mode) == GET_MODE (i2dest)((machine_mode) (i2dest)->mode)
3708 || GET_MODE (*split)((machine_mode) (*split)->mode) == VOIDmode((void) 0, E_VOIDmode)
3709 || can_change_dest_mode (i2dest, added_sets_2,
3710 GET_MODE (*split)((machine_mode) (*split)->mode)))
3711 && (next_nonnote_nondebug_insn (i2) == i3
3712 || !modified_between_p (*split, i2, i3))
3713 /* We can't overwrite I2DEST if its value is still used by
3714 NEWPAT. */
3715 && ! reg_referenced_p (i2dest, newpat)
3716 /* We should not split a possibly trapping part when we
3717 care about non-call EH and have REG_EH_REGION notes
3718 to distribute. */
3719 && ! (cfun(cfun + 0)->can_throw_non_call_exceptions
3720 && has_non_call_exception
3721 && may_trap_p (*split)))
3722 {
3723 rtx newdest = i2dest;
3724 enum rtx_code split_code = GET_CODE (*split)((enum rtx_code) (*split)->code);
3725 machine_mode split_mode = GET_MODE (*split)((machine_mode) (*split)->mode);
3726 bool subst_done = false;
3727 newi2pat = NULL_RTX(rtx) 0;
3728
3729 i2scratch = true;
3730
3731 /* *SPLIT may be part of I2SRC, so make sure we have the
3732 original expression around for later debug processing.
3733 We should not need I2SRC any more in other cases. */
3734 if (MAY_HAVE_DEBUG_BIND_INSNSglobal_options.x_flag_var_tracking_assignments)
3735 i2src = copy_rtx (i2src);
3736 else
3737 i2src = NULLnullptr;
3738
3739 /* Get NEWDEST as a register in the proper mode. We have already
3740 validated that we can do this. */
3741 if (GET_MODE (i2dest)((machine_mode) (i2dest)->mode) != split_mode && split_mode != VOIDmode((void) 0, E_VOIDmode))
3742 {
3743 if (REGNO (i2dest)(rhs_regno(i2dest)) < FIRST_PSEUDO_REGISTER76)
3744 newdest = gen_rtx_REG (split_mode, REGNO (i2dest)(rhs_regno(i2dest)));
3745 else
3746 {
3747 subst_mode (REGNO (i2dest)(rhs_regno(i2dest)), split_mode);
3748 newdest = regno_reg_rtx[REGNO (i2dest)(rhs_regno(i2dest))];
3749 }
3750 }
3751
3752 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
3753 an ASHIFT. This can occur if it was inside a PLUS and hence
3754 appeared to be a memory address. This is a kludge. */
3755 if (split_code == MULT
3756 && CONST_INT_P (XEXP (*split, 1))(((enum rtx_code) ((((*split)->u.fld[1]).rt_rtx))->code
) == CONST_INT)
3757 && INTVAL (XEXP (*split, 1))(((((*split)->u.fld[1]).rt_rtx))->u.hwint[0]) > 0
3758 && (i = exact_log2 (UINTVAL (XEXP (*split, 1))((unsigned long) (((((*split)->u.fld[1]).rt_rtx))->u.hwint
[0]))
)) >= 0)
3759 {
3760 rtx i_rtx = gen_int_shift_amount (split_mode, i);
3761 SUBST (*split, gen_rtx_ASHIFT (split_mode,do_SUBST (&(*split), (gen_rtx_fmt_ee_stat ((ASHIFT), ((split_mode
)), (((((*split)->u.fld[0]).rt_rtx))), ((i_rtx)) )))
3762 XEXP (*split, 0), i_rtx))do_SUBST (&(*split), (gen_rtx_fmt_ee_stat ((ASHIFT), ((split_mode
)), (((((*split)->u.fld[0]).rt_rtx))), ((i_rtx)) )))
;
3763 /* Update split_code because we may not have a multiply
3764 anymore. */
3765 split_code = GET_CODE (*split)((enum rtx_code) (*split)->code);
3766 }
3767
3768 /* Similarly for (plus (mult FOO (const_int pow2))). */
3769 if (split_code == PLUS
3770 && GET_CODE (XEXP (*split, 0))((enum rtx_code) ((((*split)->u.fld[0]).rt_rtx))->code) == MULT
3771 && CONST_INT_P (XEXP (XEXP (*split, 0), 1))(((enum rtx_code) (((((((*split)->u.fld[0]).rt_rtx))->u
.fld[1]).rt_rtx))->code) == CONST_INT)
3772 && INTVAL (XEXP (XEXP (*split, 0), 1))((((((((*split)->u.fld[0]).rt_rtx))->u.fld[1]).rt_rtx))
->u.hwint[0])
> 0
3773 && (i = exact_log2 (UINTVAL (XEXP (XEXP (*split, 0), 1))((unsigned long) ((((((((*split)->u.fld[0]).rt_rtx))->u
.fld[1]).rt_rtx))->u.hwint[0]))
)) >= 0)
3774 {
3775 rtx nsplit = XEXP (*split, 0)(((*split)->u.fld[0]).rt_rtx);
3776 rtx i_rtx = gen_int_shift_amount (GET_MODE (nsplit)((machine_mode) (nsplit)->mode), i);
3777 SUBST (XEXP (*split, 0), gen_rtx_ASHIFT (GET_MODE (nsplit),do_SUBST (&((((*split)->u.fld[0]).rt_rtx)), (gen_rtx_fmt_ee_stat
((ASHIFT), ((((machine_mode) (nsplit)->mode))), (((((nsplit
)->u.fld[0]).rt_rtx))), ((i_rtx)) )))
3778 XEXP (nsplit, 0),do_SUBST (&((((*split)->u.fld[0]).rt_rtx)), (gen_rtx_fmt_ee_stat
((ASHIFT), ((((machine_mode) (nsplit)->mode))), (((((nsplit
)->u.fld[0]).rt_rtx))), ((i_rtx)) )))
3779 i_rtx))do_SUBST (&((((*split)->u.fld[0]).rt_rtx)), (gen_rtx_fmt_ee_stat
((ASHIFT), ((((machine_mode) (nsplit)->mode))), (((((nsplit
)->u.fld[0]).rt_rtx))), ((i_rtx)) )))
;
3780 /* Update split_code because we may not have a multiply
3781 anymore. */
3782 split_code = GET_CODE (*split)((enum rtx_code) (*split)->code);
3783 }
3784
3785#ifdef INSN_SCHEDULING
3786 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
3787 be written as a ZERO_EXTEND. */
3788 if (split_code == SUBREG && MEM_P (SUBREG_REG (*split))(((enum rtx_code) ((((*split)->u.fld[0]).rt_rtx))->code
) == MEM)
)
3789 {
3790 /* Or as a SIGN_EXTEND if LOAD_EXTEND_OP says that that's
3791 what it really is. */
3792 if (load_extend_op (GET_MODE (SUBREG_REG (*split))((machine_mode) ((((*split)->u.fld[0]).rt_rtx))->mode))
3793 == SIGN_EXTEND)
3794 SUBST (*split, gen_rtx_SIGN_EXTEND (split_mode,do_SUBST (&(*split), (gen_rtx_fmt_e_stat ((SIGN_EXTEND), (
(split_mode)), (((((*split)->u.fld[0]).rt_rtx))) )))
3795 SUBREG_REG (*split)))do_SUBST (&(*split), (gen_rtx_fmt_e_stat ((SIGN_EXTEND), (
(split_mode)), (((((*split)->u.fld[0]).rt_rtx))) )))
;
3796 else
3797 SUBST (*split, gen_rtx_ZERO_EXTEND (split_mode,do_SUBST (&(*split), (gen_rtx_fmt_e_stat ((ZERO_EXTEND), (
(split_mode)), (((((*split)->u.fld[0]).rt_rtx))) )))
3798 SUBREG_REG (*split)))do_SUBST (&(*split), (gen_rtx_fmt_e_stat ((ZERO_EXTEND), (
(split_mode)), (((((*split)->u.fld[0]).rt_rtx))) )))
;
3799 }
3800#endif
3801
3802 /* Attempt to split binary operators using arithmetic identities. */
3803 if (BINARY_P (SET_SRC (newpat))(((rtx_class[(int) (((enum rtx_code) ((((newpat)->u.fld[1]
).rt_rtx))->code))]) & (~3)) == (RTX_COMPARE & (~3
)))
3804 && split_mode == GET_MODE (SET_SRC (newpat))((machine_mode) ((((newpat)->u.fld[1]).rt_rtx))->mode)
3805 && ! side_effects_p (SET_SRC (newpat)(((newpat)->u.fld[1]).rt_rtx)))
3806 {
3807 rtx setsrc = SET_SRC (newpat)(((newpat)->u.fld[1]).rt_rtx);
3808 machine_mode mode = GET_MODE (setsrc)((machine_mode) (setsrc)->mode);
3809 enum rtx_code code = GET_CODE (setsrc)((enum rtx_code) (setsrc)->code);
3810 rtx src_op0 = XEXP (setsrc, 0)(((setsrc)->u.fld[0]).rt_rtx);
3811 rtx src_op1 = XEXP (setsrc, 1)(((setsrc)->u.fld[1]).rt_rtx);
3812
3813 /* Split "X = Y op Y" as "Z = Y; X = Z op Z". */
3814 if (rtx_equal_p (src_op0, src_op1))
3815 {
3816 newi2pat = gen_rtx_SET (newdest, src_op0)gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((newdest
)), ((src_op0)) )
;
3817 SUBST (XEXP (setsrc, 0), newdest)do_SUBST (&((((setsrc)->u.fld[0]).rt_rtx)), (newdest));
3818 SUBST (XEXP (setsrc, 1), newdest)do_SUBST (&((((setsrc)->u.fld[1]).rt_rtx)), (newdest));
3819 subst_done = true;
3820 }
3821 /* Split "((P op Q) op R) op S" where op is PLUS or MULT. */
3822 else if ((code == PLUS || code == MULT)
3823 && GET_CODE (src_op0)((enum rtx_code) (src_op0)->code) == code
3824 && GET_CODE (XEXP (src_op0, 0))((enum rtx_code) ((((src_op0)->u.fld[0]).rt_rtx))->code
)
== code
3825 && (INTEGRAL_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_INT || ((enum mode_class
) mode_class[mode]) == MODE_PARTIAL_INT || ((enum mode_class)
mode_class[mode]) == MODE_COMPLEX_INT || ((enum mode_class) mode_class
[mode]) == MODE_VECTOR_BOOL || ((enum mode_class) mode_class[
mode]) == MODE_VECTOR_INT)
3826 || (FLOAT_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_FLOAT || ((enum
mode_class) mode_class[mode]) == MODE_DECIMAL_FLOAT || ((enum
mode_class) mode_class[mode]) == MODE_COMPLEX_FLOAT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT)
3827 && flag_unsafe_math_optimizationsglobal_options.x_flag_unsafe_math_optimizations)))
3828 {
3829 rtx p = XEXP (XEXP (src_op0, 0), 0)((((((src_op0)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx);
3830 rtx q = XEXP (XEXP (src_op0, 0), 1)((((((src_op0)->u.fld[0]).rt_rtx))->u.fld[1]).rt_rtx);
3831 rtx r = XEXP (src_op0, 1)(((src_op0)->u.fld[1]).rt_rtx);
3832 rtx s = src_op1;
3833
3834 /* Split both "((X op Y) op X) op Y" and
3835 "((X op Y) op Y) op X" as "T op T" where T is
3836 "X op Y". */
3837 if ((rtx_equal_p (p,r) && rtx_equal_p (q,s))
3838 || (rtx_equal_p (p,s) && rtx_equal_p (q,r)))
3839 {
3840 newi2pat = gen_rtx_SET (newdest, XEXP (src_op0, 0))gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((newdest
)), (((((src_op0)->u.fld[0]).rt_rtx))) )
;
3841 SUBST (XEXP (setsrc, 0), newdest)do_SUBST (&((((setsrc)->u.fld[0]).rt_rtx)), (newdest));
3842 SUBST (XEXP (setsrc, 1), newdest)do_SUBST (&((((setsrc)->u.fld[1]).rt_rtx)), (newdest));
3843 subst_done = true;
3844 }
3845 /* Split "((X op X) op Y) op Y)" as "T op T" where
3846 T is "X op Y". */
3847 else if (rtx_equal_p (p,q) && rtx_equal_p (r,s))
3848 {
3849 rtx tmp = simplify_gen_binary (code, mode, p, r);
3850 newi2pat = gen_rtx_SET (newdest, tmp)gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((newdest
)), ((tmp)) )
;
3851 SUBST (XEXP (setsrc, 0), newdest)do_SUBST (&((((setsrc)->u.fld[0]).rt_rtx)), (newdest));
3852 SUBST (XEXP (setsrc, 1), newdest)do_SUBST (&((((setsrc)->u.fld[1]).rt_rtx)), (newdest));
3853 subst_done = true;
3854 }
3855 }
3856 }
3857
3858 if (!subst_done)
3859 {
3860 newi2pat = gen_rtx_SET (newdest, *split)gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((newdest
)), ((*split)) )
;
3861 SUBST (*split, newdest)do_SUBST (&(*split), (newdest));
3862 }
3863
3864 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3865
3866 /* recog_for_combine might have added CLOBBERs to newi2pat.
3867 Make sure NEWPAT does not depend on the clobbered regs. */
3868 if (GET_CODE (newi2pat)((enum rtx_code) (newi2pat)->code) == PARALLEL)
3869 for (i = XVECLEN (newi2pat, 0)(((((newi2pat)->u.fld[0]).rt_rtvec))->num_elem) - 1; i >= 0; i--)
3870 if (GET_CODE (XVECEXP (newi2pat, 0, i))((enum rtx_code) ((((((newi2pat)->u.fld[0]).rt_rtvec))->
elem[i]))->code)
== CLOBBER)
3871 {
3872 rtx reg = XEXP (XVECEXP (newi2pat, 0, i), 0)((((((((newi2pat)->u.fld[0]).rt_rtvec))->elem[i]))->
u.fld[0]).rt_rtx)
;
3873 if (reg_overlap_mentioned_p (reg, newpat))
3874 {
3875 undo_all ();
3876 return 0;
3877 }
3878 }
3879
3880 /* If the split point was a MULT and we didn't have one before,
3881 don't use one now. */
3882 if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
3883 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3884 }
3885 }
3886
3887 /* Check for a case where we loaded from memory in a narrow mode and
3888 then sign extended it, but we need both registers. In that case,
3889 we have a PARALLEL with both loads from the same memory location.
3890 We can split this into a load from memory followed by a register-register
3891 copy. This saves at least one insn, more if register allocation can
3892 eliminate the copy.
3893
3894 We cannot do this if the destination of the first assignment is a
3895 condition code register. We eliminate this case by making sure
3896 the SET_DEST and SET_SRC have the same mode.
3897
3898 We cannot do this if the destination of the second assignment is
3899 a register that we have already assumed is zero-extended. Similarly
3900 for a SUBREG of such a register. */
3901
3902 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
3903 && GET_CODE (newpat)((enum rtx_code) (newpat)->code) == PARALLEL
3904 && XVECLEN (newpat, 0)(((((newpat)->u.fld[0]).rt_rtvec))->num_elem) == 2
3905 && GET_CODE (XVECEXP (newpat, 0, 0))((enum rtx_code) ((((((newpat)->u.fld[0]).rt_rtvec))->elem
[0]))->code)
== SET
3906 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0)))((enum rtx_code) (((((((((newpat)->u.fld[0]).rt_rtvec))->
elem[0]))->u.fld[1]).rt_rtx))->code)
== SIGN_EXTEND
3907 && (GET_MODE (SET_DEST (XVECEXP (newpat, 0, 0)))((machine_mode) (((((((((newpat)->u.fld[0]).rt_rtvec))->
elem[0]))->u.fld[0]).rt_rtx))->mode)
3908 == GET_MODE (SET_SRC (XVECEXP (newpat, 0, 0)))((machine_mode) (((((((((newpat)->u.fld[0]).rt_rtvec))->
elem[0]))->u.fld[1]).rt_rtx))->mode)
)
3909 && GET_CODE (XVECEXP (newpat, 0, 1))((enum rtx_code) ((((((newpat)->u.fld[0]).rt_rtvec))->elem
[1]))->code)
== SET
3910 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1))((((((((newpat)->u.fld[0]).rt_rtvec))->elem[1]))->u.
fld[1]).rt_rtx)
,
3911 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0)(((((((((((newpat)->u.fld[0]).rt_rtvec))->elem[0]))->
u.fld[1]).rt_rtx))->u.fld[0]).rt_rtx)
)
3912 && !modified_between_p (SET_SRC (XVECEXP (newpat, 0, 1))((((((((newpat)->u.fld[0]).rt_rtvec))->elem[1]))->u.
fld[1]).rt_rtx)
, i2, i3)
3913 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1)))((enum rtx_code) (((((((((newpat)->u.fld[0]).rt_rtvec))->
elem[1]))->u.fld[0]).rt_rtx))->code)
!= ZERO_EXTRACT
3914 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1)))((enum rtx_code) (((((((((newpat)->u.fld[0]).rt_rtvec))->
elem[1]))->u.fld[0]).rt_rtx))->code)
!= STRICT_LOW_PART
3915 && ! (temp_expr = SET_DEST (XVECEXP (newpat, 0, 1))((((((((newpat)->u.fld[0]).rt_rtvec))->elem[1]))->u.
fld[0]).rt_rtx)
,
3916 (REG_P (temp_expr)(((enum rtx_code) (temp_expr)->code) == REG)
3917 && reg_stat[REGNO (temp_expr)(rhs_regno(temp_expr))].nonzero_bits != 0
3918 && known_lt (GET_MODE_PRECISION (GET_MODE (temp_expr)),(!maybe_le (((8) * (((global_options.x_ix86_isa_flags & (
1UL << 1)) != 0) ? 8 : 4)), GET_MODE_PRECISION (((machine_mode
) (temp_expr)->mode))))
3919 BITS_PER_WORD)(!maybe_le (((8) * (((global_options.x_ix86_isa_flags & (
1UL << 1)) != 0) ? 8 : 4)), GET_MODE_PRECISION (((machine_mode
) (temp_expr)->mode))))
3920 && known_lt (GET_MODE_PRECISION (GET_MODE (temp_expr)),(!maybe_le ((8 * 4), GET_MODE_PRECISION (((machine_mode) (temp_expr
)->mode))))
3921 HOST_BITS_PER_INT)(!maybe_le ((8 * 4), GET_MODE_PRECISION (((machine_mode) (temp_expr
)->mode))))
3922 && (reg_stat[REGNO (temp_expr)(rhs_regno(temp_expr))].nonzero_bits
3923 != GET_MODE_MASK (word_mode)mode_mask_array[word_mode])))
3924 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1)))((enum rtx_code) (((((((((newpat)->u.fld[0]).rt_rtvec))->
elem[1]))->u.fld[0]).rt_rtx))->code)
== SUBREG
3925 && (temp_expr = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1)))(((((((((((newpat)->u.fld[0]).rt_rtvec))->elem[1]))->
u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx)
,
3926 (REG_P (temp_expr)(((enum rtx_code) (temp_expr)->code) == REG)
3927 && reg_stat[REGNO (temp_expr)(rhs_regno(temp_expr))].nonzero_bits != 0
3928 && known_lt (GET_MODE_PRECISION (GET_MODE (temp_expr)),(!maybe_le (((8) * (((global_options.x_ix86_isa_flags & (
1UL << 1)) != 0) ? 8 : 4)), GET_MODE_PRECISION (((machine_mode
) (temp_expr)->mode))))
3929 BITS_PER_WORD)(!maybe_le (((8) * (((global_options.x_ix86_isa_flags & (
1UL << 1)) != 0) ? 8 : 4)), GET_MODE_PRECISION (((machine_mode
) (temp_expr)->mode))))
3930 && known_lt (GET_MODE_PRECISION (GET_MODE (temp_expr)),(!maybe_le ((8 * 4), GET_MODE_PRECISION (((machine_mode) (temp_expr
)->mode))))
3931 HOST_BITS_PER_INT)(!maybe_le ((8 * 4), GET_MODE_PRECISION (((machine_mode) (temp_expr
)->mode))))
3932 && (reg_stat[REGNO (temp_expr)(rhs_regno(temp_expr))].nonzero_bits
3933 != GET_MODE_MASK (word_mode)mode_mask_array[word_mode]))))
3934 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1))((((((((newpat)->u.fld[0]).rt_rtvec))->elem[1]))->u.
fld[0]).rt_rtx)
,
3935 SET_SRC (XVECEXP (newpat, 0, 1))((((((((newpat)->u.fld[0]).rt_rtvec))->elem[1]))->u.
fld[1]).rt_rtx)
)
3936 && ! find_reg_note (i3, REG_UNUSED,
3937 SET_DEST (XVECEXP (newpat, 0, 0))((((((((newpat)->u.fld[0]).rt_rtvec))->elem[0]))->u.
fld[0]).rt_rtx)
))
3938 {
3939 rtx ni2dest;
3940
3941 newi2pat = XVECEXP (newpat, 0, 0)(((((newpat)->u.fld[0]).rt_rtvec))->elem[0]);
3942 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0))((((((((newpat)->u.fld[0]).rt_rtvec))->elem[0]))->u.
fld[0]).rt_rtx)
;
3943 newpat = XVECEXP (newpat, 0, 1)(((((newpat)->u.fld[0]).rt_rtvec))->elem[1]);
3944 SUBST (SET_SRC (newpat),do_SUBST (&((((newpat)->u.fld[1]).rt_rtx)), (rtl_hooks
.gen_lowpart (((machine_mode) ((((newpat)->u.fld[1]).rt_rtx
))->mode), ni2dest)))
3945 gen_lowpart (GET_MODE (SET_SRC (newpat)), ni2dest))do_SUBST (&((((newpat)->u.fld[1]).rt_rtx)), (rtl_hooks
.gen_lowpart (((machine_mode) ((((newpat)->u.fld[1]).rt_rtx
))->mode), ni2dest)))
;
3946 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3947
3948 if (i2_code_number >= 0)
3949 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3950
3951 if (insn_code_number >= 0)
3952 swap_i2i3 = 1;
3953 }
3954
3955 /* Similarly, check for a case where we have a PARALLEL of two independent
3956 SETs but we started with three insns. In this case, we can do the sets
3957 as two separate insns. This case occurs when some SET allows two
3958 other insns to combine, but the destination of that SET is still live.
3959
3960 Also do this if we started with two insns and (at least) one of the
3961 resulting sets is a noop; this noop will be deleted later.
3962
3963 Also do this if we started with two insns neither of which was a simple
3964 move. */
3965
3966 else if (insn_code_number < 0 && asm_noperands (newpat) < 0
3967 && GET_CODE (newpat)((enum rtx_code) (newpat)->code) == PARALLEL
3968 && XVECLEN (newpat, 0)(((((newpat)->u.fld[0]).rt_rtvec))->num_elem) == 2
3969 && GET_CODE (XVECEXP (newpat, 0, 0))((enum rtx_code) ((((((newpat)->u.fld[0]).rt_rtvec))->elem
[0]))->code)
== SET
3970 && GET_CODE (XVECEXP (newpat, 0, 1))((enum rtx_code) ((((((newpat)->u.fld[0]).rt_rtvec))->elem
[1]))->code)
== SET
3971 && (i1
3972 || set_noop_p (XVECEXP (newpat, 0, 0)(((((newpat)->u.fld[0]).rt_rtvec))->elem[0]))
3973 || set_noop_p (XVECEXP (newpat, 0, 1)(((((newpat)->u.fld[0]).rt_rtvec))->elem[1]))
3974 || (!i2_was_move && !i3_was_move))
3975 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0)))((enum rtx_code) (((((((((newpat)->u.fld[0]).rt_rtvec))->
elem[0]))->u.fld[0]).rt_rtx))->code)
!= ZERO_EXTRACT
3976 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0)))((enum rtx_code) (((((((((newpat)->u.fld[0]).rt_rtvec))->
elem[0]))->u.fld[0]).rt_rtx))->code)
!= STRICT_LOW_PART
3977 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1)))((enum rtx_code) (((((((((newpat)->u.fld[0]).rt_rtvec))->
elem[1]))->u.fld[0]).rt_rtx))->code)
!= ZERO_EXTRACT
3978 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1)))((enum rtx_code) (((((((((newpat)->u.fld[0]).rt_rtvec))->
elem[1]))->u.fld[0]).rt_rtx))->code)
!= STRICT_LOW_PART
3979 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1))((((((((newpat)->u.fld[0]).rt_rtvec))->elem[1]))->u.
fld[0]).rt_rtx)
,
3980 XVECEXP (newpat, 0, 0)(((((newpat)->u.fld[0]).rt_rtvec))->elem[0]))
3981 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0))((((((((newpat)->u.fld[0]).rt_rtvec))->elem[0]))->u.
fld[0]).rt_rtx)
,
3982 XVECEXP (newpat, 0, 1)(((((newpat)->u.fld[0]).rt_rtvec))->elem[1]))
3983 && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0))((((((((newpat)->u.fld[0]).rt_rtvec))->elem[0]))->u.
fld[1]).rt_rtx)
)
3984 && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1))((((((((newpat)->u.fld[0]).rt_rtvec))->elem[1]))->u.
fld[1]).rt_rtx)
)))
3985 {
3986 rtx set0 = XVECEXP (newpat, 0, 0)(((((newpat)->u.fld[0]).rt_rtvec))->elem[0]);
3987 rtx set1 = XVECEXP (newpat, 0, 1)(((((newpat)->u.fld[0]).rt_rtvec))->elem[1]);
3988
3989 /* Normally, it doesn't matter which of the two is done first, but
3990 one which uses any regs/memory set in between i2 and i3 can't
3991 be first. The PARALLEL might also have been pre-existing in i3,
3992 so we need to make sure that we won't wrongly hoist a SET to i2
3993 that would conflict with a death note present in there, or would
3994 have its dest modified between i2 and i3. */
3995 if (!modified_between_p (SET_SRC (set1)(((set1)->u.fld[1]).rt_rtx), i2, i3)
3996 && !(REG_P (SET_DEST (set1))(((enum rtx_code) ((((set1)->u.fld[0]).rt_rtx))->code) ==
REG)
3997 && find_reg_note (i2, REG_DEAD, SET_DEST (set1)(((set1)->u.fld[0]).rt_rtx)))
3998 && !(GET_CODE (SET_DEST (set1))((enum rtx_code) ((((set1)->u.fld[0]).rt_rtx))->code) == SUBREG
3999 && find_reg_note (i2, REG_DEAD,
4000 SUBREG_REG (SET_DEST (set1))((((((set1)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx)))
4001 && !modified_between_p (SET_DEST (set1)(((set1)->u.fld[0]).rt_rtx), i2, i3)
4002 /* If I3 is a jump, ensure that set0 is a jump so that
4003 we do not create invalid RTL. */
4004 && (!JUMP_P (i3)(((enum rtx_code) (i3)->code) == JUMP_INSN) || SET_DEST (set0)(((set0)->u.fld[0]).rt_rtx) == pc_rtx)
4005 )
4006 {
4007 newi2pat = set1;
4008 newpat = set0;
4009 }
4010 else if (!modified_between_p (SET_SRC (set0)(((set0)->u.fld[1]).rt_rtx), i2, i3)
4011 && !(REG_P (SET_DEST (set0))(((enum rtx_code) ((((set0)->u.fld[0]).rt_rtx))->code) ==
REG)
4012 && find_reg_note (i2, REG_DEAD, SET_DEST (set0)(((set0)->u.fld[0]).rt_rtx)))
4013 && !(GET_CODE (SET_DEST (set0))((enum rtx_code) ((((set0)->u.fld[0]).rt_rtx))->code) == SUBREG
4014 && find_reg_note (i2, REG_DEAD,
4015 SUBREG_REG (SET_DEST (set0))((((((set0)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx)))
4016 && !modified_between_p (SET_DEST (set0)(((set0)->u.fld[0]).rt_rtx), i2, i3)
4017 /* If I3 is a jump, ensure that set1 is a jump so that
4018 we do not create invalid RTL. */
4019 && (!JUMP_P (i3)(((enum rtx_code) (i3)->code) == JUMP_INSN) || SET_DEST (set1)(((set1)->u.fld[0]).rt_rtx) == pc_rtx)
4020 )
4021 {
4022 newi2pat = set0;
4023 newpat = set1;
4024 }
4025 else
4026 {
4027 undo_all ();
4028 return 0;
4029 }
4030
4031 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
4032
4033 if (i2_code_number >= 0)
4034 {
4035 /* recog_for_combine might have added CLOBBERs to newi2pat.
4036 Make sure NEWPAT does not depend on the clobbered regs. */
4037 if (GET_CODE (newi2pat)((enum rtx_code) (newi2pat)->code) == PARALLEL)
4038 {
4039 for (i = XVECLEN (newi2pat, 0)(((((newi2pat)->u.fld[0]).rt_rtvec))->num_elem) - 1; i >= 0; i--)
4040 if (GET_CODE (XVECEXP (newi2pat, 0, i))((enum rtx_code) ((((((newi2pat)->u.fld[0]).rt_rtvec))->
elem[i]))->code)
== CLOBBER)
4041 {
4042 rtx reg = XEXP (XVECEXP (newi2pat, 0, i), 0)((((((((newi2pat)->u.fld[0]).rt_rtvec))->elem[i]))->
u.fld[0]).rt_rtx)
;
4043 if (reg_overlap_mentioned_p (reg, newpat))
4044 {
4045 undo_all ();
4046 return 0;
4047 }
4048 }
4049 }
4050
4051 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
4052
4053 /* Likewise, recog_for_combine might have added clobbers to NEWPAT.
4054 Checking that the SET0's SET_DEST and SET1's SET_DEST aren't
4055 mentioned/clobbered, ensures NEWI2PAT's SET_DEST is live. */
4056 if (insn_code_number >= 0 && GET_CODE (newpat)((enum rtx_code) (newpat)->code) == PARALLEL)
4057 {
4058 for (i = XVECLEN (newpat, 0)(((((newpat)->u.fld[0]).rt_rtvec))->num_elem) - 1; i >= 0; i--)
4059 if (GET_CODE (XVECEXP (newpat, 0, i))((enum rtx_code) ((((((newpat)->u.fld[0]).rt_rtvec))->elem
[i]))->code)
== CLOBBER)
4060 {
4061 rtx reg = XEXP (XVECEXP (newpat, 0, i), 0)((((((((newpat)->u.fld[0]).rt_rtvec))->elem[i]))->u.
fld[0]).rt_rtx)
;
4062 if (reg_overlap_mentioned_p (reg, SET_DEST (set0)(((set0)->u.fld[0]).rt_rtx))
4063 || reg_overlap_mentioned_p (reg, SET_DEST (set1)(((set1)->u.fld[0]).rt_rtx)))
4064 {
4065 undo_all ();
4066 return 0;
4067 }
4068 }
4069 }
4070
4071 if (insn_code_number >= 0)
4072 split_i2i3 = 1;
4073 }
4074 }
4075
4076 /* If it still isn't recognized, fail and change things back the way they
4077 were. */
4078 if ((insn_code_number < 0
4079 /* Is the result a reasonable ASM_OPERANDS? */
4080 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
4081 {
4082 undo_all ();
4083 return 0;
4084 }
4085
4086 /* If we had to change another insn, make sure it is valid also. */
4087 if (undobuf.other_insn)
4088 {
4089 CLEAR_HARD_REG_SET (newpat_used_regs);
4090
4091 other_pat = PATTERN (undobuf.other_insn);
4092 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
4093 &new_other_notes);
4094
4095 if (other_code_number < 0 && ! check_asm_operands (other_pat))
4096 {
4097 undo_all ();
4098 return 0;
4099 }
4100 }
4101
4102 /* Only allow this combination if insn_cost reports that the
4103 replacement instructions are cheaper than the originals. */
4104 if (!combine_validate_cost (i0, i1, i2, i3, newpat, newi2pat, other_pat))
4105 {
4106 undo_all ();
4107 return 0;
4108 }
4109
4110 if (MAY_HAVE_DEBUG_BIND_INSNSglobal_options.x_flag_var_tracking_assignments)
4111 {
4112 struct undo *undo;
4113
4114 for (undo = undobuf.undos; undo; undo = undo->next)
4115 if (undo->kind == UNDO_MODE)
4116 {
4117 rtx reg = regno_reg_rtx[undo->where.regno];
4118 machine_mode new_mode = GET_MODE (reg)((machine_mode) (reg)->mode);
4119 machine_mode old_mode = undo->old_contents.m;
4120
4121 /* Temporarily revert mode back. */
4122 adjust_reg_mode (reg, old_mode);
4123
4124 if (reg == i2dest && i2scratch)
4125 {
4126 /* If we used i2dest as a scratch register with a
4127 different mode, substitute it for the original
4128 i2src while its original mode is temporarily
4129 restored, and then clear i2scratch so that we don't
4130 do it again later. */
4131 propagate_for_debug (i2, last_combined_insn, reg, i2src,
4132 this_basic_block);
4133 i2scratch = false;
4134 /* Put back the new mode. */
4135 adjust_reg_mode (reg, new_mode);
4136 }
4137 else
4138 {
4139 rtx tempreg = gen_raw_REG (old_mode, REGNO (reg)(rhs_regno(reg)));
4140 rtx_insn *first, *last;
4141
4142 if (reg == i2dest)
4143 {
4144 first = i2;
4145 last = last_combined_insn;
4146 }
4147 else
4148 {
4149 first = i3;
4150 last = undobuf.other_insn;
4151 gcc_assert (last)((void)(!(last) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 4151, __FUNCTION__), 0 : 0))
;
4152 if (DF_INSN_LUID (last)((((df->insns[(INSN_UID (last))]))->luid))
4153 < DF_INSN_LUID (last_combined_insn)((((df->insns[(INSN_UID (last_combined_insn))]))->luid)
)
)
4154 last = last_combined_insn;
4155 }
4156
4157 /* We're dealing with a reg that changed mode but not
4158 meaning, so we want to turn it into a subreg for
4159 the new mode. However, because of REG sharing and
4160 because its mode had already changed, we have to do
4161 it in two steps. First, replace any debug uses of
4162 reg, with its original mode temporarily restored,
4163 with this copy we have created; then, replace the
4164 copy with the SUBREG of the original shared reg,
4165 once again changed to the new mode. */
4166 propagate_for_debug (first, last, reg, tempreg,
4167 this_basic_block);
4168 adjust_reg_mode (reg, new_mode);
4169 propagate_for_debug (first, last, tempreg,
4170 lowpart_subreg (old_mode, reg, new_mode),
4171 this_basic_block);
4172 }
4173 }
4174 }
4175
4176 /* If we will be able to accept this, we have made a
4177 change to the destination of I3. This requires us to
4178 do a few adjustments. */
4179
4180 if (changed_i3_dest)
4181 {
4182 PATTERN (i3) = newpat;
4183 adjust_for_new_dest (i3);
4184 }
4185
4186 /* We now know that we can do this combination. Merge the insns and
4187 update the status of registers and LOG_LINKS. */
4188
4189 if (undobuf.other_insn)
4190 {
4191 rtx note, next;
4192
4193 PATTERN (undobuf.other_insn) = other_pat;
4194
4195 /* If any of the notes in OTHER_INSN were REG_DEAD or REG_UNUSED,
4196 ensure that they are still valid. Then add any non-duplicate
4197 notes added by recog_for_combine. */
4198 for (note = REG_NOTES (undobuf.other_insn)(((undobuf.other_insn)->u.fld[6]).rt_rtx); note; note = next)
4199 {
4200 next = XEXP (note, 1)(((note)->u.fld[1]).rt_rtx);
4201
4202 if ((REG_NOTE_KIND (note)((enum reg_note) ((machine_mode) (note)->mode)) == REG_DEAD
4203 && !reg_referenced_p (XEXP (note, 0)(((note)->u.fld[0]).rt_rtx),
4204 PATTERN (undobuf.other_insn)))
4205 ||(REG_NOTE_KIND (note)((enum reg_note) ((machine_mode) (note)->mode)) == REG_UNUSED
4206 && !reg_set_p (XEXP (note, 0)(((note)->u.fld[0]).rt_rtx),
4207 PATTERN (undobuf.other_insn)))
4208 /* Simply drop equal note since it may be no longer valid
4209 for other_insn. It may be possible to record that CC
4210 register is changed and only discard those notes, but
4211 in practice it's unnecessary complication and doesn't
4212 give any meaningful improvement.
4213
4214 See PR78559. */
4215 || REG_NOTE_KIND (note)((enum reg_note) ((machine_mode) (note)->mode)) == REG_EQUAL
4216 || REG_NOTE_KIND (note)((enum reg_note) ((machine_mode) (note)->mode)) == REG_EQUIV)
4217 remove_note (undobuf.other_insn, note);
4218 }
4219
4220 distribute_notes (new_other_notes, undobuf.other_insn,
4221 undobuf.other_insn, NULLnullptr, NULL_RTX(rtx) 0, NULL_RTX(rtx) 0,
4222 NULL_RTX(rtx) 0);
4223 }
4224
4225 if (swap_i2i3)
4226 {
4227 /* I3 now uses what used to be its destination and which is now
4228 I2's destination. This requires us to do a few adjustments. */
4229 PATTERN (i3) = newpat;
4230 adjust_for_new_dest (i3);
4231 }
4232
4233 if (swap_i2i3 || split_i2i3)
4234 {
4235 /* We might need a LOG_LINK from I3 to I2. But then we used to
4236 have one, so we still will.
4237
4238 However, some later insn might be using I2's dest and have
4239 a LOG_LINK pointing at I3. We should change it to point at
4240 I2 instead. */
4241
4242 /* newi2pat is usually a SET here; however, recog_for_combine might
4243 have added some clobbers. */
4244 rtx x = newi2pat;
4245 if (GET_CODE (x)((enum rtx_code) (x)->code) == PARALLEL)
4246 x = XVECEXP (newi2pat, 0, 0)(((((newi2pat)->u.fld[0]).rt_rtvec))->elem[0]);
4247
4248 if (REG_P (SET_DEST (x))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == REG
)
4249 || (GET_CODE (SET_DEST (x))((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == SUBREG
4250 && REG_P (SUBREG_REG (SET_DEST (x)))(((enum rtx_code) (((((((x)->u.fld[0]).rt_rtx))->u.fld[
0]).rt_rtx))->code) == REG)
))
4251 {
4252 unsigned int regno = reg_or_subregno (SET_DEST (x)(((x)->u.fld[0]).rt_rtx));
4253
4254 bool done = false;
4255 for (rtx_insn *insn = NEXT_INSN (i3);
4256 !done
4257 && insn
4258 && INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code
) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)->
code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) ==
DEBUG_INSN))
4259 && BLOCK_FOR_INSN (insn) == this_basic_block;
4260 insn = NEXT_INSN (insn))
4261 {
4262 if (DEBUG_INSN_P (insn)(((enum rtx_code) (insn)->code) == DEBUG_INSN))
4263 continue;
4264 struct insn_link *link;
4265 FOR_EACH_LOG_LINK (link, insn)for ((link) = (uid_log_links[insn_uid_check (insn)]); (link);
(link) = (link)->next)
4266 if (link->insn == i3 && link->regno == regno)
4267 {
4268 link->insn = i2;
4269 done = true;
4270 break;
4271 }
4272 }
4273 }
4274 }
4275
4276 {
4277 rtx i3notes, i2notes, i1notes = 0, i0notes = 0;
4278 struct insn_link *i3links, *i2links, *i1links = 0, *i0links = 0;
4279 rtx midnotes = 0;
4280 int from_luid;
4281 /* Compute which registers we expect to eliminate. newi2pat may be setting
4282 either i3dest or i2dest, so we must check it. */
4283 rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
4284 || i2dest_in_i2src || i2dest_in_i1src || i2dest_in_i0src
4285 || !i2dest_killed
4286 ? 0 : i2dest);
4287 /* For i1, we need to compute both local elimination and global
4288 elimination information with respect to newi2pat because i1dest
4289 may be the same as i3dest, in which case newi2pat may be setting
4290 i1dest. Global information is used when distributing REG_DEAD
4291 note for i2 and i3, in which case it does matter if newi2pat sets
4292 i1dest or not.
4293
4294 Local information is used when distributing REG_DEAD note for i1,
4295 in which case it doesn't matter if newi2pat sets i1dest or not.
4296 See PR62151, if we have four insns combination:
4297 i0: r0 <- i0src
4298 i1: r1 <- i1src (using r0)
4299 REG_DEAD (r0)
4300 i2: r0 <- i2src (using r1)
4301 i3: r3 <- i3src (using r0)
4302 ix: using r0
4303 From i1's point of view, r0 is eliminated, no matter if it is set
4304 by newi2pat or not. In other words, REG_DEAD info for r0 in i1
4305 should be discarded.
4306
4307 Note local information only affects cases in forms like "I1->I2->I3",
4308 "I0->I1->I2->I3" or "I0&I1->I2, I2->I3". For other cases like
4309 "I0->I1, I1&I2->I3" or "I1&I2->I3", newi2pat won't set i1dest or
4310 i0dest anyway. */
4311 rtx local_elim_i1 = (i1 == 0 || i1dest_in_i1src || i1dest_in_i0src
4312 || !i1dest_killed
4313 ? 0 : i1dest);
4314 rtx elim_i1 = (local_elim_i1 == 0
4315 || (newi2pat && reg_set_p (i1dest, newi2pat))
4316 ? 0 : i1dest);
4317 /* Same case as i1. */
4318 rtx local_elim_i0 = (i0 == 0 || i0dest_in_i0src || !i0dest_killed
4319 ? 0 : i0dest);
4320 rtx elim_i0 = (local_elim_i0 == 0
4321 || (newi2pat && reg_set_p (i0dest, newi2pat))
4322 ? 0 : i0dest);
4323
4324 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
4325 clear them. */
4326 i3notes = REG_NOTES (i3)(((i3)->u.fld[6]).rt_rtx), i3links = LOG_LINKS (i3)(uid_log_links[insn_uid_check (i3)]);
4327 i2notes = REG_NOTES (i2)(((i2)->u.fld[6]).rt_rtx), i2links = LOG_LINKS (i2)(uid_log_links[insn_uid_check (i2)]);
4328 if (i1)
4329 i1notes = REG_NOTES (i1)(((i1)->u.fld[6]).rt_rtx), i1links = LOG_LINKS (i1)(uid_log_links[insn_uid_check (i1)]);
4330 if (i0)
4331 i0notes = REG_NOTES (i0)(((i0)->u.fld[6]).rt_rtx), i0links = LOG_LINKS (i0)(uid_log_links[insn_uid_check (i0)]);
4332
4333 /* Ensure that we do not have something that should not be shared but
4334 occurs multiple times in the new insns. Check this by first
4335 resetting all the `used' flags and then copying anything is shared. */
4336
4337 reset_used_flags (i3notes);
4338 reset_used_flags (i2notes);
4339 reset_used_flags (i1notes);
4340 reset_used_flags (i0notes);
4341 reset_used_flags (newpat);
4342 reset_used_flags (newi2pat);
4343 if (undobuf.other_insn)
4344 reset_used_flags (PATTERN (undobuf.other_insn));
4345
4346 i3notes = copy_rtx_if_shared (i3notes);
4347 i2notes = copy_rtx_if_shared (i2notes);
4348 i1notes = copy_rtx_if_shared (i1notes);
4349 i0notes = copy_rtx_if_shared (i0notes);
4350 newpat = copy_rtx_if_shared (newpat);
4351 newi2pat = copy_rtx_if_shared (newi2pat);
4352 if (undobuf.other_insn)
4353 reset_used_flags (PATTERN (undobuf.other_insn));
4354
4355 INSN_CODE (i3)(((i3)->u.fld[5]).rt_int) = insn_code_number;
4356 PATTERN (i3) = newpat;
4357
4358 if (CALL_P (i3)(((enum rtx_code) (i3)->code) == CALL_INSN) && CALL_INSN_FUNCTION_USAGE (i3)(((i3)->u.fld[7]).rt_rtx))
4359 {
4360 for (rtx link = CALL_INSN_FUNCTION_USAGE (i3)(((i3)->u.fld[7]).rt_rtx); link;
4361 link = XEXP (link, 1)(((link)->u.fld[1]).rt_rtx))
4362 {
4363 if (substed_i2)
4364 {
4365 /* I2SRC must still be meaningful at this point. Some
4366 splitting operations can invalidate I2SRC, but those
4367 operations do not apply to calls. */
4368 gcc_assert (i2src)((void)(!(i2src) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 4368, __FUNCTION__), 0 : 0))
;
4369 XEXP (link, 0)(((link)->u.fld[0]).rt_rtx) = simplify_replace_rtx (XEXP (link, 0)(((link)->u.fld[0]).rt_rtx),
4370 i2dest, i2src);
4371 }
4372 if (substed_i1)
4373 XEXP (link, 0)(((link)->u.fld[0]).rt_rtx) = simplify_replace_rtx (XEXP (link, 0)(((link)->u.fld[0]).rt_rtx),
4374 i1dest, i1src);
4375 if (substed_i0)
4376 XEXP (link, 0)(((link)->u.fld[0]).rt_rtx) = simplify_replace_rtx (XEXP (link, 0)(((link)->u.fld[0]).rt_rtx),
4377 i0dest, i0src);
4378 }
4379 }
4380
4381 if (undobuf.other_insn)
4382 INSN_CODE (undobuf.other_insn)(((undobuf.other_insn)->u.fld[5]).rt_int) = other_code_number;
4383
4384 /* We had one special case above where I2 had more than one set and
4385 we replaced a destination of one of those sets with the destination
4386 of I3. In that case, we have to update LOG_LINKS of insns later
4387 in this basic block. Note that this (expensive) case is rare.
4388
4389 Also, in this case, we must pretend that all REG_NOTEs for I2
4390 actually came from I3, so that REG_UNUSED notes from I2 will be
4391 properly handled. */
4392
4393 if (i3_subst_into_i2)
4394 {
4395 for (i = 0; i < XVECLEN (PATTERN (i2), 0)(((((PATTERN (i2))->u.fld[0]).rt_rtvec))->num_elem); i++)
4396 if ((GET_CODE (XVECEXP (PATTERN (i2), 0, i))((enum rtx_code) ((((((PATTERN (i2))->u.fld[0]).rt_rtvec))
->elem[i]))->code)
== SET
4397 || GET_CODE (XVECEXP (PATTERN (i2), 0, i))((enum rtx_code) ((((((PATTERN (i2))->u.fld[0]).rt_rtvec))
->elem[i]))->code)
== CLOBBER)
4398 && REG_P (SET_DEST (XVECEXP (PATTERN (i2), 0, i)))(((enum rtx_code) (((((((((PATTERN (i2))->u.fld[0]).rt_rtvec
))->elem[i]))->u.fld[0]).rt_rtx))->code) == REG)
4399 && SET_DEST (XVECEXP (PATTERN (i2), 0, i))((((((((PATTERN (i2))->u.fld[0]).rt_rtvec))->elem[i]))->
u.fld[0]).rt_rtx)
!= i2dest
4400 && ! find_reg_note (i2, REG_UNUSED,
4401 SET_DEST (XVECEXP (PATTERN (i2), 0, i))((((((((PATTERN (i2))->u.fld[0]).rt_rtvec))->elem[i]))->
u.fld[0]).rt_rtx)
))
4402 for (temp_insn = NEXT_INSN (i2);
4403 temp_insn
4404 && (this_basic_block->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr)
4405 || BB_HEAD (this_basic_block)(this_basic_block)->il.x.head_ != temp_insn);
4406 temp_insn = NEXT_INSN (temp_insn))
4407 if (temp_insn != i3 && NONDEBUG_INSN_P (temp_insn)((((enum rtx_code) (temp_insn)->code) == INSN) || (((enum rtx_code
) (temp_insn)->code) == JUMP_INSN) || (((enum rtx_code) (temp_insn
)->code) == CALL_INSN))
)
4408 FOR_EACH_LOG_LINK (link, temp_insn)for ((link) = (uid_log_links[insn_uid_check (temp_insn)]); (link
); (link) = (link)->next)
4409 if (link->insn == i2)
4410 link->insn = i3;
4411
4412 if (i3notes)
4413 {
4414 rtx link = i3notes;
4415 while (XEXP (link, 1)(((link)->u.fld[1]).rt_rtx))
4416 link = XEXP (link, 1)(((link)->u.fld[1]).rt_rtx);
4417 XEXP (link, 1)(((link)->u.fld[1]).rt_rtx) = i2notes;
4418 }
4419 else
4420 i3notes = i2notes;
4421 i2notes = 0;
4422 }
4423
4424 LOG_LINKS (i3)(uid_log_links[insn_uid_check (i3)]) = NULLnullptr;
4425 REG_NOTES (i3)(((i3)->u.fld[6]).rt_rtx) = 0;
4426 LOG_LINKS (i2)(uid_log_links[insn_uid_check (i2)]) = NULLnullptr;
4427 REG_NOTES (i2)(((i2)->u.fld[6]).rt_rtx) = 0;
4428
4429 if (newi2pat)
4430 {
4431 if (MAY_HAVE_DEBUG_BIND_INSNSglobal_options.x_flag_var_tracking_assignments && i2scratch)
4432 propagate_for_debug (i2, last_combined_insn, i2dest, i2src,
4433 this_basic_block);
4434 INSN_CODE (i2)(((i2)->u.fld[5]).rt_int) = i2_code_number;
4435 PATTERN (i2) = newi2pat;
4436 }
4437 else
4438 {
4439 if (MAY_HAVE_DEBUG_BIND_INSNSglobal_options.x_flag_var_tracking_assignments && i2src)
4440 propagate_for_debug (i2, last_combined_insn, i2dest, i2src,
4441 this_basic_block);
4442 SET_INSN_DELETED (i2)set_insn_deleted (i2);;
4443 }
4444
4445 if (i1)
4446 {
4447 LOG_LINKS (i1)(uid_log_links[insn_uid_check (i1)]) = NULLnullptr;
4448 REG_NOTES (i1)(((i1)->u.fld[6]).rt_rtx) = 0;
4449 if (MAY_HAVE_DEBUG_BIND_INSNSglobal_options.x_flag_var_tracking_assignments)
4450 propagate_for_debug (i1, last_combined_insn, i1dest, i1src,
4451 this_basic_block);
4452 SET_INSN_DELETED (i1)set_insn_deleted (i1);;
4453 }
4454
4455 if (i0)
4456 {
4457 LOG_LINKS (i0)(uid_log_links[insn_uid_check (i0)]) = NULLnullptr;
4458 REG_NOTES (i0)(((i0)->u.fld[6]).rt_rtx) = 0;
4459 if (MAY_HAVE_DEBUG_BIND_INSNSglobal_options.x_flag_var_tracking_assignments)
4460 propagate_for_debug (i0, last_combined_insn, i0dest, i0src,
4461 this_basic_block);
4462 SET_INSN_DELETED (i0)set_insn_deleted (i0);;
4463 }
4464
4465 /* Get death notes for everything that is now used in either I3 or
4466 I2 and used to die in a previous insn. If we built two new
4467 patterns, move from I1 to I2 then I2 to I3 so that we get the
4468 proper movement on registers that I2 modifies. */
4469
4470 if (i0)
4471 from_luid = DF_INSN_LUID (i0)((((df->insns[(INSN_UID (i0))]))->luid));
4472 else if (i1)
4473 from_luid = DF_INSN_LUID (i1)((((df->insns[(INSN_UID (i1))]))->luid));
4474 else
4475 from_luid = DF_INSN_LUID (i2)((((df->insns[(INSN_UID (i2))]))->luid));
4476 if (newi2pat)
4477 move_deaths (newi2pat, NULL_RTX(rtx) 0, from_luid, i2, &midnotes);
4478 move_deaths (newpat, newi2pat, from_luid, i3, &midnotes);
4479
4480 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
4481 if (i3notes)
4482 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULLnullptr,
4483 elim_i2, elim_i1, elim_i0);
4484 if (i2notes)
4485 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULLnullptr,
4486 elim_i2, elim_i1, elim_i0);
4487 if (i1notes)
4488 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULLnullptr,
4489 elim_i2, local_elim_i1, local_elim_i0);
4490 if (i0notes)
4491 distribute_notes (i0notes, i0, i3, newi2pat ? i2 : NULLnullptr,
4492 elim_i2, elim_i1, local_elim_i0);
4493 if (midnotes)
4494 distribute_notes (midnotes, NULLnullptr, i3, newi2pat ? i2 : NULLnullptr,
4495 elim_i2, elim_i1, elim_i0);
4496
4497 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
4498 know these are REG_UNUSED and want them to go to the desired insn,
4499 so we always pass it as i3. */
4500
4501 if (newi2pat && new_i2_notes)
4502 distribute_notes (new_i2_notes, i2, i2, NULLnullptr, NULL_RTX(rtx) 0, NULL_RTX(rtx) 0,
4503 NULL_RTX(rtx) 0);
4504
4505 if (new_i3_notes)
4506 distribute_notes (new_i3_notes, i3, i3, NULLnullptr, NULL_RTX(rtx) 0, NULL_RTX(rtx) 0,
4507 NULL_RTX(rtx) 0);
4508
4509 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
4510 put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets
4511 I3DEST, the death must be somewhere before I2, not I3. If we passed I3
4512 in that case, it might delete I2. Similarly for I2 and I1.
4513 Show an additional death due to the REG_DEAD note we make here. If
4514 we discard it in distribute_notes, we will decrement it again. */
4515
4516 if (i3dest_killed)
4517 {
4518 rtx new_note = alloc_reg_note (REG_DEAD, i3dest_killed, NULL_RTX(rtx) 0);
4519 if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
4520 distribute_notes (new_note, NULLnullptr, i2, NULLnullptr, elim_i2,
4521 elim_i1, elim_i0);
4522 else
4523 distribute_notes (new_note, NULLnullptr, i3, newi2pat ? i2 : NULLnullptr,
4524 elim_i2, elim_i1, elim_i0);
4525 }
4526
4527 if (i2dest_in_i2src)
4528 {
4529 rtx new_note = alloc_reg_note (REG_DEAD, i2dest, NULL_RTX(rtx) 0);
4530 if (newi2pat && reg_set_p (i2dest, newi2pat))
4531 distribute_notes (new_note, NULLnullptr, i2, NULLnullptr, NULL_RTX(rtx) 0,
4532 NULL_RTX(rtx) 0, NULL_RTX(rtx) 0);
4533 else
4534 distribute_notes (new_note, NULLnullptr, i3, newi2pat ? i2 : NULLnullptr,
4535 NULL_RTX(rtx) 0, NULL_RTX(rtx) 0, NULL_RTX(rtx) 0);
4536 }
4537
4538 if (i1dest_in_i1src)
4539 {
4540 rtx new_note = alloc_reg_note (REG_DEAD, i1dest, NULL_RTX(rtx) 0);
4541 if (newi2pat && reg_set_p (i1dest, newi2pat))
4542 distribute_notes (new_note, NULLnullptr, i2, NULLnullptr, NULL_RTX(rtx) 0,
4543 NULL_RTX(rtx) 0, NULL_RTX(rtx) 0);
4544 else
4545 distribute_notes (new_note, NULLnullptr, i3, newi2pat ? i2 : NULLnullptr,
4546 NULL_RTX(rtx) 0, NULL_RTX(rtx) 0, NULL_RTX(rtx) 0);
4547 }
4548
4549 if (i0dest_in_i0src)
4550 {
4551 rtx new_note = alloc_reg_note (REG_DEAD, i0dest, NULL_RTX(rtx) 0);
4552 if (newi2pat && reg_set_p (i0dest, newi2pat))
4553 distribute_notes (new_note, NULLnullptr, i2, NULLnullptr, NULL_RTX(rtx) 0,
4554 NULL_RTX(rtx) 0, NULL_RTX(rtx) 0);
4555 else
4556 distribute_notes (new_note, NULLnullptr, i3, newi2pat ? i2 : NULLnullptr,
4557 NULL_RTX(rtx) 0, NULL_RTX(rtx) 0, NULL_RTX(rtx) 0);
4558 }
4559
4560 distribute_links (i3links);
4561 distribute_links (i2links);
4562 distribute_links (i1links);
4563 distribute_links (i0links);
4564
4565 if (REG_P (i2dest)(((enum rtx_code) (i2dest)->code) == REG))
4566 {
4567 struct insn_link *link;
4568 rtx_insn *i2_insn = 0;
4569 rtx i2_val = 0, set;
4570
4571 /* The insn that used to set this register doesn't exist, and
4572 this life of the register may not exist either. See if one of
4573 I3's links points to an insn that sets I2DEST. If it does,
4574 that is now the last known value for I2DEST. If we don't update
4575 this and I2 set the register to a value that depended on its old
4576 contents, we will get confused. If this insn is used, thing
4577 will be set correctly in combine_instructions. */
4578 FOR_EACH_LOG_LINK (link, i3)for ((link) = (uid_log_links[insn_uid_check (i3)]); (link); (
link) = (link)->next)
4579 if ((set = single_set (link->insn)) != 0
4580 && rtx_equal_p (i2dest, SET_DEST (set)(((set)->u.fld[0]).rt_rtx)))
4581 i2_insn = link->insn, i2_val = SET_SRC (set)(((set)->u.fld[1]).rt_rtx);
4582
4583 record_value_for_reg (i2dest, i2_insn, i2_val);
4584
4585 /* If the reg formerly set in I2 died only once and that was in I3,
4586 zero its use count so it won't make `reload' do any work. */
4587 if (! added_sets_2
4588 && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
4589 && ! i2dest_in_i2src
4590 && REGNO (i2dest)(rhs_regno(i2dest)) < reg_n_sets_max)
4591 INC_REG_N_SETS (REGNO (i2dest), -1)(regstat_n_sets_and_refs[(rhs_regno(i2dest))].sets += -1);
4592 }
4593
4594 if (i1 && REG_P (i1dest)(((enum rtx_code) (i1dest)->code) == REG))
4595 {
4596 struct insn_link *link;
4597 rtx_insn *i1_insn = 0;
4598 rtx i1_val = 0, set;
4599
4600 FOR_EACH_LOG_LINK (link, i3)for ((link) = (uid_log_links[insn_uid_check (i3)]); (link); (
link) = (link)->next)
4601 if ((set = single_set (link->insn)) != 0
4602 && rtx_equal_p (i1dest, SET_DEST (set)(((set)->u.fld[0]).rt_rtx)))
4603 i1_insn = link->insn, i1_val = SET_SRC (set)(((set)->u.fld[1]).rt_rtx);
4604
4605 record_value_for_reg (i1dest, i1_insn, i1_val);
4606
4607 if (! added_sets_1
4608 && ! i1dest_in_i1src
4609 && REGNO (i1dest)(rhs_regno(i1dest)) < reg_n_sets_max)
4610 INC_REG_N_SETS (REGNO (i1dest), -1)(regstat_n_sets_and_refs[(rhs_regno(i1dest))].sets += -1);
4611 }
4612
4613 if (i0 && REG_P (i0dest)(((enum rtx_code) (i0dest)->code) == REG))
4614 {
4615 struct insn_link *link;
4616 rtx_insn *i0_insn = 0;
4617 rtx i0_val = 0, set;
4618
4619 FOR_EACH_LOG_LINK (link, i3)for ((link) = (uid_log_links[insn_uid_check (i3)]); (link); (
link) = (link)->next)
4620 if ((set = single_set (link->insn)) != 0
4621 && rtx_equal_p (i0dest, SET_DEST (set)(((set)->u.fld[0]).rt_rtx)))
4622 i0_insn = link->insn, i0_val = SET_SRC (set)(((set)->u.fld[1]).rt_rtx);
4623
4624 record_value_for_reg (i0dest, i0_insn, i0_val);
4625
4626 if (! added_sets_0
4627 && ! i0dest_in_i0src
4628 && REGNO (i0dest)(rhs_regno(i0dest)) < reg_n_sets_max)
4629 INC_REG_N_SETS (REGNO (i0dest), -1)(regstat_n_sets_and_refs[(rhs_regno(i0dest))].sets += -1);
4630 }
4631
4632 /* Update reg_stat[].nonzero_bits et al for any changes that may have
4633 been made to this insn. The order is important, because newi2pat
4634 can affect nonzero_bits of newpat. */
4635 if (newi2pat)
4636 note_pattern_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULLnullptr);
4637 note_pattern_stores (newpat, set_nonzero_bits_and_sign_copies, NULLnullptr);
4638 }
4639
4640 if (undobuf.other_insn != NULL_RTX(rtx) 0)
4641 {
4642 if (dump_file)
4643 {
4644 fprintf (dump_file, "modifying other_insn ");
4645 dump_insn_slim (dump_file, undobuf.other_insn);
4646 }
4647 df_insn_rescan (undobuf.other_insn);
4648 }
4649
4650 if (i0 && !(NOTE_P (i0)(((enum rtx_code) (i0)->code) == NOTE) && (NOTE_KIND (i0)(((i0)->u.fld[4]).rt_int) == NOTE_INSN_DELETED)))
4651 {
4652 if (dump_file)
4653 {
4654 fprintf (dump_file, "modifying insn i0 ");
4655 dump_insn_slim (dump_file, i0);
4656 }
4657 df_insn_rescan (i0);
4658 }
4659
4660 if (i1 && !(NOTE_P (i1)(((enum rtx_code) (i1)->code) == NOTE) && (NOTE_KIND (i1)(((i1)->u.fld[4]).rt_int) == NOTE_INSN_DELETED)))
4661 {
4662 if (dump_file)
4663 {
4664 fprintf (dump_file, "modifying insn i1 ");
4665 dump_insn_slim (dump_file, i1);
4666 }
4667 df_insn_rescan (i1);
4668 }
4669
4670 if (i2 && !(NOTE_P (i2)(((enum rtx_code) (i2)->code) == NOTE) && (NOTE_KIND (i2)(((i2)->u.fld[4]).rt_int) == NOTE_INSN_DELETED)))
4671 {
4672 if (dump_file)
4673 {
4674 fprintf (dump_file, "modifying insn i2 ");
4675 dump_insn_slim (dump_file, i2);
4676 }
4677 df_insn_rescan (i2);
4678 }
4679
4680 if (i3 && !(NOTE_P (i3)(((enum rtx_code) (i3)->code) == NOTE) && (NOTE_KIND (i3)(((i3)->u.fld[4]).rt_int) == NOTE_INSN_DELETED)))
4681 {
4682 if (dump_file)
4683 {
4684 fprintf (dump_file, "modifying insn i3 ");
4685 dump_insn_slim (dump_file, i3);
4686 }
4687 df_insn_rescan (i3);
4688 }
4689
4690 /* Set new_direct_jump_p if a new return or simple jump instruction
4691 has been created. Adjust the CFG accordingly. */
4692 if (returnjump_p (i3) || any_uncondjump_p (i3))
4693 {
4694 *new_direct_jump_p = 1;
4695 mark_jump_label (PATTERN (i3), i3, 0);
4696 update_cfg_for_uncondjump (i3);
4697 }
4698
4699 if (undobuf.other_insn != NULL_RTX(rtx) 0
4700 && (returnjump_p (undobuf.other_insn)
4701 || any_uncondjump_p (undobuf.other_insn)))
4702 {
4703 *new_direct_jump_p = 1;
4704 update_cfg_for_uncondjump (undobuf.other_insn);
4705 }
4706
4707 if (GET_CODE (PATTERN (i3))((enum rtx_code) (PATTERN (i3))->code) == TRAP_IF
4708 && XEXP (PATTERN (i3), 0)(((PATTERN (i3))->u.fld[0]).rt_rtx) == const1_rtx(const_int_rtx[64 +1]))
4709 {
4710 basic_block bb = BLOCK_FOR_INSN (i3);
4711 gcc_assert (bb)((void)(!(bb) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 4711, __FUNCTION__), 0 : 0))
;
4712 remove_edge (split_block (bb, i3));
4713 emit_barrier_after_bb (bb);
4714 *new_direct_jump_p = 1;
4715 }
4716
4717 if (undobuf.other_insn
4718 && GET_CODE (PATTERN (undobuf.other_insn))((enum rtx_code) (PATTERN (undobuf.other_insn))->code) == TRAP_IF
4719 && XEXP (PATTERN (undobuf.other_insn), 0)(((PATTERN (undobuf.other_insn))->u.fld[0]).rt_rtx) == const1_rtx(const_int_rtx[64 +1]))
4720 {
4721 basic_block bb = BLOCK_FOR_INSN (undobuf.other_insn);
4722 gcc_assert (bb)((void)(!(bb) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 4722, __FUNCTION__), 0 : 0))
;
4723 remove_edge (split_block (bb, undobuf.other_insn));
4724 emit_barrier_after_bb (bb);
4725 *new_direct_jump_p = 1;
4726 }
4727
4728 /* A noop might also need cleaning up of CFG, if it comes from the
4729 simplification of a jump. */
4730 if (JUMP_P (i3)(((enum rtx_code) (i3)->code) == JUMP_INSN)
4731 && GET_CODE (newpat)((enum rtx_code) (newpat)->code) == SET
4732 && SET_SRC (newpat)(((newpat)->u.fld[1]).rt_rtx) == pc_rtx
4733 && SET_DEST (newpat)(((newpat)->u.fld[0]).rt_rtx) == pc_rtx)
4734 {
4735 *new_direct_jump_p = 1;
4736 update_cfg_for_uncondjump (i3);
4737 }
4738
4739 if (undobuf.other_insn != NULL_RTX(rtx) 0
4740 && JUMP_P (undobuf.other_insn)(((enum rtx_code) (undobuf.other_insn)->code) == JUMP_INSN
)
4741 && GET_CODE (PATTERN (undobuf.other_insn))((enum rtx_code) (PATTERN (undobuf.other_insn))->code) == SET
4742 && SET_SRC (PATTERN (undobuf.other_insn))(((PATTERN (undobuf.other_insn))->u.fld[1]).rt_rtx) == pc_rtx
4743 && SET_DEST (PATTERN (undobuf.other_insn))(((PATTERN (undobuf.other_insn))->u.fld[0]).rt_rtx) == pc_rtx)
4744 {
4745 *new_direct_jump_p = 1;
4746 update_cfg_for_uncondjump (undobuf.other_insn);
4747 }
4748
4749 combine_successes++;
4750 undo_commit ();
4751
4752 rtx_insn *ret = newi2pat ? i2 : i3;
4753 if (added_links_insn && DF_INSN_LUID (added_links_insn)((((df->insns[(INSN_UID (added_links_insn))]))->luid)) < DF_INSN_LUID (ret)((((df->insns[(INSN_UID (ret))]))->luid)))
4754 ret = added_links_insn;
4755 if (added_notes_insn && DF_INSN_LUID (added_notes_insn)((((df->insns[(INSN_UID (added_notes_insn))]))->luid)) < DF_INSN_LUID (ret)((((df->insns[(INSN_UID (ret))]))->luid)))
4756 ret = added_notes_insn;
4757
4758 return ret;
4759}
4760
4761/* Get a marker for undoing to the current state. */
4762
4763static void *
4764get_undo_marker (void)
4765{
4766 return undobuf.undos;
4767}
4768
4769/* Undo the modifications up to the marker. */
4770
4771static void
4772undo_to_marker (void *marker)
4773{
4774 struct undo *undo, *next;
4775
4776 for (undo = undobuf.undos; undo != marker; undo = next)
4777 {
4778 gcc_assert (undo)((void)(!(undo) ? fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 4778, __FUNCTION__), 0 : 0))
;
4779
4780 next = undo->next;
4781 switch (undo->kind)
4782 {
4783 case UNDO_RTX:
4784 *undo->where.r = undo->old_contents.r;
4785 break;
4786 case UNDO_INT:
4787 *undo->where.i = undo->old_contents.i;
4788 break;
4789 case UNDO_MODE:
4790 adjust_reg_mode (regno_reg_rtx[undo->where.regno],
4791 undo->old_contents.m);
4792 break;
4793 case UNDO_LINKS:
4794 *undo->where.l = undo->old_contents.l;
4795 break;
4796 default:
4797 gcc_unreachable ()(fancy_abort ("/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/combine.cc"
, 4797, __FUNCTION__))
;
4798 }
4799
4800 undo->next = undobuf.frees;
4801 undobuf.frees = undo;
4802 }
4803
4804 undobuf.undos = (struct undo *) marker;
4805}
4806
4807/* Undo all the modifications recorded in undobuf. */
4808
4809static void
4810undo_all (void)
4811{
4812 undo_to_marker (0);
4813}
4814
4815/* We've committed to accepting the changes we made. Move all
4816 of the undos to the free list. */
4817
4818static void
4819undo_commit (void)
4820{
4821 struct undo *undo, *next;
4822
4823 for (undo = undobuf.undos; undo; undo = next)
4824 {
4825 next = undo->next;
4826 undo->next = undobuf.frees;
4827 undobuf.frees = undo;
4828 }
4829 undobuf.undos = 0;
4830}
4831
4832/* Find the innermost point within the rtx at LOC, possibly LOC itself,
4833 where we have an arithmetic expression and return that point. LOC will
4834 be inside INSN.
4835
4836 try_combine will call this function to see if an insn can be split into
4837 two insns. */
4838
4839static rtx *
4840find_split_point (rtx *loc, rtx_insn *insn, bool set_src)
4841{
4842 rtx x = *loc;
4843 enum rtx_code code = GET_CODE (x)((enum rtx_code) (x)->code);
4844 rtx *split;
4845 unsigned HOST_WIDE_INTlong len = 0;
4846 HOST_WIDE_INTlong pos = 0;
4847 int unsignedp = 0;
4848 rtx inner = NULL_RTX(rtx) 0;
4849 scalar_int_mode mode, inner_mode;
4850
4851 /* First special-case some codes. */
4852 switch (code)
4853 {
4854 case SUBREG:
4855#ifdef INSN_SCHEDULING
4856 /* If we are making a paradoxical SUBREG invalid, it becomes a split
4857 point. */
4858 if (MEM_P (SUBREG_REG (x))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == MEM
)
)
4859 return loc;
4860#endif
4861 return find_split_point (&SUBREG_REG (x)(((x)->u.fld[0]).rt_rtx), insn, false);
4862
4863 case MEM:
4864 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
4865 using LO_SUM and HIGH. */
4866 if (HAVE_lo_sum0 && (GET_CODE (XEXP (x, 0))((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == CONST
4867 || GET_CODE (XEXP (x, 0))((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == SYMBOL_REF))
4868 {
4869 machine_mode address_mode = get_address_mode (x);
4870
4871 SUBST (XEXP (x, 0),do_SUBST (&((((x)->u.fld[0]).rt_rtx)), (gen_rtx_fmt_ee_stat
((LO_SUM), ((address_mode)), ((gen_rtx_fmt_e_stat ((HIGH), (
(address_mode)), (((((x)->u.fld[0]).rt_rtx))) ))), (((((x)
->u.fld[0]).rt_rtx))) )))
4872 gen_rtx_LO_SUM (address_mode,do_SUBST (&((((x)->u.fld[0]).rt_rtx)), (gen_rtx_fmt_ee_stat
((LO_SUM), ((address_mode)), ((gen_rtx_fmt_e_stat ((HIGH), (
(address_mode)), (((((x)->u.fld[0]).rt_rtx))) ))), (((((x)
->u.fld[0]).rt_rtx))) )))
4873 gen_rtx_HIGH (address_mode, XEXP (x, 0)),do_SUBST (&((((x)->u.fld[0]).rt_rtx)), (gen_rtx_fmt_ee_stat
((LO_SUM), ((address_mode)), ((gen_rtx_fmt_e_stat ((HIGH), (
(address_mode)), (((((x)->u.fld[0]).rt_rtx))) ))), (((((x)
->u.fld[0]).rt_rtx))) )))
4874 XEXP (x, 0)))do_SUBST (&((((x)->u.fld[0]).rt_rtx)), (gen_rtx_fmt_ee_stat
((LO_SUM), ((address_mode)), ((gen_rtx_fmt_e_stat ((HIGH), (
(address_mode)), (((((x)->u.fld[0]).rt_rtx))) ))), (((((x)
->u.fld[0]).rt_rtx))) )))
;
4875 return &XEXP (XEXP (x, 0), 0)((((((x)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx);
4876 }
4877
4878 /* If we have a PLUS whose second operand is a constant and the
4879 address is not valid, perhaps we can split it up using
4880 the machine-specific way to split large constants. We use
4881 the first pseudo-reg (one of the virtual regs) as a placeholder;
4882 it will not remain in the result. */
4883 if (GET_CODE (XEXP (x, 0))((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == PLUS
4884 && CONST_INT_P (XEXP (XEXP (x, 0), 1))(((enum rtx_code) (((((((x)->u.fld[0]).rt_rtx))->u.fld[
1]).rt_rtx))->code) == CONST_INT)
4885 && ! memory_address_addr_space_p (GET_MODE (x)((machine_mode) (x)->mode), XEXP (x, 0)(((x)->u.fld[0]).rt_rtx),
4886 MEM_ADDR_SPACE (x)(get_mem_attrs (x)->addrspace)))
4887 {
4888 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER76];
4889 rtx_insn *seq = combine_split_insns (gen_rtx_SET (reg, XEXP (x, 0))gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((reg))
, (((((x)->u.fld[0]).rt_rtx))) )
,
4890 subst_insn);
4891
4892 /* This should have produced two insns, each of which sets our
4893 placeholder. If the source of the second is a valid address,
4894 we can put both sources together and make a split point
4895 in the middle. */
4896
4897 if (seq
4898 && NEXT_INSN (seq) != NULL_RTX(rtx) 0
4899 && NEXT_INSN (NEXT_INSN (seq)) == NULL_RTX(rtx) 0
4900 && NONJUMP_INSN_P (seq)(((enum rtx_code) (seq)->code) == INSN)
4901 && GET_CODE (PATTERN (seq))((enum rtx_code) (PATTERN (seq))->code) == SET
4902 && SET_DEST (PATTERN (seq))(((PATTERN (seq))->u.fld[0]).rt_rtx) == reg
4903 && ! reg_mentioned_p (reg,
4904 SET_SRC (PATTERN (seq))(((PATTERN (seq))->u.fld[1]).rt_rtx))
4905 && NONJUMP_INSN_P (NEXT_INSN (seq))(((enum rtx_code) (NEXT_INSN (seq))->code) == INSN)
4906 && GET_CODE (PATTERN (NEXT_INSN (seq)))((enum rtx_code) (PATTERN (NEXT_INSN (seq)))->code) == SET
4907 && SET_DEST (PATTERN (NEXT_INSN (seq)))(((PATTERN (NEXT_INSN (seq)))->u.fld[0]).rt_rtx) == reg
4908 && memory_address_addr_space_p
4909 (GET_MODE (x)((machine_mode) (x)->mode), SET_SRC (PATTERN (NEXT_INSN (seq)))(((PATTERN (NEXT_INSN (seq)))->u.fld[1]).rt_rtx),
4910 MEM_ADDR_SPACE (x)(get_mem_attrs (x)->addrspace)))
4911 {
4912 rtx src1 = SET_SRC (PATTERN (seq))(((PATTERN (seq))->u.fld[1]).rt_rtx);
4913 rtx src2 = SET_SRC (PATTERN (NEXT_INSN (seq)))(((PATTERN (NEXT_INSN (seq)))->u.fld[1]).rt_rtx);
4914
4915 /* Replace the placeholder in SRC2 with SRC1. If we can
4916 find where in SRC2 it was placed, that can become our
4917 split point and we can replace this address with SRC2.
4918 Just try two obvious places. */
4919
4920 src2 = replace_rtx (src2, reg, src1);
4921 split = 0;
4922 if (XEXP (src2, 0)(((src2)->u.fld[0]).rt_rtx) == src1)
4923 split = &XEXP (src2, 0)(((src2)->u.fld[0]).rt_rtx);
4924 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))(rtx_format[(int) (((enum rtx_code) ((((src2)->u.fld[0]).rt_rtx
))->code))])
[0] == 'e'
4925 && XEXP (XEXP (src2, 0), 0)((((((src2)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx) == src1)
4926 split = &XEXP (XEXP (src2, 0), 0)((((((src2)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx);
4927
4928 if (split)
4929 {
4930 SUBST (XEXP (x, 0), src2)do_SUBST (&((((x)->u.fld[0]).rt_rtx)), (src2));
4931 return split;
4932 }
4933 }
4934
4935 /* If that didn't work and we have a nested plus, like:
4936 ((REG1 * CONST1) + REG2) + CONST2 and (REG1 + REG2) + CONST2
4937 is valid address, try to split (REG1 * CONST1). */
4938 if (GET_CODE (XEXP (XEXP (x, 0), 0))((enum rtx_code) (((((((x)->u.fld[0]).rt_rtx))->u.fld[0
]).rt_rtx))->code)
== PLUS
4939 && !OBJECT_P (XEXP (XEXP (XEXP (x, 0), 0), 0))(((rtx_class[(int) (((enum rtx_code) ((((((((((x)->u.fld[0
]).rt_rtx))->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx))->
code))]) & (~1)) == (RTX_OBJ & (~1)))
4940 && OBJECT_P (XEXP (XEXP (XEXP (x, 0), 0), 1))(((rtx_class[(int) (((enum rtx_code) ((((((((((x)->u.fld[0
]).rt_rtx))->u.fld[0]).rt_rtx))->u.fld[1]).rt_rtx))->
code))]) & (~1)) == (RTX_OBJ & (~1)))
4941 && ! (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))((enum rtx_code) ((((((((((x)->u.fld[0]).rt_rtx))->u.fld
[0]).rt_rtx))->u.fld[0]).rt_rtx))->code)
== SUBREG
4942 && OBJECT_P (SUBREG_REG (XEXP (XEXP (XEXP (x, 0),(((rtx_class[(int) (((enum rtx_code) (((((((((((((x)->u.fld
[0]).rt_rtx))->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx))->
u.fld[0]).rt_rtx))->code))]) & (~1)) == (RTX_OBJ &
(~1)))
4943 0), 0)))(((rtx_class[(int) (((enum rtx_code) (((((((((((((x)->u.fld
[0]).rt_rtx))->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx))->
u.fld[0]).rt_rtx))->code))]) & (~1)) == (RTX_OBJ &
(~1)))
))
4944 {
4945 rtx tem = XEXP (XEXP (XEXP (x, 0), 0), 0)(((((((((x)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx))->
u.fld[0]).rt_rtx)
;
4946 XEXP (XEXP (XEXP (x, 0), 0), 0)(((((((((x)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx))->
u.fld[0]).rt_rtx)
= reg;
4947 if (memory_address_addr_space_p (GET_MODE (x)((machine_mode) (x)->mode), XEXP (x, 0)(((x)->u.fld[0]).rt_rtx),
4948 MEM_ADDR_SPACE (x)(get_mem_attrs (x)->addrspace)))
4949 {
4950 XEXP (XEXP (XEXP (x, 0), 0), 0)(((((((((x)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx))->
u.fld[0]).rt_rtx)
= tem;
4951 return &XEXP (XEXP (XEXP (x, 0), 0), 0)(((((((((x)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx))->
u.fld[0]).rt_rtx)
;
4952 }
4953 XEXP (XEXP (XEXP (x, 0), 0), 0)(((((((((x)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx))->
u.fld[0]).rt_rtx)
= tem;
4954 }
4955 else if (GET_CODE (XEXP (XEXP (x, 0), 0))((enum rtx_code) (((((((x)->u.fld[0]).rt_rtx))->u.fld[0
]).rt_rtx))->code)
== PLUS
4956 && OBJECT_P (XEXP (XEXP (XEXP (x, 0), 0), 0))(((rtx_class[(int) (((enum rtx_code) ((((((((((x)->u.fld[0
]).rt_rtx))->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx))->
code))]) & (~1)) == (RTX_OBJ & (~1)))
4957 && !OBJECT_P (XEXP (XEXP (XEXP (x, 0), 0), 1))(((rtx_class[(int) (((enum rtx_code) ((((((((((x)->u.fld[0
]).rt_rtx))->u.fld[0]).rt_rtx))->u.fld[1]).rt_rtx))->
code))]) & (~1)) == (RTX_OBJ & (~1)))
4958 && ! (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1))((enum rtx_code) ((((((((((x)->u.fld[0]).rt_rtx))->u.fld
[0]).rt_rtx))->u.fld[1]).rt_rtx))->code)
== SUBREG
4959 && OBJECT_P (SUBREG_REG (XEXP (XEXP (XEXP (x, 0),(((rtx_class[(int) (((enum rtx_code) (((((((((((((x)->u.fld
[0]).rt_rtx))->u.fld[0]).rt_rtx))->u.fld[1]).rt_rtx))->
u.fld[0]).rt_rtx))->code))]) & (~1)) == (RTX_OBJ &
(~1)))
4960 0), 1)))(((rtx_class[(int) (((enum rtx_code) (((((((((((((x)->u.fld
[0]).rt_rtx))->u.fld[0]).rt_rtx))->u.fld[1]).rt_rtx))->
u.fld[0]).rt_rtx))->code))]) & (~1)) == (RTX_OBJ &
(~1)))
))
4961 {
4962 rtx tem = XEXP (XEXP (XEXP (x, 0), 0), 1)(((((((((x)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx))->
u.fld[1]).rt_rtx)
;
4963 XEXP (XEXP (XEXP (x, 0), 0), 1)(((((((((x)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx))->
u.fld[1]).rt_rtx)
= reg;
4964 if (memory_address_addr_space_p (GET_MODE (x)((machine_mode) (x)->mode), XEXP (x, 0)(((x)->u.fld[0]).rt_rtx),
4965 MEM_ADDR_SPACE (x)(get_mem_attrs (x)->addrspace)))
4966 {
4967 XEXP (XEXP (XEXP (x, 0), 0), 1)(((((((((x)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx))->
u.fld[1]).rt_rtx)
= tem;
4968 return &XEXP (XEXP (XEXP (x, 0), 0), 1)(((((((((x)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx))->
u.fld[1]).rt_rtx)
;
4969 }
4970 XEXP (XEXP (XEXP (x, 0), 0), 1)(((((((((x)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx))->
u.fld[1]).rt_rtx)
= tem;
4971 }
4972
4973 /* If that didn't work, perhaps the first operand is complex and
4974 needs to be computed separately, so make a split point there.
4975 This will occur on machines that just support REG + CONST
4976 and have a constant moved through some previous computation. */
4977 if (!OBJECT_P (XEXP (XEXP (x, 0), 0))(((rtx_class[(int) (((enum rtx_code) (((((((x)->u.fld[0]).
rt_rtx))->u.fld[0]).rt_rtx))->code))]) & (~1)) == (
RTX_OBJ & (~1)))
4978 && ! (GET_CODE (XEXP (XEXP (x, 0), 0))((enum rtx_code) (((((((x)->u.fld[0]).rt_rtx))->u.fld[0
]).rt_rtx))->code)
== SUBREG
4979 && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0)))(((rtx_class[(int) (((enum rtx_code) ((((((((((x)->u.fld[0
]).rt_rtx))->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx))->
code))]) & (~1)) == (RTX_OBJ & (~1)))
))
4980 return &XEXP (XEXP (x, 0), 0)((((((x)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx);
4981 }
4982
4983 /* If we have a PLUS whose first operand is complex, try computing it
4984 separately by making a split there. */
4985 if (GET_CODE (XEXP (x, 0))((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == PLUS
4986 && ! memory_address_addr_space_p (GET_MODE (x)((machine_mode) (x)->mode), XEXP (x, 0)(((x)->u.fld[0]).rt_rtx),
4987 MEM_ADDR_SPACE (x)(get_mem_attrs (x)->addrspace))
4988 && ! OBJECT_P (XEXP (XEXP (x, 0), 0))(((rtx_class[(int) (((enum rtx_code) (((((((x)->u.fld[0]).
rt_rtx))->u.fld[0]).rt_rtx))->code))]) & (~1)) == (
RTX_OBJ & (~1)))
4989 && ! (GET_CODE (XEXP (XEXP (x, 0), 0))((enum rtx_code) (((((((x)->u.fld[0]).rt_rtx))->u.fld[0
]).rt_rtx))->code)
== SUBREG
4990 && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0)))(((rtx_class[(int) (((enum rtx_code) ((((((((((x)->u.fld[0
]).rt_rtx))->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx))->
code))]) & (~1)) == (RTX_OBJ & (~1)))
))
4991 return &XEXP (XEXP (x, 0), 0)((((((x)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx);
4992 break;
4993
4994 case SET:
4995 /* See if we can split SET_SRC as it stands. */
4996 split = find_split_point (&SET_SRC (x)(((x)->u.fld[1]).rt_rtx), insn, true);
4997 if (split && split != &SET_SRC (x)(((x)->u.fld[1]).rt_rtx))
4998 return split;
4999
5000 /* See if we can split SET_DEST as it stands. */
5001 split = find_split_point (&SET_DEST (x)(((x)->u.fld[0]).rt_rtx), insn, false);
5002 if (split && split != &SET_DEST (x)(((x)->u.fld[0]).rt_rtx))
5003 return split;
5004
5005 /* See if this is a bitfield assignment with everything constant. If
5006 so, this is an IOR of an AND, so split it into that. */
5007 if (GET_CODE (SET_DEST (x))((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == ZERO_EXTRACT
5008 && is_a <scalar_int_mode> (GET_MODE (XEXP (SET_DEST (x), 0))((machine_mode) (((((((x)->u.fld[0]).rt_rtx))->u.fld[0]
).rt_rtx))->mode)
,
5009 &inner_mode)
5010 && HWI_COMPUTABLE_MODE_P (inner_mode)
5011 && CONST_INT_P (XEXP (SET_DEST (x), 1))(((enum rtx_code) (((((((x)->u.fld[0]).rt_rtx))->u.fld[
1]).rt_rtx))->code) == CONST_INT)
5012 && CONST_INT_P (XEXP (SET_DEST (x), 2))(((enum rtx_code) (((((((x)->u.fld[0]).rt_rtx))->u.fld[
2]).rt_rtx))->code) == CONST_INT)
5013 && CONST_INT_P (SET_SRC (x))(((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) == CONST_INT
)
5014 && ((INTVAL (XEXP (SET_DEST (x), 1))((((((((x)->u.fld[0]).rt_rtx))->u.fld[1]).rt_rtx))->
u.hwint[0])
5015 + INTVAL (XEXP (SET_DEST (x), 2))((((((((x)->u.fld[0]).rt_rtx))->u.fld[2]).rt_rtx))->
u.hwint[0])
)
5016 <= GET_MODE_PRECISION (inner_mode))
5017 && ! side_effects_p (XEXP (SET_DEST (x), 0)((((((x)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx)))
5018 {
5019 HOST_WIDE_INTlong pos = INTVAL (XEXP (SET_DEST (x), 2))((((((((x)->u.fld[0]).rt_rtx))->u.fld[2]).rt_rtx))->
u.hwint[0])
;
5020 unsigned HOST_WIDE_INTlong len = INTVAL (XEXP (SET_DEST (x), 1))((((((((x)->u.fld[0]).rt_rtx))->u.fld[1]).rt_rtx))->
u.hwint[0])
;
5021 rtx dest = XEXP (SET_DEST (x), 0)((((((x)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx);
5022 unsigned HOST_WIDE_INTlong mask = (HOST_WIDE_INT_1U1UL << len) - 1;
5023 unsigned HOST_WIDE_INTlong src = INTVAL (SET_SRC (x))(((((x)->u.fld[1]).rt_rtx))->u.hwint[0]) & mask;
5024 rtx or_mask;
5025
5026 if (BITS_BIG_ENDIAN0)
5027 pos = GET_MODE_PRECISION (inner_mode) - len - pos;
5028
5029 or_mask = gen_int_mode (src << pos, inner_mode);
5030 if (src == mask)
5031 SUBST (SET_SRC (x),do_SUBST (&((((x)->u.fld[1]).rt_rtx)), (simplify_gen_binary
(IOR, inner_mode, dest, or_mask)))
5032 simplify_gen_binary (IOR, inner_mode, dest, or_mask))do_SUBST (&((((x)->u.fld[1]).rt_rtx)), (simplify_gen_binary
(IOR, inner_mode, dest, or_mask)))
;
5033 else
5034 {
5035 rtx negmask = gen_int_mode (~(mask << pos), inner_mode);
5036 SUBST (SET_SRC (x),do_SUBST (&((((x)->u.fld[1]).rt_rtx)), (simplify_gen_binary
(IOR, inner_mode, simplify_gen_binary (AND, inner_mode, dest
, negmask), or_mask)))
5037 simplify_gen_binary (IOR, inner_mode,do_SUBST (&((((x)->u.fld[1]).rt_rtx)), (simplify_gen_binary
(IOR, inner_mode, simplify_gen_binary (AND, inner_mode, dest
, negmask), or_mask)))
5038 simplify_gen_binary (AND, inner_mode,do_SUBST (&((((x)->u.fld[1]).rt_rtx)), (simplify_gen_binary
(IOR, inner_mode, simplify_gen_binary (AND, inner_mode, dest
, negmask), or_mask)))
5039 dest, negmask),do_SUBST (&((((x)->u.fld[1]).rt_rtx)), (simplify_gen_binary
(IOR, inner_mode, simplify_gen_binary (AND, inner_mode, dest
, negmask), or_mask)))
5040 or_mask))do_SUBST (&((((x)->u.fld[1]).rt_rtx)), (simplify_gen_binary
(IOR, inner_mode, simplify_gen_binary (AND, inner_mode, dest
, negmask), or_mask)))
;
5041 }
5042
5043 SUBST (SET_DEST (x), dest)do_SUBST (&((((x)->u.fld[0]).rt_rtx)), (dest));
5044
5045 split = find_split_point (&SET_SRC (x)(((x)->u.fld[1]).rt_rtx), insn, true);
5046 if (split && split != &SET_SRC (x)(((x)->u.fld[1]).rt_rtx))
5047 return split;
5048 }
5049
5050 /* Otherwise, see if this is an operation that we can split into two.
5051 If so, try to split that. */
5052 code = GET_CODE (SET_SRC (x))((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code);
5053
5054 switch (code)
5055 {
5056 case AND:
5057 /* If we are AND'ing with a large constant that is only a single
5058 bit and the result is only being used in a context where we
5059 need to know if it is zero or nonzero, replace it with a bit
5060 extraction. This will avoid the large constant, which might
5061 have taken more than one insn to make. If the constant were
5062 not a valid argument to the AND but took only one insn to make,
5063 this is no worse, but if it took more than one insn, it will
5064 be better. */
5065
5066 if (CONST_INT_P (XEXP (SET_SRC (x), 1))(((enum rtx_code) (((((((x)->u.fld[1]).rt_rtx))->u.fld[
1]).rt_rtx))->code) == CONST_INT)
5067 && REG_P (XEXP (SET_SRC (x), 0))(((enum rtx_code) (((((((x)->u.fld[1]).rt_rtx))->u.fld[
0]).rt_rtx))->code) == REG)
5068 && (pos = exact_log2 (UINTVAL (XEXP (SET_SRC (x), 1))((unsigned long) ((((((((x)->u.fld[1]).rt_rtx))->u.fld[
1]).rt_rtx))->u.hwint[0]))
)) >= 7
5069 && REG_P (SET_DEST (x))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == REG
)
5070 && (split = find_single_use (SET_DEST (x)(((x)->u.fld[0]).rt_rtx), insn, NULLnullptr)) != 0
5071 && (GET_CODE (*split)((enum rtx_code) (*split)->code) == EQ || GET_CODE (*split)((enum rtx_code) (*split)->code) == NE)
5072 && XEXP (*split, 0)(((*split)->u.fld[0]).rt_rtx) == SET_DEST (x)(((x)->u.fld[0]).rt_rtx)
5073 && XEXP (*split, 1)(((*split)->u.fld[1]).rt_rtx) == const0_rtx(const_int_rtx[64]))
5074 {
5075 rtx extraction = make_extraction (GET_MODE (SET_DEST (x))((machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode),
5076 XEXP (SET_SRC (x), 0)((((((x)->u.fld[1]).rt_rtx))->u.fld[0]).rt_rtx),
5077 pos, NULL_RTX(rtx) 0, 1, 1, 0, 0);
5078 if (extraction != 0)
5079 {
5080 SUBST (SET_SRC (x), extraction)do_SUBST (&((((x)->u.fld[1]).rt_rtx)), (extraction));
5081 return find_split_point (loc, insn, false);
5082 }
5083 }
5084 break;
5085
5086 case NE:
5087 /* If STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
5088 is known to be on, this can be converted into a NEG of a shift. */
5089 if (STORE_FLAG_VALUE1 == -1 && XEXP (SET_SRC (x), 1)((((((x)->u.fld[1]).rt_rtx))->u.fld[1]).rt_rtx) == const0_rtx(const_int_rtx[64])
5090 && GET_MODE (SET_SRC (x))((machine_mode) ((((x)->u.fld[1]).rt_rtx))->mode) == GET_MODE (XEXP (SET_SRC (x), 0))((machine_mode) (((((((x)->u.fld[1]).rt_rtx))->u.fld[0]
).rt_rtx))->mode)
5091 && ((pos = exact_log2 (nonzero_bits (XEXP (SET_SRC (x), 0)((((((x)->u.fld[1]).rt_rtx))->u.fld[0]).rt_rtx),
5092 GET_MODE (XEXP (SET_SRC (x),((machine_mode) (((((((x)->u.fld[1]).rt_rtx))->u.fld[0]
).rt_rtx))->mode)
5093 0))((machine_mode) (((((((x)->u.fld[1]).rt_rtx))->u.fld[0]
).rt_rtx))->mode)
))) >= 1))
5094 {
5095 machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0))((machine_mode) (((((((x)->u.fld[1]).rt_rtx))->u.fld[0]
).rt_rtx))->mode)
;
5096 rtx pos_rtx = gen_int_shift_amount (mode, pos);
5097 SUBST (SET_SRC (x),do_SUBST (&((((x)->u.fld[1]).rt_rtx)), (gen_rtx_fmt_e_stat
((NEG), ((mode)), ((gen_rtx_fmt_ee_stat ((LSHIFTRT), ((mode)
), ((((((((x)->u.fld[1]).rt_rtx))->u.fld[0]).rt_rtx))),
((pos_rtx)) ))) )))
5098 gen_rtx_NEG (mode,do_SUBST (&((((x)->u.fld[1]).rt_rtx)), (gen_rtx_fmt_e_stat
((NEG), ((mode)), ((gen_rtx_fmt_ee_stat ((LSHIFTRT), ((mode)
), ((((((((x)->u.fld[1]).rt_rtx))->u.fld[0]).rt_rtx))),
((pos_rtx)) ))) )))
5099 gen_rtx_LSHIFTRT (mode,do_SUBST (&((((x)->u.fld[1]).rt_rtx)), (gen_rtx_fmt_e_stat
((NEG), ((mode)), ((gen_rtx_fmt_ee_stat ((LSHIFTRT), ((mode)
), ((((((((x)->u.fld[1]).rt_rtx))->u.fld[0]).rt_rtx))),
((pos_rtx)) ))) )))
5100 XEXP (SET_SRC (x), 0),do_SUBST (&((((x)->u.fld[1]).rt_rtx)), (gen_rtx_fmt_e_stat
((NEG), ((mode)), ((gen_rtx_fmt_ee_stat ((LSHIFTRT), ((mode)
), ((((((((x)->u.fld[1]).rt_rtx))->u.fld[0]).rt_rtx))),
((pos_rtx)) ))) )))
5101 pos_rtx)))do_SUBST (&((((x)->u.fld[1]).rt_rtx)), (gen_rtx_fmt_e_stat
((NEG), ((mode)), ((gen_rtx_fmt_ee_stat ((LSHIFTRT), ((mode)
), ((((((((x)->u.fld[1]).rt_rtx))->u.fld[0]).rt_rtx))),
((pos_rtx)) ))) )))
;
5102
5103 split = find_split_point (&SET_SRC (x)(((x)->u.fld[1]).rt_rtx), insn, true);
5104 if (split && split != &SET_SRC (x)(((x)->u.fld[1]).rt_rtx))
5105 return split;
5106 }
5107 break;
5108
5109 case SIGN_EXTEND:
5110 inner = XEXP (SET_SRC (x), 0)((((((x)->u.fld[1]).rt_rtx))->u.fld[0]).rt_rtx);
5111
5112 /* We can't optimize if either mode is a partial integer
5113 mode as we don't know how many bits are significant
5114 in those modes. */
5115 if (!is_int_mode (GET_MODE (inner)((machine_mode) (inner)->mode), &inner_mode)
5116 || GET_MODE_CLASS (GET_MODE (SET_SRC (x)))((enum mode_class) mode_class[((machine_mode) ((((x)->u.fld
[1]).rt_rtx))->mode)])
== MODE_PARTIAL_INT)
5117 break;
5118
5119 pos = 0;
5120 len = GET_MODE_PRECISION (inner_mode);
5121 unsignedp = 0;
5122 break;
5123
5124 case SIGN_EXTRACT:
5125 case ZERO_EXTRACT:
5126 if (is_a <scalar_int_mode> (GET_MODE (XEXP (SET_SRC (x), 0))((machine_mode) (((((((x)->u.fld[1]).rt_rtx))->u.fld[0]
).rt_rtx))->mode)
,
5127 &inner_mode)
5128 && CONST_INT_P (XEXP (SET_SRC (x), 1))(((enum rtx_code) (((((((x)->u.fld[1]).rt_rtx))->u.fld[
1]).rt_rtx))->code) == CONST_INT)
5129 && CONST_INT_P (XEXP (SET_SRC (x), 2))(((enum rtx_code) (((((((x)->u.fld[1]).rt_rtx))->u.fld[
2]).rt_rtx))->code) == CONST_INT)
)
5130 {
5131 inner = XEXP (SET_SRC (x), 0)((((((x)->u.fld[1]).rt_rtx))->u.fld[0]).rt_rtx);
5132 len = INTVAL (XEXP (SET_SRC (x), 1))((((((((x)->u.fld[1]).rt_rtx))->u.fld[1]).rt_rtx))->
u.hwint[0])
;
5133 pos = INTVAL (XEXP (SET_SRC (x), 2))((((((((x)->u.fld[1]).rt_rtx))->u.fld[2]).rt_rtx))->
u.hwint[0])
;
5134
5135 if (BITS_BIG_ENDIAN0)
5136 pos = GET_MODE_PRECISION (inner_mode) - len - pos;
5137 unsignedp = (code == ZERO_EXTRACT);
5138 }
5139 break;
5140
5141 default:
5142 break;
5143 }
5144
5145 if (len
5146 && known_subrange_p (pos, len,
5147 0, GET_MODE_PRECISION (GET_MODE (inner)((machine_mode) (inner)->mode)))
5148 && is_a <scalar_int_mode> (GET_MODE (SET_SRC (x))((machine_mode) ((((x)->u.fld[1]).rt_rtx))->mode), &mode))
5149 {
5150 /* For unsigned, we have a choice of a shift followed by an
5151 AND or two shifts. Use two shifts for field sizes where the
5152 constant might be too large. We assume here that we can
5153 always at least get 8-bit constants in an AND insn, which is
5154 true for every current RISC. */
5155
5156 if (unsignedp && len <= 8)
5157 {
5158 unsigned HOST_WIDE_INTlong mask
5159 = (HOST_WIDE_INT_1U1UL << len) - 1;
5160 rtx pos_rtx = gen_int_shift_amount (mode, pos);
5161 SUBST (SET_SRC (x),do_SUBST (&((((x)->u.fld[1]).rt_rtx)), (gen_rtx_fmt_ee_stat
((AND), ((mode)), ((gen_rtx_fmt_ee_stat ((LSHIFTRT), ((mode)
), ((rtl_hooks.gen_lowpart (mode, inner))), ((pos_rtx)) ))), (
(gen_int_mode (mask, mode))) )))
5162 gen_rtx_AND (mode,do_SUBST (&((((x)->u.fld[1]).rt_rtx)), (gen_rtx_fmt_ee_stat
((AND), ((mode)), ((gen_rtx_fmt_ee_stat ((LSHIFTRT), ((mode)
), ((rtl_hooks.gen_lowpart (mode, inner))), ((pos_rtx)) ))), (
(gen_int_mode (mask, mode))) )))
5163 gen_rtx_LSHIFTRTdo_SUBST (&((((x)->u.fld[1]).rt_rtx)), (gen_rtx_fmt_ee_stat
((AND), ((mode)), ((gen_rtx_fmt_ee_stat ((LSHIFTRT), ((mode)
), ((rtl_hooks.gen_lowpart (mode, inner))), ((pos_rtx)) ))), (
(gen_int_mode (mask, mode))) )))
5164 (mode, gen_lowpart (mode, inner), pos_rtx),do_SUBST (&((((x)->u.fld[1]).rt_rtx)), (gen_rtx_fmt_ee_stat
((AND), ((mode)), ((gen_rtx_fmt_ee_stat ((LSHIFTRT), ((mode)
), ((rtl_hooks.gen_lowpart (mode, inner))), ((pos_rtx)) ))), (
(gen_int_mode (mask, mode))) )))
5165 gen_int_mode (mask, mode)))do_SUBST (&((((x)->u.fld[1]).rt_rtx)), (gen_rtx_fmt_ee_stat
((AND), ((mode)), ((gen_rtx_fmt_ee_stat ((LSHIFTRT), ((mode)
), ((rtl_hooks.gen_lowpart (mode, inner))), ((pos_rtx)) ))), (
(gen_int_mode (mask, mode))) )))
;
5166
5167 split = find_split_point (&SET_SRC (x)(((x)->u.fld[1]).rt_rtx), insn, true);
5168 if (split && split != &SET_SRC (x)(((x)->u.fld[1]).rt_rtx))
5169 return split;
5170 }
5171 else
5172 {
5173 int left_bits = GET_MODE_PRECISION (mode) - len - pos;
5174 int right_bits = GET_MODE_PRECISION (mode) - len;
5175 SUBST (SET_SRC (x),do_SUBST (&((((x)->u.fld[1]).rt_rtx)), (gen_rtx_fmt_ee_stat
((unsignedp ? LSHIFTRT : ASHIFTRT), (mode), (gen_rtx_fmt_ee_stat
((ASHIFT), ((mode)), ((rtl_hooks.gen_lowpart (mode, inner)))
, ((gen_int_shift_amount (mode, left_bits))) )), (gen_int_shift_amount
(mode, right_bits)) )))
5176 gen_rtx_fmt_eedo_SUBST (&((((x)->u.fld[1]).rt_rtx)), (gen_rtx_fmt_ee_stat
((unsignedp ? LSHIFTRT : ASHIFTRT), (mode), (gen_rtx_fmt_ee_stat
((ASHIFT), ((mode)), ((rtl_hooks.gen_lowpart (mode, inner)))
, ((gen_int_shift_amount (mode, left_bits))) )), (gen_int_shift_amount
(mode, right_bits)) )))
5177 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,do_SUBST (&((((x)->u.fld[1]).rt_rtx)), (gen_rtx_fmt_ee_stat
((unsignedp ? LSHIFTRT : ASHIFTRT), (mode), (gen_rtx_fmt_ee_stat
((ASHIFT), ((mode)), ((rtl_hooks.gen_lowpart (mode, inner)))
, ((gen_int_shift_amount (mode, left_bits))) )), (gen_int_shift_amount
(mode, right_bits)) )))
5178 gen_rtx_ASHIFT (mode,do_SUBST (&((((x)->u.fld[1]).rt_rtx)), (gen_rtx_fmt_ee_stat
((unsignedp ? LSHIFTRT : ASHIFTRT), (mode), (gen_rtx_fmt_ee_stat
((ASHIFT), ((mode)), ((rtl_hooks.gen_lowpart (mode, inner)))
, ((gen_int_shift_amount (mode, left_bits))) )), (gen_int_shift_amount
(mode, right_bits)) )))
5179 gen_lowpart (mode, inner),do_SUBST (&((((x)->u.fld[1]).rt_rtx)), (gen_rtx_fmt_ee_stat
((unsignedp ? LSHIFTRT : ASHIFTRT), (mode), (gen_rtx_fmt_ee_stat
((ASHIFT), ((mode)), ((rtl_hooks.gen_lowpart (mode, inner)))
, ((gen_int_shift_amount (mode, left_bits))) )), (gen_int_shift_amount
(mode, right_bits)) )))
5180 gen_int_shift_amount (mode, left_bits)),do_SUBST (&((((x)->u.fld[1]).rt_rtx)), (gen_rtx_fmt_ee_stat
((unsignedp ? LSHIFTRT : ASHIFTRT), (mode), (gen_rtx_fmt_ee_stat
((ASHIFT), ((mode)), ((rtl_hooks.gen_lowpart (mode, inner)))
, ((gen_int_shift_amount (mode, left_bits))) )), (gen_int_shift_amount
(mode, right_bits)) )))
5181 gen_int_shift_amount (mode, right_bits)))do_SUBST (&((((x)->u.fld[1]).rt_rtx)), (gen_rtx_fmt_ee_stat
((unsignedp ? LSHIFTRT : ASHIFTRT), (mode), (gen_rtx_fmt_ee_stat
((ASHIFT), ((mode)), ((rtl_hooks.gen_lowpart (mode, inner)))
, ((gen_int_shift_amount (mode, left_bits))) )), (gen_int_shift_amount
(mode, right_bits)) )))
;
5182
5183 split = find_split_point (&SET_SRC (x)(((x)->u.fld[1]).rt_rtx), insn, true);
5184 if (split && split != &SET_SRC (x)(((x)->u.fld[1]).rt_rtx))
5185 return split;
5186 }
5187 }
5188
5189 /* See if this is a simple operation with a constant as the second
5190 operand. It might be that this constant is out of range and hence
5191 could be used as a split point. */
5192 if (BINARY_P (SET_SRC (x))(((rtx_class[(int) (((enum rtx_code) ((((x)->u.fld[1]).rt_rtx
))->code))]) & (~3)) == (RTX_COMPARE & (~3)))
5193 && CONSTANT_P (XEXP (SET_SRC (x), 1))((rtx_class[(int) (((enum rtx_code) (((((((x)->u.fld[1]).rt_rtx
))->u.fld[1]).rt_rtx))->code))]) == RTX_CONST_OBJ)
5194 && (OBJECT_P (XEXP (SET_SRC (x), 0))(((rtx_class[(int) (((enum rtx_code) (((((((x)->u.fld[1]).
rt_rtx))->u.fld[0]).rt_rtx))->code))]) & (~1)) == (
RTX_OBJ & (~1)))
5195 || (GET_CODE (XEXP (SET_SRC (x), 0))((enum rtx_code) (((((((x)->u.fld[1]).rt_rtx))->u.fld[0
]).rt_rtx))->code)
== SUBREG
5196 && OBJECT_P (SUBREG_REG (XEXP (SET_SRC (x), 0)))(((rtx_class[(int) (((enum rtx_code) ((((((((((x)->u.fld[1
]).rt_rtx))->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx))->
code))]) & (~1)) == (RTX_OBJ & (~1)))
)))
5197 return &XEXP (SET_SRC (x), 1)((((((x)->u.fld[1]).rt_rtx))->u.fld[1]).rt_rtx);
5198
5199 /* Finally, see if this is a simple operation with its first operand
5200 not in a register. The operation might require this operand in a
5201 register, so return it as a split point. We can always do this
5202 because if the first operand were another operation, we would have
5203 already found it as a split point. */
5204 if ((BINARY_P (SET_SRC (x))(((rtx_class[(int) (((enum rtx_code) ((((x)->u.fld[1]).rt_rtx
))->code))]) & (~3)) == (RTX_COMPARE & (~3)))
|| UNARY_P (SET_SRC (x))((rtx_class[(int) (((enum rtx_code) ((((x)->u.fld[1]).rt_rtx
))->code))]) == RTX_UNARY)
)
5205 && ! register_operand (XEXP (SET_SRC (x), 0)((((((x)->u.fld[1]).rt_rtx))->u.fld[0]).rt_rtx), VOIDmode((void) 0, E_VOIDmode)))
5206 return &XEXP (SET_SRC (x), 0)((((((x)->u.fld[1]).rt_rtx))->u.fld[0]).rt_rtx);
5207
5208 return 0;
5209
5210 case AND:
5211 case IOR:
5212 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
5213 it is better to write this as (not (ior A B)) so we can split it.
5214 Similarly for IOR. */
5215 if (GET_CODE (XEXP (x, 0))((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == NOT && GET_CODE (XEXP (x, 1))((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) == NOT)
5216 {
5217 SUBST (*loc,do_SUBST (&(*loc), (gen_rtx_fmt_e_stat ((NOT), ((((machine_mode
) (x)->mode))), ((gen_rtx_fmt_ee_stat ((code == IOR ? AND :
IOR), (((machine_mode) (x)->mode)), (((((((x)->u.fld[0
]).rt_rtx))->u.fld[0]).rt_rtx)), (((((((x)->u.fld[1]).rt_rtx
))->u.fld[0]).rt_rtx)) ))) )))
5218 gen_rtx_NOT (GET_MODE (x),do_SUBST (&(*loc), (gen_rtx_fmt_e_stat ((NOT), ((((machine_mode
) (x)->mode))), ((gen_rtx_fmt_ee_stat ((code == IOR ? AND :
IOR), (((machine_mode) (x)->mode)), (((((((x)->u.fld[0
]).rt_rtx))->u.fld[0]).rt_rtx)), (((((((x)->u.fld[1]).rt_rtx
))->u.fld[0]).rt_rtx)) ))) )))
5219 gen_rtx_fmt_ee (code == IOR ? AND : IOR,do_SUBST (&(*loc), (gen_rtx_fmt_e_stat ((NOT), ((((machine_mode
) (x)->mode))), ((gen_rtx_fmt_ee_stat ((code == IOR ? AND :
IOR), (((machine_mode) (x)->mode)), (((((((x)->u.fld[0
]).rt_rtx))->u.fld[0]).rt_rtx)), (((((((x)->u.fld[1]).rt_rtx
))->u.fld[0]).rt_rtx)) ))) )))
5220 GET_MODE (x),do_SUBST (&(*loc), (gen_rtx_fmt_e_stat ((NOT), ((((machine_mode
) (x)->mode))), ((gen_rtx_fmt_ee_stat ((code == IOR ? AND :
IOR), (((machine_mode) (x)->mode)), (((((((x)->u.fld[0
]).rt_rtx))->u.fld[0]).rt_rtx)), (((((((x)->u.fld[1]).rt_rtx
))->u.fld[0]).rt_rtx)) ))) )))
5221 XEXP (XEXP (x, 0), 0),do_SUBST (&(*loc), (gen_rtx_fmt_e_stat ((NOT), ((((machine_mode
) (x)->mode))), ((gen_rtx_fmt_ee_stat ((code == IOR ? AND :
IOR), (((machine_mode) (x)->mode)), (((((((x)->u.fld[0
]).rt_rtx))->u.fld[0]).rt_rtx)), (((((((x)->u.fld[1]).rt_rtx
))->u.fld[0]).rt_rtx)) ))) )))
5222 XEXP (XEXP (x, 1), 0))))do_SUBST (&(*loc), (gen_rtx_fmt_e_stat ((NOT), ((((machine_mode
) (x)->mode))), ((gen_rtx_fmt_ee_stat ((code == IOR ? AND :
IOR), (((machine_mode) (x)->mode)), (((((((x)->u.fld[0
]).rt_rtx))->u.fld[0]).rt_rtx)), (((((((x)->u.fld[1]).rt_rtx
))->u.fld[0]).rt_rtx)) ))) )))
;
5223 return find_split_point (loc, insn, set_src);
5224 }
5225
5226 /* Many RISC machines have a large set of logical insns. If the
5227 second operand is a NOT, put it first so we will try to split the
5228 other operand first. */
5229 if (GET_CODE (XEXP (x, 1))((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) == NOT)
5230 {
5231 rtx tem = XEXP (x, 0)(((x)->u.fld[0]).rt_rtx);
5232 SUBST (XEXP (x, 0), XEXP (x, 1))do_SUBST (&((((x)->u.fld[0]).rt_rtx)), ((((x)->u.fld
[1]).rt_rtx)))
;
5233 SUBST (XEXP (x, 1), tem)do_SUBST (&((((x)->u.fld[1]).rt_rtx)), (tem));
5234 }
5235 break;
5236
5237 case PLUS:
5238 case MINUS:
5239 /* Canonicalization can produce (minus A (mult B C)), where C is a
5240 constant. It may be better to try splitting (plus (mult B -C) A)
5241 instead if this isn't a multiply by a power of two. */
5242 if (set_src && code == MINUS && GET_CODE (XEXP (x, 1))((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) == MULT
5243 && GET_CODE (XEXP (XEXP (x, 1), 1))((enum rtx_code) (((((((x)->u.fld[1]).rt_rtx))->u.fld[1
]).rt_rtx))->code)
== CONST_INT
5244 && !pow2p_hwi (INTVAL (XEXP (XEXP (x, 1), 1))((((((((x)->u.fld[1]).rt_rtx))->u.fld[1]).rt_rtx))->
u.hwint[0])
))
5245 {
5246 machine_mode mode = GET_MODE (x)((machine_mode) (x)->mode);
5247 unsigned HOST_WIDE_INTlong this_int = INTVAL (XEXP (XEXP (x, 1), 1))((((((((x)->u.fld[1]).rt_rtx))->u.fld[1]).rt_rtx))->
u.hwint[0])
;
5248 HOST_WIDE_INTlong other_int = trunc_int_for_mode (-this_int, mode);
5249 SUBST (*loc, gen_rtx_PLUS (mode,do_SUBST (&(*loc), (gen_rtx_fmt_ee_stat ((PLUS), ((mode))
, ((gen_rtx_fmt_ee_stat ((MULT), ((mode)), ((((((((x)->u.fld
[1]).rt_rtx))->u.fld[0]).rt_rtx))), ((gen_int_mode (other_int
, mode))) ))), (((((x)->u.fld[0]).rt_rtx))) )))
5250 gen_rtx_MULT (mode,do_SUBST (&(*loc), (gen_rtx_fmt_ee_stat ((PLUS), ((mode))
, ((gen_rtx_fmt_ee_stat ((MULT), ((mode)), ((((((((x)->u.fld
[1]).rt_rtx))->u.fld[0]).rt_rtx))), ((gen_int_mode (other_int
, mode))) ))), (((((x)->u.fld[0]).rt_rtx))) )))
5251 XEXP (XEXP (x, 1), 0),do_SUBST (&(*loc), (gen_rtx_fmt_ee_stat ((PLUS), ((mode))
, ((gen_rtx_fmt_ee_stat ((MULT), ((mode)), ((((((((x)->u.fld
[1]).rt_rtx))->u.fld[0]).rt_rtx))), ((gen_int_mode (other_int
, mode))) ))), (((((x)->u.fld[0]).rt_rtx))) )))
5252 gen_int_mode (other_int,do_SUBST (&(*loc), (gen_rtx_fmt_ee_stat ((PLUS), ((mode))
, ((gen_rtx_fmt_ee_stat ((MULT), ((mode)), ((((((((x)->u.fld
[1]).rt_rtx))->u.fld[0]).rt_rtx))), ((gen_int_mode (other_int
, mode))) ))), (((((x)->u.fld[0]).rt_rtx))) )))
5253 mode)),do_SUBST (&(*loc), (gen_rtx_fmt_ee_stat ((PLUS), ((mode))
, ((gen_rtx_fmt_ee_stat ((MULT), ((mode)), ((((((((x)->u.fld
[1]).rt_rtx))->u.fld[0]).rt_rtx))), ((gen_int_mode (other_int
, mode))) ))), (((((x)->u.fld[0]).rt_rtx))) )))
5254 XEXP (x, 0)))do_SUBST (&(*loc), (gen_rtx_fmt_ee_stat ((PLUS), ((mode))
, ((gen_rtx_fmt_ee_stat ((MULT), ((mode)), ((((((((x)->u.fld
[1]).rt_rtx))->u.fld[0]).rt_rtx))), ((gen_int_mode (other_int
, mode))) ))), (((((x)->u.fld[0]).rt_rtx))) )))
;
5255 return find_split_point (loc, insn, set_src);
5256 }
5257
5258 /* Split at a multiply-accumulate instruction. However if this is
5259 the SET_SRC, we likely do not have such an instruction and it's
5260 worthless to try this split. */
5261 if (!set_src
5262 && (GET_CODE (XEXP (x, 0))((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == MULT
5263 || (GET_CODE (XEXP (x, 0))((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == ASHIFT
5264 && GET_CODE (XEXP (XEXP (x, 0), 1))((enum rtx_code) (((((((x)->u.fld[0]).rt_rtx))->u.fld[1
]).rt_rtx))->code)
== CONST_INT)))
5265 return loc;
5266
5267 default:
5268 break;
5269 }
5270
5271 /* Otherwise, select our actions depending on our rtx class. */
5272 switch (GET_RTX_CLASS (code)(rtx_class[(int) (code)]))
5273 {
5274 case RTX_BITFIELD_OPS: /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
5275 case RTX_TERNARY:
5276 split = find_split_point (&XEXP (x, 2)(((x)->u.fld[2]).rt_rtx), insn, false);
5277 if (split)
5278 return split;
5279 /* fall through */
5280 case RTX_BIN_ARITH:
5281 case RTX_COMM_ARITH:
5282 case RTX_COMPARE:
5283 case RTX_COMM_COMPARE:
5284 split = find_split_point (&XEXP (x, 1)(((x)->u.fld[1]).rt_rtx), insn, false);
5285 if (split)
5286 return split;
5287 /* fall through */
5288 case RTX_UNARY:
5289 /* Some machines have (and (shift ...) ...) insns. If X is not
5290 an AND, but XEXP (X, 0) is, use it as our split point. */
5291 if (GET_CODE (x)((enum rtx_code) (x)->code) != AND && GET_CODE (XEXP (x, 0))((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == AND)
5292 return &XEXP (x, 0)(((x)->u.fld[0]).rt_rtx);
5293
5294 split = find_split_point (&XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), insn, false);
5295 if (split)
5296 return split;
5297 return loc;
5298
5299 default:
5300 /* Otherwise, we don't have a split point. */
5301 return 0;
5302 }
5303}
5304
5305/* Throughout X, replace FROM with TO, and return the result.
5306 The result is TO if X is FROM;
5307 otherwise the result is X, but its contents may have been modified.
5308 If they were modified, a record was made in undobuf so that
5309 undo_all will (among other things) return X to its original state.
5310
5311 If the number of changes necessary is too much to record to undo,
5312 the excess changes are not made, so the result is invalid.
5313 The changes already made can still be undone.
5314 undobuf.num_undo is incremented for such changes, so by testing that
5315 the caller can tell whether the result is valid.
5316
5317 `n_occurrences' is incremented each time FROM is replaced.
5318
5319 IN_DEST is nonzero if we are processing the SET_DEST of a SET.
5320
5321 IN_COND is nonzero if we are at the top level of a condition.
5322
5323 UNIQUE_COPY is nonzero if each substitution must be unique. We do this
5324 by copying if `n_occurrences' is nonzero. */
5325
5326static rtx
5327subst (rtx x, rtx from, rtx to, int in_dest, int in_cond, int unique_copy)
5328{
5329 enum rtx_code code = GET_CODE (x)((enum rtx_code) (x)->code);
5330 machine_mode op0_mode = VOIDmode((void) 0, E_VOIDmode);
5331 const char *fmt;
5332 int len, i;
5333 rtx new_rtx;
5334
5335/* Two expressions are equal if they are identical copies of a shared
5336 RTX or if they are both registers with the same register number
5337 and mode. */
5338
5339#define COMBINE_RTX_EQUAL_P(X,Y)((X) == (Y) || ((((enum rtx_code) (X)->code) == REG) &&
(((enum rtx_code) (Y)->code) == REG) && (rhs_regno
(X)) == (rhs_regno(Y)) && ((machine_mode) (X)->mode
) == ((machine_mode) (Y)->mode)))
\
5340 ((X) == (Y) \
5341 || (REG_P (X)(((enum rtx_code) (X)->code) == REG) && REG_P (Y)(((enum rtx_code) (Y)->code) == REG) \
5342 && REGNO (X)(rhs_regno(X)) == REGNO (Y)(rhs_regno(Y)) && GET_MODE (X)((machine_mode) (X)->mode) == GET_MODE (Y)((machine_mode) (Y)->mode)))
5343
5344 /* Do not substitute into clobbers of regs -- this will never result in
5345 valid RTL. */
5346 if (GET_CODE (x)((enum rtx_code) (x)->code) == CLOBBER && REG_P (XEXP (x, 0))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == REG
)
)
5347 return x;
5348
5349 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from)((x) == (from) || ((((enum rtx_code) (x)->code) == REG) &&
(((enum rtx_code) (from)->code) == REG) && (rhs_regno
(x)) == (rhs_regno(from)) && ((machine_mode) (x)->
mode) == ((machine_mode) (from)->mode)))
)
5350 {
5351 n_occurrences++;
5352 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
5353 }
5354
5355 /* If X and FROM are the same register but different modes, they
5356 will not have been seen as equal above. However, the log links code
5357 will make a LOG_LINKS entry for that case. If we do nothing, we
5358 will try to rerecognize our original insn and, when it succeeds,
5359 we will delete the feeding insn, which is incorrect.
5360
5361 So force this insn not to match in this (rare) case. */
5362 if (! in_dest && code == REG && REG_P (from)(((enum rtx_code) (from)->code) == REG)
5363 && reg_overlap_mentioned_p (x, from))
5364 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx)gen_rtx_fmt_e_stat ((CLOBBER), ((((machine_mode) (x)->mode
))), (((const_int_rtx[64]))) )
;
5365
5366 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
5367 of which may contain things that can be combined. */
5368 if (code != MEM && code != LO_SUM && OBJECT_P (x)(((rtx_class[(int) (((enum rtx_code) (x)->code))]) & (
~1)) == (RTX_OBJ & (~1)))
)
5369 return x;
5370
5371 /* It is possible to have a subexpression appear twice in the insn.
5372 Suppose that FROM is a register that appears within TO.
5373 Then, after that subexpression has been scanned once by `subst',
5374 the second time it is scanned, TO may be found. If we were
5375 to scan TO here, we would find FROM within it and create a
5376 self-referent rtl structure which is completely wrong. */
5377 if (COMBINE_RTX_EQUAL_P (x, to)((x) == (to) || ((((enum rtx_code) (x)->code) == REG) &&
(((enum rtx_code) (to)->code) == REG) && (rhs_regno
(x)) == (rhs_regno(to)) && ((machine_mode) (x)->mode
) == ((machine_mode) (to)->mode)))
)
5378 return to;
5379
5380 /* Parallel asm_operands need special attention because all of the
5381 inputs are shared across the arms. Furthermore, unsharing the
5382 rtl results in recognition failures. Failure to handle this case
5383 specially can result in circular rtl.
5384
5385 Solve this by doing a normal pass across the first entry of the
5386 parallel, and only processing the SET_DESTs of the subsequent
5387 entries. Ug. */
5388
5389 if (code == PARALLEL
5390 && GET_CODE (XVECEXP (x, 0, 0))((enum rtx_code) ((((((x)->u.fld[0]).rt_rtvec))->elem[0
]))->code)
== SET
5391 && GET_CODE (SET_SRC (XVECEXP (x, 0, 0)))((enum rtx_code) (((((((((x)->u.fld[0]).rt_rtvec))->elem
[0]))->u.fld[1]).rt_rtx))->code)
== ASM_OPERANDS)
5392 {
5393 new_rtx = subst (XVECEXP (x, 0, 0)(((((x)->u.fld[0]).rt_rtvec))->elem[0]), from, to, 0, 0, unique_copy);
5394
5395 /* If this substitution failed, this whole thing fails. */
5396 if (GET_CODE (new_rtx)((enum rtx_code) (new_rtx)->code) == CLOBBER
5397 && XEXP (new_rtx, 0)(((new_rtx)->u.fld[0]).rt_rtx) == const0_rtx(const_int_rtx[64]))
5398 return new_rtx;
5399
5400 SUBST (XVECEXP (x, 0, 0), new_rtx)do_SUBST (&((((((x)->u.fld[0]).rt_rtvec))->elem[0])
), (new_rtx))
;
5401
5402 for (i = XVECLEN (x, 0)(((((x)->u.fld[0]).rt_rtvec))->num_elem) - 1; i >= 1; i--)
5403 {
5404 rtx dest = SET_DEST (XVECEXP (x, 0, i))((((((((x)->u.fld[0]).rt_rtvec))->elem[i]))->u.fld[0
]).rt_rtx)
;
5405
5406 if (!REG_P (dest)(((enum rtx_code) (dest)->code) == REG) && GET_CODE (dest)((enum rtx_code) (dest)->code) != PC)
5407 {
5408 new_rtx = subst (dest, from, to, 0, 0, unique_copy);
5409
5410 /* If this substitution failed, this whole thing fails. */
5411 if (GET_CODE (new_rtx)((enum rtx_code) (new_rtx)->code) == CLOBBER
5412 && XEXP (new_rtx, 0)(((new_rtx)->u.fld[0]).rt_rtx) == const0_rtx(const_int_rtx[64]))
5413 return new_rtx;
5414
5415 SUBST (SET_DEST (XVECEXP (x, 0, i)), new_rtx)do_SUBST (&(((((((((x)->u.fld[0]).rt_rtvec))->elem[
i]))->u.fld[0]).rt_rtx)), (new_rtx))
;
5416 }
5417 }
5418 }
5419 else
5420 {
5421 len = GET_RTX_LENGTH (code)(rtx_length[(int) (code)]);
5422 fmt = GET_RTX_FORMAT (code)(rtx_format[(int) (code)]);
5423
5424 /* We don't need to process a SET_DEST that is a register or PC, so
5425 set up to skip this common case. All other cases where we want
5426 to suppress replacing something inside a SET_SRC are handled via
5427 the IN_DEST operand. */
5428 if (code == SET
5429 && (REG_P (SET_DEST (x))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == REG
)
5430 || GET_CODE (SET_DEST (x))((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == PC))
5431 fmt = "ie";
5432
5433 /* Trying to simplify the operands of a widening MULT is not likely
5434 to create RTL matching a machine insn. */
5435 if (code == MULT
5436 && (GET_CODE (XEXP (x, 0))((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == ZERO_EXTEND
5437 || GET_CODE (XEXP (x, 0))((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == SIGN_EXTEND)
5438 && (GET_CODE (XEXP (x, 1))((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) == ZERO_EXTEND
5439 || GET_CODE (XEXP (x, 1))((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) == SIGN_EXTEND)
5440 && REG_P (XEXP (XEXP (x, 0), 0))(((enum rtx_code) (((((((x)->u.fld[0]).rt_rtx))->u.fld[
0]).rt_rtx))->code) == REG)
5441 && REG_P (XEXP (XEXP (x, 1), 0))(((enum rtx_code) (((((((x)->u.fld[1]).rt_rtx))->u.fld[
0]).rt_rtx))->code) == REG)
5442 && from == to)
5443 return x;
5444
5445
5446 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
5447 constant. */
5448 if (fmt[0] == 'e')
5449 op0_mode = GET_MODE (XEXP (x, 0))((machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode);
5450
5451 for (i = 0; i < len; i++)
5452 {
5453 if (fmt[i] == 'E')
5454 {
5455 int j;
5456 for (j = XVECLEN (x, i)(((((x)->u.fld[i]).rt_rtvec))->num_elem) - 1; j >= 0; j--)
5457 {
5458 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from)(((((((x)->u.fld[i]).rt_rtvec))->elem[j])) == (from) ||
((((enum rtx_code) ((((((x)->u.fld[i]).rt_rtvec))->elem
[j]))->code) == REG) && (((enum rtx_code) (from)->
code) == REG) && (rhs_regno((((((x)->u.fld[i]).rt_rtvec
))->elem[j]))) == (rhs_regno(from)) && ((machine_mode
) ((((((x)->u.fld[i]).rt_rtvec))->elem[j]))->mode) ==
((machine_mode) (from)->mode)))
)
5459 {
5460 new_rtx = (unique_copy && n_occurrences
5461 ? copy_rtx (to) : to);
5462 n_occurrences++;
5463 }
5464 else
5465 {
5466 new_rtx = subst (XVECEXP (x, i, j)(((((x)->u.fld[i]).rt_rtvec))->elem[j]), from, to, 0, 0,
5467 unique_copy);
5468
5469 /* If this substitution failed, this whole thing
5470 fails. */
5471 if (GET_CODE (new_rtx)((enum rtx_code) (new_rtx)->code) == CLOBBER
5472 && XEXP (new_rtx, 0)(((new_rtx)->u.fld[0]).rt_rtx) == const0_rtx(const_int_rtx[64]))
5473 return new_rtx;
5474 }
5475
5476 SUBST (XVECEXP (x, i, j), new_rtx)do_SUBST (&((((((x)->u.fld[i]).rt_rtvec))->elem[j])
), (new_rtx))
;
5477 }
5478 }
5479 else if (fmt[i] == 'e')
5480 {
5481 /* If this is a register being set, ignore it. */
5482 new_rtx = XEXP (x, i)(((x)->u.fld[i]).rt_rtx);
5483 if (in_dest
5484 && i == 0
5485 && (((code == SUBREG || code == ZERO_EXTRACT)
5486 && REG_P (new_rtx)(((enum rtx_code) (new_rtx)->code) == REG))
5487 || code == STRICT_LOW_PART))
5488 ;
5489
5490 else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from)(((((x)->u.fld[i]).rt_rtx)) == (from) || ((((enum rtx_code
) ((((x)->u.fld[i]).rt_rtx))->code) == REG) && (
((enum rtx_code) (from)->code) == REG) && (rhs_regno
((((x)->u.fld[i]).rt_rtx))) == (rhs_regno(from)) &&
((machine_mode) ((((x)->u.fld[i]).rt_rtx))->mode) == (
(machine_mode) (from)->mode)))
)
5491 {
5492 /* In general, don't install a subreg involving two
5493 modes not tieable. It can worsen register
5494 allocation, and can even make invalid reload
5495 insns, since the reg inside may need to be copied
5496 from in the outside mode, and that may be invalid
5497 if it is an fp reg copied in integer mode.
5498
5499 We allow an exception to this: It is valid if
5500 it is inside another SUBREG and the mode of that
5501 SUBREG and the mode of the inside of TO is
5502 tieable. */
5503
5504 if (GET_CODE (to)((enum rtx_code) (to)->code) == SUBREG
5505 && !targetm.modes_tieable_p (GET_MODE (to)((machine_mode) (to)->mode),
5506 GET_MODE (SUBREG_REG (to))((machine_mode) ((((to)->u.fld[0]).rt_rtx))->mode))
5507 && ! (code == SUBREG
5508 && (targetm.modes_tieable_p
5509 (GET_MODE (x)((machine_mode) (x)->mode), GET_MODE (SUBREG_REG (to))((machine_mode) ((((to)->u.fld[0]).rt_rtx))->mode)))))
5510 return gen_rtx_CLOBBER (VOIDmode, const0_rtx)gen_rtx_fmt_e_stat ((CLOBBER), ((((void) 0, E_VOIDmode))), ((
(const_int_rtx[64]))) )
;
5511
5512 if (code == SUBREG
5513 && REG_P (to)(((enum rtx_code) (to)->code) == REG)
5514 && REGNO (to)(rhs_regno(to)) < FIRST_PSEUDO_REGISTER76
5515</